This commit is contained in:
ton
2024-10-07 10:13:40 +07:00
parent aa1631742f
commit 3a7d696db6
9729 changed files with 1832837 additions and 161742 deletions

View File

@@ -0,0 +1,10 @@
The following files include code from opensource projects
(either as direct copies or modified versions):
- `setuptools.schema.json`, `distutils.schema.json`:
- project: `validate-pyproject` - licensed under MPL-2.0
(https://github.com/abravalheri/validate-pyproject):
This Source Code Form is subject to the terms of the Mozilla Public
License, v. 2.0. If a copy of the MPL was not distributed with this file,
You can obtain one at https://mozilla.org/MPL/2.0/.

View File

@@ -1,6 +1,7 @@
"""For backward compatibility, expose main functions from
``setuptools.config.setupcfg``
"""
from functools import wraps
from typing import Callable, TypeVar, cast

View File

@@ -7,43 +7,41 @@ need to be processed before being applied.
**PRIVATE MODULE**: API reserved for setuptools internal usage only.
"""
from __future__ import annotations
import logging
import os
from collections.abc import Mapping
from email.headerregistry import Address
from functools import partial, reduce
from inspect import cleandoc
from itertools import chain
from types import MappingProxyType
from typing import (
TYPE_CHECKING,
Any,
Callable,
Dict,
List,
Optional,
Set,
Tuple,
Type,
Union,
cast,
)
from typing import TYPE_CHECKING, Any, Callable, Dict, Mapping, TypeVar, Union
from ..warnings import SetuptoolsWarning, SetuptoolsDeprecationWarning
from .._path import StrPath
from ..errors import RemovedConfigError
from ..extension import Extension
from ..warnings import SetuptoolsWarning
if TYPE_CHECKING:
from setuptools._importlib import metadata # noqa
from setuptools.dist import Distribution # noqa
from typing_extensions import TypeAlias
from setuptools._importlib import metadata
from setuptools.dist import Distribution
from distutils.dist import _OptionsList
EMPTY: Mapping = MappingProxyType({}) # Immutable dict-like
_Path = Union[os.PathLike, str]
_DictOrStr = Union[dict, str]
_CorrespFn = Callable[["Distribution", Any, _Path], None]
_Correspondence = Union[str, _CorrespFn]
_ProjectReadmeValue: TypeAlias = Union[str, Dict[str, str]]
_CorrespFn: TypeAlias = Callable[["Distribution", Any, StrPath], None]
_Correspondence: TypeAlias = Union[str, _CorrespFn]
_T = TypeVar("_T")
_logger = logging.getLogger(__name__)
def apply(dist: "Distribution", config: dict, filename: _Path) -> "Distribution":
def apply(dist: Distribution, config: dict, filename: StrPath) -> Distribution:
"""Apply configuration dict read with :func:`read_configuration`"""
if not config:
@@ -65,7 +63,7 @@ def apply(dist: "Distribution", config: dict, filename: _Path) -> "Distribution"
return dist
def _apply_project_table(dist: "Distribution", config: dict, root_dir: _Path):
def _apply_project_table(dist: Distribution, config: dict, root_dir: StrPath):
project_table = config.get("project", {}).copy()
if not project_table:
return # short-circuit
@@ -82,7 +80,7 @@ def _apply_project_table(dist: "Distribution", config: dict, root_dir: _Path):
_set_config(dist, corresp, value)
def _apply_tool_table(dist: "Distribution", config: dict, filename: _Path):
def _apply_tool_table(dist: Distribution, config: dict, filename: StrPath):
tool_table = config.get("tool", {}).get("setuptools", {})
if not tool_table:
return # short-circuit
@@ -90,12 +88,13 @@ def _apply_tool_table(dist: "Distribution", config: dict, filename: _Path):
for field, value in tool_table.items():
norm_key = json_compatible_key(field)
if norm_key in TOOL_TABLE_DEPRECATIONS:
suggestion, kwargs = TOOL_TABLE_DEPRECATIONS[norm_key]
msg = f"The parameter `{norm_key}` is deprecated, {suggestion}"
SetuptoolsDeprecationWarning.emit(
"Deprecated config", msg, **kwargs # type: ignore
)
if norm_key in TOOL_TABLE_REMOVALS:
suggestion = cleandoc(TOOL_TABLE_REMOVALS[norm_key])
msg = f"""
The parameter `tool.setuptools.{field}` was long deprecated
and has been removed from `pyproject.toml`.
"""
raise RemovedConfigError("\n".join([cleandoc(msg), suggestion]))
norm_key = TOOL_TABLE_RENAMES.get(norm_key, norm_key)
_set_config(dist, norm_key, value)
@@ -103,15 +102,15 @@ def _apply_tool_table(dist: "Distribution", config: dict, filename: _Path):
_copy_command_options(config, dist, filename)
def _handle_missing_dynamic(dist: "Distribution", project_table: dict):
def _handle_missing_dynamic(dist: Distribution, project_table: dict):
"""Be temporarily forgiving with ``dynamic`` fields not listed in ``dynamic``"""
# TODO: Set fields back to `None` once the feature stabilizes
dynamic = set(project_table.get("dynamic", []))
for field, getter in _PREVIOUSLY_DEFINED.items():
if not (field in project_table or field in dynamic):
value = getter(dist)
if value:
_WouldIgnoreField.emit(field=field, value=value)
_MissingDynamic.emit(field=field, value=value)
project_table[field] = _RESET_PREVIOUSLY_DEFINED.get(field)
def json_compatible_key(key: str) -> str:
@@ -119,14 +118,15 @@ def json_compatible_key(key: str) -> str:
return key.lower().replace("-", "_")
def _set_config(dist: "Distribution", field: str, value: Any):
def _set_config(dist: Distribution, field: str, value: Any):
val = _PREPROCESS.get(field, _noop)(dist, value)
setter = getattr(dist.metadata, f"set_{field}", None)
if setter:
setter(value)
setter(val)
elif hasattr(dist.metadata, field) or field in SETUPTOOLS_PATCHES:
setattr(dist.metadata, field, value)
setattr(dist.metadata, field, val)
else:
setattr(dist, field, value)
setattr(dist, field, val)
_CONTENT_TYPES = {
@@ -136,7 +136,7 @@ _CONTENT_TYPES = {
}
def _guess_content_type(file: str) -> Optional[str]:
def _guess_content_type(file: str) -> str | None:
_, ext = os.path.splitext(file.lower())
if not ext:
return None
@@ -149,15 +149,16 @@ def _guess_content_type(file: str) -> Optional[str]:
raise ValueError(f"Undefined content type for {file}, {msg}")
def _long_description(dist: "Distribution", val: _DictOrStr, root_dir: _Path):
def _long_description(dist: Distribution, val: _ProjectReadmeValue, root_dir: StrPath):
from setuptools.config import expand
file: str | tuple[()]
if isinstance(val, str):
file: Union[str, list] = val
file = val
text = expand.read_files(file, root_dir)
ctype = _guess_content_type(val)
ctype = _guess_content_type(file)
else:
file = val.get("file") or []
file = val.get("file") or ()
text = val.get("text") or expand.read_files(file, root_dir)
ctype = val["content-type"]
@@ -167,10 +168,10 @@ def _long_description(dist: "Distribution", val: _DictOrStr, root_dir: _Path):
_set_config(dist, "long_description_content_type", ctype)
if file:
dist._referenced_files.add(cast(str, file))
dist._referenced_files.add(file)
def _license(dist: "Distribution", val: dict, root_dir: _Path):
def _license(dist: Distribution, val: dict, root_dir: StrPath):
from setuptools.config import expand
if "file" in val:
@@ -180,7 +181,7 @@ def _license(dist: "Distribution", val: dict, root_dir: _Path):
_set_config(dist, "license", val["text"])
def _people(dist: "Distribution", val: List[dict], _root_dir: _Path, kind: str):
def _people(dist: Distribution, val: list[dict], _root_dir: StrPath, kind: str):
field = []
email_field = []
for person in val:
@@ -198,45 +199,60 @@ def _people(dist: "Distribution", val: List[dict], _root_dir: _Path, kind: str):
_set_config(dist, f"{kind}_email", ", ".join(email_field))
def _project_urls(dist: "Distribution", val: dict, _root_dir):
def _project_urls(dist: Distribution, val: dict, _root_dir):
_set_config(dist, "project_urls", val)
def _python_requires(dist: "Distribution", val: dict, _root_dir):
from setuptools.extern.packaging.specifiers import SpecifierSet
def _python_requires(dist: Distribution, val: str, _root_dir):
from packaging.specifiers import SpecifierSet
_set_config(dist, "python_requires", SpecifierSet(val))
def _dependencies(dist: "Distribution", val: list, _root_dir):
def _dependencies(dist: Distribution, val: list, _root_dir):
if getattr(dist, "install_requires", []):
msg = "`install_requires` overwritten in `pyproject.toml` (dependencies)"
SetuptoolsWarning.emit(msg)
dist.install_requires = val
def _optional_dependencies(dist: "Distribution", val: dict, _root_dir):
def _optional_dependencies(dist: Distribution, val: dict, _root_dir):
existing = getattr(dist, "extras_require", None) or {}
dist.extras_require = {**existing, **val}
def _ext_modules(dist: Distribution, val: list[dict]) -> list[Extension]:
existing = dist.ext_modules or []
args = ({k.replace("-", "_"): v for k, v in x.items()} for x in val)
new = [Extension(**kw) for kw in args]
return [*existing, *new]
def _noop(_dist: Distribution, val: _T) -> _T:
return val
def _unify_entry_points(project_table: dict):
project = project_table
entry_points = project.pop("entry-points", project.pop("entry_points", {}))
renaming = {"scripts": "console_scripts", "gui_scripts": "gui_scripts"}
for key, value in list(project.items()): # eager to allow modifications
norm_key = json_compatible_key(key)
if norm_key in renaming and value:
if norm_key in renaming:
# Don't skip even if value is empty (reason: reset missing `dynamic`)
entry_points[renaming[norm_key]] = project.pop(key)
if entry_points:
project["entry-points"] = {
name: [f"{k} = {v}" for k, v in group.items()]
for name, group in entry_points.items()
if group # now we can skip empty groups
}
# Sometimes this will set `project["entry-points"] = {}`, and that is
# intentional (for resetting configurations that are missing `dynamic`).
def _copy_command_options(pyproject: dict, dist: "Distribution", filename: _Path):
def _copy_command_options(pyproject: dict, dist: Distribution, filename: StrPath):
tool_table = pyproject.get("tool", {})
cmdclass = tool_table.get("setuptools", {}).get("cmdclass", {})
valid_options = _valid_command_options(cmdclass)
@@ -255,10 +271,11 @@ def _copy_command_options(pyproject: dict, dist: "Distribution", filename: _Path
_logger.warning(f"Command option {cmd}.{key} is not defined")
def _valid_command_options(cmdclass: Mapping = EMPTY) -> Dict[str, Set[str]]:
from .._importlib import metadata
def _valid_command_options(cmdclass: Mapping = EMPTY) -> dict[str, set[str]]:
from setuptools.dist import Distribution
from .._importlib import metadata
valid_options = {"global": _normalise_cmd_options(Distribution.global_options)}
unloaded_entry_points = metadata.entry_points(group='distutils.commands')
@@ -272,7 +289,12 @@ def _valid_command_options(cmdclass: Mapping = EMPTY) -> Dict[str, Set[str]]:
return valid_options
def _load_ep(ep: "metadata.EntryPoint") -> Optional[Tuple[str, Type]]:
def _load_ep(ep: metadata.EntryPoint) -> tuple[str, type] | None:
if ep.value.startswith("wheel.bdist_wheel"):
# Ignore deprecated entrypoint from wheel and avoid warning pypa/wheel#631
# TODO: remove check when `bdist_wheel` has been fully removed from pypa/wheel
return None
# Ignore all the errors
try:
return (ep.name, ep.load())
@@ -286,22 +308,22 @@ def _normalise_cmd_option_key(name: str) -> str:
return json_compatible_key(name).strip("_=")
def _normalise_cmd_options(desc: List[Tuple[str, Optional[str], str]]) -> Set[str]:
def _normalise_cmd_options(desc: _OptionsList) -> set[str]:
return {_normalise_cmd_option_key(fancy_option[0]) for fancy_option in desc}
def _get_previous_entrypoints(dist: "Distribution") -> Dict[str, list]:
def _get_previous_entrypoints(dist: Distribution) -> dict[str, list]:
ignore = ("console_scripts", "gui_scripts")
value = getattr(dist, "entry_points", None) or {}
return {k: v for k, v in value.items() if k not in ignore}
def _get_previous_scripts(dist: "Distribution") -> Optional[list]:
def _get_previous_scripts(dist: Distribution) -> list | None:
value = getattr(dist, "entry_points", None) or {}
return value.get("console_scripts")
def _get_previous_gui_scripts(dist: "Distribution") -> Optional[list]:
def _get_previous_gui_scripts(dist: Distribution) -> list | None:
value = getattr(dist, "entry_points", None) or {}
return value.get("gui_scripts")
@@ -341,7 +363,7 @@ def _some_attrgetter(*items):
return _acessor
PYPROJECT_CORRESPONDENCE: Dict[str, _Correspondence] = {
PYPROJECT_CORRESPONDENCE: dict[str, _Correspondence] = {
"readme": _long_description,
"license": _license,
"authors": partial(_people, kind="author"),
@@ -353,11 +375,11 @@ PYPROJECT_CORRESPONDENCE: Dict[str, _Correspondence] = {
}
TOOL_TABLE_RENAMES = {"script_files": "scripts"}
TOOL_TABLE_DEPRECATIONS = {
"namespace_packages": (
"consider using implicit namespaces instead (PEP 420).",
{"due_date": (2023, 10, 30)}, # warning introduced in May 2022
)
TOOL_TABLE_REMOVALS = {
"namespace_packages": """
Please migrate to implicit native namespaces instead.
See https://packaging.python.org/en/latest/guides/packaging-namespace-packages/.
""",
}
SETUPTOOLS_PATCHES = {
@@ -368,6 +390,10 @@ SETUPTOOLS_PATCHES = {
"license_files",
}
_PREPROCESS = {
"ext_modules": _ext_modules,
}
_PREVIOUSLY_DEFINED = {
"name": _attrgetter("metadata.name"),
"version": _attrgetter("metadata.version"),
@@ -388,14 +414,27 @@ _PREVIOUSLY_DEFINED = {
}
class _WouldIgnoreField(SetuptoolsDeprecationWarning):
_SUMMARY = "`{field}` defined outside of `pyproject.toml` would be ignored."
_RESET_PREVIOUSLY_DEFINED: dict = {
# Fix improper setting: given in `setup.py`, but not listed in `dynamic`
# dict: pyproject name => value to which reset
"license": {},
"authors": [],
"maintainers": [],
"keywords": [],
"classifiers": [],
"urls": {},
"entry-points": {},
"scripts": {},
"gui-scripts": {},
"dependencies": [],
"optional-dependencies": {},
}
class _MissingDynamic(SetuptoolsWarning):
_SUMMARY = "`{field}` defined outside of `pyproject.toml` is ignored."
_DETAILS = """
##########################################################################
# configuration would be ignored/result in error due to `pyproject.toml` #
##########################################################################
The following seems to be defined outside of `pyproject.toml`:
`{field} = {value!r}`
@@ -403,14 +442,16 @@ class _WouldIgnoreField(SetuptoolsDeprecationWarning):
According to the spec (see the link below), however, setuptools CANNOT
consider this value unless `{field}` is listed as `dynamic`.
https://packaging.python.org/en/latest/specifications/declaring-project-metadata/
https://packaging.python.org/en/latest/specifications/pyproject-toml/#declaring-project-metadata-the-project-table
For the time being, `setuptools` will still consider the given value (as a
**transitional** measure), but please note that future releases of setuptools will
follow strictly the standard.
To prevent this warning, you can list `{field}` under `dynamic` or alternatively
To prevent this problem, you can list `{field}` under `dynamic` or alternatively
remove the `[project]` table from your file and rely entirely on other means of
configuration.
"""
_DUE_DATE = (2023, 10, 30) # Initially introduced in 27 May 2022
# TODO: Consider removing this check in the future?
# There is a trade-off here between improving "debug-ability" and the cost
# of running/testing/maintaining these unnecessary checks...
@classmethod
def details(cls, field: str, value: Any) -> str:
return cls._DETAILS.format(field=field, value=value)

View File

@@ -0,0 +1,438 @@
The code contained in this directory was automatically generated using the
following command:
python -m validate_pyproject.pre_compile --output-dir=setuptools/config/_validate_pyproject --enable-plugins setuptools distutils --very-verbose -t distutils=setuptools/config/distutils.schema.json -t setuptools=setuptools/config/setuptools.schema.json
Please avoid changing it manually.
You can report issues or suggest changes directly to `validate-pyproject`
(or to the relevant plugin repository)
- https://github.com/abravalheri/validate-pyproject/issues
***
The following files include code from opensource projects
(either as direct copies or modified versions):
- `fastjsonschema_exceptions.py`:
- project: `fastjsonschema` - licensed under BSD-3-Clause
(https://github.com/horejsek/python-fastjsonschema)
- `extra_validations.py` and `format.py`, `error_reporting.py`:
- project: `validate-pyproject` - licensed under MPL-2.0
(https://github.com/abravalheri/validate-pyproject)
Additionally the following files are automatically generated by tools provided
by the same projects:
- `__init__.py`
- `fastjsonschema_validations.py`
The relevant copyright notes and licenses are included below.
***
`fastjsonschema`
================
Copyright (c) 2018, Michal Horejsek
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
Redistributions in binary form must reproduce the above copyright notice, this
list of conditions and the following disclaimer in the documentation and/or
other materials provided with the distribution.
Neither the name of the {organization} nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
***
`validate-pyproject`
====================
Mozilla Public License, version 2.0
1. Definitions
1.1. "Contributor"
means each individual or legal entity that creates, contributes to the
creation of, or owns Covered Software.
1.2. "Contributor Version"
means the combination of the Contributions of others (if any) used by a
Contributor and that particular Contributor's Contribution.
1.3. "Contribution"
means Covered Software of a particular Contributor.
1.4. "Covered Software"
means Source Code Form to which the initial Contributor has attached the
notice in Exhibit A, the Executable Form of such Source Code Form, and
Modifications of such Source Code Form, in each case including portions
thereof.
1.5. "Incompatible With Secondary Licenses"
means
a. that the initial Contributor has attached the notice described in
Exhibit B to the Covered Software; or
b. that the Covered Software was made available under the terms of
version 1.1 or earlier of the License, but not also under the terms of
a Secondary License.
1.6. "Executable Form"
means any form of the work other than Source Code Form.
1.7. "Larger Work"
means a work that combines Covered Software with other material, in a
separate file or files, that is not Covered Software.
1.8. "License"
means this document.
1.9. "Licensable"
means having the right to grant, to the maximum extent possible, whether
at the time of the initial grant or subsequently, any and all of the
rights conveyed by this License.
1.10. "Modifications"
means any of the following:
a. any file in Source Code Form that results from an addition to,
deletion from, or modification of the contents of Covered Software; or
b. any new file in Source Code Form that contains any Covered Software.
1.11. "Patent Claims" of a Contributor
means any patent claim(s), including without limitation, method,
process, and apparatus claims, in any patent Licensable by such
Contributor that would be infringed, but for the grant of the License,
by the making, using, selling, offering for sale, having made, import,
or transfer of either its Contributions or its Contributor Version.
1.12. "Secondary License"
means either the GNU General Public License, Version 2.0, the GNU Lesser
General Public License, Version 2.1, the GNU Affero General Public
License, Version 3.0, or any later versions of those licenses.
1.13. "Source Code Form"
means the form of the work preferred for making modifications.
1.14. "You" (or "Your")
means an individual or a legal entity exercising rights under this
License. For legal entities, "You" includes any entity that controls, is
controlled by, or is under common control with You. For purposes of this
definition, "control" means (a) the power, direct or indirect, to cause
the direction or management of such entity, whether by contract or
otherwise, or (b) ownership of more than fifty percent (50%) of the
outstanding shares or beneficial ownership of such entity.
2. License Grants and Conditions
2.1. Grants
Each Contributor hereby grants You a world-wide, royalty-free,
non-exclusive license:
a. under intellectual property rights (other than patent or trademark)
Licensable by such Contributor to use, reproduce, make available,
modify, display, perform, distribute, and otherwise exploit its
Contributions, either on an unmodified basis, with Modifications, or
as part of a Larger Work; and
b. under Patent Claims of such Contributor to make, use, sell, offer for
sale, have made, import, and otherwise transfer either its
Contributions or its Contributor Version.
2.2. Effective Date
The licenses granted in Section 2.1 with respect to any Contribution
become effective for each Contribution on the date the Contributor first
distributes such Contribution.
2.3. Limitations on Grant Scope
The licenses granted in this Section 2 are the only rights granted under
this License. No additional rights or licenses will be implied from the
distribution or licensing of Covered Software under this License.
Notwithstanding Section 2.1(b) above, no patent license is granted by a
Contributor:
a. for any code that a Contributor has removed from Covered Software; or
b. for infringements caused by: (i) Your and any other third party's
modifications of Covered Software, or (ii) the combination of its
Contributions with other software (except as part of its Contributor
Version); or
c. under Patent Claims infringed by Covered Software in the absence of
its Contributions.
This License does not grant any rights in the trademarks, service marks,
or logos of any Contributor (except as may be necessary to comply with
the notice requirements in Section 3.4).
2.4. Subsequent Licenses
No Contributor makes additional grants as a result of Your choice to
distribute the Covered Software under a subsequent version of this
License (see Section 10.2) or under the terms of a Secondary License (if
permitted under the terms of Section 3.3).
2.5. Representation
Each Contributor represents that the Contributor believes its
Contributions are its original creation(s) or it has sufficient rights to
grant the rights to its Contributions conveyed by this License.
2.6. Fair Use
This License is not intended to limit any rights You have under
applicable copyright doctrines of fair use, fair dealing, or other
equivalents.
2.7. Conditions
Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in
Section 2.1.
3. Responsibilities
3.1. Distribution of Source Form
All distribution of Covered Software in Source Code Form, including any
Modifications that You create or to which You contribute, must be under
the terms of this License. You must inform recipients that the Source
Code Form of the Covered Software is governed by the terms of this
License, and how they can obtain a copy of this License. You may not
attempt to alter or restrict the recipients' rights in the Source Code
Form.
3.2. Distribution of Executable Form
If You distribute Covered Software in Executable Form then:
a. such Covered Software must also be made available in Source Code Form,
as described in Section 3.1, and You must inform recipients of the
Executable Form how they can obtain a copy of such Source Code Form by
reasonable means in a timely manner, at a charge no more than the cost
of distribution to the recipient; and
b. You may distribute such Executable Form under the terms of this
License, or sublicense it under different terms, provided that the
license for the Executable Form does not attempt to limit or alter the
recipients' rights in the Source Code Form under this License.
3.3. Distribution of a Larger Work
You may create and distribute a Larger Work under terms of Your choice,
provided that You also comply with the requirements of this License for
the Covered Software. If the Larger Work is a combination of Covered
Software with a work governed by one or more Secondary Licenses, and the
Covered Software is not Incompatible With Secondary Licenses, this
License permits You to additionally distribute such Covered Software
under the terms of such Secondary License(s), so that the recipient of
the Larger Work may, at their option, further distribute the Covered
Software under the terms of either this License or such Secondary
License(s).
3.4. Notices
You may not remove or alter the substance of any license notices
(including copyright notices, patent notices, disclaimers of warranty, or
limitations of liability) contained within the Source Code Form of the
Covered Software, except that You may alter any license notices to the
extent required to remedy known factual inaccuracies.
3.5. Application of Additional Terms
You may choose to offer, and to charge a fee for, warranty, support,
indemnity or liability obligations to one or more recipients of Covered
Software. However, You may do so only on Your own behalf, and not on
behalf of any Contributor. You must make it absolutely clear that any
such warranty, support, indemnity, or liability obligation is offered by
You alone, and You hereby agree to indemnify every Contributor for any
liability incurred by such Contributor as a result of warranty, support,
indemnity or liability terms You offer. You may include additional
disclaimers of warranty and limitations of liability specific to any
jurisdiction.
4. Inability to Comply Due to Statute or Regulation
If it is impossible for You to comply with any of the terms of this License
with respect to some or all of the Covered Software due to statute,
judicial order, or regulation then You must: (a) comply with the terms of
this License to the maximum extent possible; and (b) describe the
limitations and the code they affect. Such description must be placed in a
text file included with all distributions of the Covered Software under
this License. Except to the extent prohibited by statute or regulation,
such description must be sufficiently detailed for a recipient of ordinary
skill to be able to understand it.
5. Termination
5.1. The rights granted under this License will terminate automatically if You
fail to comply with any of its terms. However, if You become compliant,
then the rights granted under this License from a particular Contributor
are reinstated (a) provisionally, unless and until such Contributor
explicitly and finally terminates Your grants, and (b) on an ongoing
basis, if such Contributor fails to notify You of the non-compliance by
some reasonable means prior to 60 days after You have come back into
compliance. Moreover, Your grants from a particular Contributor are
reinstated on an ongoing basis if such Contributor notifies You of the
non-compliance by some reasonable means, this is the first time You have
received notice of non-compliance with this License from such
Contributor, and You become compliant prior to 30 days after Your receipt
of the notice.
5.2. If You initiate litigation against any entity by asserting a patent
infringement claim (excluding declaratory judgment actions,
counter-claims, and cross-claims) alleging that a Contributor Version
directly or indirectly infringes any patent, then the rights granted to
You by any and all Contributors for the Covered Software under Section
2.1 of this License shall terminate.
5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user
license agreements (excluding distributors and resellers) which have been
validly granted by You or Your distributors under this License prior to
termination shall survive termination.
6. Disclaimer of Warranty
Covered Software is provided under this License on an "as is" basis,
without warranty of any kind, either expressed, implied, or statutory,
including, without limitation, warranties that the Covered Software is free
of defects, merchantable, fit for a particular purpose or non-infringing.
The entire risk as to the quality and performance of the Covered Software
is with You. Should any Covered Software prove defective in any respect,
You (not any Contributor) assume the cost of any necessary servicing,
repair, or correction. This disclaimer of warranty constitutes an essential
part of this License. No use of any Covered Software is authorized under
this License except under this disclaimer.
7. Limitation of Liability
Under no circumstances and under no legal theory, whether tort (including
negligence), contract, or otherwise, shall any Contributor, or anyone who
distributes Covered Software as permitted above, be liable to You for any
direct, indirect, special, incidental, or consequential damages of any
character including, without limitation, damages for lost profits, loss of
goodwill, work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses, even if such party shall have been
informed of the possibility of such damages. This limitation of liability
shall not apply to liability for death or personal injury resulting from
such party's negligence to the extent applicable law prohibits such
limitation. Some jurisdictions do not allow the exclusion or limitation of
incidental or consequential damages, so this exclusion and limitation may
not apply to You.
8. Litigation
Any litigation relating to this License may be brought only in the courts
of a jurisdiction where the defendant maintains its principal place of
business and such litigation shall be governed by laws of that
jurisdiction, without reference to its conflict-of-law provisions. Nothing
in this Section shall prevent a party's ability to bring cross-claims or
counter-claims.
9. Miscellaneous
This License represents the complete agreement concerning the subject
matter hereof. If any provision of this License is held to be
unenforceable, such provision shall be reformed only to the extent
necessary to make it enforceable. Any law or regulation which provides that
the language of a contract shall be construed against the drafter shall not
be used to construe this License against a Contributor.
10. Versions of the License
10.1. New Versions
Mozilla Foundation is the license steward. Except as provided in Section
10.3, no one other than the license steward has the right to modify or
publish new versions of this License. Each version will be given a
distinguishing version number.
10.2. Effect of New Versions
You may distribute the Covered Software under the terms of the version
of the License under which You originally received the Covered Software,
or under the terms of any subsequent version published by the license
steward.
10.3. Modified Versions
If you create software not governed by this License, and you want to
create a new license for such software, you may create and use a
modified version of this License if you rename the license and remove
any references to the name of the license steward (except to note that
such modified license differs from this License).
10.4. Distributing Source Code Form that is Incompatible With Secondary
Licenses If You choose to distribute Source Code Form that is
Incompatible With Secondary Licenses under the terms of this version of
the License, the notice described in Exhibit B of this License must be
attached.
Exhibit A - Source Code Form License Notice
This Source Code Form is subject to the
terms of the Mozilla Public License, v.
2.0. If a copy of the MPL was not
distributed with this file, You can
obtain one at
https://mozilla.org/MPL/2.0/.
If it is not possible or desirable to put the notice in a particular file,
then You may include the notice in a location (such as a LICENSE file in a
relevant directory) where a recipient would be likely to look for such a
notice.
You may add additional accurate notices of copyright ownership.
Exhibit B - "Incompatible With Secondary Licenses" Notice
This Source Code Form is "Incompatible
With Secondary Licenses", as defined by
the Mozilla Public License, v. 2.0.

View File

@@ -30,5 +30,5 @@ def validate(data: Any) -> bool:
"""
with detailed_errors():
_validate(data, custom_formats=FORMAT_FUNCTIONS)
reduce(lambda acc, fn: fn(acc), EXTRA_VALIDATIONS, data)
reduce(lambda acc, fn: fn(acc), EXTRA_VALIDATIONS, data)
return True

View File

@@ -3,12 +3,21 @@ import json
import logging
import os
import re
import typing
from contextlib import contextmanager
from textwrap import indent, wrap
from typing import Any, Dict, Iterator, List, Optional, Sequence, Union, cast
from typing import Any, Dict, Generator, Iterator, List, Optional, Sequence, Union
from .fastjsonschema_exceptions import JsonSchemaValueException
if typing.TYPE_CHECKING:
import sys
if sys.version_info < (3, 11):
from typing_extensions import Self
else:
from typing import Self
_logger = logging.getLogger(__name__)
_MESSAGE_REPLACEMENTS = {
@@ -24,7 +33,7 @@ _SKIP_DETAILS = (
"must not be there",
)
_NEED_DETAILS = {"anyOf", "oneOf", "anyOf", "contains", "propertyNames", "not", "items"}
_NEED_DETAILS = {"anyOf", "oneOf", "allOf", "contains", "propertyNames", "not", "items"}
_CAMEL_CASE_SPLITTER = re.compile(r"\W+|([A-Z][^A-Z\W]*)")
_IDENTIFIER = re.compile(r"^[\w_]+$", re.I)
@@ -36,6 +45,11 @@ _TOML_JARGON = {
"property names": "keys",
}
_FORMATS_HELP = """
For more details about `format` see
https://validate-pyproject.readthedocs.io/en/latest/api/validate_pyproject.formats.html
"""
class ValidationError(JsonSchemaValueException):
"""Report violations of a given JSON schema.
@@ -59,7 +73,7 @@ class ValidationError(JsonSchemaValueException):
_original_message = ""
@classmethod
def _from_jsonschema(cls, ex: JsonSchemaValueException):
def _from_jsonschema(cls, ex: JsonSchemaValueException) -> "Self":
formatter = _ErrorFormatting(ex)
obj = cls(str(formatter), ex.value, formatter.name, ex.definition, ex.rule)
debug_code = os.getenv("JSONSCHEMA_DEBUG_CODE_GENERATION", "false").lower()
@@ -72,7 +86,7 @@ class ValidationError(JsonSchemaValueException):
@contextmanager
def detailed_errors():
def detailed_errors() -> Generator[None, None, None]:
try:
yield
except JsonSchemaValueException as ex:
@@ -83,7 +97,7 @@ class _ErrorFormatting:
def __init__(self, ex: JsonSchemaValueException):
self.ex = ex
self.name = f"`{self._simplify_name(ex.name)}`"
self._original_message = self.ex.message.replace(ex.name, self.name)
self._original_message: str = self.ex.message.replace(ex.name, self.name)
self._summary = ""
self._details = ""
@@ -107,11 +121,12 @@ class _ErrorFormatting:
return self._details
def _simplify_name(self, name):
@staticmethod
def _simplify_name(name: str) -> str:
x = len("data.")
return name[x:] if name.startswith("data.") else name
def _expand_summary(self):
def _expand_summary(self) -> str:
msg = self._original_message
for bad, repl in _MESSAGE_REPLACEMENTS.items():
@@ -129,8 +144,9 @@ class _ErrorFormatting:
def _expand_details(self) -> str:
optional = []
desc_lines = self.ex.definition.pop("$$description", [])
desc = self.ex.definition.pop("description", None) or " ".join(desc_lines)
definition = self.ex.definition or {}
desc_lines = definition.pop("$$description", [])
desc = definition.pop("description", None) or " ".join(desc_lines)
if desc:
description = "\n".join(
wrap(
@@ -142,18 +158,20 @@ class _ErrorFormatting:
)
)
optional.append(f"DESCRIPTION:\n{description}")
schema = json.dumps(self.ex.definition, indent=4)
schema = json.dumps(definition, indent=4)
value = json.dumps(self.ex.value, indent=4)
defaults = [
f"GIVEN VALUE:\n{indent(value, ' ')}",
f"OFFENDING RULE: {self.ex.rule!r}",
f"DEFINITION:\n{indent(schema, ' ')}",
]
return "\n\n".join(optional + defaults)
msg = "\n\n".join(optional + defaults)
epilog = f"\n{_FORMATS_HELP}" if "format" in msg.lower() else ""
return msg + epilog
class _SummaryWriter:
_IGNORE = {"description", "default", "title", "examples"}
_IGNORE = frozenset(("description", "default", "title", "examples"))
def __init__(self, jargon: Optional[Dict[str, str]] = None):
self.jargon: Dict[str, str] = jargon or {}
@@ -242,7 +260,9 @@ class _SummaryWriter:
key = path[-1]
return any(key.startswith(k) for k in "$_") or key in self._IGNORE
def _filter_unecessary(self, schema: dict, path: Sequence[str]):
def _filter_unecessary(
self, schema: Dict[str, Any], path: Sequence[str]
) -> Dict[str, Any]:
return {
key: value
for key, value in schema.items()
@@ -271,7 +291,7 @@ class _SummaryWriter:
self(v, item_prefix, _path=[*path, f"[{i}]"]) for i, v in enumerate(schemas)
)
def _is_property(self, path: Sequence[str]):
def _is_property(self, path: Sequence[str]) -> bool:
"""Check if the given path can correspond to an arbitrarily named property"""
counter = 0
for key in path[-2::-1]:
@@ -296,9 +316,7 @@ class _SummaryWriter:
def _value(self, value: Any, path: Sequence[str]) -> str:
if path[-1] == "type" and not self._is_property(path):
type_ = self._jargon(value)
return (
f"[{', '.join(type_)}]" if isinstance(value, list) else cast(str, type_)
)
return f"[{', '.join(type_)}]" if isinstance(type_, list) else type_
return repr(value)
def _inline_attrs(self, schema: dict, path: Sequence[str]) -> Iterator[str]:

View File

@@ -3,6 +3,7 @@ difficult to express as a JSON Schema (or that are not supported by the current
JSON Schema library).
"""
from inspect import cleandoc
from typing import Mapping, TypeVar
from .error_reporting import ValidationError
@@ -11,11 +12,16 @@ T = TypeVar("T", bound=Mapping)
class RedefiningStaticFieldAsDynamic(ValidationError):
"""According to PEP 621:
_DESC = """According to PEP 621:
Build back-ends MUST raise an error if the metadata specifies a field
statically as well as being listed in dynamic.
"""
__doc__ = _DESC
_URL = (
"https://packaging.python.org/en/latest/specifications/"
"pyproject-toml/#dynamic"
)
def validate_project_dynamic(pyproject: T) -> T:
@@ -24,11 +30,21 @@ def validate_project_dynamic(pyproject: T) -> T:
for field in dynamic:
if field in project_table:
msg = f"You cannot provide a value for `project.{field}` and "
msg += "list it under `project.dynamic` at the same time"
name = f"data.project.{field}"
value = {field: project_table[field], "...": " # ...", "dynamic": dynamic}
raise RedefiningStaticFieldAsDynamic(msg, value, name, rule="PEP 621")
raise RedefiningStaticFieldAsDynamic(
message=f"You cannot provide a value for `project.{field}` and "
"list it under `project.dynamic` at the same time",
value={
field: project_table[field],
"...": " # ...",
"dynamic": dynamic,
},
name=f"data.project.{field}",
definition={
"description": cleandoc(RedefiningStaticFieldAsDynamic._DESC),
"see": RedefiningStaticFieldAsDynamic._URL,
},
rule="PEP 621",
)
return pyproject

File diff suppressed because one or more lines are too long

View File

@@ -1,3 +1,13 @@
"""
The functions in this module are used to validate schemas with the
`format JSON Schema keyword
<https://json-schema.org/understanding-json-schema/reference/string#format>`_.
The correspondence is given by replacing the ``_`` character in the name of the
function with a ``-`` to obtain the format name and vice versa.
"""
import builtins
import logging
import os
import re
@@ -20,7 +30,7 @@ VERSION_PATTERN = r"""
(?P<release>[0-9]+(?:\.[0-9]+)*) # release segment
(?P<pre> # pre-release
[-_\.]?
(?P<pre_l>(a|b|c|rc|alpha|beta|pre|preview))
(?P<pre_l>alpha|a|beta|b|preview|pre|c|rc)
[-_\.]?
(?P<pre_n>[0-9]+)?
)?
@@ -48,6 +58,9 @@ VERSION_REGEX = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.X | re.I)
def pep440(version: str) -> bool:
"""See :ref:`PyPA's version specification <pypa:version-specifiers>`
(initially introduced in :pep:`440`).
"""
return VERSION_REGEX.match(version) is not None
@@ -59,6 +72,9 @@ PEP508_IDENTIFIER_REGEX = re.compile(f"^{PEP508_IDENTIFIER_PATTERN}$", re.I)
def pep508_identifier(name: str) -> bool:
"""See :ref:`PyPA's name specification <pypa:name-format>`
(initially introduced in :pep:`508#names`).
"""
return PEP508_IDENTIFIER_REGEX.match(name) is not None
@@ -67,9 +83,14 @@ try:
from packaging import requirements as _req
except ImportError: # pragma: no cover
# let's try setuptools vendored version
from setuptools._vendor.packaging import requirements as _req # type: ignore
from setuptools._vendor.packaging import ( # type: ignore[no-redef]
requirements as _req,
)
def pep508(value: str) -> bool:
"""See :ref:`PyPA's dependency specifiers <pypa:dependency-specifiers>`
(initially introduced in :pep:`508`).
"""
try:
_req.Requirement(value)
return True
@@ -88,7 +109,10 @@ except ImportError: # pragma: no cover
def pep508_versionspec(value: str) -> bool:
"""Expression that can be used to specify/lock versions (including ranges)"""
"""Expression that can be used to specify/lock versions (including ranges)
See ``versionspec`` in :ref:`PyPA's dependency specifiers
<pypa:dependency-specifiers>` (initially introduced in :pep:`508`).
"""
if any(c in value for c in (";", "]", "@")):
# In PEP 508:
# conditional markers, extras and URL specs are not included in the
@@ -104,6 +128,11 @@ def pep508_versionspec(value: str) -> bool:
def pep517_backend_reference(value: str) -> bool:
"""See PyPA's specification for defining build-backend references
introduced in :pep:`517#source-trees`.
This is similar to an entry-point reference (e.g., ``package.module:object``).
"""
module, _, obj = value.partition(":")
identifiers = (i.strip() for i in _chain(module.split("."), obj.split(".")))
return all(python_identifier(i) for i in identifiers if i)
@@ -120,10 +149,10 @@ def _download_classifiers() -> str:
url = "https://pypi.org/pypi?:action=list_classifiers"
context = ssl.create_default_context()
with urlopen(url, context=context) as response:
with urlopen(url, context=context) as response: # noqa: S310 (audit URLs)
headers = Message()
headers["content_type"] = response.getheader("content-type", "text/plain")
return response.read().decode(headers.get_param("charset", "utf-8"))
return response.read().decode(headers.get_param("charset", "utf-8")) # type: ignore[no-any-return]
class _TroveClassifier:
@@ -136,14 +165,14 @@ class _TroveClassifier:
downloaded: typing.Union[None, "Literal[False]", typing.Set[str]]
def __init__(self):
def __init__(self) -> None:
self.downloaded = None
self._skip_download = False
# None => not cached yet
# False => cache not available
self.__name__ = "trove_classifier" # Emulate a public function
def _disable_download(self):
def _disable_download(self) -> None:
# This is a private API. Only setuptools has the consent of using it.
self._skip_download = True
@@ -180,6 +209,7 @@ try:
from trove_classifiers import classifiers as _trove_classifiers
def trove_classifier(value: str) -> bool:
"""See https://pypi.org/classifiers/"""
return value in _trove_classifiers or value.lower().startswith("private ::")
except ImportError: # pragma: no cover
@@ -191,6 +221,10 @@ except ImportError: # pragma: no cover
def pep561_stub_name(value: str) -> bool:
"""Name of a directory containing type stubs.
It must follow the name scheme ``<package>-stubs`` as defined in
:pep:`561#stub-only-packages`.
"""
top, *children = value.split(".")
if not top.endswith("-stubs"):
return False
@@ -202,6 +236,10 @@ def pep561_stub_name(value: str) -> bool:
def url(value: str) -> bool:
"""Valid URL (validation uses :obj:`urllib.parse`).
For maximum compatibility please make sure to include a ``scheme`` prefix
in your URL (e.g. ``http://``).
"""
from urllib.parse import urlparse
try:
@@ -230,24 +268,59 @@ ENTRYPOINT_GROUP_REGEX = re.compile(f"^{ENTRYPOINT_GROUP_PATTERN}$", re.I)
def python_identifier(value: str) -> bool:
"""Can be used as identifier in Python.
(Validation uses :obj:`str.isidentifier`).
"""
return value.isidentifier()
def python_qualified_identifier(value: str) -> bool:
"""
Python "dotted identifier", i.e. a sequence of :obj:`python_identifier`
concatenated with ``"."`` (e.g.: ``package.module.submodule``).
"""
if value.startswith(".") or value.endswith("."):
return False
return all(python_identifier(m) for m in value.split("."))
def python_module_name(value: str) -> bool:
"""Module name that can be used in an ``import``-statement in Python.
See :obj:`python_qualified_identifier`.
"""
return python_qualified_identifier(value)
def python_module_name_relaxed(value: str) -> bool:
"""Similar to :obj:`python_module_name`, but relaxed to also accept
dash characters (``-``) and cover special cases like ``pip-run``.
It is recommended, however, that beginners avoid dash characters,
as they require advanced knowledge about Python internals.
The following are disallowed:
* names starting/ending in dashes,
* names ending in ``-stubs`` (potentially collide with :obj:`pep561_stub_name`).
"""
if value.startswith("-") or value.endswith("-"):
return False
if value.endswith("-stubs"):
return False # Avoid collision with PEP 561
return python_module_name(value.replace("-", "_"))
def python_entrypoint_group(value: str) -> bool:
"""See ``Data model > group`` in the :ref:`PyPA's entry-points specification
<pypa:entry-points>`.
"""
return ENTRYPOINT_GROUP_REGEX.match(value) is not None
def python_entrypoint_name(value: str) -> bool:
"""See ``Data model > name`` in the :ref:`PyPA's entry-points specification
<pypa:entry-points>`.
"""
if not ENTRYPOINT_REGEX.match(value):
return False
if not RECOMMEDED_ENTRYPOINT_REGEX.match(value):
@@ -258,6 +331,13 @@ def python_entrypoint_name(value: str) -> bool:
def python_entrypoint_reference(value: str) -> bool:
"""Reference to a Python object using in the format::
importable.module:object.attr
See ``Data model >object reference`` in the :ref:`PyPA's entry-points specification
<pypa:entry-points>`.
"""
module, _, rest = value.partition(":")
if "[" in rest:
obj, _, extras_ = rest.partition("[")
@@ -273,3 +353,23 @@ def python_entrypoint_reference(value: str) -> bool:
module_parts = module.split(".")
identifiers = _chain(module_parts, obj.split(".")) if rest else module_parts
return all(python_identifier(i.strip()) for i in identifiers)
def uint8(value: builtins.int) -> bool:
r"""Unsigned 8-bit integer (:math:`0 \leq x < 2^8`)"""
return 0 <= value < 2**8
def uint16(value: builtins.int) -> bool:
r"""Unsigned 16-bit integer (:math:`0 \leq x < 2^{16}`)"""
return 0 <= value < 2**16
def uint(value: builtins.int) -> bool:
r"""Unsigned 64-bit integer (:math:`0 \leq x < 2^{64}`)"""
return 0 <= value < 2**64
def int(value: builtins.int) -> bool:
r"""Signed 64-bit integer (:math:`-2^{63} \leq x < 2^{63}`)"""
return -(2**63) <= value < 2**63

View File

@@ -0,0 +1,26 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"$id": "https://setuptools.pypa.io/en/latest/deprecated/distutils/configfile.html",
"title": "``tool.distutils`` table",
"$$description": [
"**EXPERIMENTAL** (NOT OFFICIALLY SUPPORTED): Use ``tool.distutils``",
"subtables to configure arguments for ``distutils`` commands.",
"Originally, ``distutils`` allowed developers to configure arguments for",
"``setup.py`` commands via `distutils configuration files",
"<https://setuptools.pypa.io/en/latest/deprecated/distutils/configfile.html>`_.",
"See also `the old Python docs <https://docs.python.org/3.11/install/>_`."
],
"type": "object",
"properties": {
"global": {
"type": "object",
"description": "Global options applied to all ``distutils`` commands"
}
},
"patternProperties": {
".+": {"type": "object"}
},
"$comment": "TODO: Is there a practical way of making this schema more specific?"
}

View File

@@ -17,45 +17,33 @@ functions among several configuration file formats.
**PRIVATE MODULE**: API reserved for setuptools internal usage only.
"""
from __future__ import annotations
import ast
import importlib
import io
import os
import pathlib
import sys
from glob import iglob
from configparser import ConfigParser
from importlib.machinery import ModuleSpec
from glob import iglob
from importlib.machinery import ModuleSpec, all_suffixes
from itertools import chain
from typing import (
TYPE_CHECKING,
Callable,
Dict,
Iterable,
Iterator,
List,
Mapping,
Optional,
Tuple,
TypeVar,
Union,
cast,
)
from pathlib import Path
from types import ModuleType
from types import ModuleType, TracebackType
from typing import TYPE_CHECKING, Any, Callable, Iterable, Iterator, Mapping, TypeVar
from .._path import StrPath, same_path as _same_path
from ..discovery import find_package_path
from ..warnings import SetuptoolsWarning
from distutils.errors import DistutilsOptionError
from .._path import same_path as _same_path
from ..warnings import SetuptoolsWarning
if TYPE_CHECKING:
from setuptools.dist import Distribution # noqa
from setuptools.discovery import ConfigDiscovery # noqa
from distutils.dist import DistributionMetadata # noqa
from typing_extensions import Self
from setuptools.dist import Distribution
chain_iter = chain.from_iterable
_Path = Union[str, os.PathLike]
_K = TypeVar("_K")
_V = TypeVar("_V", covariant=True)
@@ -64,18 +52,18 @@ class StaticModule:
"""Proxy to a module object that avoids executing arbitrary code."""
def __init__(self, name: str, spec: ModuleSpec):
module = ast.parse(pathlib.Path(spec.origin).read_bytes())
module = ast.parse(pathlib.Path(spec.origin).read_bytes()) # type: ignore[arg-type] # Let it raise an error on None
vars(self).update(locals())
del self.self
def _find_assignments(self) -> Iterator[Tuple[ast.AST, ast.AST]]:
def _find_assignments(self) -> Iterator[tuple[ast.AST, ast.AST]]:
for statement in self.module.body:
if isinstance(statement, ast.Assign):
yield from ((target, statement.value) for target in statement.targets)
elif isinstance(statement, ast.AnnAssign) and statement.value:
yield (statement.target, statement.value)
def __getattr__(self, attr):
def __getattr__(self, attr: str):
"""Attempt to load an attribute "statically", via :func:`ast.literal_eval`."""
try:
return next(
@@ -88,8 +76,8 @@ class StaticModule:
def glob_relative(
patterns: Iterable[str], root_dir: Optional[_Path] = None
) -> List[str]:
patterns: Iterable[str], root_dir: StrPath | None = None
) -> list[str]:
"""Expand the list of glob patterns, but preserving relative paths.
:param list[str] patterns: List of glob patterns
@@ -120,14 +108,16 @@ def glob_relative(
return expanded_values
def read_files(filepaths: Union[str, bytes, Iterable[_Path]], root_dir=None) -> str:
def read_files(
filepaths: StrPath | Iterable[StrPath], root_dir: StrPath | None = None
) -> str:
"""Return the content of the files concatenated using ``\n`` as str
This function is sandboxed and won't reach anything outside ``root_dir``
(By default ``root_dir`` is the current directory).
"""
from setuptools.extern.more_itertools import always_iterable
from more_itertools import always_iterable
root_dir = os.path.abspath(root_dir or os.getcwd())
_filepaths = (os.path.join(root_dir, path) for path in always_iterable(filepaths))
@@ -138,7 +128,7 @@ def read_files(filepaths: Union[str, bytes, Iterable[_Path]], root_dir=None) ->
)
def _filter_existing_files(filepaths: Iterable[_Path]) -> Iterator[_Path]:
def _filter_existing_files(filepaths: Iterable[StrPath]) -> Iterator[StrPath]:
for path in filepaths:
if os.path.isfile(path):
yield path
@@ -146,12 +136,12 @@ def _filter_existing_files(filepaths: Iterable[_Path]) -> Iterator[_Path]:
SetuptoolsWarning.emit(f"File {path!r} cannot be found")
def _read_file(filepath: Union[bytes, _Path]) -> str:
with io.open(filepath, encoding='utf-8') as f:
def _read_file(filepath: bytes | StrPath) -> str:
with open(filepath, encoding='utf-8') as f:
return f.read()
def _assert_local(filepath: _Path, root_dir: str):
def _assert_local(filepath: StrPath, root_dir: str):
if Path(os.path.abspath(root_dir)) not in Path(os.path.abspath(filepath)).parents:
msg = f"Cannot access {filepath!r} (or anything outside {root_dir!r})"
raise DistutilsOptionError(msg)
@@ -161,9 +151,9 @@ def _assert_local(filepath: _Path, root_dir: str):
def read_attr(
attr_desc: str,
package_dir: Optional[Mapping[str, str]] = None,
root_dir: Optional[_Path] = None,
):
package_dir: Mapping[str, str] | None = None,
root_dir: StrPath | None = None,
) -> Any:
"""Reads the value of an attribute from a module.
This function will try to read the attributed statically first
@@ -186,7 +176,7 @@ def read_attr(
attr_name = attrs_path.pop()
module_name = '.'.join(attrs_path)
module_name = module_name or '__init__'
_parent_path, path, module_name = _find_module(module_name, package_dir, root_dir)
path = _find_module(module_name, package_dir, root_dir)
spec = _find_spec(module_name, path)
try:
@@ -197,7 +187,7 @@ def read_attr(
return getattr(module, attr_name)
def _find_spec(module_name: str, module_path: Optional[_Path]) -> ModuleSpec:
def _find_spec(module_name: str, module_path: StrPath | None) -> ModuleSpec:
spec = importlib.util.spec_from_file_location(module_name, module_path)
spec = spec or importlib.util.find_spec(module_name)
@@ -218,43 +208,32 @@ def _load_spec(spec: ModuleSpec, module_name: str) -> ModuleType:
def _find_module(
module_name: str, package_dir: Optional[Mapping[str, str]], root_dir: _Path
) -> Tuple[_Path, Optional[str], str]:
"""Given a module (that could normally be imported by ``module_name``
after the build is complete), find the path to the parent directory where
it is contained and the canonical name that could be used to import it
considering the ``package_dir`` in the build configuration and ``root_dir``
"""
parent_path = root_dir
module_parts = module_name.split('.')
if package_dir:
if module_parts[0] in package_dir:
# A custom path was specified for the module we want to import
custom_path = package_dir[module_parts[0]]
parts = custom_path.rsplit('/', 1)
if len(parts) > 1:
parent_path = os.path.join(root_dir, parts[0])
parent_module = parts[1]
else:
parent_module = custom_path
module_name = ".".join([parent_module, *module_parts[1:]])
elif '' in package_dir:
# A custom parent directory was specified for all root modules
parent_path = os.path.join(root_dir, package_dir[''])
module_name: str, package_dir: Mapping[str, str] | None, root_dir: StrPath
) -> str | None:
"""Find the path to the module named ``module_name``,
considering the ``package_dir`` in the build configuration and ``root_dir``.
path_start = os.path.join(parent_path, *module_name.split("."))
candidates = chain(
(f"{path_start}.py", os.path.join(path_start, "__init__.py")),
iglob(f"{path_start}.*"),
>>> tmp = getfixture('tmpdir')
>>> _ = tmp.ensure("a/b/c.py")
>>> _ = tmp.ensure("a/b/d/__init__.py")
>>> r = lambda x: x.replace(str(tmp), "tmp").replace(os.sep, "/")
>>> r(_find_module("a.b.c", None, tmp))
'tmp/a/b/c.py'
>>> r(_find_module("f.g.h", {"": "1", "f": "2", "f.g": "3", "f.g.h": "a/b/d"}, tmp))
'tmp/a/b/d/__init__.py'
"""
path_start = find_package_path(module_name, package_dir or {}, root_dir)
candidates = chain.from_iterable(
(f"{path_start}{ext}", os.path.join(path_start, f"__init__{ext}"))
for ext in all_suffixes()
)
module_path = next((x for x in candidates if os.path.isfile(x)), None)
return parent_path, module_path, module_name
return next((x for x in candidates if os.path.isfile(x)), None)
def resolve_class(
qualified_class_name: str,
package_dir: Optional[Mapping[str, str]] = None,
root_dir: Optional[_Path] = None,
package_dir: Mapping[str, str] | None = None,
root_dir: StrPath | None = None,
) -> Callable:
"""Given a qualified class name, return the associated class object"""
root_dir = root_dir or os.getcwd()
@@ -262,16 +241,16 @@ def resolve_class(
class_name = qualified_class_name[idx + 1 :]
pkg_name = qualified_class_name[:idx]
_parent_path, path, module_name = _find_module(pkg_name, package_dir, root_dir)
module = _load_spec(_find_spec(module_name, path), module_name)
path = _find_module(pkg_name, package_dir, root_dir)
module = _load_spec(_find_spec(pkg_name, path), pkg_name)
return getattr(module, class_name)
def cmdclass(
values: Dict[str, str],
package_dir: Optional[Mapping[str, str]] = None,
root_dir: Optional[_Path] = None,
) -> Dict[str, Callable]:
values: dict[str, str],
package_dir: Mapping[str, str] | None = None,
root_dir: StrPath | None = None,
) -> dict[str, Callable]:
"""Given a dictionary mapping command names to strings for qualified class
names, apply :func:`resolve_class` to the dict values.
"""
@@ -281,10 +260,10 @@ def cmdclass(
def find_packages(
*,
namespaces=True,
fill_package_dir: Optional[Dict[str, str]] = None,
root_dir: Optional[_Path] = None,
fill_package_dir: dict[str, str] | None = None,
root_dir: StrPath | None = None,
**kwargs,
) -> List[str]:
) -> list[str]:
"""Works similarly to :func:`setuptools.find_packages`, but with all
arguments given as keyword arguments. Moreover, ``where`` can be given
as a list (the results will be simply concatenated).
@@ -302,8 +281,9 @@ def find_packages(
:rtype: list
"""
from more_itertools import always_iterable, unique_everseen
from setuptools.discovery import construct_package_dir
from setuptools.extern.more_itertools import unique_everseen, always_iterable
if namespaces:
from setuptools.discovery import PEP420PackageFinder as PackageFinder
@@ -312,7 +292,7 @@ def find_packages(
root_dir = root_dir or os.curdir
where = kwargs.pop('where', ['.'])
packages: List[str] = []
packages: list[str] = []
fill_package_dir = {} if fill_package_dir is None else fill_package_dir
search = list(unique_everseen(always_iterable(where)))
@@ -331,27 +311,22 @@ def find_packages(
return packages
def _nest_path(parent: _Path, path: _Path) -> str:
def _nest_path(parent: StrPath, path: StrPath) -> str:
path = parent if path in {".", ""} else os.path.join(parent, path)
return os.path.normpath(path)
def version(value: Union[Callable, Iterable[Union[str, int]], str]) -> str:
def version(value: Callable | Iterable[str | int] | str) -> str:
"""When getting the version directly from an attribute,
it should be normalised to string.
"""
if callable(value):
value = value()
_value = value() if callable(value) else value
value = cast(Iterable[Union[str, int]], value)
if not isinstance(value, str):
if hasattr(value, '__iter__'):
value = '.'.join(map(str, value))
else:
value = '%s' % value
return value
if isinstance(_value, str):
return _value
if hasattr(_value, '__iter__'):
return '.'.join(map(str, _value))
return '%s' % _value
def canonic_package_data(package_data: dict) -> dict:
@@ -361,8 +336,8 @@ def canonic_package_data(package_data: dict) -> dict:
def canonic_data_files(
data_files: Union[list, dict], root_dir: Optional[_Path] = None
) -> List[Tuple[str, List[str]]]:
data_files: list | dict, root_dir: StrPath | None = None
) -> list[tuple[str, list[str]]]:
"""For compatibility with ``setup.py``, ``data_files`` should be a list
of pairs instead of a dict.
@@ -377,7 +352,7 @@ def canonic_data_files(
]
def entry_points(text: str, text_source="entry-points") -> Dict[str, dict]:
def entry_points(text: str, text_source="entry-points") -> dict[str, dict]:
"""Given the contents of entry-points file,
process it into a 2-level dictionary (``dict[str, dict[str, str]]``).
The first level keys are entry-point groups, the second level keys are
@@ -402,7 +377,7 @@ class EnsurePackagesDiscovered:
and those might not have been processed yet.
"""
def __init__(self, distribution: "Distribution"):
def __init__(self, distribution: Distribution):
self._dist = distribution
self._called = False
@@ -412,10 +387,15 @@ class EnsurePackagesDiscovered:
self._called = True
self._dist.set_defaults(name=False) # Skip name, we can still be parsing
def __enter__(self):
def __enter__(self) -> Self:
return self
def __exit__(self, _exc_type, _exc_value, _traceback):
def __exit__(
self,
exc_type: type[BaseException] | None,
exc_value: BaseException | None,
traceback: TracebackType | None,
) -> None:
if self._called:
self._dist.set_defaults.analyse_name() # Now we can set a default name
@@ -446,7 +426,7 @@ class LazyMappingProxy(Mapping[_K, _V]):
def __init__(self, obtain_mapping_value: Callable[[], Mapping[_K, _V]]):
self._obtain = obtain_mapping_value
self._value: Optional[Mapping[_K, _V]] = None
self._value: Mapping[_K, _V] | None = None
def _target(self) -> Mapping[_K, _V]:
if self._value is None:

View File

@@ -8,33 +8,38 @@ To read project metadata, consider using
For simple scenarios, you can also try parsing the file directly
with the help of ``tomllib`` or ``tomli``.
"""
from __future__ import annotations
import logging
import os
from contextlib import contextmanager
from functools import partial
from typing import TYPE_CHECKING, Callable, Dict, Mapping, Optional, Set, Union
from types import TracebackType
from typing import TYPE_CHECKING, Any, Callable, Mapping
from ..errors import FileError, OptionError
from .._path import StrPath
from ..errors import FileError, InvalidConfigError
from ..warnings import SetuptoolsWarning
from . import expand as _expand
from ._apply_pyprojecttoml import _PREVIOUSLY_DEFINED, _WouldIgnoreField
from ._apply_pyprojecttoml import apply as _apply
from ._apply_pyprojecttoml import _PREVIOUSLY_DEFINED, _MissingDynamic, apply as _apply
if TYPE_CHECKING:
from setuptools.dist import Distribution # noqa
from typing_extensions import Self
from setuptools.dist import Distribution
_Path = Union[str, os.PathLike]
_logger = logging.getLogger(__name__)
def load_file(filepath: _Path) -> dict:
from setuptools.extern import tomli # type: ignore
def load_file(filepath: StrPath) -> dict:
from ..compat.py310 import tomllib
with open(filepath, "rb") as file:
return tomli.load(file)
return tomllib.load(file)
def validate(config: dict, filepath: _Path) -> bool:
def validate(config: dict, filepath: StrPath) -> bool:
from . import _validate_pyproject as validator
trove_classifier = validator.FORMAT_FUNCTIONS.get("trove-classifier")
@@ -56,10 +61,10 @@ def validate(config: dict, filepath: _Path) -> bool:
def apply_configuration(
dist: "Distribution",
filepath: _Path,
dist: Distribution,
filepath: StrPath,
ignore_option_errors=False,
) -> "Distribution":
) -> Distribution:
"""Apply the configuration from a ``pyproject.toml`` file into an existing
distribution object.
"""
@@ -68,11 +73,11 @@ def apply_configuration(
def read_configuration(
filepath: _Path,
filepath: StrPath,
expand=True,
ignore_option_errors=False,
dist: Optional["Distribution"] = None,
):
dist: Distribution | None = None,
) -> dict[str, Any]:
"""Read given configuration file and returns options from it as a dict.
:param str|unicode filepath: Path to configuration file in the ``pyproject.toml``
@@ -106,6 +111,10 @@ def read_configuration(
if not asdict or not (project_table or setuptools_table):
return {} # User is not using pyproject to configure setuptools
if "setuptools" in asdict.get("tools", {}):
# let the user know they probably have a typo in their metadata
_ToolsTypoInMetadata.emit()
if "distutils" in tool_table:
_ExperimentalConfiguration.emit(subject="[tool.distutils]")
@@ -113,7 +122,7 @@ def read_configuration(
# the default would be an improvement.
# `ini2toml` backfills include_package_data=False when nothing is explicitly given,
# therefore setting a default here is backwards compatible.
if dist and getattr(dist, "include_package_data", None) is not None:
if dist and dist.include_package_data is not None:
setuptools_table.setdefault("include-package-data", dist.include_package_data)
else:
setuptools_table.setdefault("include-package-data", True)
@@ -121,6 +130,9 @@ def read_configuration(
asdict["tool"] = tool_table
tool_table["setuptools"] = setuptools_table
if "ext-modules" in setuptools_table:
_ExperimentalConfiguration.emit(subject="[tool.setuptools.ext-modules]")
with _ignore_errors(ignore_option_errors):
# Don't complain about unrelated errors (e.g. tools not using the "tool" table)
subset = {"project": project_table, "tool": {"setuptools": setuptools_table}}
@@ -135,9 +147,9 @@ def read_configuration(
def expand_configuration(
config: dict,
root_dir: Optional[_Path] = None,
root_dir: StrPath | None = None,
ignore_option_errors: bool = False,
dist: Optional["Distribution"] = None,
dist: Distribution | None = None,
) -> dict:
"""Given a configuration with unresolved fields (e.g. dynamic, cmdclass, ...)
find their final values.
@@ -160,9 +172,9 @@ class _ConfigExpander:
def __init__(
self,
config: dict,
root_dir: Optional[_Path] = None,
root_dir: StrPath | None = None,
ignore_option_errors: bool = False,
dist: Optional["Distribution"] = None,
dist: Distribution | None = None,
):
self.config = config
self.root_dir = root_dir or os.getcwd()
@@ -172,9 +184,9 @@ class _ConfigExpander:
self.dynamic_cfg = self.setuptools_cfg.get("dynamic", {})
self.ignore_option_errors = ignore_option_errors
self._dist = dist
self._referenced_files: Set[str] = set()
self._referenced_files: set[str] = set()
def _ensure_dist(self) -> "Distribution":
def _ensure_dist(self) -> Distribution:
from setuptools.dist import Distribution
attrs = {"src_root": self.root_dir, "name": self.project_cfg.get("name", None)}
@@ -227,7 +239,7 @@ class _ConfigExpander:
cmdclass = partial(_expand.cmdclass, package_dir=package_dir, root_dir=root_dir)
self._process_field(self.setuptools_cfg, "cmdclass", cmdclass)
def _expand_all_dynamic(self, dist: "Distribution", package_dir: Mapping[str, str]):
def _expand_all_dynamic(self, dist: Distribution, package_dir: Mapping[str, str]):
special = ( # need special handling
"version",
"readme",
@@ -257,7 +269,7 @@ class _ConfigExpander:
updates = {k: v for k, v in obtained_dynamic.items() if v is not None}
self.project_cfg.update(updates)
def _ensure_previously_set(self, dist: "Distribution", field: str):
def _ensure_previously_set(self, dist: Distribution, field: str):
previous = _PREVIOUSLY_DEFINED[field](dist)
if previous is None and not self.ignore_option_errors:
msg = (
@@ -265,12 +277,12 @@ class _ConfigExpander:
"Some dynamic fields need to be specified via `tool.setuptools.dynamic`"
"\nothers must be specified via the equivalent attribute in `setup.py`."
)
raise OptionError(msg)
raise InvalidConfigError(msg)
def _expand_directive(
self, specifier: str, directive, package_dir: Mapping[str, str]
):
from setuptools.extern.more_itertools import always_iterable # type: ignore
from more_itertools import always_iterable
with _ignore_errors(self.ignore_option_errors):
root_dir = self.root_dir
@@ -282,7 +294,7 @@ class _ConfigExpander:
raise ValueError(f"invalid `{specifier}`: {directive!r}")
return None
def _obtain(self, dist: "Distribution", field: str, package_dir: Mapping[str, str]):
def _obtain(self, dist: Distribution, field: str, package_dir: Mapping[str, str]):
if field in self.dynamic_cfg:
return self._expand_directive(
f"tool.setuptools.dynamic.{field}",
@@ -292,29 +304,33 @@ class _ConfigExpander:
self._ensure_previously_set(dist, field)
return None
def _obtain_version(self, dist: "Distribution", package_dir: Mapping[str, str]):
def _obtain_version(self, dist: Distribution, package_dir: Mapping[str, str]):
# Since plugins can set version, let's silently skip if it cannot be obtained
if "version" in self.dynamic and "version" in self.dynamic_cfg:
return _expand.version(self._obtain(dist, "version", package_dir))
return _expand.version(
# We already do an early check for the presence of "version"
self._obtain(dist, "version", package_dir) # pyright: ignore[reportArgumentType]
)
return None
def _obtain_readme(self, dist: "Distribution") -> Optional[Dict[str, str]]:
def _obtain_readme(self, dist: Distribution) -> dict[str, str] | None:
if "readme" not in self.dynamic:
return None
dynamic_cfg = self.dynamic_cfg
if "readme" in dynamic_cfg:
return {
# We already do an early check for the presence of "readme"
"text": self._obtain(dist, "readme", {}),
"content-type": dynamic_cfg["readme"].get("content-type", "text/x-rst"),
}
} # pyright: ignore[reportReturnType]
self._ensure_previously_set(dist, "readme")
return None
def _obtain_entry_points(
self, dist: "Distribution", package_dir: Mapping[str, str]
) -> Optional[Dict[str, dict]]:
self, dist: Distribution, package_dir: Mapping[str, str]
) -> dict[str, dict] | None:
fields = ("entry-points", "scripts", "gui-scripts")
if not any(field in self.dynamic for field in fields):
return None
@@ -330,9 +346,7 @@ class _ConfigExpander:
if group in groups:
value = groups.pop(group)
if field not in self.dynamic:
_WouldIgnoreField.emit(field=field, value=value)
# TODO: Don't set field when support for pyproject.toml stabilizes
# instead raise an error as specified in PEP 621
raise InvalidConfigError(_MissingDynamic.details(field, value))
expanded[field] = value
_set_scripts("scripts", "console_scripts")
@@ -340,21 +354,21 @@ class _ConfigExpander:
return expanded
def _obtain_classifiers(self, dist: "Distribution"):
def _obtain_classifiers(self, dist: Distribution):
if "classifiers" in self.dynamic:
value = self._obtain(dist, "classifiers", {})
if value:
return value.splitlines()
return None
def _obtain_dependencies(self, dist: "Distribution"):
def _obtain_dependencies(self, dist: Distribution):
if "dependencies" in self.dynamic:
value = self._obtain(dist, "dependencies", {})
if value:
return _parse_requirements_list(value)
return None
def _obtain_optional_dependencies(self, dist: "Distribution"):
def _obtain_optional_dependencies(self, dist: Distribution):
if "optional-dependencies" not in self.dynamic:
return None
if "optional-dependencies" in self.dynamic_cfg:
@@ -396,18 +410,18 @@ def _ignore_errors(ignore_option_errors: bool):
class _EnsurePackagesDiscovered(_expand.EnsurePackagesDiscovered):
def __init__(
self, distribution: "Distribution", project_cfg: dict, setuptools_cfg: dict
self, distribution: Distribution, project_cfg: dict, setuptools_cfg: dict
):
super().__init__(distribution)
self._project_cfg = project_cfg
self._setuptools_cfg = setuptools_cfg
def __enter__(self):
def __enter__(self) -> Self:
"""When entering the context, the values of ``packages``, ``py_modules`` and
``package_dir`` that are missing in ``dist`` are copied from ``setuptools_cfg``.
"""
dist, cfg = self._dist, self._setuptools_cfg
package_dir: Dict[str, str] = cfg.setdefault("package-dir", {})
package_dir: dict[str, str] = cfg.setdefault("package-dir", {})
package_dir.update(dist.package_dir or {})
dist.package_dir = package_dir # needs to be the same object
@@ -424,7 +438,12 @@ class _EnsurePackagesDiscovered(_expand.EnsurePackagesDiscovered):
return super().__enter__()
def __exit__(self, exc_type, exc_value, traceback):
def __exit__(
self,
exc_type: type[BaseException] | None,
exc_value: BaseException | None,
traceback: TracebackType | None,
) -> None:
"""When exiting the context, if values of ``packages``, ``py_modules`` and
``package_dir`` are missing in ``setuptools_cfg``, copy from ``dist``.
"""
@@ -439,3 +458,9 @@ class _ExperimentalConfiguration(SetuptoolsWarning):
"`{subject}` in `pyproject.toml` is still *experimental* "
"and likely to change in future releases."
)
class _ToolsTypoInMetadata(SetuptoolsWarning):
_SUMMARY = (
"Ignoring [tools.setuptools] in pyproject.toml, did you mean [tool.setuptools]?"
)

View File

@@ -8,41 +8,44 @@ To read project metadata, consider using
For simple scenarios, you can also try parsing the file directly
with the help of ``configparser``.
"""
from __future__ import annotations
import contextlib
import functools
import os
from collections import defaultdict
from functools import partial
from functools import wraps
from functools import partial, wraps
from typing import (
TYPE_CHECKING,
Callable,
Any,
Callable,
Dict,
Generic,
Iterable,
Iterator,
List,
Optional,
Set,
Tuple,
TypeVar,
Union,
cast,
)
from packaging.markers import default_environment as marker_env
from packaging.requirements import InvalidRequirement, Requirement
from packaging.specifiers import SpecifierSet
from packaging.version import InvalidVersion, Version
from .._path import StrPath
from ..errors import FileError, OptionError
from ..extern.packaging.markers import default_environment as marker_env
from ..extern.packaging.requirements import InvalidRequirement, Requirement
from ..extern.packaging.specifiers import SpecifierSet
from ..extern.packaging.version import InvalidVersion, Version
from ..warnings import SetuptoolsDeprecationWarning
from . import expand
if TYPE_CHECKING:
from distutils.dist import DistributionMetadata # noqa
from setuptools.dist import Distribution
from setuptools.dist import Distribution # noqa
from distutils.dist import DistributionMetadata
_Path = Union[str, os.PathLike]
SingleCommandOptions = Dict["str", Tuple["str", Any]]
"""Dict that associate the name of the options of a particular command to a
tuple. The first element of the tuple indicates the origin of the option value
@@ -54,7 +57,7 @@ Target = TypeVar("Target", bound=Union["Distribution", "DistributionMetadata"])
def read_configuration(
filepath: _Path, find_others=False, ignore_option_errors=False
filepath: StrPath, find_others=False, ignore_option_errors=False
) -> dict:
"""Read given configuration file and returns options from it as a dict.
@@ -79,7 +82,7 @@ def read_configuration(
return configuration_to_dict(handlers)
def apply_configuration(dist: "Distribution", filepath: _Path) -> "Distribution":
def apply_configuration(dist: Distribution, filepath: StrPath) -> Distribution:
"""Apply the configuration from a ``setup.cfg`` file into an existing
distribution object.
"""
@@ -89,11 +92,11 @@ def apply_configuration(dist: "Distribution", filepath: _Path) -> "Distribution"
def _apply(
dist: "Distribution",
filepath: _Path,
other_files: Iterable[_Path] = (),
dist: Distribution,
filepath: StrPath,
other_files: Iterable[StrPath] = (),
ignore_option_errors: bool = False,
) -> Tuple["ConfigHandler", ...]:
) -> tuple[ConfigHandler, ...]:
"""Read configuration from ``filepath`` and applies to the ``dist`` object."""
from setuptools.dist import _Distribution
@@ -107,7 +110,8 @@ def _apply(
filenames = [*other_files, filepath]
try:
_Distribution.parse_config_files(dist, filenames=filenames)
# TODO: Temporary cast until mypy 1.12 is released with upstream fixes from typeshed
_Distribution.parse_config_files(dist, filenames=cast(List[str], filenames))
handlers = parse_configuration(
dist, dist.command_options, ignore_option_errors=ignore_option_errors
)
@@ -130,7 +134,7 @@ def _get_option(target_obj: Target, key: str):
return getter()
def configuration_to_dict(handlers: Tuple["ConfigHandler", ...]) -> dict:
def configuration_to_dict(handlers: tuple[ConfigHandler, ...]) -> dict:
"""Returns configuration data gathered by given handlers as a dict.
:param list[ConfigHandler] handlers: Handlers list,
@@ -149,10 +153,10 @@ def configuration_to_dict(handlers: Tuple["ConfigHandler", ...]) -> dict:
def parse_configuration(
distribution: "Distribution",
distribution: Distribution,
command_options: AllCommandOptions,
ignore_option_errors=False,
) -> Tuple["ConfigMetadataHandler", "ConfigOptionsHandler"]:
) -> tuple[ConfigMetadataHandler, ConfigOptionsHandler]:
"""Performs additional parsing of configuration options
for a distribution.
@@ -235,7 +239,7 @@ class ConfigHandler(Generic[Target]):
"""
aliases: Dict[str, str] = {}
aliases: dict[str, str] = {}
"""Options aliases.
For compatibility with various packages. E.g.: d2to1 and pbr.
Note: `-` in keys is replaced with `_` by config parser.
@@ -252,15 +256,17 @@ class ConfigHandler(Generic[Target]):
self.ignore_option_errors = ignore_option_errors
self.target_obj = target_obj
self.sections = dict(self._section_options(options))
self.set_options: List[str] = []
self.set_options: list[str] = []
self.ensure_discovered = ensure_discovered
self._referenced_files: Set[str] = set()
self._referenced_files: set[str] = set()
"""After parsing configurations, this property will enumerate
all files referenced by the "file:" directive. Private API for setuptools only.
"""
@classmethod
def _section_options(cls, options: AllCommandOptions):
def _section_options(
cls, options: AllCommandOptions
) -> Iterator[tuple[str, SingleCommandOptions]]:
for full_name, value in options.items():
pre, sep, name = full_name.partition(cls.section_prefix)
if pre:
@@ -274,7 +280,7 @@ class ConfigHandler(Generic[Target]):
'%s must provide .parsers property' % self.__class__.__name__
)
def __setitem__(self, option_name, value):
def __setitem__(self, option_name, value) -> None:
target_obj = self.target_obj
# Translate alias into real name.
@@ -282,8 +288,8 @@ class ConfigHandler(Generic[Target]):
try:
current_value = getattr(target_obj, option_name)
except AttributeError:
raise KeyError(option_name)
except AttributeError as e:
raise KeyError(option_name) from e
if current_value:
# Already inhabited. Skipping.
@@ -370,7 +376,7 @@ class ConfigHandler(Generic[Target]):
return parser
def _parse_file(self, value, root_dir: _Path):
def _parse_file(self, value, root_dir: StrPath):
"""Represents value as a string, allowing including text
from nearest files using `file:` directive.
@@ -396,7 +402,7 @@ class ConfigHandler(Generic[Target]):
self._referenced_files.update(filepaths)
return expand.read_files(filepaths, root_dir)
def _parse_attr(self, value, package_dir, root_dir: _Path):
def _parse_attr(self, value, package_dir, root_dir: StrPath):
"""Represents value as a module attribute.
Examples:
@@ -474,7 +480,7 @@ class ConfigHandler(Generic[Target]):
# Keep silent for a new option may appear anytime.
self[name] = value
def parse(self):
def parse(self) -> None:
"""Parses configuration file items from one
or more related sections.
@@ -484,7 +490,7 @@ class ConfigHandler(Generic[Target]):
if section_name: # [section.option] variant
method_postfix = '_%s' % section_name
section_parser_method: Optional[Callable] = getattr(
section_parser_method: Callable | None = getattr(
self,
# Dots in section names are translated into dunderscores.
('parse_section%s' % method_postfix).replace('.', '__'),
@@ -533,12 +539,12 @@ class ConfigMetadataHandler(ConfigHandler["DistributionMetadata"]):
def __init__(
self,
target_obj: "DistributionMetadata",
target_obj: DistributionMetadata,
options: AllCommandOptions,
ignore_option_errors: bool,
ensure_discovered: expand.EnsurePackagesDiscovered,
package_dir: Optional[dict] = None,
root_dir: _Path = os.curdir,
package_dir: dict | None = None,
root_dir: StrPath = os.curdir,
):
super().__init__(target_obj, options, ignore_option_errors, ensure_discovered)
self.package_dir = package_dir
@@ -556,23 +562,9 @@ class ConfigMetadataHandler(ConfigHandler["DistributionMetadata"]):
'platforms': parse_list,
'keywords': parse_list,
'provides': parse_list,
'requires': self._deprecated_config_handler(
parse_list,
"The requires parameter is deprecated, please use "
"install_requires for runtime dependencies.",
due_date=(2023, 10, 30),
# Warning introduced in 27 Oct 2018
),
'obsoletes': parse_list,
'classifiers': self._get_parser_compound(parse_file, parse_list),
'license': exclude_files_parser('license'),
'license_file': self._deprecated_config_handler(
exclude_files_parser('license_file'),
"The license_file parameter is deprecated, "
"use license_files instead.",
due_date=(2023, 10, 30),
# Warning introduced in 23 May 2021
),
'license_files': parse_list,
'description': parse_file,
'long_description': parse_file,
@@ -595,11 +587,11 @@ class ConfigMetadataHandler(ConfigHandler["DistributionMetadata"]):
# accidentally include newlines and other unintended content
try:
Version(version)
except InvalidVersion:
except InvalidVersion as e:
raise OptionError(
f'Version loaded from {value} does not '
f'comply with PEP 440: {version}'
)
) from e
return version
@@ -611,14 +603,14 @@ class ConfigOptionsHandler(ConfigHandler["Distribution"]):
def __init__(
self,
target_obj: "Distribution",
target_obj: Distribution,
options: AllCommandOptions,
ignore_option_errors: bool,
ensure_discovered: expand.EnsurePackagesDiscovered,
):
super().__init__(target_obj, options, ignore_option_errors, ensure_discovered)
self.root_dir = target_obj.src_root
self.package_dir: Dict[str, str] = {} # To be filled by `find_packages`
self.package_dir: dict[str, str] = {} # To be filled by `find_packages`
@classmethod
def _parse_list_semicolon(cls, value):
@@ -660,7 +652,6 @@ class ConfigOptionsHandler(ConfigHandler["Distribution"]):
self._parse_requirements_list, "install_requires"
),
'setup_requires': self._parse_list_semicolon,
'tests_require': self._parse_list_semicolon,
'packages': self._parse_packages,
'entry_points': self._parse_file_in_root,
'py_modules': parse_list,
@@ -708,9 +699,9 @@ class ConfigOptionsHandler(ConfigHandler["Distribution"]):
valid_keys = ['where', 'include', 'exclude']
find_kwargs = dict(
[(k, v) for k, v in section_data.items() if k in valid_keys and v]
)
find_kwargs = dict([
(k, v) for k, v in section_data.items() if k in valid_keys and v
])
where = find_kwargs.get('where')
if where is not None:

View File

@@ -0,0 +1,433 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"$id": "https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html",
"title": "``tool.setuptools`` table",
"$$description": [
"``setuptools``-specific configurations that can be set by users that require",
"customization.",
"These configurations are completely optional and probably can be skipped when",
"creating simple packages. They are equivalent to some of the `Keywords",
"<https://setuptools.pypa.io/en/latest/references/keywords.html>`_",
"used by the ``setup.py`` file, and can be set via the ``tool.setuptools`` table.",
"It considers only ``setuptools`` `parameters",
"<https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html#setuptools-specific-configuration>`_",
"that are not covered by :pep:`621`; and intentionally excludes ``dependency_links``",
"and ``setup_requires`` (incompatible with modern workflows/standards)."
],
"type": "object",
"additionalProperties": false,
"properties": {
"platforms": {
"type": "array",
"items": {"type": "string"}
},
"provides": {
"$$description": [
"Package and virtual package names contained within this package",
"**(not supported by pip)**"
],
"type": "array",
"items": {"type": "string", "format": "pep508-identifier"}
},
"obsoletes": {
"$$description": [
"Packages which this package renders obsolete",
"**(not supported by pip)**"
],
"type": "array",
"items": {"type": "string", "format": "pep508-identifier"}
},
"zip-safe": {
"$$description": [
"Whether the project can be safely installed and run from a zip file.",
"**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and",
"``setup.py install`` in the context of ``eggs`` (**DEPRECATED**)."
],
"type": "boolean"
},
"script-files": {
"$$description": [
"Legacy way of defining scripts (entry-points are preferred).",
"Equivalent to the ``script`` keyword in ``setup.py``",
"(it was renamed to avoid confusion with entry-point based ``project.scripts``",
"defined in :pep:`621`).",
"**DISCOURAGED**: generic script wrappers are tricky and may not work properly.",
"Whenever possible, please use ``project.scripts`` instead."
],
"type": "array",
"items": {"type": "string"},
"$comment": "TODO: is this field deprecated/should be removed?"
},
"eager-resources": {
"$$description": [
"Resources that should be extracted together, if any of them is needed,",
"or if any C extensions included in the project are imported.",
"**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and",
"``setup.py install`` in the context of ``eggs`` (**DEPRECATED**)."
],
"type": "array",
"items": {"type": "string"}
},
"packages": {
"$$description": [
"Packages that should be included in the distribution.",
"It can be given either as a list of package identifiers",
"or as a ``dict``-like structure with a single key ``find``",
"which corresponds to a dynamic call to",
"``setuptools.config.expand.find_packages`` function.",
"The ``find`` key is associated with a nested ``dict``-like structure that can",
"contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,",
"mimicking the keyword arguments of the associated function."
],
"oneOf": [
{
"title": "Array of Python package identifiers",
"type": "array",
"items": {"$ref": "#/definitions/package-name"}
},
{"$ref": "#/definitions/find-directive"}
]
},
"package-dir": {
"$$description": [
":class:`dict`-like structure mapping from package names to directories where their",
"code can be found.",
"The empty string (as key) means that all packages are contained inside",
"the given directory will be included in the distribution."
],
"type": "object",
"additionalProperties": false,
"propertyNames": {
"anyOf": [{"const": ""}, {"$ref": "#/definitions/package-name"}]
},
"patternProperties": {
"^.*$": {"type": "string" }
}
},
"package-data": {
"$$description": [
"Mapping from package names to lists of glob patterns.",
"Usually this option is not needed when using ``include-package-data = true``",
"For more information on how to include data files, check ``setuptools`` `docs",
"<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_."
],
"type": "object",
"additionalProperties": false,
"propertyNames": {
"anyOf": [{"type": "string", "format": "python-module-name"}, {"const": "*"}]
},
"patternProperties": {
"^.*$": {"type": "array", "items": {"type": "string"}}
}
},
"include-package-data": {
"$$description": [
"Automatically include any data files inside the package directories",
"that are specified by ``MANIFEST.in``",
"For more information on how to include data files, check ``setuptools`` `docs",
"<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_."
],
"type": "boolean"
},
"exclude-package-data": {
"$$description": [
"Mapping from package names to lists of glob patterns that should be excluded",
"For more information on how to include data files, check ``setuptools`` `docs",
"<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_."
],
"type": "object",
"additionalProperties": false,
"propertyNames": {
"anyOf": [{"type": "string", "format": "python-module-name"}, {"const": "*"}]
},
"patternProperties": {
"^.*$": {"type": "array", "items": {"type": "string"}}
}
},
"namespace-packages": {
"type": "array",
"items": {"type": "string", "format": "python-module-name-relaxed"},
"$comment": "https://setuptools.pypa.io/en/latest/userguide/package_discovery.html",
"description": "**DEPRECATED**: use implicit namespaces instead (:pep:`420`)."
},
"py-modules": {
"description": "Modules that setuptools will manipulate",
"type": "array",
"items": {"type": "string", "format": "python-module-name-relaxed"},
"$comment": "TODO: clarify the relationship with ``packages``"
},
"ext-modules": {
"description": "Extension modules to be compiled by setuptools",
"type": "array",
"items": {"$ref": "#/definitions/ext-module"}
},
"data-files": {
"$$description": [
"``dict``-like structure where each key represents a directory and",
"the value is a list of glob patterns that should be installed in them.",
"**DISCOURAGED**: please notice this might not work as expected with wheels.",
"Whenever possible, consider using data files inside the package directories",
"(or create a new namespace package that only contains data files).",
"See `data files support",
"<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_."
],
"type": "object",
"patternProperties": {
"^.*$": {"type": "array", "items": {"type": "string"}}
}
},
"cmdclass": {
"$$description": [
"Mapping of distutils-style command names to ``setuptools.Command`` subclasses",
"which in turn should be represented by strings with a qualified class name",
"(i.e., \"dotted\" form with module), e.g.::\n\n",
" cmdclass = {mycmd = \"pkg.subpkg.module.CommandClass\"}\n\n",
"The command class should be a directly defined at the top-level of the",
"containing module (no class nesting)."
],
"type": "object",
"patternProperties": {
"^.*$": {"type": "string", "format": "python-qualified-identifier"}
}
},
"license-files": {
"type": "array",
"items": {"type": "string"},
"$$description": [
"**PROVISIONAL**: list of glob patterns for all license files being distributed.",
"(likely to become standard with :pep:`639`).",
"By default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``"
],
"$comment": "TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?"
},
"dynamic": {
"type": "object",
"description": "Instructions for loading :pep:`621`-related metadata dynamically",
"additionalProperties": false,
"properties": {
"version": {
"$$description": [
"A version dynamically loaded via either the ``attr:`` or ``file:``",
"directives. Please make sure the given file or attribute respects :pep:`440`.",
"Also ensure to set ``project.dynamic`` accordingly."
],
"oneOf": [
{"$ref": "#/definitions/attr-directive"},
{"$ref": "#/definitions/file-directive"}
]
},
"classifiers": {"$ref": "#/definitions/file-directive"},
"description": {"$ref": "#/definitions/file-directive"},
"entry-points": {"$ref": "#/definitions/file-directive"},
"dependencies": {"$ref": "#/definitions/file-directive-for-dependencies"},
"optional-dependencies": {
"type": "object",
"propertyNames": {"type": "string", "format": "pep508-identifier"},
"additionalProperties": false,
"patternProperties": {
".+": {"$ref": "#/definitions/file-directive-for-dependencies"}
}
},
"readme": {
"type": "object",
"anyOf": [
{"$ref": "#/definitions/file-directive"},
{
"type": "object",
"properties": {
"content-type": {"type": "string"},
"file": { "$ref": "#/definitions/file-directive/properties/file" }
},
"additionalProperties": false}
],
"required": ["file"]
}
}
}
},
"definitions": {
"package-name": {
"$id": "#/definitions/package-name",
"title": "Valid package name",
"description": "Valid package name (importable or :pep:`561`).",
"type": "string",
"anyOf": [
{"type": "string", "format": "python-module-name-relaxed"},
{"type": "string", "format": "pep561-stub-name"}
]
},
"ext-module": {
"$id": "#/definitions/ext-module",
"title": "Extension module",
"description": "Parameters to construct a :class:`setuptools.Extension` object",
"type": "object",
"required": ["name", "sources"],
"additionalProperties": false,
"properties": {
"name": {
"type": "string",
"format": "python-module-name-relaxed"
},
"sources": {
"type": "array",
"items": {"type": "string"}
},
"include-dirs":{
"type": "array",
"items": {"type": "string"}
},
"define-macros": {
"type": "array",
"items": {
"type": "array",
"items": [
{"description": "macro name", "type": "string"},
{"description": "macro value", "oneOf": [{"type": "string"}, {"type": "null"}]}
],
"additionalItems": false
}
},
"undef-macros": {
"type": "array",
"items": {"type": "string"}
},
"library-dirs": {
"type": "array",
"items": {"type": "string"}
},
"libraries": {
"type": "array",
"items": {"type": "string"}
},
"runtime-library-dirs": {
"type": "array",
"items": {"type": "string"}
},
"extra-objects": {
"type": "array",
"items": {"type": "string"}
},
"extra-compile-args": {
"type": "array",
"items": {"type": "string"}
},
"extra-link-args": {
"type": "array",
"items": {"type": "string"}
},
"export-symbols": {
"type": "array",
"items": {"type": "string"}
},
"swig-opts": {
"type": "array",
"items": {"type": "string"}
},
"depends": {
"type": "array",
"items": {"type": "string"}
},
"language": {"type": "string"},
"optional": {"type": "boolean"},
"py-limited-api": {"type": "boolean"}
}
},
"file-directive": {
"$id": "#/definitions/file-directive",
"title": "'file:' directive",
"description":
"Value is read from a file (or list of files and then concatenated)",
"type": "object",
"additionalProperties": false,
"properties": {
"file": {
"oneOf": [
{"type": "string"},
{"type": "array", "items": {"type": "string"}}
]
}
},
"required": ["file"]
},
"file-directive-for-dependencies": {
"title": "'file:' directive for dependencies",
"allOf": [
{
"$$description": [
"**BETA**: subset of the ``requirements.txt`` format",
"without ``pip`` flags and options",
"(one :pep:`508`-compliant string per line,",
"lines that are blank or start with ``#`` are excluded).",
"See `dynamic metadata",
"<https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html#dynamic-metadata>`_."
]
},
{"$ref": "#/definitions/file-directive"}
]
},
"attr-directive": {
"title": "'attr:' directive",
"$id": "#/definitions/attr-directive",
"$$description": [
"Value is read from a module attribute. Supports callables and iterables;",
"unsupported types are cast via ``str()``"
],
"type": "object",
"additionalProperties": false,
"properties": {
"attr": {"type": "string", "format": "python-qualified-identifier"}
},
"required": ["attr"]
},
"find-directive": {
"$id": "#/definitions/find-directive",
"title": "'find:' directive",
"type": "object",
"additionalProperties": false,
"properties": {
"find": {
"type": "object",
"$$description": [
"Dynamic `package discovery",
"<https://setuptools.pypa.io/en/latest/userguide/package_discovery.html>`_."
],
"additionalProperties": false,
"properties": {
"where": {
"description":
"Directories to be searched for packages (Unix-style relative path)",
"type": "array",
"items": {"type": "string"}
},
"exclude": {
"type": "array",
"$$description": [
"Exclude packages that match the values listed in this field.",
"Can container shell-style wildcards (e.g. ``'pkg.*'``)"
],
"items": {"type": "string"}
},
"include": {
"type": "array",
"$$description": [
"Restrict the found packages to just the ones listed in this field.",
"Can container shell-style wildcards (e.g. ``'pkg.*'``)"
],
"items": {"type": "string"}
},
"namespaces": {
"type": "boolean",
"$$description": [
"When ``True``, directories without a ``__init__.py`` file will also",
"be scanned for :pep:`420`-style implicit namespaces"
]
}
}
}
}
}
}
}