venv added, updated
This commit is contained in:
10
myenv/lib/python3.12/site-packages/setuptools/config/NOTICE
Normal file
10
myenv/lib/python3.12/site-packages/setuptools/config/NOTICE
Normal file
@@ -0,0 +1,10 @@
|
||||
The following files include code from opensource projects
|
||||
(either as direct copies or modified versions):
|
||||
|
||||
- `setuptools.schema.json`, `distutils.schema.json`:
|
||||
- project: `validate-pyproject` - licensed under MPL-2.0
|
||||
(https://github.com/abravalheri/validate-pyproject):
|
||||
|
||||
This Source Code Form is subject to the terms of the Mozilla Public
|
||||
License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||
You can obtain one at https://mozilla.org/MPL/2.0/.
|
||||
@@ -0,0 +1,43 @@
|
||||
"""For backward compatibility, expose main functions from
|
||||
``setuptools.config.setupcfg``
|
||||
"""
|
||||
|
||||
from functools import wraps
|
||||
from typing import Callable, TypeVar, cast
|
||||
|
||||
from ..warnings import SetuptoolsDeprecationWarning
|
||||
from . import setupcfg
|
||||
|
||||
Fn = TypeVar("Fn", bound=Callable)
|
||||
|
||||
__all__ = ('parse_configuration', 'read_configuration')
|
||||
|
||||
|
||||
def _deprecation_notice(fn: Fn) -> Fn:
|
||||
@wraps(fn)
|
||||
def _wrapper(*args, **kwargs):
|
||||
SetuptoolsDeprecationWarning.emit(
|
||||
"Deprecated API usage.",
|
||||
f"""
|
||||
As setuptools moves its configuration towards `pyproject.toml`,
|
||||
`{__name__}.{fn.__name__}` became deprecated.
|
||||
|
||||
For the time being, you can use the `{setupcfg.__name__}` module
|
||||
to access a backward compatible API, but this module is provisional
|
||||
and might be removed in the future.
|
||||
|
||||
To read project metadata, consider using
|
||||
``build.util.project_wheel_metadata`` (https://pypi.org/project/build/).
|
||||
For simple scenarios, you can also try parsing the file directly
|
||||
with the help of ``configparser``.
|
||||
""",
|
||||
# due_date not defined yet, because the community still heavily relies on it
|
||||
# Warning introduced in 24 Mar 2022
|
||||
)
|
||||
return fn(*args, **kwargs)
|
||||
|
||||
return cast(Fn, _wrapper)
|
||||
|
||||
|
||||
read_configuration = _deprecation_notice(setupcfg.read_configuration)
|
||||
parse_configuration = _deprecation_notice(setupcfg.parse_configuration)
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,457 @@
|
||||
"""Translation layer between pyproject config and setuptools distribution and
|
||||
metadata objects.
|
||||
|
||||
The distribution and metadata objects are modeled after (an old version of)
|
||||
core metadata, therefore configs in the format specified for ``pyproject.toml``
|
||||
need to be processed before being applied.
|
||||
|
||||
**PRIVATE MODULE**: API reserved for setuptools internal usage only.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import os
|
||||
from email.headerregistry import Address
|
||||
from functools import partial, reduce
|
||||
from inspect import cleandoc
|
||||
from itertools import chain
|
||||
from types import MappingProxyType
|
||||
from typing import TYPE_CHECKING, Any, Callable, Dict, Mapping, TypeVar, Union
|
||||
|
||||
from .._path import StrPath
|
||||
from ..errors import RemovedConfigError
|
||||
from ..extension import Extension
|
||||
from ..warnings import SetuptoolsWarning
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
from setuptools._importlib import metadata
|
||||
from setuptools.dist import Distribution
|
||||
|
||||
from distutils.dist import _OptionsList
|
||||
|
||||
EMPTY: Mapping = MappingProxyType({}) # Immutable dict-like
|
||||
_ProjectReadmeValue: TypeAlias = Union[str, Dict[str, str]]
|
||||
_CorrespFn: TypeAlias = Callable[["Distribution", Any, StrPath], None]
|
||||
_Correspondence: TypeAlias = Union[str, _CorrespFn]
|
||||
_T = TypeVar("_T")
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def apply(dist: Distribution, config: dict, filename: StrPath) -> Distribution:
|
||||
"""Apply configuration dict read with :func:`read_configuration`"""
|
||||
|
||||
if not config:
|
||||
return dist # short-circuit unrelated pyproject.toml file
|
||||
|
||||
root_dir = os.path.dirname(filename) or "."
|
||||
|
||||
_apply_project_table(dist, config, root_dir)
|
||||
_apply_tool_table(dist, config, filename)
|
||||
|
||||
current_directory = os.getcwd()
|
||||
os.chdir(root_dir)
|
||||
try:
|
||||
dist._finalize_requires()
|
||||
dist._finalize_license_files()
|
||||
finally:
|
||||
os.chdir(current_directory)
|
||||
|
||||
return dist
|
||||
|
||||
|
||||
def _apply_project_table(dist: Distribution, config: dict, root_dir: StrPath):
|
||||
project_table = config.get("project", {}).copy()
|
||||
if not project_table:
|
||||
return # short-circuit
|
||||
|
||||
_handle_missing_dynamic(dist, project_table)
|
||||
_unify_entry_points(project_table)
|
||||
|
||||
for field, value in project_table.items():
|
||||
norm_key = json_compatible_key(field)
|
||||
corresp = PYPROJECT_CORRESPONDENCE.get(norm_key, norm_key)
|
||||
if callable(corresp):
|
||||
corresp(dist, value, root_dir)
|
||||
else:
|
||||
_set_config(dist, corresp, value)
|
||||
|
||||
|
||||
def _apply_tool_table(dist: Distribution, config: dict, filename: StrPath):
|
||||
tool_table = config.get("tool", {}).get("setuptools", {})
|
||||
if not tool_table:
|
||||
return # short-circuit
|
||||
|
||||
for field, value in tool_table.items():
|
||||
norm_key = json_compatible_key(field)
|
||||
|
||||
if norm_key in TOOL_TABLE_REMOVALS:
|
||||
suggestion = cleandoc(TOOL_TABLE_REMOVALS[norm_key])
|
||||
msg = f"""
|
||||
The parameter `tool.setuptools.{field}` was long deprecated
|
||||
and has been removed from `pyproject.toml`.
|
||||
"""
|
||||
raise RemovedConfigError("\n".join([cleandoc(msg), suggestion]))
|
||||
|
||||
norm_key = TOOL_TABLE_RENAMES.get(norm_key, norm_key)
|
||||
_set_config(dist, norm_key, value)
|
||||
|
||||
_copy_command_options(config, dist, filename)
|
||||
|
||||
|
||||
def _handle_missing_dynamic(dist: Distribution, project_table: dict):
|
||||
"""Be temporarily forgiving with ``dynamic`` fields not listed in ``dynamic``"""
|
||||
dynamic = set(project_table.get("dynamic", []))
|
||||
for field, getter in _PREVIOUSLY_DEFINED.items():
|
||||
if not (field in project_table or field in dynamic):
|
||||
value = getter(dist)
|
||||
if value:
|
||||
_MissingDynamic.emit(field=field, value=value)
|
||||
project_table[field] = _RESET_PREVIOUSLY_DEFINED.get(field)
|
||||
|
||||
|
||||
def json_compatible_key(key: str) -> str:
|
||||
"""As defined in :pep:`566#json-compatible-metadata`"""
|
||||
return key.lower().replace("-", "_")
|
||||
|
||||
|
||||
def _set_config(dist: Distribution, field: str, value: Any):
|
||||
val = _PREPROCESS.get(field, _noop)(dist, value)
|
||||
setter = getattr(dist.metadata, f"set_{field}", None)
|
||||
if setter:
|
||||
setter(val)
|
||||
elif hasattr(dist.metadata, field) or field in SETUPTOOLS_PATCHES:
|
||||
setattr(dist.metadata, field, val)
|
||||
else:
|
||||
setattr(dist, field, val)
|
||||
|
||||
|
||||
_CONTENT_TYPES = {
|
||||
".md": "text/markdown",
|
||||
".rst": "text/x-rst",
|
||||
".txt": "text/plain",
|
||||
}
|
||||
|
||||
|
||||
def _guess_content_type(file: str) -> str | None:
|
||||
_, ext = os.path.splitext(file.lower())
|
||||
if not ext:
|
||||
return None
|
||||
|
||||
if ext in _CONTENT_TYPES:
|
||||
return _CONTENT_TYPES[ext]
|
||||
|
||||
valid = ", ".join(f"{k} ({v})" for k, v in _CONTENT_TYPES.items())
|
||||
msg = f"only the following file extensions are recognized: {valid}."
|
||||
raise ValueError(f"Undefined content type for {file}, {msg}")
|
||||
|
||||
|
||||
def _long_description(dist: Distribution, val: _ProjectReadmeValue, root_dir: StrPath):
|
||||
from setuptools.config import expand
|
||||
|
||||
file: str | tuple[()]
|
||||
if isinstance(val, str):
|
||||
file = val
|
||||
text = expand.read_files(file, root_dir)
|
||||
ctype = _guess_content_type(file)
|
||||
else:
|
||||
file = val.get("file") or ()
|
||||
text = val.get("text") or expand.read_files(file, root_dir)
|
||||
ctype = val["content-type"]
|
||||
|
||||
_set_config(dist, "long_description", text)
|
||||
|
||||
if ctype:
|
||||
_set_config(dist, "long_description_content_type", ctype)
|
||||
|
||||
if file:
|
||||
dist._referenced_files.add(file)
|
||||
|
||||
|
||||
def _license(dist: Distribution, val: dict, root_dir: StrPath):
|
||||
from setuptools.config import expand
|
||||
|
||||
if "file" in val:
|
||||
_set_config(dist, "license", expand.read_files([val["file"]], root_dir))
|
||||
dist._referenced_files.add(val["file"])
|
||||
else:
|
||||
_set_config(dist, "license", val["text"])
|
||||
|
||||
|
||||
def _people(dist: Distribution, val: list[dict], _root_dir: StrPath, kind: str):
|
||||
field = []
|
||||
email_field = []
|
||||
for person in val:
|
||||
if "name" not in person:
|
||||
email_field.append(person["email"])
|
||||
elif "email" not in person:
|
||||
field.append(person["name"])
|
||||
else:
|
||||
addr = Address(display_name=person["name"], addr_spec=person["email"])
|
||||
email_field.append(str(addr))
|
||||
|
||||
if field:
|
||||
_set_config(dist, kind, ", ".join(field))
|
||||
if email_field:
|
||||
_set_config(dist, f"{kind}_email", ", ".join(email_field))
|
||||
|
||||
|
||||
def _project_urls(dist: Distribution, val: dict, _root_dir):
|
||||
_set_config(dist, "project_urls", val)
|
||||
|
||||
|
||||
def _python_requires(dist: Distribution, val: str, _root_dir):
|
||||
from packaging.specifiers import SpecifierSet
|
||||
|
||||
_set_config(dist, "python_requires", SpecifierSet(val))
|
||||
|
||||
|
||||
def _dependencies(dist: Distribution, val: list, _root_dir):
|
||||
if getattr(dist, "install_requires", []):
|
||||
msg = "`install_requires` overwritten in `pyproject.toml` (dependencies)"
|
||||
SetuptoolsWarning.emit(msg)
|
||||
dist.install_requires = val
|
||||
|
||||
|
||||
def _optional_dependencies(dist: Distribution, val: dict, _root_dir):
|
||||
existing = getattr(dist, "extras_require", None) or {}
|
||||
dist.extras_require = {**existing, **val}
|
||||
|
||||
|
||||
def _ext_modules(dist: Distribution, val: list[dict]) -> list[Extension]:
|
||||
existing = dist.ext_modules or []
|
||||
args = ({k.replace("-", "_"): v for k, v in x.items()} for x in val)
|
||||
new = [Extension(**kw) for kw in args]
|
||||
return [*existing, *new]
|
||||
|
||||
|
||||
def _noop(_dist: Distribution, val: _T) -> _T:
|
||||
return val
|
||||
|
||||
|
||||
def _unify_entry_points(project_table: dict):
|
||||
project = project_table
|
||||
entry_points = project.pop("entry-points", project.pop("entry_points", {}))
|
||||
renaming = {"scripts": "console_scripts", "gui_scripts": "gui_scripts"}
|
||||
for key, value in list(project.items()): # eager to allow modifications
|
||||
norm_key = json_compatible_key(key)
|
||||
if norm_key in renaming:
|
||||
# Don't skip even if value is empty (reason: reset missing `dynamic`)
|
||||
entry_points[renaming[norm_key]] = project.pop(key)
|
||||
|
||||
if entry_points:
|
||||
project["entry-points"] = {
|
||||
name: [f"{k} = {v}" for k, v in group.items()]
|
||||
for name, group in entry_points.items()
|
||||
if group # now we can skip empty groups
|
||||
}
|
||||
# Sometimes this will set `project["entry-points"] = {}`, and that is
|
||||
# intentional (for resetting configurations that are missing `dynamic`).
|
||||
|
||||
|
||||
def _copy_command_options(pyproject: dict, dist: Distribution, filename: StrPath):
|
||||
tool_table = pyproject.get("tool", {})
|
||||
cmdclass = tool_table.get("setuptools", {}).get("cmdclass", {})
|
||||
valid_options = _valid_command_options(cmdclass)
|
||||
|
||||
cmd_opts = dist.command_options
|
||||
for cmd, config in pyproject.get("tool", {}).get("distutils", {}).items():
|
||||
cmd = json_compatible_key(cmd)
|
||||
valid = valid_options.get(cmd, set())
|
||||
cmd_opts.setdefault(cmd, {})
|
||||
for key, value in config.items():
|
||||
key = json_compatible_key(key)
|
||||
cmd_opts[cmd][key] = (str(filename), value)
|
||||
if key not in valid:
|
||||
# To avoid removing options that are specified dynamically we
|
||||
# just log a warn...
|
||||
_logger.warning(f"Command option {cmd}.{key} is not defined")
|
||||
|
||||
|
||||
def _valid_command_options(cmdclass: Mapping = EMPTY) -> dict[str, set[str]]:
|
||||
from setuptools.dist import Distribution
|
||||
|
||||
from .._importlib import metadata
|
||||
|
||||
valid_options = {"global": _normalise_cmd_options(Distribution.global_options)}
|
||||
|
||||
unloaded_entry_points = metadata.entry_points(group='distutils.commands')
|
||||
loaded_entry_points = (_load_ep(ep) for ep in unloaded_entry_points)
|
||||
entry_points = (ep for ep in loaded_entry_points if ep)
|
||||
for cmd, cmd_class in chain(entry_points, cmdclass.items()):
|
||||
opts = valid_options.get(cmd, set())
|
||||
opts = opts | _normalise_cmd_options(getattr(cmd_class, "user_options", []))
|
||||
valid_options[cmd] = opts
|
||||
|
||||
return valid_options
|
||||
|
||||
|
||||
def _load_ep(ep: metadata.EntryPoint) -> tuple[str, type] | None:
|
||||
if ep.value.startswith("wheel.bdist_wheel"):
|
||||
# Ignore deprecated entrypoint from wheel and avoid warning pypa/wheel#631
|
||||
# TODO: remove check when `bdist_wheel` has been fully removed from pypa/wheel
|
||||
return None
|
||||
|
||||
# Ignore all the errors
|
||||
try:
|
||||
return (ep.name, ep.load())
|
||||
except Exception as ex:
|
||||
msg = f"{ex.__class__.__name__} while trying to load entry-point {ep.name}"
|
||||
_logger.warning(f"{msg}: {ex}")
|
||||
return None
|
||||
|
||||
|
||||
def _normalise_cmd_option_key(name: str) -> str:
|
||||
return json_compatible_key(name).strip("_=")
|
||||
|
||||
|
||||
def _normalise_cmd_options(desc: _OptionsList) -> set[str]:
|
||||
return {_normalise_cmd_option_key(fancy_option[0]) for fancy_option in desc}
|
||||
|
||||
|
||||
def _get_previous_entrypoints(dist: Distribution) -> dict[str, list]:
|
||||
ignore = ("console_scripts", "gui_scripts")
|
||||
value = getattr(dist, "entry_points", None) or {}
|
||||
return {k: v for k, v in value.items() if k not in ignore}
|
||||
|
||||
|
||||
def _get_previous_scripts(dist: Distribution) -> list | None:
|
||||
value = getattr(dist, "entry_points", None) or {}
|
||||
return value.get("console_scripts")
|
||||
|
||||
|
||||
def _get_previous_gui_scripts(dist: Distribution) -> list | None:
|
||||
value = getattr(dist, "entry_points", None) or {}
|
||||
return value.get("gui_scripts")
|
||||
|
||||
|
||||
def _attrgetter(attr):
|
||||
"""
|
||||
Similar to ``operator.attrgetter`` but returns None if ``attr`` is not found
|
||||
>>> from types import SimpleNamespace
|
||||
>>> obj = SimpleNamespace(a=42, b=SimpleNamespace(c=13))
|
||||
>>> _attrgetter("a")(obj)
|
||||
42
|
||||
>>> _attrgetter("b.c")(obj)
|
||||
13
|
||||
>>> _attrgetter("d")(obj) is None
|
||||
True
|
||||
"""
|
||||
return partial(reduce, lambda acc, x: getattr(acc, x, None), attr.split("."))
|
||||
|
||||
|
||||
def _some_attrgetter(*items):
|
||||
"""
|
||||
Return the first "truth-y" attribute or None
|
||||
>>> from types import SimpleNamespace
|
||||
>>> obj = SimpleNamespace(a=42, b=SimpleNamespace(c=13))
|
||||
>>> _some_attrgetter("d", "a", "b.c")(obj)
|
||||
42
|
||||
>>> _some_attrgetter("d", "e", "b.c", "a")(obj)
|
||||
13
|
||||
>>> _some_attrgetter("d", "e", "f")(obj) is None
|
||||
True
|
||||
"""
|
||||
|
||||
def _acessor(obj):
|
||||
values = (_attrgetter(i)(obj) for i in items)
|
||||
return next((i for i in values if i is not None), None)
|
||||
|
||||
return _acessor
|
||||
|
||||
|
||||
PYPROJECT_CORRESPONDENCE: dict[str, _Correspondence] = {
|
||||
"readme": _long_description,
|
||||
"license": _license,
|
||||
"authors": partial(_people, kind="author"),
|
||||
"maintainers": partial(_people, kind="maintainer"),
|
||||
"urls": _project_urls,
|
||||
"dependencies": _dependencies,
|
||||
"optional_dependencies": _optional_dependencies,
|
||||
"requires_python": _python_requires,
|
||||
}
|
||||
|
||||
TOOL_TABLE_RENAMES = {"script_files": "scripts"}
|
||||
TOOL_TABLE_REMOVALS = {
|
||||
"namespace_packages": """
|
||||
Please migrate to implicit native namespaces instead.
|
||||
See https://packaging.python.org/en/latest/guides/packaging-namespace-packages/.
|
||||
""",
|
||||
}
|
||||
|
||||
SETUPTOOLS_PATCHES = {
|
||||
"long_description_content_type",
|
||||
"project_urls",
|
||||
"provides_extras",
|
||||
"license_file",
|
||||
"license_files",
|
||||
}
|
||||
|
||||
_PREPROCESS = {
|
||||
"ext_modules": _ext_modules,
|
||||
}
|
||||
|
||||
_PREVIOUSLY_DEFINED = {
|
||||
"name": _attrgetter("metadata.name"),
|
||||
"version": _attrgetter("metadata.version"),
|
||||
"description": _attrgetter("metadata.description"),
|
||||
"readme": _attrgetter("metadata.long_description"),
|
||||
"requires-python": _some_attrgetter("python_requires", "metadata.python_requires"),
|
||||
"license": _attrgetter("metadata.license"),
|
||||
"authors": _some_attrgetter("metadata.author", "metadata.author_email"),
|
||||
"maintainers": _some_attrgetter("metadata.maintainer", "metadata.maintainer_email"),
|
||||
"keywords": _attrgetter("metadata.keywords"),
|
||||
"classifiers": _attrgetter("metadata.classifiers"),
|
||||
"urls": _attrgetter("metadata.project_urls"),
|
||||
"entry-points": _get_previous_entrypoints,
|
||||
"scripts": _get_previous_scripts,
|
||||
"gui-scripts": _get_previous_gui_scripts,
|
||||
"dependencies": _attrgetter("install_requires"),
|
||||
"optional-dependencies": _attrgetter("extras_require"),
|
||||
}
|
||||
|
||||
|
||||
_RESET_PREVIOUSLY_DEFINED: dict = {
|
||||
# Fix improper setting: given in `setup.py`, but not listed in `dynamic`
|
||||
# dict: pyproject name => value to which reset
|
||||
"license": {},
|
||||
"authors": [],
|
||||
"maintainers": [],
|
||||
"keywords": [],
|
||||
"classifiers": [],
|
||||
"urls": {},
|
||||
"entry-points": {},
|
||||
"scripts": {},
|
||||
"gui-scripts": {},
|
||||
"dependencies": [],
|
||||
"optional-dependencies": {},
|
||||
}
|
||||
|
||||
|
||||
class _MissingDynamic(SetuptoolsWarning):
|
||||
_SUMMARY = "`{field}` defined outside of `pyproject.toml` is ignored."
|
||||
|
||||
_DETAILS = """
|
||||
The following seems to be defined outside of `pyproject.toml`:
|
||||
|
||||
`{field} = {value!r}`
|
||||
|
||||
According to the spec (see the link below), however, setuptools CANNOT
|
||||
consider this value unless `{field}` is listed as `dynamic`.
|
||||
|
||||
https://packaging.python.org/en/latest/specifications/pyproject-toml/#declaring-project-metadata-the-project-table
|
||||
|
||||
To prevent this problem, you can list `{field}` under `dynamic` or alternatively
|
||||
remove the `[project]` table from your file and rely entirely on other means of
|
||||
configuration.
|
||||
"""
|
||||
# TODO: Consider removing this check in the future?
|
||||
# There is a trade-off here between improving "debug-ability" and the cost
|
||||
# of running/testing/maintaining these unnecessary checks...
|
||||
|
||||
@classmethod
|
||||
def details(cls, field: str, value: Any) -> str:
|
||||
return cls._DETAILS.format(field=field, value=value)
|
||||
@@ -0,0 +1,438 @@
|
||||
The code contained in this directory was automatically generated using the
|
||||
following command:
|
||||
|
||||
python -m validate_pyproject.pre_compile --output-dir=setuptools/config/_validate_pyproject --enable-plugins setuptools distutils --very-verbose -t distutils=setuptools/config/distutils.schema.json -t setuptools=setuptools/config/setuptools.schema.json
|
||||
|
||||
Please avoid changing it manually.
|
||||
|
||||
|
||||
You can report issues or suggest changes directly to `validate-pyproject`
|
||||
(or to the relevant plugin repository)
|
||||
|
||||
- https://github.com/abravalheri/validate-pyproject/issues
|
||||
|
||||
|
||||
***
|
||||
|
||||
The following files include code from opensource projects
|
||||
(either as direct copies or modified versions):
|
||||
|
||||
- `fastjsonschema_exceptions.py`:
|
||||
- project: `fastjsonschema` - licensed under BSD-3-Clause
|
||||
(https://github.com/horejsek/python-fastjsonschema)
|
||||
- `extra_validations.py` and `format.py`, `error_reporting.py`:
|
||||
- project: `validate-pyproject` - licensed under MPL-2.0
|
||||
(https://github.com/abravalheri/validate-pyproject)
|
||||
|
||||
|
||||
Additionally the following files are automatically generated by tools provided
|
||||
by the same projects:
|
||||
|
||||
- `__init__.py`
|
||||
- `fastjsonschema_validations.py`
|
||||
|
||||
The relevant copyright notes and licenses are included below.
|
||||
|
||||
|
||||
***
|
||||
|
||||
`fastjsonschema`
|
||||
================
|
||||
|
||||
Copyright (c) 2018, Michal Horejsek
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this
|
||||
list of conditions and the following disclaimer.
|
||||
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
Neither the name of the {organization} nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
|
||||
|
||||
***
|
||||
|
||||
`validate-pyproject`
|
||||
====================
|
||||
|
||||
Mozilla Public License, version 2.0
|
||||
|
||||
1. Definitions
|
||||
|
||||
1.1. "Contributor"
|
||||
|
||||
means each individual or legal entity that creates, contributes to the
|
||||
creation of, or owns Covered Software.
|
||||
|
||||
1.2. "Contributor Version"
|
||||
|
||||
means the combination of the Contributions of others (if any) used by a
|
||||
Contributor and that particular Contributor's Contribution.
|
||||
|
||||
1.3. "Contribution"
|
||||
|
||||
means Covered Software of a particular Contributor.
|
||||
|
||||
1.4. "Covered Software"
|
||||
|
||||
means Source Code Form to which the initial Contributor has attached the
|
||||
notice in Exhibit A, the Executable Form of such Source Code Form, and
|
||||
Modifications of such Source Code Form, in each case including portions
|
||||
thereof.
|
||||
|
||||
1.5. "Incompatible With Secondary Licenses"
|
||||
means
|
||||
|
||||
a. that the initial Contributor has attached the notice described in
|
||||
Exhibit B to the Covered Software; or
|
||||
|
||||
b. that the Covered Software was made available under the terms of
|
||||
version 1.1 or earlier of the License, but not also under the terms of
|
||||
a Secondary License.
|
||||
|
||||
1.6. "Executable Form"
|
||||
|
||||
means any form of the work other than Source Code Form.
|
||||
|
||||
1.7. "Larger Work"
|
||||
|
||||
means a work that combines Covered Software with other material, in a
|
||||
separate file or files, that is not Covered Software.
|
||||
|
||||
1.8. "License"
|
||||
|
||||
means this document.
|
||||
|
||||
1.9. "Licensable"
|
||||
|
||||
means having the right to grant, to the maximum extent possible, whether
|
||||
at the time of the initial grant or subsequently, any and all of the
|
||||
rights conveyed by this License.
|
||||
|
||||
1.10. "Modifications"
|
||||
|
||||
means any of the following:
|
||||
|
||||
a. any file in Source Code Form that results from an addition to,
|
||||
deletion from, or modification of the contents of Covered Software; or
|
||||
|
||||
b. any new file in Source Code Form that contains any Covered Software.
|
||||
|
||||
1.11. "Patent Claims" of a Contributor
|
||||
|
||||
means any patent claim(s), including without limitation, method,
|
||||
process, and apparatus claims, in any patent Licensable by such
|
||||
Contributor that would be infringed, but for the grant of the License,
|
||||
by the making, using, selling, offering for sale, having made, import,
|
||||
or transfer of either its Contributions or its Contributor Version.
|
||||
|
||||
1.12. "Secondary License"
|
||||
|
||||
means either the GNU General Public License, Version 2.0, the GNU Lesser
|
||||
General Public License, Version 2.1, the GNU Affero General Public
|
||||
License, Version 3.0, or any later versions of those licenses.
|
||||
|
||||
1.13. "Source Code Form"
|
||||
|
||||
means the form of the work preferred for making modifications.
|
||||
|
||||
1.14. "You" (or "Your")
|
||||
|
||||
means an individual or a legal entity exercising rights under this
|
||||
License. For legal entities, "You" includes any entity that controls, is
|
||||
controlled by, or is under common control with You. For purposes of this
|
||||
definition, "control" means (a) the power, direct or indirect, to cause
|
||||
the direction or management of such entity, whether by contract or
|
||||
otherwise, or (b) ownership of more than fifty percent (50%) of the
|
||||
outstanding shares or beneficial ownership of such entity.
|
||||
|
||||
|
||||
2. License Grants and Conditions
|
||||
|
||||
2.1. Grants
|
||||
|
||||
Each Contributor hereby grants You a world-wide, royalty-free,
|
||||
non-exclusive license:
|
||||
|
||||
a. under intellectual property rights (other than patent or trademark)
|
||||
Licensable by such Contributor to use, reproduce, make available,
|
||||
modify, display, perform, distribute, and otherwise exploit its
|
||||
Contributions, either on an unmodified basis, with Modifications, or
|
||||
as part of a Larger Work; and
|
||||
|
||||
b. under Patent Claims of such Contributor to make, use, sell, offer for
|
||||
sale, have made, import, and otherwise transfer either its
|
||||
Contributions or its Contributor Version.
|
||||
|
||||
2.2. Effective Date
|
||||
|
||||
The licenses granted in Section 2.1 with respect to any Contribution
|
||||
become effective for each Contribution on the date the Contributor first
|
||||
distributes such Contribution.
|
||||
|
||||
2.3. Limitations on Grant Scope
|
||||
|
||||
The licenses granted in this Section 2 are the only rights granted under
|
||||
this License. No additional rights or licenses will be implied from the
|
||||
distribution or licensing of Covered Software under this License.
|
||||
Notwithstanding Section 2.1(b) above, no patent license is granted by a
|
||||
Contributor:
|
||||
|
||||
a. for any code that a Contributor has removed from Covered Software; or
|
||||
|
||||
b. for infringements caused by: (i) Your and any other third party's
|
||||
modifications of Covered Software, or (ii) the combination of its
|
||||
Contributions with other software (except as part of its Contributor
|
||||
Version); or
|
||||
|
||||
c. under Patent Claims infringed by Covered Software in the absence of
|
||||
its Contributions.
|
||||
|
||||
This License does not grant any rights in the trademarks, service marks,
|
||||
or logos of any Contributor (except as may be necessary to comply with
|
||||
the notice requirements in Section 3.4).
|
||||
|
||||
2.4. Subsequent Licenses
|
||||
|
||||
No Contributor makes additional grants as a result of Your choice to
|
||||
distribute the Covered Software under a subsequent version of this
|
||||
License (see Section 10.2) or under the terms of a Secondary License (if
|
||||
permitted under the terms of Section 3.3).
|
||||
|
||||
2.5. Representation
|
||||
|
||||
Each Contributor represents that the Contributor believes its
|
||||
Contributions are its original creation(s) or it has sufficient rights to
|
||||
grant the rights to its Contributions conveyed by this License.
|
||||
|
||||
2.6. Fair Use
|
||||
|
||||
This License is not intended to limit any rights You have under
|
||||
applicable copyright doctrines of fair use, fair dealing, or other
|
||||
equivalents.
|
||||
|
||||
2.7. Conditions
|
||||
|
||||
Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in
|
||||
Section 2.1.
|
||||
|
||||
|
||||
3. Responsibilities
|
||||
|
||||
3.1. Distribution of Source Form
|
||||
|
||||
All distribution of Covered Software in Source Code Form, including any
|
||||
Modifications that You create or to which You contribute, must be under
|
||||
the terms of this License. You must inform recipients that the Source
|
||||
Code Form of the Covered Software is governed by the terms of this
|
||||
License, and how they can obtain a copy of this License. You may not
|
||||
attempt to alter or restrict the recipients' rights in the Source Code
|
||||
Form.
|
||||
|
||||
3.2. Distribution of Executable Form
|
||||
|
||||
If You distribute Covered Software in Executable Form then:
|
||||
|
||||
a. such Covered Software must also be made available in Source Code Form,
|
||||
as described in Section 3.1, and You must inform recipients of the
|
||||
Executable Form how they can obtain a copy of such Source Code Form by
|
||||
reasonable means in a timely manner, at a charge no more than the cost
|
||||
of distribution to the recipient; and
|
||||
|
||||
b. You may distribute such Executable Form under the terms of this
|
||||
License, or sublicense it under different terms, provided that the
|
||||
license for the Executable Form does not attempt to limit or alter the
|
||||
recipients' rights in the Source Code Form under this License.
|
||||
|
||||
3.3. Distribution of a Larger Work
|
||||
|
||||
You may create and distribute a Larger Work under terms of Your choice,
|
||||
provided that You also comply with the requirements of this License for
|
||||
the Covered Software. If the Larger Work is a combination of Covered
|
||||
Software with a work governed by one or more Secondary Licenses, and the
|
||||
Covered Software is not Incompatible With Secondary Licenses, this
|
||||
License permits You to additionally distribute such Covered Software
|
||||
under the terms of such Secondary License(s), so that the recipient of
|
||||
the Larger Work may, at their option, further distribute the Covered
|
||||
Software under the terms of either this License or such Secondary
|
||||
License(s).
|
||||
|
||||
3.4. Notices
|
||||
|
||||
You may not remove or alter the substance of any license notices
|
||||
(including copyright notices, patent notices, disclaimers of warranty, or
|
||||
limitations of liability) contained within the Source Code Form of the
|
||||
Covered Software, except that You may alter any license notices to the
|
||||
extent required to remedy known factual inaccuracies.
|
||||
|
||||
3.5. Application of Additional Terms
|
||||
|
||||
You may choose to offer, and to charge a fee for, warranty, support,
|
||||
indemnity or liability obligations to one or more recipients of Covered
|
||||
Software. However, You may do so only on Your own behalf, and not on
|
||||
behalf of any Contributor. You must make it absolutely clear that any
|
||||
such warranty, support, indemnity, or liability obligation is offered by
|
||||
You alone, and You hereby agree to indemnify every Contributor for any
|
||||
liability incurred by such Contributor as a result of warranty, support,
|
||||
indemnity or liability terms You offer. You may include additional
|
||||
disclaimers of warranty and limitations of liability specific to any
|
||||
jurisdiction.
|
||||
|
||||
4. Inability to Comply Due to Statute or Regulation
|
||||
|
||||
If it is impossible for You to comply with any of the terms of this License
|
||||
with respect to some or all of the Covered Software due to statute,
|
||||
judicial order, or regulation then You must: (a) comply with the terms of
|
||||
this License to the maximum extent possible; and (b) describe the
|
||||
limitations and the code they affect. Such description must be placed in a
|
||||
text file included with all distributions of the Covered Software under
|
||||
this License. Except to the extent prohibited by statute or regulation,
|
||||
such description must be sufficiently detailed for a recipient of ordinary
|
||||
skill to be able to understand it.
|
||||
|
||||
5. Termination
|
||||
|
||||
5.1. The rights granted under this License will terminate automatically if You
|
||||
fail to comply with any of its terms. However, if You become compliant,
|
||||
then the rights granted under this License from a particular Contributor
|
||||
are reinstated (a) provisionally, unless and until such Contributor
|
||||
explicitly and finally terminates Your grants, and (b) on an ongoing
|
||||
basis, if such Contributor fails to notify You of the non-compliance by
|
||||
some reasonable means prior to 60 days after You have come back into
|
||||
compliance. Moreover, Your grants from a particular Contributor are
|
||||
reinstated on an ongoing basis if such Contributor notifies You of the
|
||||
non-compliance by some reasonable means, this is the first time You have
|
||||
received notice of non-compliance with this License from such
|
||||
Contributor, and You become compliant prior to 30 days after Your receipt
|
||||
of the notice.
|
||||
|
||||
5.2. If You initiate litigation against any entity by asserting a patent
|
||||
infringement claim (excluding declaratory judgment actions,
|
||||
counter-claims, and cross-claims) alleging that a Contributor Version
|
||||
directly or indirectly infringes any patent, then the rights granted to
|
||||
You by any and all Contributors for the Covered Software under Section
|
||||
2.1 of this License shall terminate.
|
||||
|
||||
5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user
|
||||
license agreements (excluding distributors and resellers) which have been
|
||||
validly granted by You or Your distributors under this License prior to
|
||||
termination shall survive termination.
|
||||
|
||||
6. Disclaimer of Warranty
|
||||
|
||||
Covered Software is provided under this License on an "as is" basis,
|
||||
without warranty of any kind, either expressed, implied, or statutory,
|
||||
including, without limitation, warranties that the Covered Software is free
|
||||
of defects, merchantable, fit for a particular purpose or non-infringing.
|
||||
The entire risk as to the quality and performance of the Covered Software
|
||||
is with You. Should any Covered Software prove defective in any respect,
|
||||
You (not any Contributor) assume the cost of any necessary servicing,
|
||||
repair, or correction. This disclaimer of warranty constitutes an essential
|
||||
part of this License. No use of any Covered Software is authorized under
|
||||
this License except under this disclaimer.
|
||||
|
||||
7. Limitation of Liability
|
||||
|
||||
Under no circumstances and under no legal theory, whether tort (including
|
||||
negligence), contract, or otherwise, shall any Contributor, or anyone who
|
||||
distributes Covered Software as permitted above, be liable to You for any
|
||||
direct, indirect, special, incidental, or consequential damages of any
|
||||
character including, without limitation, damages for lost profits, loss of
|
||||
goodwill, work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses, even if such party shall have been
|
||||
informed of the possibility of such damages. This limitation of liability
|
||||
shall not apply to liability for death or personal injury resulting from
|
||||
such party's negligence to the extent applicable law prohibits such
|
||||
limitation. Some jurisdictions do not allow the exclusion or limitation of
|
||||
incidental or consequential damages, so this exclusion and limitation may
|
||||
not apply to You.
|
||||
|
||||
8. Litigation
|
||||
|
||||
Any litigation relating to this License may be brought only in the courts
|
||||
of a jurisdiction where the defendant maintains its principal place of
|
||||
business and such litigation shall be governed by laws of that
|
||||
jurisdiction, without reference to its conflict-of-law provisions. Nothing
|
||||
in this Section shall prevent a party's ability to bring cross-claims or
|
||||
counter-claims.
|
||||
|
||||
9. Miscellaneous
|
||||
|
||||
This License represents the complete agreement concerning the subject
|
||||
matter hereof. If any provision of this License is held to be
|
||||
unenforceable, such provision shall be reformed only to the extent
|
||||
necessary to make it enforceable. Any law or regulation which provides that
|
||||
the language of a contract shall be construed against the drafter shall not
|
||||
be used to construe this License against a Contributor.
|
||||
|
||||
|
||||
10. Versions of the License
|
||||
|
||||
10.1. New Versions
|
||||
|
||||
Mozilla Foundation is the license steward. Except as provided in Section
|
||||
10.3, no one other than the license steward has the right to modify or
|
||||
publish new versions of this License. Each version will be given a
|
||||
distinguishing version number.
|
||||
|
||||
10.2. Effect of New Versions
|
||||
|
||||
You may distribute the Covered Software under the terms of the version
|
||||
of the License under which You originally received the Covered Software,
|
||||
or under the terms of any subsequent version published by the license
|
||||
steward.
|
||||
|
||||
10.3. Modified Versions
|
||||
|
||||
If you create software not governed by this License, and you want to
|
||||
create a new license for such software, you may create and use a
|
||||
modified version of this License if you rename the license and remove
|
||||
any references to the name of the license steward (except to note that
|
||||
such modified license differs from this License).
|
||||
|
||||
10.4. Distributing Source Code Form that is Incompatible With Secondary
|
||||
Licenses If You choose to distribute Source Code Form that is
|
||||
Incompatible With Secondary Licenses under the terms of this version of
|
||||
the License, the notice described in Exhibit B of this License must be
|
||||
attached.
|
||||
|
||||
Exhibit A - Source Code Form License Notice
|
||||
|
||||
This Source Code Form is subject to the
|
||||
terms of the Mozilla Public License, v.
|
||||
2.0. If a copy of the MPL was not
|
||||
distributed with this file, You can
|
||||
obtain one at
|
||||
https://mozilla.org/MPL/2.0/.
|
||||
|
||||
If it is not possible or desirable to put the notice in a particular file,
|
||||
then You may include the notice in a location (such as a LICENSE file in a
|
||||
relevant directory) where a recipient would be likely to look for such a
|
||||
notice.
|
||||
|
||||
You may add additional accurate notices of copyright ownership.
|
||||
|
||||
Exhibit B - "Incompatible With Secondary Licenses" Notice
|
||||
|
||||
This Source Code Form is "Incompatible
|
||||
With Secondary Licenses", as defined by
|
||||
the Mozilla Public License, v. 2.0.
|
||||
@@ -0,0 +1,34 @@
|
||||
from functools import reduce
|
||||
from typing import Any, Callable, Dict
|
||||
|
||||
from . import formats
|
||||
from .error_reporting import detailed_errors, ValidationError
|
||||
from .extra_validations import EXTRA_VALIDATIONS
|
||||
from .fastjsonschema_exceptions import JsonSchemaException, JsonSchemaValueException
|
||||
from .fastjsonschema_validations import validate as _validate
|
||||
|
||||
__all__ = [
|
||||
"validate",
|
||||
"FORMAT_FUNCTIONS",
|
||||
"EXTRA_VALIDATIONS",
|
||||
"ValidationError",
|
||||
"JsonSchemaException",
|
||||
"JsonSchemaValueException",
|
||||
]
|
||||
|
||||
|
||||
FORMAT_FUNCTIONS: Dict[str, Callable[[str], bool]] = {
|
||||
fn.__name__.replace("_", "-"): fn
|
||||
for fn in formats.__dict__.values()
|
||||
if callable(fn) and not fn.__name__.startswith("_")
|
||||
}
|
||||
|
||||
|
||||
def validate(data: Any) -> bool:
|
||||
"""Validate the given ``data`` object using JSON Schema
|
||||
This function raises ``ValidationError`` if ``data`` is invalid.
|
||||
"""
|
||||
with detailed_errors():
|
||||
_validate(data, custom_formats=FORMAT_FUNCTIONS)
|
||||
reduce(lambda acc, fn: fn(acc), EXTRA_VALIDATIONS, data)
|
||||
return True
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,336 @@
|
||||
import io
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import typing
|
||||
from contextlib import contextmanager
|
||||
from textwrap import indent, wrap
|
||||
from typing import Any, Dict, Generator, Iterator, List, Optional, Sequence, Union
|
||||
|
||||
from .fastjsonschema_exceptions import JsonSchemaValueException
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
import sys
|
||||
|
||||
if sys.version_info < (3, 11):
|
||||
from typing_extensions import Self
|
||||
else:
|
||||
from typing import Self
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
_MESSAGE_REPLACEMENTS = {
|
||||
"must be named by propertyName definition": "keys must be named by",
|
||||
"one of contains definition": "at least one item that matches",
|
||||
" same as const definition:": "",
|
||||
"only specified items": "only items matching the definition",
|
||||
}
|
||||
|
||||
_SKIP_DETAILS = (
|
||||
"must not be empty",
|
||||
"is always invalid",
|
||||
"must not be there",
|
||||
)
|
||||
|
||||
_NEED_DETAILS = {"anyOf", "oneOf", "allOf", "contains", "propertyNames", "not", "items"}
|
||||
|
||||
_CAMEL_CASE_SPLITTER = re.compile(r"\W+|([A-Z][^A-Z\W]*)")
|
||||
_IDENTIFIER = re.compile(r"^[\w_]+$", re.I)
|
||||
|
||||
_TOML_JARGON = {
|
||||
"object": "table",
|
||||
"property": "key",
|
||||
"properties": "keys",
|
||||
"property names": "keys",
|
||||
}
|
||||
|
||||
_FORMATS_HELP = """
|
||||
For more details about `format` see
|
||||
https://validate-pyproject.readthedocs.io/en/latest/api/validate_pyproject.formats.html
|
||||
"""
|
||||
|
||||
|
||||
class ValidationError(JsonSchemaValueException):
|
||||
"""Report violations of a given JSON schema.
|
||||
|
||||
This class extends :exc:`~fastjsonschema.JsonSchemaValueException`
|
||||
by adding the following properties:
|
||||
|
||||
- ``summary``: an improved version of the ``JsonSchemaValueException`` error message
|
||||
with only the necessary information)
|
||||
|
||||
- ``details``: more contextual information about the error like the failing schema
|
||||
itself and the value that violates the schema.
|
||||
|
||||
Depending on the level of the verbosity of the ``logging`` configuration
|
||||
the exception message will be only ``summary`` (default) or a combination of
|
||||
``summary`` and ``details`` (when the logging level is set to :obj:`logging.DEBUG`).
|
||||
"""
|
||||
|
||||
summary = ""
|
||||
details = ""
|
||||
_original_message = ""
|
||||
|
||||
@classmethod
|
||||
def _from_jsonschema(cls, ex: JsonSchemaValueException) -> "Self":
|
||||
formatter = _ErrorFormatting(ex)
|
||||
obj = cls(str(formatter), ex.value, formatter.name, ex.definition, ex.rule)
|
||||
debug_code = os.getenv("JSONSCHEMA_DEBUG_CODE_GENERATION", "false").lower()
|
||||
if debug_code != "false": # pragma: no cover
|
||||
obj.__cause__, obj.__traceback__ = ex.__cause__, ex.__traceback__
|
||||
obj._original_message = ex.message
|
||||
obj.summary = formatter.summary
|
||||
obj.details = formatter.details
|
||||
return obj
|
||||
|
||||
|
||||
@contextmanager
|
||||
def detailed_errors() -> Generator[None, None, None]:
|
||||
try:
|
||||
yield
|
||||
except JsonSchemaValueException as ex:
|
||||
raise ValidationError._from_jsonschema(ex) from None
|
||||
|
||||
|
||||
class _ErrorFormatting:
|
||||
def __init__(self, ex: JsonSchemaValueException):
|
||||
self.ex = ex
|
||||
self.name = f"`{self._simplify_name(ex.name)}`"
|
||||
self._original_message: str = self.ex.message.replace(ex.name, self.name)
|
||||
self._summary = ""
|
||||
self._details = ""
|
||||
|
||||
def __str__(self) -> str:
|
||||
if _logger.getEffectiveLevel() <= logging.DEBUG and self.details:
|
||||
return f"{self.summary}\n\n{self.details}"
|
||||
|
||||
return self.summary
|
||||
|
||||
@property
|
||||
def summary(self) -> str:
|
||||
if not self._summary:
|
||||
self._summary = self._expand_summary()
|
||||
|
||||
return self._summary
|
||||
|
||||
@property
|
||||
def details(self) -> str:
|
||||
if not self._details:
|
||||
self._details = self._expand_details()
|
||||
|
||||
return self._details
|
||||
|
||||
@staticmethod
|
||||
def _simplify_name(name: str) -> str:
|
||||
x = len("data.")
|
||||
return name[x:] if name.startswith("data.") else name
|
||||
|
||||
def _expand_summary(self) -> str:
|
||||
msg = self._original_message
|
||||
|
||||
for bad, repl in _MESSAGE_REPLACEMENTS.items():
|
||||
msg = msg.replace(bad, repl)
|
||||
|
||||
if any(substring in msg for substring in _SKIP_DETAILS):
|
||||
return msg
|
||||
|
||||
schema = self.ex.rule_definition
|
||||
if self.ex.rule in _NEED_DETAILS and schema:
|
||||
summary = _SummaryWriter(_TOML_JARGON)
|
||||
return f"{msg}:\n\n{indent(summary(schema), ' ')}"
|
||||
|
||||
return msg
|
||||
|
||||
def _expand_details(self) -> str:
|
||||
optional = []
|
||||
definition = self.ex.definition or {}
|
||||
desc_lines = definition.pop("$$description", [])
|
||||
desc = definition.pop("description", None) or " ".join(desc_lines)
|
||||
if desc:
|
||||
description = "\n".join(
|
||||
wrap(
|
||||
desc,
|
||||
width=80,
|
||||
initial_indent=" ",
|
||||
subsequent_indent=" ",
|
||||
break_long_words=False,
|
||||
)
|
||||
)
|
||||
optional.append(f"DESCRIPTION:\n{description}")
|
||||
schema = json.dumps(definition, indent=4)
|
||||
value = json.dumps(self.ex.value, indent=4)
|
||||
defaults = [
|
||||
f"GIVEN VALUE:\n{indent(value, ' ')}",
|
||||
f"OFFENDING RULE: {self.ex.rule!r}",
|
||||
f"DEFINITION:\n{indent(schema, ' ')}",
|
||||
]
|
||||
msg = "\n\n".join(optional + defaults)
|
||||
epilog = f"\n{_FORMATS_HELP}" if "format" in msg.lower() else ""
|
||||
return msg + epilog
|
||||
|
||||
|
||||
class _SummaryWriter:
|
||||
_IGNORE = frozenset(("description", "default", "title", "examples"))
|
||||
|
||||
def __init__(self, jargon: Optional[Dict[str, str]] = None):
|
||||
self.jargon: Dict[str, str] = jargon or {}
|
||||
# Clarify confusing terms
|
||||
self._terms = {
|
||||
"anyOf": "at least one of the following",
|
||||
"oneOf": "exactly one of the following",
|
||||
"allOf": "all of the following",
|
||||
"not": "(*NOT* the following)",
|
||||
"prefixItems": f"{self._jargon('items')} (in order)",
|
||||
"items": "items",
|
||||
"contains": "contains at least one of",
|
||||
"propertyNames": (
|
||||
f"non-predefined acceptable {self._jargon('property names')}"
|
||||
),
|
||||
"patternProperties": f"{self._jargon('properties')} named via pattern",
|
||||
"const": "predefined value",
|
||||
"enum": "one of",
|
||||
}
|
||||
# Attributes that indicate that the definition is easy and can be done
|
||||
# inline (e.g. string and number)
|
||||
self._guess_inline_defs = [
|
||||
"enum",
|
||||
"const",
|
||||
"maxLength",
|
||||
"minLength",
|
||||
"pattern",
|
||||
"format",
|
||||
"minimum",
|
||||
"maximum",
|
||||
"exclusiveMinimum",
|
||||
"exclusiveMaximum",
|
||||
"multipleOf",
|
||||
]
|
||||
|
||||
def _jargon(self, term: Union[str, List[str]]) -> Union[str, List[str]]:
|
||||
if isinstance(term, list):
|
||||
return [self.jargon.get(t, t) for t in term]
|
||||
return self.jargon.get(term, term)
|
||||
|
||||
def __call__(
|
||||
self,
|
||||
schema: Union[dict, List[dict]],
|
||||
prefix: str = "",
|
||||
*,
|
||||
_path: Sequence[str] = (),
|
||||
) -> str:
|
||||
if isinstance(schema, list):
|
||||
return self._handle_list(schema, prefix, _path)
|
||||
|
||||
filtered = self._filter_unecessary(schema, _path)
|
||||
simple = self._handle_simple_dict(filtered, _path)
|
||||
if simple:
|
||||
return f"{prefix}{simple}"
|
||||
|
||||
child_prefix = self._child_prefix(prefix, " ")
|
||||
item_prefix = self._child_prefix(prefix, "- ")
|
||||
indent = len(prefix) * " "
|
||||
with io.StringIO() as buffer:
|
||||
for i, (key, value) in enumerate(filtered.items()):
|
||||
child_path = [*_path, key]
|
||||
line_prefix = prefix if i == 0 else indent
|
||||
buffer.write(f"{line_prefix}{self._label(child_path)}:")
|
||||
# ^ just the first item should receive the complete prefix
|
||||
if isinstance(value, dict):
|
||||
filtered = self._filter_unecessary(value, child_path)
|
||||
simple = self._handle_simple_dict(filtered, child_path)
|
||||
buffer.write(
|
||||
f" {simple}"
|
||||
if simple
|
||||
else f"\n{self(value, child_prefix, _path=child_path)}"
|
||||
)
|
||||
elif isinstance(value, list) and (
|
||||
key != "type" or self._is_property(child_path)
|
||||
):
|
||||
children = self._handle_list(value, item_prefix, child_path)
|
||||
sep = " " if children.startswith("[") else "\n"
|
||||
buffer.write(f"{sep}{children}")
|
||||
else:
|
||||
buffer.write(f" {self._value(value, child_path)}\n")
|
||||
return buffer.getvalue()
|
||||
|
||||
def _is_unecessary(self, path: Sequence[str]) -> bool:
|
||||
if self._is_property(path) or not path: # empty path => instruction @ root
|
||||
return False
|
||||
key = path[-1]
|
||||
return any(key.startswith(k) for k in "$_") or key in self._IGNORE
|
||||
|
||||
def _filter_unecessary(
|
||||
self, schema: Dict[str, Any], path: Sequence[str]
|
||||
) -> Dict[str, Any]:
|
||||
return {
|
||||
key: value
|
||||
for key, value in schema.items()
|
||||
if not self._is_unecessary([*path, key])
|
||||
}
|
||||
|
||||
def _handle_simple_dict(self, value: dict, path: Sequence[str]) -> Optional[str]:
|
||||
inline = any(p in value for p in self._guess_inline_defs)
|
||||
simple = not any(isinstance(v, (list, dict)) for v in value.values())
|
||||
if inline or simple:
|
||||
return f"{{{', '.join(self._inline_attrs(value, path))}}}\n"
|
||||
return None
|
||||
|
||||
def _handle_list(
|
||||
self, schemas: list, prefix: str = "", path: Sequence[str] = ()
|
||||
) -> str:
|
||||
if self._is_unecessary(path):
|
||||
return ""
|
||||
|
||||
repr_ = repr(schemas)
|
||||
if all(not isinstance(e, (dict, list)) for e in schemas) and len(repr_) < 60:
|
||||
return f"{repr_}\n"
|
||||
|
||||
item_prefix = self._child_prefix(prefix, "- ")
|
||||
return "".join(
|
||||
self(v, item_prefix, _path=[*path, f"[{i}]"]) for i, v in enumerate(schemas)
|
||||
)
|
||||
|
||||
def _is_property(self, path: Sequence[str]) -> bool:
|
||||
"""Check if the given path can correspond to an arbitrarily named property"""
|
||||
counter = 0
|
||||
for key in path[-2::-1]:
|
||||
if key not in {"properties", "patternProperties"}:
|
||||
break
|
||||
counter += 1
|
||||
|
||||
# If the counter if even, the path correspond to a JSON Schema keyword
|
||||
# otherwise it can be any arbitrary string naming a property
|
||||
return counter % 2 == 1
|
||||
|
||||
def _label(self, path: Sequence[str]) -> str:
|
||||
*parents, key = path
|
||||
if not self._is_property(path):
|
||||
norm_key = _separate_terms(key)
|
||||
return self._terms.get(key) or " ".join(self._jargon(norm_key))
|
||||
|
||||
if parents[-1] == "patternProperties":
|
||||
return f"(regex {key!r})"
|
||||
return repr(key) # property name
|
||||
|
||||
def _value(self, value: Any, path: Sequence[str]) -> str:
|
||||
if path[-1] == "type" and not self._is_property(path):
|
||||
type_ = self._jargon(value)
|
||||
return f"[{', '.join(type_)}]" if isinstance(type_, list) else type_
|
||||
return repr(value)
|
||||
|
||||
def _inline_attrs(self, schema: dict, path: Sequence[str]) -> Iterator[str]:
|
||||
for key, value in schema.items():
|
||||
child_path = [*path, key]
|
||||
yield f"{self._label(child_path)}: {self._value(value, child_path)}"
|
||||
|
||||
def _child_prefix(self, parent_prefix: str, child_prefix: str) -> str:
|
||||
return len(parent_prefix) * " " + child_prefix
|
||||
|
||||
|
||||
def _separate_terms(word: str) -> List[str]:
|
||||
"""
|
||||
>>> _separate_terms("FooBar-foo")
|
||||
['foo', 'bar', 'foo']
|
||||
"""
|
||||
return [w.lower() for w in _CAMEL_CASE_SPLITTER.split(word) if w]
|
||||
@@ -0,0 +1,52 @@
|
||||
"""The purpose of this module is implement PEP 621 validations that are
|
||||
difficult to express as a JSON Schema (or that are not supported by the current
|
||||
JSON Schema library).
|
||||
"""
|
||||
|
||||
from inspect import cleandoc
|
||||
from typing import Mapping, TypeVar
|
||||
|
||||
from .error_reporting import ValidationError
|
||||
|
||||
T = TypeVar("T", bound=Mapping)
|
||||
|
||||
|
||||
class RedefiningStaticFieldAsDynamic(ValidationError):
|
||||
_DESC = """According to PEP 621:
|
||||
|
||||
Build back-ends MUST raise an error if the metadata specifies a field
|
||||
statically as well as being listed in dynamic.
|
||||
"""
|
||||
__doc__ = _DESC
|
||||
_URL = (
|
||||
"https://packaging.python.org/en/latest/specifications/"
|
||||
"pyproject-toml/#dynamic"
|
||||
)
|
||||
|
||||
|
||||
def validate_project_dynamic(pyproject: T) -> T:
|
||||
project_table = pyproject.get("project", {})
|
||||
dynamic = project_table.get("dynamic", [])
|
||||
|
||||
for field in dynamic:
|
||||
if field in project_table:
|
||||
raise RedefiningStaticFieldAsDynamic(
|
||||
message=f"You cannot provide a value for `project.{field}` and "
|
||||
"list it under `project.dynamic` at the same time",
|
||||
value={
|
||||
field: project_table[field],
|
||||
"...": " # ...",
|
||||
"dynamic": dynamic,
|
||||
},
|
||||
name=f"data.project.{field}",
|
||||
definition={
|
||||
"description": cleandoc(RedefiningStaticFieldAsDynamic._DESC),
|
||||
"see": RedefiningStaticFieldAsDynamic._URL,
|
||||
},
|
||||
rule="PEP 621",
|
||||
)
|
||||
|
||||
return pyproject
|
||||
|
||||
|
||||
EXTRA_VALIDATIONS = (validate_project_dynamic,)
|
||||
@@ -0,0 +1,51 @@
|
||||
import re
|
||||
|
||||
|
||||
SPLIT_RE = re.compile(r'[\.\[\]]+')
|
||||
|
||||
|
||||
class JsonSchemaException(ValueError):
|
||||
"""
|
||||
Base exception of ``fastjsonschema`` library.
|
||||
"""
|
||||
|
||||
|
||||
class JsonSchemaValueException(JsonSchemaException):
|
||||
"""
|
||||
Exception raised by validation function. Available properties:
|
||||
|
||||
* ``message`` containing human-readable information what is wrong (e.g. ``data.property[index] must be smaller than or equal to 42``),
|
||||
* invalid ``value`` (e.g. ``60``),
|
||||
* ``name`` of a path in the data structure (e.g. ``data.property[index]``),
|
||||
* ``path`` as an array in the data structure (e.g. ``['data', 'property', 'index']``),
|
||||
* the whole ``definition`` which the ``value`` has to fulfil (e.g. ``{'type': 'number', 'maximum': 42}``),
|
||||
* ``rule`` which the ``value`` is breaking (e.g. ``maximum``)
|
||||
* and ``rule_definition`` (e.g. ``42``).
|
||||
|
||||
.. versionchanged:: 2.14.0
|
||||
Added all extra properties.
|
||||
"""
|
||||
|
||||
def __init__(self, message, value=None, name=None, definition=None, rule=None):
|
||||
super().__init__(message)
|
||||
self.message = message
|
||||
self.value = value
|
||||
self.name = name
|
||||
self.definition = definition
|
||||
self.rule = rule
|
||||
|
||||
@property
|
||||
def path(self):
|
||||
return [item for item in SPLIT_RE.split(self.name) if item != '']
|
||||
|
||||
@property
|
||||
def rule_definition(self):
|
||||
if not self.rule or not self.definition:
|
||||
return None
|
||||
return self.definition.get(self.rule)
|
||||
|
||||
|
||||
class JsonSchemaDefinitionException(JsonSchemaException):
|
||||
"""
|
||||
Exception raised by generator of validation function.
|
||||
"""
|
||||
File diff suppressed because one or more lines are too long
@@ -0,0 +1,375 @@
|
||||
"""
|
||||
The functions in this module are used to validate schemas with the
|
||||
`format JSON Schema keyword
|
||||
<https://json-schema.org/understanding-json-schema/reference/string#format>`_.
|
||||
|
||||
The correspondence is given by replacing the ``_`` character in the name of the
|
||||
function with a ``-`` to obtain the format name and vice versa.
|
||||
"""
|
||||
|
||||
import builtins
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import string
|
||||
import typing
|
||||
from itertools import chain as _chain
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
from typing_extensions import Literal
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
# -------------------------------------------------------------------------------------
|
||||
# PEP 440
|
||||
|
||||
VERSION_PATTERN = r"""
|
||||
v?
|
||||
(?:
|
||||
(?:(?P<epoch>[0-9]+)!)? # epoch
|
||||
(?P<release>[0-9]+(?:\.[0-9]+)*) # release segment
|
||||
(?P<pre> # pre-release
|
||||
[-_\.]?
|
||||
(?P<pre_l>alpha|a|beta|b|preview|pre|c|rc)
|
||||
[-_\.]?
|
||||
(?P<pre_n>[0-9]+)?
|
||||
)?
|
||||
(?P<post> # post release
|
||||
(?:-(?P<post_n1>[0-9]+))
|
||||
|
|
||||
(?:
|
||||
[-_\.]?
|
||||
(?P<post_l>post|rev|r)
|
||||
[-_\.]?
|
||||
(?P<post_n2>[0-9]+)?
|
||||
)
|
||||
)?
|
||||
(?P<dev> # dev release
|
||||
[-_\.]?
|
||||
(?P<dev_l>dev)
|
||||
[-_\.]?
|
||||
(?P<dev_n>[0-9]+)?
|
||||
)?
|
||||
)
|
||||
(?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version
|
||||
"""
|
||||
|
||||
VERSION_REGEX = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.X | re.I)
|
||||
|
||||
|
||||
def pep440(version: str) -> bool:
|
||||
"""See :ref:`PyPA's version specification <pypa:version-specifiers>`
|
||||
(initially introduced in :pep:`440`).
|
||||
"""
|
||||
return VERSION_REGEX.match(version) is not None
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------------
|
||||
# PEP 508
|
||||
|
||||
PEP508_IDENTIFIER_PATTERN = r"([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])"
|
||||
PEP508_IDENTIFIER_REGEX = re.compile(f"^{PEP508_IDENTIFIER_PATTERN}$", re.I)
|
||||
|
||||
|
||||
def pep508_identifier(name: str) -> bool:
|
||||
"""See :ref:`PyPA's name specification <pypa:name-format>`
|
||||
(initially introduced in :pep:`508#names`).
|
||||
"""
|
||||
return PEP508_IDENTIFIER_REGEX.match(name) is not None
|
||||
|
||||
|
||||
try:
|
||||
try:
|
||||
from packaging import requirements as _req
|
||||
except ImportError: # pragma: no cover
|
||||
# let's try setuptools vendored version
|
||||
from setuptools._vendor.packaging import ( # type: ignore[no-redef]
|
||||
requirements as _req,
|
||||
)
|
||||
|
||||
def pep508(value: str) -> bool:
|
||||
"""See :ref:`PyPA's dependency specifiers <pypa:dependency-specifiers>`
|
||||
(initially introduced in :pep:`508`).
|
||||
"""
|
||||
try:
|
||||
_req.Requirement(value)
|
||||
return True
|
||||
except _req.InvalidRequirement:
|
||||
return False
|
||||
|
||||
except ImportError: # pragma: no cover
|
||||
_logger.warning(
|
||||
"Could not find an installation of `packaging`. Requirements, dependencies and "
|
||||
"versions might not be validated. "
|
||||
"To enforce validation, please install `packaging`."
|
||||
)
|
||||
|
||||
def pep508(value: str) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
def pep508_versionspec(value: str) -> bool:
|
||||
"""Expression that can be used to specify/lock versions (including ranges)
|
||||
See ``versionspec`` in :ref:`PyPA's dependency specifiers
|
||||
<pypa:dependency-specifiers>` (initially introduced in :pep:`508`).
|
||||
"""
|
||||
if any(c in value for c in (";", "]", "@")):
|
||||
# In PEP 508:
|
||||
# conditional markers, extras and URL specs are not included in the
|
||||
# versionspec
|
||||
return False
|
||||
# Let's pretend we have a dependency called `requirement` with the given
|
||||
# version spec, then we can reuse the pep508 function for validation:
|
||||
return pep508(f"requirement{value}")
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------------
|
||||
# PEP 517
|
||||
|
||||
|
||||
def pep517_backend_reference(value: str) -> bool:
|
||||
"""See PyPA's specification for defining build-backend references
|
||||
introduced in :pep:`517#source-trees`.
|
||||
|
||||
This is similar to an entry-point reference (e.g., ``package.module:object``).
|
||||
"""
|
||||
module, _, obj = value.partition(":")
|
||||
identifiers = (i.strip() for i in _chain(module.split("."), obj.split(".")))
|
||||
return all(python_identifier(i) for i in identifiers if i)
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------------
|
||||
# Classifiers - PEP 301
|
||||
|
||||
|
||||
def _download_classifiers() -> str:
|
||||
import ssl
|
||||
from email.message import Message
|
||||
from urllib.request import urlopen
|
||||
|
||||
url = "https://pypi.org/pypi?:action=list_classifiers"
|
||||
context = ssl.create_default_context()
|
||||
with urlopen(url, context=context) as response: # noqa: S310 (audit URLs)
|
||||
headers = Message()
|
||||
headers["content_type"] = response.getheader("content-type", "text/plain")
|
||||
return response.read().decode(headers.get_param("charset", "utf-8")) # type: ignore[no-any-return]
|
||||
|
||||
|
||||
class _TroveClassifier:
|
||||
"""The ``trove_classifiers`` package is the official way of validating classifiers,
|
||||
however this package might not be always available.
|
||||
As a workaround we can still download a list from PyPI.
|
||||
We also don't want to be over strict about it, so simply skipping silently is an
|
||||
option (classifiers will be validated anyway during the upload to PyPI).
|
||||
"""
|
||||
|
||||
downloaded: typing.Union[None, "Literal[False]", typing.Set[str]]
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.downloaded = None
|
||||
self._skip_download = False
|
||||
# None => not cached yet
|
||||
# False => cache not available
|
||||
self.__name__ = "trove_classifier" # Emulate a public function
|
||||
|
||||
def _disable_download(self) -> None:
|
||||
# This is a private API. Only setuptools has the consent of using it.
|
||||
self._skip_download = True
|
||||
|
||||
def __call__(self, value: str) -> bool:
|
||||
if self.downloaded is False or self._skip_download is True:
|
||||
return True
|
||||
|
||||
if os.getenv("NO_NETWORK") or os.getenv("VALIDATE_PYPROJECT_NO_NETWORK"):
|
||||
self.downloaded = False
|
||||
msg = (
|
||||
"Install ``trove-classifiers`` to ensure proper validation. "
|
||||
"Skipping download of classifiers list from PyPI (NO_NETWORK)."
|
||||
)
|
||||
_logger.debug(msg)
|
||||
return True
|
||||
|
||||
if self.downloaded is None:
|
||||
msg = (
|
||||
"Install ``trove-classifiers`` to ensure proper validation. "
|
||||
"Meanwhile a list of classifiers will be downloaded from PyPI."
|
||||
)
|
||||
_logger.debug(msg)
|
||||
try:
|
||||
self.downloaded = set(_download_classifiers().splitlines())
|
||||
except Exception:
|
||||
self.downloaded = False
|
||||
_logger.debug("Problem with download, skipping validation")
|
||||
return True
|
||||
|
||||
return value in self.downloaded or value.lower().startswith("private ::")
|
||||
|
||||
|
||||
try:
|
||||
from trove_classifiers import classifiers as _trove_classifiers
|
||||
|
||||
def trove_classifier(value: str) -> bool:
|
||||
"""See https://pypi.org/classifiers/"""
|
||||
return value in _trove_classifiers or value.lower().startswith("private ::")
|
||||
|
||||
except ImportError: # pragma: no cover
|
||||
trove_classifier = _TroveClassifier()
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------------
|
||||
# Stub packages - PEP 561
|
||||
|
||||
|
||||
def pep561_stub_name(value: str) -> bool:
|
||||
"""Name of a directory containing type stubs.
|
||||
It must follow the name scheme ``<package>-stubs`` as defined in
|
||||
:pep:`561#stub-only-packages`.
|
||||
"""
|
||||
top, *children = value.split(".")
|
||||
if not top.endswith("-stubs"):
|
||||
return False
|
||||
return python_module_name(".".join([top[: -len("-stubs")], *children]))
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------------
|
||||
# Non-PEP related
|
||||
|
||||
|
||||
def url(value: str) -> bool:
|
||||
"""Valid URL (validation uses :obj:`urllib.parse`).
|
||||
For maximum compatibility please make sure to include a ``scheme`` prefix
|
||||
in your URL (e.g. ``http://``).
|
||||
"""
|
||||
from urllib.parse import urlparse
|
||||
|
||||
try:
|
||||
parts = urlparse(value)
|
||||
if not parts.scheme:
|
||||
_logger.warning(
|
||||
"For maximum compatibility please make sure to include a "
|
||||
"`scheme` prefix in your URL (e.g. 'http://'). "
|
||||
f"Given value: {value}"
|
||||
)
|
||||
if not (value.startswith("/") or value.startswith("\\") or "@" in value):
|
||||
parts = urlparse(f"http://{value}")
|
||||
|
||||
return bool(parts.scheme and parts.netloc)
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
# https://packaging.python.org/specifications/entry-points/
|
||||
ENTRYPOINT_PATTERN = r"[^\[\s=]([^=]*[^\s=])?"
|
||||
ENTRYPOINT_REGEX = re.compile(f"^{ENTRYPOINT_PATTERN}$", re.I)
|
||||
RECOMMEDED_ENTRYPOINT_PATTERN = r"[\w.-]+"
|
||||
RECOMMEDED_ENTRYPOINT_REGEX = re.compile(f"^{RECOMMEDED_ENTRYPOINT_PATTERN}$", re.I)
|
||||
ENTRYPOINT_GROUP_PATTERN = r"\w+(\.\w+)*"
|
||||
ENTRYPOINT_GROUP_REGEX = re.compile(f"^{ENTRYPOINT_GROUP_PATTERN}$", re.I)
|
||||
|
||||
|
||||
def python_identifier(value: str) -> bool:
|
||||
"""Can be used as identifier in Python.
|
||||
(Validation uses :obj:`str.isidentifier`).
|
||||
"""
|
||||
return value.isidentifier()
|
||||
|
||||
|
||||
def python_qualified_identifier(value: str) -> bool:
|
||||
"""
|
||||
Python "dotted identifier", i.e. a sequence of :obj:`python_identifier`
|
||||
concatenated with ``"."`` (e.g.: ``package.module.submodule``).
|
||||
"""
|
||||
if value.startswith(".") or value.endswith("."):
|
||||
return False
|
||||
return all(python_identifier(m) for m in value.split("."))
|
||||
|
||||
|
||||
def python_module_name(value: str) -> bool:
|
||||
"""Module name that can be used in an ``import``-statement in Python.
|
||||
See :obj:`python_qualified_identifier`.
|
||||
"""
|
||||
return python_qualified_identifier(value)
|
||||
|
||||
|
||||
def python_module_name_relaxed(value: str) -> bool:
|
||||
"""Similar to :obj:`python_module_name`, but relaxed to also accept
|
||||
dash characters (``-``) and cover special cases like ``pip-run``.
|
||||
|
||||
It is recommended, however, that beginners avoid dash characters,
|
||||
as they require advanced knowledge about Python internals.
|
||||
|
||||
The following are disallowed:
|
||||
|
||||
* names starting/ending in dashes,
|
||||
* names ending in ``-stubs`` (potentially collide with :obj:`pep561_stub_name`).
|
||||
"""
|
||||
if value.startswith("-") or value.endswith("-"):
|
||||
return False
|
||||
if value.endswith("-stubs"):
|
||||
return False # Avoid collision with PEP 561
|
||||
return python_module_name(value.replace("-", "_"))
|
||||
|
||||
|
||||
def python_entrypoint_group(value: str) -> bool:
|
||||
"""See ``Data model > group`` in the :ref:`PyPA's entry-points specification
|
||||
<pypa:entry-points>`.
|
||||
"""
|
||||
return ENTRYPOINT_GROUP_REGEX.match(value) is not None
|
||||
|
||||
|
||||
def python_entrypoint_name(value: str) -> bool:
|
||||
"""See ``Data model > name`` in the :ref:`PyPA's entry-points specification
|
||||
<pypa:entry-points>`.
|
||||
"""
|
||||
if not ENTRYPOINT_REGEX.match(value):
|
||||
return False
|
||||
if not RECOMMEDED_ENTRYPOINT_REGEX.match(value):
|
||||
msg = f"Entry point `{value}` does not follow recommended pattern: "
|
||||
msg += RECOMMEDED_ENTRYPOINT_PATTERN
|
||||
_logger.warning(msg)
|
||||
return True
|
||||
|
||||
|
||||
def python_entrypoint_reference(value: str) -> bool:
|
||||
"""Reference to a Python object using in the format::
|
||||
|
||||
importable.module:object.attr
|
||||
|
||||
See ``Data model >object reference`` in the :ref:`PyPA's entry-points specification
|
||||
<pypa:entry-points>`.
|
||||
"""
|
||||
module, _, rest = value.partition(":")
|
||||
if "[" in rest:
|
||||
obj, _, extras_ = rest.partition("[")
|
||||
if extras_.strip()[-1] != "]":
|
||||
return False
|
||||
extras = (x.strip() for x in extras_.strip(string.whitespace + "[]").split(","))
|
||||
if not all(pep508_identifier(e) for e in extras):
|
||||
return False
|
||||
_logger.warning(f"`{value}` - using extras for entry points is not recommended")
|
||||
else:
|
||||
obj = rest
|
||||
|
||||
module_parts = module.split(".")
|
||||
identifiers = _chain(module_parts, obj.split(".")) if rest else module_parts
|
||||
return all(python_identifier(i.strip()) for i in identifiers)
|
||||
|
||||
|
||||
def uint8(value: builtins.int) -> bool:
|
||||
r"""Unsigned 8-bit integer (:math:`0 \leq x < 2^8`)"""
|
||||
return 0 <= value < 2**8
|
||||
|
||||
|
||||
def uint16(value: builtins.int) -> bool:
|
||||
r"""Unsigned 16-bit integer (:math:`0 \leq x < 2^{16}`)"""
|
||||
return 0 <= value < 2**16
|
||||
|
||||
|
||||
def uint(value: builtins.int) -> bool:
|
||||
r"""Unsigned 64-bit integer (:math:`0 \leq x < 2^{64}`)"""
|
||||
return 0 <= value < 2**64
|
||||
|
||||
|
||||
def int(value: builtins.int) -> bool:
|
||||
r"""Signed 64-bit integer (:math:`-2^{63} \leq x < 2^{63}`)"""
|
||||
return -(2**63) <= value < 2**63
|
||||
@@ -0,0 +1,26 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
|
||||
"$id": "https://setuptools.pypa.io/en/latest/deprecated/distutils/configfile.html",
|
||||
"title": "``tool.distutils`` table",
|
||||
"$$description": [
|
||||
"**EXPERIMENTAL** (NOT OFFICIALLY SUPPORTED): Use ``tool.distutils``",
|
||||
"subtables to configure arguments for ``distutils`` commands.",
|
||||
"Originally, ``distutils`` allowed developers to configure arguments for",
|
||||
"``setup.py`` commands via `distutils configuration files",
|
||||
"<https://setuptools.pypa.io/en/latest/deprecated/distutils/configfile.html>`_.",
|
||||
"See also `the old Python docs <https://docs.python.org/3.11/install/>_`."
|
||||
],
|
||||
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"global": {
|
||||
"type": "object",
|
||||
"description": "Global options applied to all ``distutils`` commands"
|
||||
}
|
||||
},
|
||||
"patternProperties": {
|
||||
".+": {"type": "object"}
|
||||
},
|
||||
"$comment": "TODO: Is there a practical way of making this schema more specific?"
|
||||
}
|
||||
443
myenv/lib/python3.12/site-packages/setuptools/config/expand.py
Normal file
443
myenv/lib/python3.12/site-packages/setuptools/config/expand.py
Normal file
@@ -0,0 +1,443 @@
|
||||
"""Utility functions to expand configuration directives or special values
|
||||
(such glob patterns).
|
||||
|
||||
We can split the process of interpreting configuration files into 2 steps:
|
||||
|
||||
1. The parsing the file contents from strings to value objects
|
||||
that can be understand by Python (for example a string with a comma
|
||||
separated list of keywords into an actual Python list of strings).
|
||||
|
||||
2. The expansion (or post-processing) of these values according to the
|
||||
semantics ``setuptools`` assign to them (for example a configuration field
|
||||
with the ``file:`` directive should be expanded from a list of file paths to
|
||||
a single string with the contents of those files concatenated)
|
||||
|
||||
This module focus on the second step, and therefore allow sharing the expansion
|
||||
functions among several configuration file formats.
|
||||
|
||||
**PRIVATE MODULE**: API reserved for setuptools internal usage only.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import ast
|
||||
import importlib
|
||||
import os
|
||||
import pathlib
|
||||
import sys
|
||||
from configparser import ConfigParser
|
||||
from glob import iglob
|
||||
from importlib.machinery import ModuleSpec, all_suffixes
|
||||
from itertools import chain
|
||||
from pathlib import Path
|
||||
from types import ModuleType, TracebackType
|
||||
from typing import TYPE_CHECKING, Any, Callable, Iterable, Iterator, Mapping, TypeVar
|
||||
|
||||
from .._path import StrPath, same_path as _same_path
|
||||
from ..discovery import find_package_path
|
||||
from ..warnings import SetuptoolsWarning
|
||||
|
||||
from distutils.errors import DistutilsOptionError
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing_extensions import Self
|
||||
|
||||
from setuptools.dist import Distribution
|
||||
|
||||
_K = TypeVar("_K")
|
||||
_V = TypeVar("_V", covariant=True)
|
||||
|
||||
|
||||
class StaticModule:
|
||||
"""Proxy to a module object that avoids executing arbitrary code."""
|
||||
|
||||
def __init__(self, name: str, spec: ModuleSpec):
|
||||
module = ast.parse(pathlib.Path(spec.origin).read_bytes()) # type: ignore[arg-type] # Let it raise an error on None
|
||||
vars(self).update(locals())
|
||||
del self.self
|
||||
|
||||
def _find_assignments(self) -> Iterator[tuple[ast.AST, ast.AST]]:
|
||||
for statement in self.module.body:
|
||||
if isinstance(statement, ast.Assign):
|
||||
yield from ((target, statement.value) for target in statement.targets)
|
||||
elif isinstance(statement, ast.AnnAssign) and statement.value:
|
||||
yield (statement.target, statement.value)
|
||||
|
||||
def __getattr__(self, attr: str):
|
||||
"""Attempt to load an attribute "statically", via :func:`ast.literal_eval`."""
|
||||
try:
|
||||
return next(
|
||||
ast.literal_eval(value)
|
||||
for target, value in self._find_assignments()
|
||||
if isinstance(target, ast.Name) and target.id == attr
|
||||
)
|
||||
except Exception as e:
|
||||
raise AttributeError(f"{self.name} has no attribute {attr}") from e
|
||||
|
||||
|
||||
def glob_relative(
|
||||
patterns: Iterable[str], root_dir: StrPath | None = None
|
||||
) -> list[str]:
|
||||
"""Expand the list of glob patterns, but preserving relative paths.
|
||||
|
||||
:param list[str] patterns: List of glob patterns
|
||||
:param str root_dir: Path to which globs should be relative
|
||||
(current directory by default)
|
||||
:rtype: list
|
||||
"""
|
||||
glob_characters = {'*', '?', '[', ']', '{', '}'}
|
||||
expanded_values = []
|
||||
root_dir = root_dir or os.getcwd()
|
||||
for value in patterns:
|
||||
# Has globby characters?
|
||||
if any(char in value for char in glob_characters):
|
||||
# then expand the glob pattern while keeping paths *relative*:
|
||||
glob_path = os.path.abspath(os.path.join(root_dir, value))
|
||||
expanded_values.extend(
|
||||
sorted(
|
||||
os.path.relpath(path, root_dir).replace(os.sep, "/")
|
||||
for path in iglob(glob_path, recursive=True)
|
||||
)
|
||||
)
|
||||
|
||||
else:
|
||||
# take the value as-is
|
||||
path = os.path.relpath(value, root_dir).replace(os.sep, "/")
|
||||
expanded_values.append(path)
|
||||
|
||||
return expanded_values
|
||||
|
||||
|
||||
def read_files(
|
||||
filepaths: StrPath | Iterable[StrPath], root_dir: StrPath | None = None
|
||||
) -> str:
|
||||
"""Return the content of the files concatenated using ``\n`` as str
|
||||
|
||||
This function is sandboxed and won't reach anything outside ``root_dir``
|
||||
|
||||
(By default ``root_dir`` is the current directory).
|
||||
"""
|
||||
from more_itertools import always_iterable
|
||||
|
||||
root_dir = os.path.abspath(root_dir or os.getcwd())
|
||||
_filepaths = (os.path.join(root_dir, path) for path in always_iterable(filepaths))
|
||||
return '\n'.join(
|
||||
_read_file(path)
|
||||
for path in _filter_existing_files(_filepaths)
|
||||
if _assert_local(path, root_dir)
|
||||
)
|
||||
|
||||
|
||||
def _filter_existing_files(filepaths: Iterable[StrPath]) -> Iterator[StrPath]:
|
||||
for path in filepaths:
|
||||
if os.path.isfile(path):
|
||||
yield path
|
||||
else:
|
||||
SetuptoolsWarning.emit(f"File {path!r} cannot be found")
|
||||
|
||||
|
||||
def _read_file(filepath: bytes | StrPath) -> str:
|
||||
with open(filepath, encoding='utf-8') as f:
|
||||
return f.read()
|
||||
|
||||
|
||||
def _assert_local(filepath: StrPath, root_dir: str):
|
||||
if Path(os.path.abspath(root_dir)) not in Path(os.path.abspath(filepath)).parents:
|
||||
msg = f"Cannot access {filepath!r} (or anything outside {root_dir!r})"
|
||||
raise DistutilsOptionError(msg)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def read_attr(
|
||||
attr_desc: str,
|
||||
package_dir: Mapping[str, str] | None = None,
|
||||
root_dir: StrPath | None = None,
|
||||
) -> Any:
|
||||
"""Reads the value of an attribute from a module.
|
||||
|
||||
This function will try to read the attributed statically first
|
||||
(via :func:`ast.literal_eval`), and only evaluate the module if it fails.
|
||||
|
||||
Examples:
|
||||
read_attr("package.attr")
|
||||
read_attr("package.module.attr")
|
||||
|
||||
:param str attr_desc: Dot-separated string describing how to reach the
|
||||
attribute (see examples above)
|
||||
:param dict[str, str] package_dir: Mapping of package names to their
|
||||
location in disk (represented by paths relative to ``root_dir``).
|
||||
:param str root_dir: Path to directory containing all the packages in
|
||||
``package_dir`` (current directory by default).
|
||||
:rtype: str
|
||||
"""
|
||||
root_dir = root_dir or os.getcwd()
|
||||
attrs_path = attr_desc.strip().split('.')
|
||||
attr_name = attrs_path.pop()
|
||||
module_name = '.'.join(attrs_path)
|
||||
module_name = module_name or '__init__'
|
||||
path = _find_module(module_name, package_dir, root_dir)
|
||||
spec = _find_spec(module_name, path)
|
||||
|
||||
try:
|
||||
return getattr(StaticModule(module_name, spec), attr_name)
|
||||
except Exception:
|
||||
# fallback to evaluate module
|
||||
module = _load_spec(spec, module_name)
|
||||
return getattr(module, attr_name)
|
||||
|
||||
|
||||
def _find_spec(module_name: str, module_path: StrPath | None) -> ModuleSpec:
|
||||
spec = importlib.util.spec_from_file_location(module_name, module_path)
|
||||
spec = spec or importlib.util.find_spec(module_name)
|
||||
|
||||
if spec is None:
|
||||
raise ModuleNotFoundError(module_name)
|
||||
|
||||
return spec
|
||||
|
||||
|
||||
def _load_spec(spec: ModuleSpec, module_name: str) -> ModuleType:
|
||||
name = getattr(spec, "__name__", module_name)
|
||||
if name in sys.modules:
|
||||
return sys.modules[name]
|
||||
module = importlib.util.module_from_spec(spec)
|
||||
sys.modules[name] = module # cache (it also ensures `==` works on loaded items)
|
||||
spec.loader.exec_module(module) # type: ignore
|
||||
return module
|
||||
|
||||
|
||||
def _find_module(
|
||||
module_name: str, package_dir: Mapping[str, str] | None, root_dir: StrPath
|
||||
) -> str | None:
|
||||
"""Find the path to the module named ``module_name``,
|
||||
considering the ``package_dir`` in the build configuration and ``root_dir``.
|
||||
|
||||
>>> tmp = getfixture('tmpdir')
|
||||
>>> _ = tmp.ensure("a/b/c.py")
|
||||
>>> _ = tmp.ensure("a/b/d/__init__.py")
|
||||
>>> r = lambda x: x.replace(str(tmp), "tmp").replace(os.sep, "/")
|
||||
>>> r(_find_module("a.b.c", None, tmp))
|
||||
'tmp/a/b/c.py'
|
||||
>>> r(_find_module("f.g.h", {"": "1", "f": "2", "f.g": "3", "f.g.h": "a/b/d"}, tmp))
|
||||
'tmp/a/b/d/__init__.py'
|
||||
"""
|
||||
path_start = find_package_path(module_name, package_dir or {}, root_dir)
|
||||
candidates = chain.from_iterable(
|
||||
(f"{path_start}{ext}", os.path.join(path_start, f"__init__{ext}"))
|
||||
for ext in all_suffixes()
|
||||
)
|
||||
return next((x for x in candidates if os.path.isfile(x)), None)
|
||||
|
||||
|
||||
def resolve_class(
|
||||
qualified_class_name: str,
|
||||
package_dir: Mapping[str, str] | None = None,
|
||||
root_dir: StrPath | None = None,
|
||||
) -> Callable:
|
||||
"""Given a qualified class name, return the associated class object"""
|
||||
root_dir = root_dir or os.getcwd()
|
||||
idx = qualified_class_name.rfind('.')
|
||||
class_name = qualified_class_name[idx + 1 :]
|
||||
pkg_name = qualified_class_name[:idx]
|
||||
|
||||
path = _find_module(pkg_name, package_dir, root_dir)
|
||||
module = _load_spec(_find_spec(pkg_name, path), pkg_name)
|
||||
return getattr(module, class_name)
|
||||
|
||||
|
||||
def cmdclass(
|
||||
values: dict[str, str],
|
||||
package_dir: Mapping[str, str] | None = None,
|
||||
root_dir: StrPath | None = None,
|
||||
) -> dict[str, Callable]:
|
||||
"""Given a dictionary mapping command names to strings for qualified class
|
||||
names, apply :func:`resolve_class` to the dict values.
|
||||
"""
|
||||
return {k: resolve_class(v, package_dir, root_dir) for k, v in values.items()}
|
||||
|
||||
|
||||
def find_packages(
|
||||
*,
|
||||
namespaces=True,
|
||||
fill_package_dir: dict[str, str] | None = None,
|
||||
root_dir: StrPath | None = None,
|
||||
**kwargs,
|
||||
) -> list[str]:
|
||||
"""Works similarly to :func:`setuptools.find_packages`, but with all
|
||||
arguments given as keyword arguments. Moreover, ``where`` can be given
|
||||
as a list (the results will be simply concatenated).
|
||||
|
||||
When the additional keyword argument ``namespaces`` is ``True``, it will
|
||||
behave like :func:`setuptools.find_namespace_packages`` (i.e. include
|
||||
implicit namespaces as per :pep:`420`).
|
||||
|
||||
The ``where`` argument will be considered relative to ``root_dir`` (or the current
|
||||
working directory when ``root_dir`` is not given).
|
||||
|
||||
If the ``fill_package_dir`` argument is passed, this function will consider it as a
|
||||
similar data structure to the ``package_dir`` configuration parameter add fill-in
|
||||
any missing package location.
|
||||
|
||||
:rtype: list
|
||||
"""
|
||||
from more_itertools import always_iterable, unique_everseen
|
||||
|
||||
from setuptools.discovery import construct_package_dir
|
||||
|
||||
if namespaces:
|
||||
from setuptools.discovery import PEP420PackageFinder as PackageFinder
|
||||
else:
|
||||
from setuptools.discovery import PackageFinder # type: ignore
|
||||
|
||||
root_dir = root_dir or os.curdir
|
||||
where = kwargs.pop('where', ['.'])
|
||||
packages: list[str] = []
|
||||
fill_package_dir = {} if fill_package_dir is None else fill_package_dir
|
||||
search = list(unique_everseen(always_iterable(where)))
|
||||
|
||||
if len(search) == 1 and all(not _same_path(search[0], x) for x in (".", root_dir)):
|
||||
fill_package_dir.setdefault("", search[0])
|
||||
|
||||
for path in search:
|
||||
package_path = _nest_path(root_dir, path)
|
||||
pkgs = PackageFinder.find(package_path, **kwargs)
|
||||
packages.extend(pkgs)
|
||||
if pkgs and not (
|
||||
fill_package_dir.get("") == path or os.path.samefile(package_path, root_dir)
|
||||
):
|
||||
fill_package_dir.update(construct_package_dir(pkgs, path))
|
||||
|
||||
return packages
|
||||
|
||||
|
||||
def _nest_path(parent: StrPath, path: StrPath) -> str:
|
||||
path = parent if path in {".", ""} else os.path.join(parent, path)
|
||||
return os.path.normpath(path)
|
||||
|
||||
|
||||
def version(value: Callable | Iterable[str | int] | str) -> str:
|
||||
"""When getting the version directly from an attribute,
|
||||
it should be normalised to string.
|
||||
"""
|
||||
_value = value() if callable(value) else value
|
||||
|
||||
if isinstance(_value, str):
|
||||
return _value
|
||||
if hasattr(_value, '__iter__'):
|
||||
return '.'.join(map(str, _value))
|
||||
return '%s' % _value
|
||||
|
||||
|
||||
def canonic_package_data(package_data: dict) -> dict:
|
||||
if "*" in package_data:
|
||||
package_data[""] = package_data.pop("*")
|
||||
return package_data
|
||||
|
||||
|
||||
def canonic_data_files(
|
||||
data_files: list | dict, root_dir: StrPath | None = None
|
||||
) -> list[tuple[str, list[str]]]:
|
||||
"""For compatibility with ``setup.py``, ``data_files`` should be a list
|
||||
of pairs instead of a dict.
|
||||
|
||||
This function also expands glob patterns.
|
||||
"""
|
||||
if isinstance(data_files, list):
|
||||
return data_files
|
||||
|
||||
return [
|
||||
(dest, glob_relative(patterns, root_dir))
|
||||
for dest, patterns in data_files.items()
|
||||
]
|
||||
|
||||
|
||||
def entry_points(text: str, text_source="entry-points") -> dict[str, dict]:
|
||||
"""Given the contents of entry-points file,
|
||||
process it into a 2-level dictionary (``dict[str, dict[str, str]]``).
|
||||
The first level keys are entry-point groups, the second level keys are
|
||||
entry-point names, and the second level values are references to objects
|
||||
(that correspond to the entry-point value).
|
||||
"""
|
||||
parser = ConfigParser(default_section=None, delimiters=("=",)) # type: ignore
|
||||
parser.optionxform = str # case sensitive
|
||||
parser.read_string(text, text_source)
|
||||
groups = {k: dict(v.items()) for k, v in parser.items()}
|
||||
groups.pop(parser.default_section, None)
|
||||
return groups
|
||||
|
||||
|
||||
class EnsurePackagesDiscovered:
|
||||
"""Some expand functions require all the packages to already be discovered before
|
||||
they run, e.g. :func:`read_attr`, :func:`resolve_class`, :func:`cmdclass`.
|
||||
|
||||
Therefore in some cases we will need to run autodiscovery during the evaluation of
|
||||
the configuration. However, it is better to postpone calling package discovery as
|
||||
much as possible, because some parameters can influence it (e.g. ``package_dir``),
|
||||
and those might not have been processed yet.
|
||||
"""
|
||||
|
||||
def __init__(self, distribution: Distribution):
|
||||
self._dist = distribution
|
||||
self._called = False
|
||||
|
||||
def __call__(self):
|
||||
"""Trigger the automatic package discovery, if it is still necessary."""
|
||||
if not self._called:
|
||||
self._called = True
|
||||
self._dist.set_defaults(name=False) # Skip name, we can still be parsing
|
||||
|
||||
def __enter__(self) -> Self:
|
||||
return self
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type: type[BaseException] | None,
|
||||
exc_value: BaseException | None,
|
||||
traceback: TracebackType | None,
|
||||
) -> None:
|
||||
if self._called:
|
||||
self._dist.set_defaults.analyse_name() # Now we can set a default name
|
||||
|
||||
def _get_package_dir(self) -> Mapping[str, str]:
|
||||
self()
|
||||
pkg_dir = self._dist.package_dir
|
||||
return {} if pkg_dir is None else pkg_dir
|
||||
|
||||
@property
|
||||
def package_dir(self) -> Mapping[str, str]:
|
||||
"""Proxy to ``package_dir`` that may trigger auto-discovery when used."""
|
||||
return LazyMappingProxy(self._get_package_dir)
|
||||
|
||||
|
||||
class LazyMappingProxy(Mapping[_K, _V]):
|
||||
"""Mapping proxy that delays resolving the target object, until really needed.
|
||||
|
||||
>>> def obtain_mapping():
|
||||
... print("Running expensive function!")
|
||||
... return {"key": "value", "other key": "other value"}
|
||||
>>> mapping = LazyMappingProxy(obtain_mapping)
|
||||
>>> mapping["key"]
|
||||
Running expensive function!
|
||||
'value'
|
||||
>>> mapping["other key"]
|
||||
'other value'
|
||||
"""
|
||||
|
||||
def __init__(self, obtain_mapping_value: Callable[[], Mapping[_K, _V]]):
|
||||
self._obtain = obtain_mapping_value
|
||||
self._value: Mapping[_K, _V] | None = None
|
||||
|
||||
def _target(self) -> Mapping[_K, _V]:
|
||||
if self._value is None:
|
||||
self._value = self._obtain()
|
||||
return self._value
|
||||
|
||||
def __getitem__(self, key: _K) -> _V:
|
||||
return self._target()[key]
|
||||
|
||||
def __len__(self) -> int:
|
||||
return len(self._target())
|
||||
|
||||
def __iter__(self) -> Iterator[_K]:
|
||||
return iter(self._target())
|
||||
@@ -0,0 +1,466 @@
|
||||
"""
|
||||
Load setuptools configuration from ``pyproject.toml`` files.
|
||||
|
||||
**PRIVATE MODULE**: API reserved for setuptools internal usage only.
|
||||
|
||||
To read project metadata, consider using
|
||||
``build.util.project_wheel_metadata`` (https://pypi.org/project/build/).
|
||||
For simple scenarios, you can also try parsing the file directly
|
||||
with the help of ``tomllib`` or ``tomli``.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import os
|
||||
from contextlib import contextmanager
|
||||
from functools import partial
|
||||
from types import TracebackType
|
||||
from typing import TYPE_CHECKING, Any, Callable, Mapping
|
||||
|
||||
from .._path import StrPath
|
||||
from ..errors import FileError, InvalidConfigError
|
||||
from ..warnings import SetuptoolsWarning
|
||||
from . import expand as _expand
|
||||
from ._apply_pyprojecttoml import _PREVIOUSLY_DEFINED, _MissingDynamic, apply as _apply
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing_extensions import Self
|
||||
|
||||
from setuptools.dist import Distribution
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def load_file(filepath: StrPath) -> dict:
|
||||
from ..compat.py310 import tomllib
|
||||
|
||||
with open(filepath, "rb") as file:
|
||||
return tomllib.load(file)
|
||||
|
||||
|
||||
def validate(config: dict, filepath: StrPath) -> bool:
|
||||
from . import _validate_pyproject as validator
|
||||
|
||||
trove_classifier = validator.FORMAT_FUNCTIONS.get("trove-classifier")
|
||||
if hasattr(trove_classifier, "_disable_download"):
|
||||
# Improve reproducibility by default. See issue 31 for validate-pyproject.
|
||||
trove_classifier._disable_download() # type: ignore
|
||||
|
||||
try:
|
||||
return validator.validate(config)
|
||||
except validator.ValidationError as ex:
|
||||
summary = f"configuration error: {ex.summary}"
|
||||
if ex.name.strip("`") != "project":
|
||||
# Probably it is just a field missing/misnamed, not worthy the verbosity...
|
||||
_logger.debug(summary)
|
||||
_logger.debug(ex.details)
|
||||
|
||||
error = f"invalid pyproject.toml config: {ex.name}."
|
||||
raise ValueError(f"{error}\n{summary}") from None
|
||||
|
||||
|
||||
def apply_configuration(
|
||||
dist: Distribution,
|
||||
filepath: StrPath,
|
||||
ignore_option_errors=False,
|
||||
) -> Distribution:
|
||||
"""Apply the configuration from a ``pyproject.toml`` file into an existing
|
||||
distribution object.
|
||||
"""
|
||||
config = read_configuration(filepath, True, ignore_option_errors, dist)
|
||||
return _apply(dist, config, filepath)
|
||||
|
||||
|
||||
def read_configuration(
|
||||
filepath: StrPath,
|
||||
expand=True,
|
||||
ignore_option_errors=False,
|
||||
dist: Distribution | None = None,
|
||||
) -> dict[str, Any]:
|
||||
"""Read given configuration file and returns options from it as a dict.
|
||||
|
||||
:param str|unicode filepath: Path to configuration file in the ``pyproject.toml``
|
||||
format.
|
||||
|
||||
:param bool expand: Whether to expand directives and other computed values
|
||||
(i.e. post-process the given configuration)
|
||||
|
||||
:param bool ignore_option_errors: Whether to silently ignore
|
||||
options, values of which could not be resolved (e.g. due to exceptions
|
||||
in directives such as file:, attr:, etc.).
|
||||
If False exceptions are propagated as expected.
|
||||
|
||||
:param Distribution|None: Distribution object to which the configuration refers.
|
||||
If not given a dummy object will be created and discarded after the
|
||||
configuration is read. This is used for auto-discovery of packages and in the
|
||||
case a dynamic configuration (e.g. ``attr`` or ``cmdclass``) is expanded.
|
||||
When ``expand=False`` this object is simply ignored.
|
||||
|
||||
:rtype: dict
|
||||
"""
|
||||
filepath = os.path.abspath(filepath)
|
||||
|
||||
if not os.path.isfile(filepath):
|
||||
raise FileError(f"Configuration file {filepath!r} does not exist.")
|
||||
|
||||
asdict = load_file(filepath) or {}
|
||||
project_table = asdict.get("project", {})
|
||||
tool_table = asdict.get("tool", {})
|
||||
setuptools_table = tool_table.get("setuptools", {})
|
||||
if not asdict or not (project_table or setuptools_table):
|
||||
return {} # User is not using pyproject to configure setuptools
|
||||
|
||||
if "setuptools" in asdict.get("tools", {}):
|
||||
# let the user know they probably have a typo in their metadata
|
||||
_ToolsTypoInMetadata.emit()
|
||||
|
||||
if "distutils" in tool_table:
|
||||
_ExperimentalConfiguration.emit(subject="[tool.distutils]")
|
||||
|
||||
# There is an overall sense in the community that making include_package_data=True
|
||||
# the default would be an improvement.
|
||||
# `ini2toml` backfills include_package_data=False when nothing is explicitly given,
|
||||
# therefore setting a default here is backwards compatible.
|
||||
if dist and dist.include_package_data is not None:
|
||||
setuptools_table.setdefault("include-package-data", dist.include_package_data)
|
||||
else:
|
||||
setuptools_table.setdefault("include-package-data", True)
|
||||
# Persist changes:
|
||||
asdict["tool"] = tool_table
|
||||
tool_table["setuptools"] = setuptools_table
|
||||
|
||||
if "ext-modules" in setuptools_table:
|
||||
_ExperimentalConfiguration.emit(subject="[tool.setuptools.ext-modules]")
|
||||
|
||||
with _ignore_errors(ignore_option_errors):
|
||||
# Don't complain about unrelated errors (e.g. tools not using the "tool" table)
|
||||
subset = {"project": project_table, "tool": {"setuptools": setuptools_table}}
|
||||
validate(subset, filepath)
|
||||
|
||||
if expand:
|
||||
root_dir = os.path.dirname(filepath)
|
||||
return expand_configuration(asdict, root_dir, ignore_option_errors, dist)
|
||||
|
||||
return asdict
|
||||
|
||||
|
||||
def expand_configuration(
|
||||
config: dict,
|
||||
root_dir: StrPath | None = None,
|
||||
ignore_option_errors: bool = False,
|
||||
dist: Distribution | None = None,
|
||||
) -> dict:
|
||||
"""Given a configuration with unresolved fields (e.g. dynamic, cmdclass, ...)
|
||||
find their final values.
|
||||
|
||||
:param dict config: Dict containing the configuration for the distribution
|
||||
:param str root_dir: Top-level directory for the distribution/project
|
||||
(the same directory where ``pyproject.toml`` is place)
|
||||
:param bool ignore_option_errors: see :func:`read_configuration`
|
||||
:param Distribution|None: Distribution object to which the configuration refers.
|
||||
If not given a dummy object will be created and discarded after the
|
||||
configuration is read. Used in the case a dynamic configuration
|
||||
(e.g. ``attr`` or ``cmdclass``).
|
||||
|
||||
:rtype: dict
|
||||
"""
|
||||
return _ConfigExpander(config, root_dir, ignore_option_errors, dist).expand()
|
||||
|
||||
|
||||
class _ConfigExpander:
|
||||
def __init__(
|
||||
self,
|
||||
config: dict,
|
||||
root_dir: StrPath | None = None,
|
||||
ignore_option_errors: bool = False,
|
||||
dist: Distribution | None = None,
|
||||
):
|
||||
self.config = config
|
||||
self.root_dir = root_dir or os.getcwd()
|
||||
self.project_cfg = config.get("project", {})
|
||||
self.dynamic = self.project_cfg.get("dynamic", [])
|
||||
self.setuptools_cfg = config.get("tool", {}).get("setuptools", {})
|
||||
self.dynamic_cfg = self.setuptools_cfg.get("dynamic", {})
|
||||
self.ignore_option_errors = ignore_option_errors
|
||||
self._dist = dist
|
||||
self._referenced_files: set[str] = set()
|
||||
|
||||
def _ensure_dist(self) -> Distribution:
|
||||
from setuptools.dist import Distribution
|
||||
|
||||
attrs = {"src_root": self.root_dir, "name": self.project_cfg.get("name", None)}
|
||||
return self._dist or Distribution(attrs)
|
||||
|
||||
def _process_field(self, container: dict, field: str, fn: Callable):
|
||||
if field in container:
|
||||
with _ignore_errors(self.ignore_option_errors):
|
||||
container[field] = fn(container[field])
|
||||
|
||||
def _canonic_package_data(self, field="package-data"):
|
||||
package_data = self.setuptools_cfg.get(field, {})
|
||||
return _expand.canonic_package_data(package_data)
|
||||
|
||||
def expand(self):
|
||||
self._expand_packages()
|
||||
self._canonic_package_data()
|
||||
self._canonic_package_data("exclude-package-data")
|
||||
|
||||
# A distribution object is required for discovering the correct package_dir
|
||||
dist = self._ensure_dist()
|
||||
ctx = _EnsurePackagesDiscovered(dist, self.project_cfg, self.setuptools_cfg)
|
||||
with ctx as ensure_discovered:
|
||||
package_dir = ensure_discovered.package_dir
|
||||
self._expand_data_files()
|
||||
self._expand_cmdclass(package_dir)
|
||||
self._expand_all_dynamic(dist, package_dir)
|
||||
|
||||
dist._referenced_files.update(self._referenced_files)
|
||||
return self.config
|
||||
|
||||
def _expand_packages(self):
|
||||
packages = self.setuptools_cfg.get("packages")
|
||||
if packages is None or isinstance(packages, (list, tuple)):
|
||||
return
|
||||
|
||||
find = packages.get("find")
|
||||
if isinstance(find, dict):
|
||||
find["root_dir"] = self.root_dir
|
||||
find["fill_package_dir"] = self.setuptools_cfg.setdefault("package-dir", {})
|
||||
with _ignore_errors(self.ignore_option_errors):
|
||||
self.setuptools_cfg["packages"] = _expand.find_packages(**find)
|
||||
|
||||
def _expand_data_files(self):
|
||||
data_files = partial(_expand.canonic_data_files, root_dir=self.root_dir)
|
||||
self._process_field(self.setuptools_cfg, "data-files", data_files)
|
||||
|
||||
def _expand_cmdclass(self, package_dir: Mapping[str, str]):
|
||||
root_dir = self.root_dir
|
||||
cmdclass = partial(_expand.cmdclass, package_dir=package_dir, root_dir=root_dir)
|
||||
self._process_field(self.setuptools_cfg, "cmdclass", cmdclass)
|
||||
|
||||
def _expand_all_dynamic(self, dist: Distribution, package_dir: Mapping[str, str]):
|
||||
special = ( # need special handling
|
||||
"version",
|
||||
"readme",
|
||||
"entry-points",
|
||||
"scripts",
|
||||
"gui-scripts",
|
||||
"classifiers",
|
||||
"dependencies",
|
||||
"optional-dependencies",
|
||||
)
|
||||
# `_obtain` functions are assumed to raise appropriate exceptions/warnings.
|
||||
obtained_dynamic = {
|
||||
field: self._obtain(dist, field, package_dir)
|
||||
for field in self.dynamic
|
||||
if field not in special
|
||||
}
|
||||
obtained_dynamic.update(
|
||||
self._obtain_entry_points(dist, package_dir) or {},
|
||||
version=self._obtain_version(dist, package_dir),
|
||||
readme=self._obtain_readme(dist),
|
||||
classifiers=self._obtain_classifiers(dist),
|
||||
dependencies=self._obtain_dependencies(dist),
|
||||
optional_dependencies=self._obtain_optional_dependencies(dist),
|
||||
)
|
||||
# `None` indicates there is nothing in `tool.setuptools.dynamic` but the value
|
||||
# might have already been set by setup.py/extensions, so avoid overwriting.
|
||||
updates = {k: v for k, v in obtained_dynamic.items() if v is not None}
|
||||
self.project_cfg.update(updates)
|
||||
|
||||
def _ensure_previously_set(self, dist: Distribution, field: str):
|
||||
previous = _PREVIOUSLY_DEFINED[field](dist)
|
||||
if previous is None and not self.ignore_option_errors:
|
||||
msg = (
|
||||
f"No configuration found for dynamic {field!r}.\n"
|
||||
"Some dynamic fields need to be specified via `tool.setuptools.dynamic`"
|
||||
"\nothers must be specified via the equivalent attribute in `setup.py`."
|
||||
)
|
||||
raise InvalidConfigError(msg)
|
||||
|
||||
def _expand_directive(
|
||||
self, specifier: str, directive, package_dir: Mapping[str, str]
|
||||
):
|
||||
from more_itertools import always_iterable
|
||||
|
||||
with _ignore_errors(self.ignore_option_errors):
|
||||
root_dir = self.root_dir
|
||||
if "file" in directive:
|
||||
self._referenced_files.update(always_iterable(directive["file"]))
|
||||
return _expand.read_files(directive["file"], root_dir)
|
||||
if "attr" in directive:
|
||||
return _expand.read_attr(directive["attr"], package_dir, root_dir)
|
||||
raise ValueError(f"invalid `{specifier}`: {directive!r}")
|
||||
return None
|
||||
|
||||
def _obtain(self, dist: Distribution, field: str, package_dir: Mapping[str, str]):
|
||||
if field in self.dynamic_cfg:
|
||||
return self._expand_directive(
|
||||
f"tool.setuptools.dynamic.{field}",
|
||||
self.dynamic_cfg[field],
|
||||
package_dir,
|
||||
)
|
||||
self._ensure_previously_set(dist, field)
|
||||
return None
|
||||
|
||||
def _obtain_version(self, dist: Distribution, package_dir: Mapping[str, str]):
|
||||
# Since plugins can set version, let's silently skip if it cannot be obtained
|
||||
if "version" in self.dynamic and "version" in self.dynamic_cfg:
|
||||
return _expand.version(
|
||||
# We already do an early check for the presence of "version"
|
||||
self._obtain(dist, "version", package_dir) # pyright: ignore[reportArgumentType]
|
||||
)
|
||||
return None
|
||||
|
||||
def _obtain_readme(self, dist: Distribution) -> dict[str, str] | None:
|
||||
if "readme" not in self.dynamic:
|
||||
return None
|
||||
|
||||
dynamic_cfg = self.dynamic_cfg
|
||||
if "readme" in dynamic_cfg:
|
||||
return {
|
||||
# We already do an early check for the presence of "readme"
|
||||
"text": self._obtain(dist, "readme", {}),
|
||||
"content-type": dynamic_cfg["readme"].get("content-type", "text/x-rst"),
|
||||
} # pyright: ignore[reportReturnType]
|
||||
|
||||
self._ensure_previously_set(dist, "readme")
|
||||
return None
|
||||
|
||||
def _obtain_entry_points(
|
||||
self, dist: Distribution, package_dir: Mapping[str, str]
|
||||
) -> dict[str, dict] | None:
|
||||
fields = ("entry-points", "scripts", "gui-scripts")
|
||||
if not any(field in self.dynamic for field in fields):
|
||||
return None
|
||||
|
||||
text = self._obtain(dist, "entry-points", package_dir)
|
||||
if text is None:
|
||||
return None
|
||||
|
||||
groups = _expand.entry_points(text)
|
||||
expanded = {"entry-points": groups}
|
||||
|
||||
def _set_scripts(field: str, group: str):
|
||||
if group in groups:
|
||||
value = groups.pop(group)
|
||||
if field not in self.dynamic:
|
||||
raise InvalidConfigError(_MissingDynamic.details(field, value))
|
||||
expanded[field] = value
|
||||
|
||||
_set_scripts("scripts", "console_scripts")
|
||||
_set_scripts("gui-scripts", "gui_scripts")
|
||||
|
||||
return expanded
|
||||
|
||||
def _obtain_classifiers(self, dist: Distribution):
|
||||
if "classifiers" in self.dynamic:
|
||||
value = self._obtain(dist, "classifiers", {})
|
||||
if value:
|
||||
return value.splitlines()
|
||||
return None
|
||||
|
||||
def _obtain_dependencies(self, dist: Distribution):
|
||||
if "dependencies" in self.dynamic:
|
||||
value = self._obtain(dist, "dependencies", {})
|
||||
if value:
|
||||
return _parse_requirements_list(value)
|
||||
return None
|
||||
|
||||
def _obtain_optional_dependencies(self, dist: Distribution):
|
||||
if "optional-dependencies" not in self.dynamic:
|
||||
return None
|
||||
if "optional-dependencies" in self.dynamic_cfg:
|
||||
optional_dependencies_map = self.dynamic_cfg["optional-dependencies"]
|
||||
assert isinstance(optional_dependencies_map, dict)
|
||||
return {
|
||||
group: _parse_requirements_list(
|
||||
self._expand_directive(
|
||||
f"tool.setuptools.dynamic.optional-dependencies.{group}",
|
||||
directive,
|
||||
{},
|
||||
)
|
||||
)
|
||||
for group, directive in optional_dependencies_map.items()
|
||||
}
|
||||
self._ensure_previously_set(dist, "optional-dependencies")
|
||||
return None
|
||||
|
||||
|
||||
def _parse_requirements_list(value):
|
||||
return [
|
||||
line
|
||||
for line in value.splitlines()
|
||||
if line.strip() and not line.strip().startswith("#")
|
||||
]
|
||||
|
||||
|
||||
@contextmanager
|
||||
def _ignore_errors(ignore_option_errors: bool):
|
||||
if not ignore_option_errors:
|
||||
yield
|
||||
return
|
||||
|
||||
try:
|
||||
yield
|
||||
except Exception as ex:
|
||||
_logger.debug(f"ignored error: {ex.__class__.__name__} - {ex}")
|
||||
|
||||
|
||||
class _EnsurePackagesDiscovered(_expand.EnsurePackagesDiscovered):
|
||||
def __init__(
|
||||
self, distribution: Distribution, project_cfg: dict, setuptools_cfg: dict
|
||||
):
|
||||
super().__init__(distribution)
|
||||
self._project_cfg = project_cfg
|
||||
self._setuptools_cfg = setuptools_cfg
|
||||
|
||||
def __enter__(self) -> Self:
|
||||
"""When entering the context, the values of ``packages``, ``py_modules`` and
|
||||
``package_dir`` that are missing in ``dist`` are copied from ``setuptools_cfg``.
|
||||
"""
|
||||
dist, cfg = self._dist, self._setuptools_cfg
|
||||
package_dir: dict[str, str] = cfg.setdefault("package-dir", {})
|
||||
package_dir.update(dist.package_dir or {})
|
||||
dist.package_dir = package_dir # needs to be the same object
|
||||
|
||||
dist.set_defaults._ignore_ext_modules() # pyproject.toml-specific behaviour
|
||||
|
||||
# Set `name`, `py_modules` and `packages` in dist to short-circuit
|
||||
# auto-discovery, but avoid overwriting empty lists purposefully set by users.
|
||||
if dist.metadata.name is None:
|
||||
dist.metadata.name = self._project_cfg.get("name")
|
||||
if dist.py_modules is None:
|
||||
dist.py_modules = cfg.get("py-modules")
|
||||
if dist.packages is None:
|
||||
dist.packages = cfg.get("packages")
|
||||
|
||||
return super().__enter__()
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type: type[BaseException] | None,
|
||||
exc_value: BaseException | None,
|
||||
traceback: TracebackType | None,
|
||||
) -> None:
|
||||
"""When exiting the context, if values of ``packages``, ``py_modules`` and
|
||||
``package_dir`` are missing in ``setuptools_cfg``, copy from ``dist``.
|
||||
"""
|
||||
# If anything was discovered set them back, so they count in the final config.
|
||||
self._setuptools_cfg.setdefault("packages", self._dist.packages)
|
||||
self._setuptools_cfg.setdefault("py-modules", self._dist.py_modules)
|
||||
return super().__exit__(exc_type, exc_value, traceback)
|
||||
|
||||
|
||||
class _ExperimentalConfiguration(SetuptoolsWarning):
|
||||
_SUMMARY = (
|
||||
"`{subject}` in `pyproject.toml` is still *experimental* "
|
||||
"and likely to change in future releases."
|
||||
)
|
||||
|
||||
|
||||
class _ToolsTypoInMetadata(SetuptoolsWarning):
|
||||
_SUMMARY = (
|
||||
"Ignoring [tools.setuptools] in pyproject.toml, did you mean [tool.setuptools]?"
|
||||
)
|
||||
780
myenv/lib/python3.12/site-packages/setuptools/config/setupcfg.py
Normal file
780
myenv/lib/python3.12/site-packages/setuptools/config/setupcfg.py
Normal file
@@ -0,0 +1,780 @@
|
||||
"""
|
||||
Load setuptools configuration from ``setup.cfg`` files.
|
||||
|
||||
**API will be made private in the future**
|
||||
|
||||
To read project metadata, consider using
|
||||
``build.util.project_wheel_metadata`` (https://pypi.org/project/build/).
|
||||
For simple scenarios, you can also try parsing the file directly
|
||||
with the help of ``configparser``.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import contextlib
|
||||
import functools
|
||||
import os
|
||||
from collections import defaultdict
|
||||
from functools import partial, wraps
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
Callable,
|
||||
Dict,
|
||||
Generic,
|
||||
Iterable,
|
||||
Iterator,
|
||||
List,
|
||||
Tuple,
|
||||
TypeVar,
|
||||
Union,
|
||||
cast,
|
||||
)
|
||||
|
||||
from packaging.markers import default_environment as marker_env
|
||||
from packaging.requirements import InvalidRequirement, Requirement
|
||||
from packaging.specifiers import SpecifierSet
|
||||
from packaging.version import InvalidVersion, Version
|
||||
|
||||
from .._path import StrPath
|
||||
from ..errors import FileError, OptionError
|
||||
from ..warnings import SetuptoolsDeprecationWarning
|
||||
from . import expand
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from setuptools.dist import Distribution
|
||||
|
||||
from distutils.dist import DistributionMetadata
|
||||
|
||||
SingleCommandOptions = Dict["str", Tuple["str", Any]]
|
||||
"""Dict that associate the name of the options of a particular command to a
|
||||
tuple. The first element of the tuple indicates the origin of the option value
|
||||
(e.g. the name of the configuration file where it was read from),
|
||||
while the second element of the tuple is the option value itself
|
||||
"""
|
||||
AllCommandOptions = Dict["str", SingleCommandOptions] # cmd name => its options
|
||||
Target = TypeVar("Target", bound=Union["Distribution", "DistributionMetadata"])
|
||||
|
||||
|
||||
def read_configuration(
|
||||
filepath: StrPath, find_others=False, ignore_option_errors=False
|
||||
) -> dict:
|
||||
"""Read given configuration file and returns options from it as a dict.
|
||||
|
||||
:param str|unicode filepath: Path to configuration file
|
||||
to get options from.
|
||||
|
||||
:param bool find_others: Whether to search for other configuration files
|
||||
which could be on in various places.
|
||||
|
||||
:param bool ignore_option_errors: Whether to silently ignore
|
||||
options, values of which could not be resolved (e.g. due to exceptions
|
||||
in directives such as file:, attr:, etc.).
|
||||
If False exceptions are propagated as expected.
|
||||
|
||||
:rtype: dict
|
||||
"""
|
||||
from setuptools.dist import Distribution
|
||||
|
||||
dist = Distribution()
|
||||
filenames = dist.find_config_files() if find_others else []
|
||||
handlers = _apply(dist, filepath, filenames, ignore_option_errors)
|
||||
return configuration_to_dict(handlers)
|
||||
|
||||
|
||||
def apply_configuration(dist: Distribution, filepath: StrPath) -> Distribution:
|
||||
"""Apply the configuration from a ``setup.cfg`` file into an existing
|
||||
distribution object.
|
||||
"""
|
||||
_apply(dist, filepath)
|
||||
dist._finalize_requires()
|
||||
return dist
|
||||
|
||||
|
||||
def _apply(
|
||||
dist: Distribution,
|
||||
filepath: StrPath,
|
||||
other_files: Iterable[StrPath] = (),
|
||||
ignore_option_errors: bool = False,
|
||||
) -> tuple[ConfigHandler, ...]:
|
||||
"""Read configuration from ``filepath`` and applies to the ``dist`` object."""
|
||||
from setuptools.dist import _Distribution
|
||||
|
||||
filepath = os.path.abspath(filepath)
|
||||
|
||||
if not os.path.isfile(filepath):
|
||||
raise FileError(f'Configuration file {filepath} does not exist.')
|
||||
|
||||
current_directory = os.getcwd()
|
||||
os.chdir(os.path.dirname(filepath))
|
||||
filenames = [*other_files, filepath]
|
||||
|
||||
try:
|
||||
# TODO: Temporary cast until mypy 1.12 is released with upstream fixes from typeshed
|
||||
_Distribution.parse_config_files(dist, filenames=cast(List[str], filenames))
|
||||
handlers = parse_configuration(
|
||||
dist, dist.command_options, ignore_option_errors=ignore_option_errors
|
||||
)
|
||||
dist._finalize_license_files()
|
||||
finally:
|
||||
os.chdir(current_directory)
|
||||
|
||||
return handlers
|
||||
|
||||
|
||||
def _get_option(target_obj: Target, key: str):
|
||||
"""
|
||||
Given a target object and option key, get that option from
|
||||
the target object, either through a get_{key} method or
|
||||
from an attribute directly.
|
||||
"""
|
||||
getter_name = f'get_{key}'
|
||||
by_attribute = functools.partial(getattr, target_obj, key)
|
||||
getter = getattr(target_obj, getter_name, by_attribute)
|
||||
return getter()
|
||||
|
||||
|
||||
def configuration_to_dict(handlers: tuple[ConfigHandler, ...]) -> dict:
|
||||
"""Returns configuration data gathered by given handlers as a dict.
|
||||
|
||||
:param list[ConfigHandler] handlers: Handlers list,
|
||||
usually from parse_configuration()
|
||||
|
||||
:rtype: dict
|
||||
"""
|
||||
config_dict: dict = defaultdict(dict)
|
||||
|
||||
for handler in handlers:
|
||||
for option in handler.set_options:
|
||||
value = _get_option(handler.target_obj, option)
|
||||
config_dict[handler.section_prefix][option] = value
|
||||
|
||||
return config_dict
|
||||
|
||||
|
||||
def parse_configuration(
|
||||
distribution: Distribution,
|
||||
command_options: AllCommandOptions,
|
||||
ignore_option_errors=False,
|
||||
) -> tuple[ConfigMetadataHandler, ConfigOptionsHandler]:
|
||||
"""Performs additional parsing of configuration options
|
||||
for a distribution.
|
||||
|
||||
Returns a list of used option handlers.
|
||||
|
||||
:param Distribution distribution:
|
||||
:param dict command_options:
|
||||
:param bool ignore_option_errors: Whether to silently ignore
|
||||
options, values of which could not be resolved (e.g. due to exceptions
|
||||
in directives such as file:, attr:, etc.).
|
||||
If False exceptions are propagated as expected.
|
||||
:rtype: list
|
||||
"""
|
||||
with expand.EnsurePackagesDiscovered(distribution) as ensure_discovered:
|
||||
options = ConfigOptionsHandler(
|
||||
distribution,
|
||||
command_options,
|
||||
ignore_option_errors,
|
||||
ensure_discovered,
|
||||
)
|
||||
|
||||
options.parse()
|
||||
if not distribution.package_dir:
|
||||
distribution.package_dir = options.package_dir # Filled by `find_packages`
|
||||
|
||||
meta = ConfigMetadataHandler(
|
||||
distribution.metadata,
|
||||
command_options,
|
||||
ignore_option_errors,
|
||||
ensure_discovered,
|
||||
distribution.package_dir,
|
||||
distribution.src_root,
|
||||
)
|
||||
meta.parse()
|
||||
distribution._referenced_files.update(
|
||||
options._referenced_files, meta._referenced_files
|
||||
)
|
||||
|
||||
return meta, options
|
||||
|
||||
|
||||
def _warn_accidental_env_marker_misconfig(label: str, orig_value: str, parsed: list):
|
||||
"""Because users sometimes misinterpret this configuration:
|
||||
|
||||
[options.extras_require]
|
||||
foo = bar;python_version<"4"
|
||||
|
||||
It looks like one requirement with an environment marker
|
||||
but because there is no newline, it's parsed as two requirements
|
||||
with a semicolon as separator.
|
||||
|
||||
Therefore, if:
|
||||
* input string does not contain a newline AND
|
||||
* parsed result contains two requirements AND
|
||||
* parsing of the two parts from the result ("<first>;<second>")
|
||||
leads in a valid Requirement with a valid marker
|
||||
a UserWarning is shown to inform the user about the possible problem.
|
||||
"""
|
||||
if "\n" in orig_value or len(parsed) != 2:
|
||||
return
|
||||
|
||||
markers = marker_env().keys()
|
||||
|
||||
try:
|
||||
req = Requirement(parsed[1])
|
||||
if req.name in markers:
|
||||
_AmbiguousMarker.emit(field=label, req=parsed[1])
|
||||
except InvalidRequirement as ex:
|
||||
if any(parsed[1].startswith(marker) for marker in markers):
|
||||
msg = _AmbiguousMarker.message(field=label, req=parsed[1])
|
||||
raise InvalidRequirement(msg) from ex
|
||||
|
||||
|
||||
class ConfigHandler(Generic[Target]):
|
||||
"""Handles metadata supplied in configuration files."""
|
||||
|
||||
section_prefix: str
|
||||
"""Prefix for config sections handled by this handler.
|
||||
Must be provided by class heirs.
|
||||
|
||||
"""
|
||||
|
||||
aliases: dict[str, str] = {}
|
||||
"""Options aliases.
|
||||
For compatibility with various packages. E.g.: d2to1 and pbr.
|
||||
Note: `-` in keys is replaced with `_` by config parser.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
target_obj: Target,
|
||||
options: AllCommandOptions,
|
||||
ignore_option_errors,
|
||||
ensure_discovered: expand.EnsurePackagesDiscovered,
|
||||
):
|
||||
self.ignore_option_errors = ignore_option_errors
|
||||
self.target_obj = target_obj
|
||||
self.sections = dict(self._section_options(options))
|
||||
self.set_options: list[str] = []
|
||||
self.ensure_discovered = ensure_discovered
|
||||
self._referenced_files: set[str] = set()
|
||||
"""After parsing configurations, this property will enumerate
|
||||
all files referenced by the "file:" directive. Private API for setuptools only.
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def _section_options(
|
||||
cls, options: AllCommandOptions
|
||||
) -> Iterator[tuple[str, SingleCommandOptions]]:
|
||||
for full_name, value in options.items():
|
||||
pre, sep, name = full_name.partition(cls.section_prefix)
|
||||
if pre:
|
||||
continue
|
||||
yield name.lstrip('.'), value
|
||||
|
||||
@property
|
||||
def parsers(self):
|
||||
"""Metadata item name to parser function mapping."""
|
||||
raise NotImplementedError(
|
||||
'%s must provide .parsers property' % self.__class__.__name__
|
||||
)
|
||||
|
||||
def __setitem__(self, option_name, value) -> None:
|
||||
target_obj = self.target_obj
|
||||
|
||||
# Translate alias into real name.
|
||||
option_name = self.aliases.get(option_name, option_name)
|
||||
|
||||
try:
|
||||
current_value = getattr(target_obj, option_name)
|
||||
except AttributeError as e:
|
||||
raise KeyError(option_name) from e
|
||||
|
||||
if current_value:
|
||||
# Already inhabited. Skipping.
|
||||
return
|
||||
|
||||
try:
|
||||
parsed = self.parsers.get(option_name, lambda x: x)(value)
|
||||
except (Exception,) * self.ignore_option_errors:
|
||||
return
|
||||
|
||||
simple_setter = functools.partial(target_obj.__setattr__, option_name)
|
||||
setter = getattr(target_obj, 'set_%s' % option_name, simple_setter)
|
||||
setter(parsed)
|
||||
|
||||
self.set_options.append(option_name)
|
||||
|
||||
@classmethod
|
||||
def _parse_list(cls, value, separator=','):
|
||||
"""Represents value as a list.
|
||||
|
||||
Value is split either by separator (defaults to comma) or by lines.
|
||||
|
||||
:param value:
|
||||
:param separator: List items separator character.
|
||||
:rtype: list
|
||||
"""
|
||||
if isinstance(value, list): # _get_parser_compound case
|
||||
return value
|
||||
|
||||
if '\n' in value:
|
||||
value = value.splitlines()
|
||||
else:
|
||||
value = value.split(separator)
|
||||
|
||||
return [chunk.strip() for chunk in value if chunk.strip()]
|
||||
|
||||
@classmethod
|
||||
def _parse_dict(cls, value):
|
||||
"""Represents value as a dict.
|
||||
|
||||
:param value:
|
||||
:rtype: dict
|
||||
"""
|
||||
separator = '='
|
||||
result = {}
|
||||
for line in cls._parse_list(value):
|
||||
key, sep, val = line.partition(separator)
|
||||
if sep != separator:
|
||||
raise OptionError(f"Unable to parse option value to dict: {value}")
|
||||
result[key.strip()] = val.strip()
|
||||
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
def _parse_bool(cls, value):
|
||||
"""Represents value as boolean.
|
||||
|
||||
:param value:
|
||||
:rtype: bool
|
||||
"""
|
||||
value = value.lower()
|
||||
return value in ('1', 'true', 'yes')
|
||||
|
||||
@classmethod
|
||||
def _exclude_files_parser(cls, key):
|
||||
"""Returns a parser function to make sure field inputs
|
||||
are not files.
|
||||
|
||||
Parses a value after getting the key so error messages are
|
||||
more informative.
|
||||
|
||||
:param key:
|
||||
:rtype: callable
|
||||
"""
|
||||
|
||||
def parser(value):
|
||||
exclude_directive = 'file:'
|
||||
if value.startswith(exclude_directive):
|
||||
raise ValueError(
|
||||
'Only strings are accepted for the {0} field, '
|
||||
'files are not accepted'.format(key)
|
||||
)
|
||||
return value
|
||||
|
||||
return parser
|
||||
|
||||
def _parse_file(self, value, root_dir: StrPath):
|
||||
"""Represents value as a string, allowing including text
|
||||
from nearest files using `file:` directive.
|
||||
|
||||
Directive is sandboxed and won't reach anything outside
|
||||
directory with setup.py.
|
||||
|
||||
Examples:
|
||||
file: README.rst, CHANGELOG.md, src/file.txt
|
||||
|
||||
:param str value:
|
||||
:rtype: str
|
||||
"""
|
||||
include_directive = 'file:'
|
||||
|
||||
if not isinstance(value, str):
|
||||
return value
|
||||
|
||||
if not value.startswith(include_directive):
|
||||
return value
|
||||
|
||||
spec = value[len(include_directive) :]
|
||||
filepaths = [path.strip() for path in spec.split(',')]
|
||||
self._referenced_files.update(filepaths)
|
||||
return expand.read_files(filepaths, root_dir)
|
||||
|
||||
def _parse_attr(self, value, package_dir, root_dir: StrPath):
|
||||
"""Represents value as a module attribute.
|
||||
|
||||
Examples:
|
||||
attr: package.attr
|
||||
attr: package.module.attr
|
||||
|
||||
:param str value:
|
||||
:rtype: str
|
||||
"""
|
||||
attr_directive = 'attr:'
|
||||
if not value.startswith(attr_directive):
|
||||
return value
|
||||
|
||||
attr_desc = value.replace(attr_directive, '')
|
||||
|
||||
# Make sure package_dir is populated correctly, so `attr:` directives can work
|
||||
package_dir.update(self.ensure_discovered.package_dir)
|
||||
return expand.read_attr(attr_desc, package_dir, root_dir)
|
||||
|
||||
@classmethod
|
||||
def _get_parser_compound(cls, *parse_methods):
|
||||
"""Returns parser function to represents value as a list.
|
||||
|
||||
Parses a value applying given methods one after another.
|
||||
|
||||
:param parse_methods:
|
||||
:rtype: callable
|
||||
"""
|
||||
|
||||
def parse(value):
|
||||
parsed = value
|
||||
|
||||
for method in parse_methods:
|
||||
parsed = method(parsed)
|
||||
|
||||
return parsed
|
||||
|
||||
return parse
|
||||
|
||||
@classmethod
|
||||
def _parse_section_to_dict_with_key(cls, section_options, values_parser):
|
||||
"""Parses section options into a dictionary.
|
||||
|
||||
Applies a given parser to each option in a section.
|
||||
|
||||
:param dict section_options:
|
||||
:param callable values_parser: function with 2 args corresponding to key, value
|
||||
:rtype: dict
|
||||
"""
|
||||
value = {}
|
||||
for key, (_, val) in section_options.items():
|
||||
value[key] = values_parser(key, val)
|
||||
return value
|
||||
|
||||
@classmethod
|
||||
def _parse_section_to_dict(cls, section_options, values_parser=None):
|
||||
"""Parses section options into a dictionary.
|
||||
|
||||
Optionally applies a given parser to each value.
|
||||
|
||||
:param dict section_options:
|
||||
:param callable values_parser: function with 1 arg corresponding to option value
|
||||
:rtype: dict
|
||||
"""
|
||||
parser = (lambda _, v: values_parser(v)) if values_parser else (lambda _, v: v)
|
||||
return cls._parse_section_to_dict_with_key(section_options, parser)
|
||||
|
||||
def parse_section(self, section_options):
|
||||
"""Parses configuration file section.
|
||||
|
||||
:param dict section_options:
|
||||
"""
|
||||
for name, (_, value) in section_options.items():
|
||||
with contextlib.suppress(KeyError):
|
||||
# Keep silent for a new option may appear anytime.
|
||||
self[name] = value
|
||||
|
||||
def parse(self) -> None:
|
||||
"""Parses configuration file items from one
|
||||
or more related sections.
|
||||
|
||||
"""
|
||||
for section_name, section_options in self.sections.items():
|
||||
method_postfix = ''
|
||||
if section_name: # [section.option] variant
|
||||
method_postfix = '_%s' % section_name
|
||||
|
||||
section_parser_method: Callable | None = getattr(
|
||||
self,
|
||||
# Dots in section names are translated into dunderscores.
|
||||
('parse_section%s' % method_postfix).replace('.', '__'),
|
||||
None,
|
||||
)
|
||||
|
||||
if section_parser_method is None:
|
||||
raise OptionError(
|
||||
"Unsupported distribution option section: "
|
||||
f"[{self.section_prefix}.{section_name}]"
|
||||
)
|
||||
|
||||
section_parser_method(section_options)
|
||||
|
||||
def _deprecated_config_handler(self, func, msg, **kw):
|
||||
"""this function will wrap around parameters that are deprecated
|
||||
|
||||
:param msg: deprecation message
|
||||
:param func: function to be wrapped around
|
||||
"""
|
||||
|
||||
@wraps(func)
|
||||
def config_handler(*args, **kwargs):
|
||||
kw.setdefault("stacklevel", 2)
|
||||
_DeprecatedConfig.emit("Deprecated config in `setup.cfg`", msg, **kw)
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return config_handler
|
||||
|
||||
|
||||
class ConfigMetadataHandler(ConfigHandler["DistributionMetadata"]):
|
||||
section_prefix = 'metadata'
|
||||
|
||||
aliases = {
|
||||
'home_page': 'url',
|
||||
'summary': 'description',
|
||||
'classifier': 'classifiers',
|
||||
'platform': 'platforms',
|
||||
}
|
||||
|
||||
strict_mode = False
|
||||
"""We need to keep it loose, to be partially compatible with
|
||||
`pbr` and `d2to1` packages which also uses `metadata` section.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
target_obj: DistributionMetadata,
|
||||
options: AllCommandOptions,
|
||||
ignore_option_errors: bool,
|
||||
ensure_discovered: expand.EnsurePackagesDiscovered,
|
||||
package_dir: dict | None = None,
|
||||
root_dir: StrPath = os.curdir,
|
||||
):
|
||||
super().__init__(target_obj, options, ignore_option_errors, ensure_discovered)
|
||||
self.package_dir = package_dir
|
||||
self.root_dir = root_dir
|
||||
|
||||
@property
|
||||
def parsers(self):
|
||||
"""Metadata item name to parser function mapping."""
|
||||
parse_list = self._parse_list
|
||||
parse_file = partial(self._parse_file, root_dir=self.root_dir)
|
||||
parse_dict = self._parse_dict
|
||||
exclude_files_parser = self._exclude_files_parser
|
||||
|
||||
return {
|
||||
'platforms': parse_list,
|
||||
'keywords': parse_list,
|
||||
'provides': parse_list,
|
||||
'obsoletes': parse_list,
|
||||
'classifiers': self._get_parser_compound(parse_file, parse_list),
|
||||
'license': exclude_files_parser('license'),
|
||||
'license_files': parse_list,
|
||||
'description': parse_file,
|
||||
'long_description': parse_file,
|
||||
'version': self._parse_version,
|
||||
'project_urls': parse_dict,
|
||||
}
|
||||
|
||||
def _parse_version(self, value):
|
||||
"""Parses `version` option value.
|
||||
|
||||
:param value:
|
||||
:rtype: str
|
||||
|
||||
"""
|
||||
version = self._parse_file(value, self.root_dir)
|
||||
|
||||
if version != value:
|
||||
version = version.strip()
|
||||
# Be strict about versions loaded from file because it's easy to
|
||||
# accidentally include newlines and other unintended content
|
||||
try:
|
||||
Version(version)
|
||||
except InvalidVersion as e:
|
||||
raise OptionError(
|
||||
f'Version loaded from {value} does not '
|
||||
f'comply with PEP 440: {version}'
|
||||
) from e
|
||||
|
||||
return version
|
||||
|
||||
return expand.version(self._parse_attr(value, self.package_dir, self.root_dir))
|
||||
|
||||
|
||||
class ConfigOptionsHandler(ConfigHandler["Distribution"]):
|
||||
section_prefix = 'options'
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
target_obj: Distribution,
|
||||
options: AllCommandOptions,
|
||||
ignore_option_errors: bool,
|
||||
ensure_discovered: expand.EnsurePackagesDiscovered,
|
||||
):
|
||||
super().__init__(target_obj, options, ignore_option_errors, ensure_discovered)
|
||||
self.root_dir = target_obj.src_root
|
||||
self.package_dir: dict[str, str] = {} # To be filled by `find_packages`
|
||||
|
||||
@classmethod
|
||||
def _parse_list_semicolon(cls, value):
|
||||
return cls._parse_list(value, separator=';')
|
||||
|
||||
def _parse_file_in_root(self, value):
|
||||
return self._parse_file(value, root_dir=self.root_dir)
|
||||
|
||||
def _parse_requirements_list(self, label: str, value: str):
|
||||
# Parse a requirements list, either by reading in a `file:`, or a list.
|
||||
parsed = self._parse_list_semicolon(self._parse_file_in_root(value))
|
||||
_warn_accidental_env_marker_misconfig(label, value, parsed)
|
||||
# Filter it to only include lines that are not comments. `parse_list`
|
||||
# will have stripped each line and filtered out empties.
|
||||
return [line for line in parsed if not line.startswith("#")]
|
||||
|
||||
@property
|
||||
def parsers(self):
|
||||
"""Metadata item name to parser function mapping."""
|
||||
parse_list = self._parse_list
|
||||
parse_bool = self._parse_bool
|
||||
parse_dict = self._parse_dict
|
||||
parse_cmdclass = self._parse_cmdclass
|
||||
|
||||
return {
|
||||
'zip_safe': parse_bool,
|
||||
'include_package_data': parse_bool,
|
||||
'package_dir': parse_dict,
|
||||
'scripts': parse_list,
|
||||
'eager_resources': parse_list,
|
||||
'dependency_links': parse_list,
|
||||
'namespace_packages': self._deprecated_config_handler(
|
||||
parse_list,
|
||||
"The namespace_packages parameter is deprecated, "
|
||||
"consider using implicit namespaces instead (PEP 420).",
|
||||
# TODO: define due date, see setuptools.dist:check_nsp.
|
||||
),
|
||||
'install_requires': partial(
|
||||
self._parse_requirements_list, "install_requires"
|
||||
),
|
||||
'setup_requires': self._parse_list_semicolon,
|
||||
'packages': self._parse_packages,
|
||||
'entry_points': self._parse_file_in_root,
|
||||
'py_modules': parse_list,
|
||||
'python_requires': SpecifierSet,
|
||||
'cmdclass': parse_cmdclass,
|
||||
}
|
||||
|
||||
def _parse_cmdclass(self, value):
|
||||
package_dir = self.ensure_discovered.package_dir
|
||||
return expand.cmdclass(self._parse_dict(value), package_dir, self.root_dir)
|
||||
|
||||
def _parse_packages(self, value):
|
||||
"""Parses `packages` option value.
|
||||
|
||||
:param value:
|
||||
:rtype: list
|
||||
"""
|
||||
find_directives = ['find:', 'find_namespace:']
|
||||
trimmed_value = value.strip()
|
||||
|
||||
if trimmed_value not in find_directives:
|
||||
return self._parse_list(value)
|
||||
|
||||
# Read function arguments from a dedicated section.
|
||||
find_kwargs = self.parse_section_packages__find(
|
||||
self.sections.get('packages.find', {})
|
||||
)
|
||||
|
||||
find_kwargs.update(
|
||||
namespaces=(trimmed_value == find_directives[1]),
|
||||
root_dir=self.root_dir,
|
||||
fill_package_dir=self.package_dir,
|
||||
)
|
||||
|
||||
return expand.find_packages(**find_kwargs)
|
||||
|
||||
def parse_section_packages__find(self, section_options):
|
||||
"""Parses `packages.find` configuration file section.
|
||||
|
||||
To be used in conjunction with _parse_packages().
|
||||
|
||||
:param dict section_options:
|
||||
"""
|
||||
section_data = self._parse_section_to_dict(section_options, self._parse_list)
|
||||
|
||||
valid_keys = ['where', 'include', 'exclude']
|
||||
|
||||
find_kwargs = dict([
|
||||
(k, v) for k, v in section_data.items() if k in valid_keys and v
|
||||
])
|
||||
|
||||
where = find_kwargs.get('where')
|
||||
if where is not None:
|
||||
find_kwargs['where'] = where[0] # cast list to single val
|
||||
|
||||
return find_kwargs
|
||||
|
||||
def parse_section_entry_points(self, section_options):
|
||||
"""Parses `entry_points` configuration file section.
|
||||
|
||||
:param dict section_options:
|
||||
"""
|
||||
parsed = self._parse_section_to_dict(section_options, self._parse_list)
|
||||
self['entry_points'] = parsed
|
||||
|
||||
def _parse_package_data(self, section_options):
|
||||
package_data = self._parse_section_to_dict(section_options, self._parse_list)
|
||||
return expand.canonic_package_data(package_data)
|
||||
|
||||
def parse_section_package_data(self, section_options):
|
||||
"""Parses `package_data` configuration file section.
|
||||
|
||||
:param dict section_options:
|
||||
"""
|
||||
self['package_data'] = self._parse_package_data(section_options)
|
||||
|
||||
def parse_section_exclude_package_data(self, section_options):
|
||||
"""Parses `exclude_package_data` configuration file section.
|
||||
|
||||
:param dict section_options:
|
||||
"""
|
||||
self['exclude_package_data'] = self._parse_package_data(section_options)
|
||||
|
||||
def parse_section_extras_require(self, section_options):
|
||||
"""Parses `extras_require` configuration file section.
|
||||
|
||||
:param dict section_options:
|
||||
"""
|
||||
parsed = self._parse_section_to_dict_with_key(
|
||||
section_options,
|
||||
lambda k, v: self._parse_requirements_list(f"extras_require[{k}]", v),
|
||||
)
|
||||
|
||||
self['extras_require'] = parsed
|
||||
|
||||
def parse_section_data_files(self, section_options):
|
||||
"""Parses `data_files` configuration file section.
|
||||
|
||||
:param dict section_options:
|
||||
"""
|
||||
parsed = self._parse_section_to_dict(section_options, self._parse_list)
|
||||
self['data_files'] = expand.canonic_data_files(parsed, self.root_dir)
|
||||
|
||||
|
||||
class _AmbiguousMarker(SetuptoolsDeprecationWarning):
|
||||
_SUMMARY = "Ambiguous requirement marker."
|
||||
_DETAILS = """
|
||||
One of the parsed requirements in `{field}` looks like a valid environment marker:
|
||||
|
||||
{req!r}
|
||||
|
||||
Please make sure that the configuration file is correct.
|
||||
You can use dangling lines to avoid this problem.
|
||||
"""
|
||||
_SEE_DOCS = "userguide/declarative_config.html#opt-2"
|
||||
# TODO: should we include due_date here? Initially introduced in 6 Aug 2022.
|
||||
# Does this make sense with latest version of packaging?
|
||||
|
||||
@classmethod
|
||||
def message(cls, **kw):
|
||||
docs = f"https://setuptools.pypa.io/en/latest/{cls._SEE_DOCS}"
|
||||
return cls._format(cls._SUMMARY, cls._DETAILS, see_url=docs, format_args=kw)
|
||||
|
||||
|
||||
class _DeprecatedConfig(SetuptoolsDeprecationWarning):
|
||||
_SEE_DOCS = "userguide/declarative_config.html"
|
||||
@@ -0,0 +1,433 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
|
||||
"$id": "https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html",
|
||||
"title": "``tool.setuptools`` table",
|
||||
"$$description": [
|
||||
"``setuptools``-specific configurations that can be set by users that require",
|
||||
"customization.",
|
||||
"These configurations are completely optional and probably can be skipped when",
|
||||
"creating simple packages. They are equivalent to some of the `Keywords",
|
||||
"<https://setuptools.pypa.io/en/latest/references/keywords.html>`_",
|
||||
"used by the ``setup.py`` file, and can be set via the ``tool.setuptools`` table.",
|
||||
"It considers only ``setuptools`` `parameters",
|
||||
"<https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html#setuptools-specific-configuration>`_",
|
||||
"that are not covered by :pep:`621`; and intentionally excludes ``dependency_links``",
|
||||
"and ``setup_requires`` (incompatible with modern workflows/standards)."
|
||||
],
|
||||
|
||||
"type": "object",
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"platforms": {
|
||||
"type": "array",
|
||||
"items": {"type": "string"}
|
||||
},
|
||||
"provides": {
|
||||
"$$description": [
|
||||
"Package and virtual package names contained within this package",
|
||||
"**(not supported by pip)**"
|
||||
],
|
||||
"type": "array",
|
||||
"items": {"type": "string", "format": "pep508-identifier"}
|
||||
},
|
||||
"obsoletes": {
|
||||
"$$description": [
|
||||
"Packages which this package renders obsolete",
|
||||
"**(not supported by pip)**"
|
||||
],
|
||||
"type": "array",
|
||||
"items": {"type": "string", "format": "pep508-identifier"}
|
||||
},
|
||||
"zip-safe": {
|
||||
"$$description": [
|
||||
"Whether the project can be safely installed and run from a zip file.",
|
||||
"**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and",
|
||||
"``setup.py install`` in the context of ``eggs`` (**DEPRECATED**)."
|
||||
],
|
||||
"type": "boolean"
|
||||
},
|
||||
"script-files": {
|
||||
"$$description": [
|
||||
"Legacy way of defining scripts (entry-points are preferred).",
|
||||
"Equivalent to the ``script`` keyword in ``setup.py``",
|
||||
"(it was renamed to avoid confusion with entry-point based ``project.scripts``",
|
||||
"defined in :pep:`621`).",
|
||||
"**DISCOURAGED**: generic script wrappers are tricky and may not work properly.",
|
||||
"Whenever possible, please use ``project.scripts`` instead."
|
||||
],
|
||||
"type": "array",
|
||||
"items": {"type": "string"},
|
||||
"$comment": "TODO: is this field deprecated/should be removed?"
|
||||
},
|
||||
"eager-resources": {
|
||||
"$$description": [
|
||||
"Resources that should be extracted together, if any of them is needed,",
|
||||
"or if any C extensions included in the project are imported.",
|
||||
"**OBSOLETE**: only relevant for ``pkg_resources``, ``easy_install`` and",
|
||||
"``setup.py install`` in the context of ``eggs`` (**DEPRECATED**)."
|
||||
],
|
||||
"type": "array",
|
||||
"items": {"type": "string"}
|
||||
},
|
||||
"packages": {
|
||||
"$$description": [
|
||||
"Packages that should be included in the distribution.",
|
||||
"It can be given either as a list of package identifiers",
|
||||
"or as a ``dict``-like structure with a single key ``find``",
|
||||
"which corresponds to a dynamic call to",
|
||||
"``setuptools.config.expand.find_packages`` function.",
|
||||
"The ``find`` key is associated with a nested ``dict``-like structure that can",
|
||||
"contain ``where``, ``include``, ``exclude`` and ``namespaces`` keys,",
|
||||
"mimicking the keyword arguments of the associated function."
|
||||
],
|
||||
"oneOf": [
|
||||
{
|
||||
"title": "Array of Python package identifiers",
|
||||
"type": "array",
|
||||
"items": {"$ref": "#/definitions/package-name"}
|
||||
},
|
||||
{"$ref": "#/definitions/find-directive"}
|
||||
]
|
||||
},
|
||||
"package-dir": {
|
||||
"$$description": [
|
||||
":class:`dict`-like structure mapping from package names to directories where their",
|
||||
"code can be found.",
|
||||
"The empty string (as key) means that all packages are contained inside",
|
||||
"the given directory will be included in the distribution."
|
||||
],
|
||||
"type": "object",
|
||||
"additionalProperties": false,
|
||||
"propertyNames": {
|
||||
"anyOf": [{"const": ""}, {"$ref": "#/definitions/package-name"}]
|
||||
},
|
||||
"patternProperties": {
|
||||
"^.*$": {"type": "string" }
|
||||
}
|
||||
},
|
||||
"package-data": {
|
||||
"$$description": [
|
||||
"Mapping from package names to lists of glob patterns.",
|
||||
"Usually this option is not needed when using ``include-package-data = true``",
|
||||
"For more information on how to include data files, check ``setuptools`` `docs",
|
||||
"<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_."
|
||||
],
|
||||
"type": "object",
|
||||
"additionalProperties": false,
|
||||
"propertyNames": {
|
||||
"anyOf": [{"type": "string", "format": "python-module-name"}, {"const": "*"}]
|
||||
},
|
||||
"patternProperties": {
|
||||
"^.*$": {"type": "array", "items": {"type": "string"}}
|
||||
}
|
||||
},
|
||||
"include-package-data": {
|
||||
"$$description": [
|
||||
"Automatically include any data files inside the package directories",
|
||||
"that are specified by ``MANIFEST.in``",
|
||||
"For more information on how to include data files, check ``setuptools`` `docs",
|
||||
"<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_."
|
||||
],
|
||||
"type": "boolean"
|
||||
},
|
||||
"exclude-package-data": {
|
||||
"$$description": [
|
||||
"Mapping from package names to lists of glob patterns that should be excluded",
|
||||
"For more information on how to include data files, check ``setuptools`` `docs",
|
||||
"<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_."
|
||||
],
|
||||
"type": "object",
|
||||
"additionalProperties": false,
|
||||
"propertyNames": {
|
||||
"anyOf": [{"type": "string", "format": "python-module-name"}, {"const": "*"}]
|
||||
},
|
||||
"patternProperties": {
|
||||
"^.*$": {"type": "array", "items": {"type": "string"}}
|
||||
}
|
||||
},
|
||||
"namespace-packages": {
|
||||
"type": "array",
|
||||
"items": {"type": "string", "format": "python-module-name-relaxed"},
|
||||
"$comment": "https://setuptools.pypa.io/en/latest/userguide/package_discovery.html",
|
||||
"description": "**DEPRECATED**: use implicit namespaces instead (:pep:`420`)."
|
||||
},
|
||||
"py-modules": {
|
||||
"description": "Modules that setuptools will manipulate",
|
||||
"type": "array",
|
||||
"items": {"type": "string", "format": "python-module-name-relaxed"},
|
||||
"$comment": "TODO: clarify the relationship with ``packages``"
|
||||
},
|
||||
"ext-modules": {
|
||||
"description": "Extension modules to be compiled by setuptools",
|
||||
"type": "array",
|
||||
"items": {"$ref": "#/definitions/ext-module"}
|
||||
},
|
||||
"data-files": {
|
||||
"$$description": [
|
||||
"``dict``-like structure where each key represents a directory and",
|
||||
"the value is a list of glob patterns that should be installed in them.",
|
||||
"**DISCOURAGED**: please notice this might not work as expected with wheels.",
|
||||
"Whenever possible, consider using data files inside the package directories",
|
||||
"(or create a new namespace package that only contains data files).",
|
||||
"See `data files support",
|
||||
"<https://setuptools.pypa.io/en/latest/userguide/datafiles.html>`_."
|
||||
],
|
||||
"type": "object",
|
||||
"patternProperties": {
|
||||
"^.*$": {"type": "array", "items": {"type": "string"}}
|
||||
}
|
||||
},
|
||||
"cmdclass": {
|
||||
"$$description": [
|
||||
"Mapping of distutils-style command names to ``setuptools.Command`` subclasses",
|
||||
"which in turn should be represented by strings with a qualified class name",
|
||||
"(i.e., \"dotted\" form with module), e.g.::\n\n",
|
||||
" cmdclass = {mycmd = \"pkg.subpkg.module.CommandClass\"}\n\n",
|
||||
"The command class should be a directly defined at the top-level of the",
|
||||
"containing module (no class nesting)."
|
||||
],
|
||||
"type": "object",
|
||||
"patternProperties": {
|
||||
"^.*$": {"type": "string", "format": "python-qualified-identifier"}
|
||||
}
|
||||
},
|
||||
"license-files": {
|
||||
"type": "array",
|
||||
"items": {"type": "string"},
|
||||
"$$description": [
|
||||
"**PROVISIONAL**: list of glob patterns for all license files being distributed.",
|
||||
"(likely to become standard with :pep:`639`).",
|
||||
"By default: ``['LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*']``"
|
||||
],
|
||||
"$comment": "TODO: revise if PEP 639 is accepted. Probably ``project.license-files``?"
|
||||
},
|
||||
"dynamic": {
|
||||
"type": "object",
|
||||
"description": "Instructions for loading :pep:`621`-related metadata dynamically",
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"version": {
|
||||
"$$description": [
|
||||
"A version dynamically loaded via either the ``attr:`` or ``file:``",
|
||||
"directives. Please make sure the given file or attribute respects :pep:`440`.",
|
||||
"Also ensure to set ``project.dynamic`` accordingly."
|
||||
],
|
||||
"oneOf": [
|
||||
{"$ref": "#/definitions/attr-directive"},
|
||||
{"$ref": "#/definitions/file-directive"}
|
||||
]
|
||||
},
|
||||
"classifiers": {"$ref": "#/definitions/file-directive"},
|
||||
"description": {"$ref": "#/definitions/file-directive"},
|
||||
"entry-points": {"$ref": "#/definitions/file-directive"},
|
||||
"dependencies": {"$ref": "#/definitions/file-directive-for-dependencies"},
|
||||
"optional-dependencies": {
|
||||
"type": "object",
|
||||
"propertyNames": {"type": "string", "format": "pep508-identifier"},
|
||||
"additionalProperties": false,
|
||||
"patternProperties": {
|
||||
".+": {"$ref": "#/definitions/file-directive-for-dependencies"}
|
||||
}
|
||||
},
|
||||
"readme": {
|
||||
"type": "object",
|
||||
"anyOf": [
|
||||
{"$ref": "#/definitions/file-directive"},
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"content-type": {"type": "string"},
|
||||
"file": { "$ref": "#/definitions/file-directive/properties/file" }
|
||||
},
|
||||
"additionalProperties": false}
|
||||
],
|
||||
"required": ["file"]
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
"definitions": {
|
||||
"package-name": {
|
||||
"$id": "#/definitions/package-name",
|
||||
"title": "Valid package name",
|
||||
"description": "Valid package name (importable or :pep:`561`).",
|
||||
"type": "string",
|
||||
"anyOf": [
|
||||
{"type": "string", "format": "python-module-name-relaxed"},
|
||||
{"type": "string", "format": "pep561-stub-name"}
|
||||
]
|
||||
},
|
||||
"ext-module": {
|
||||
"$id": "#/definitions/ext-module",
|
||||
"title": "Extension module",
|
||||
"description": "Parameters to construct a :class:`setuptools.Extension` object",
|
||||
"type": "object",
|
||||
"required": ["name", "sources"],
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string",
|
||||
"format": "python-module-name-relaxed"
|
||||
},
|
||||
"sources": {
|
||||
"type": "array",
|
||||
"items": {"type": "string"}
|
||||
},
|
||||
"include-dirs":{
|
||||
"type": "array",
|
||||
"items": {"type": "string"}
|
||||
},
|
||||
"define-macros": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "array",
|
||||
"items": [
|
||||
{"description": "macro name", "type": "string"},
|
||||
{"description": "macro value", "oneOf": [{"type": "string"}, {"type": "null"}]}
|
||||
],
|
||||
"additionalItems": false
|
||||
}
|
||||
},
|
||||
"undef-macros": {
|
||||
"type": "array",
|
||||
"items": {"type": "string"}
|
||||
},
|
||||
"library-dirs": {
|
||||
"type": "array",
|
||||
"items": {"type": "string"}
|
||||
},
|
||||
"libraries": {
|
||||
"type": "array",
|
||||
"items": {"type": "string"}
|
||||
},
|
||||
"runtime-library-dirs": {
|
||||
"type": "array",
|
||||
"items": {"type": "string"}
|
||||
},
|
||||
"extra-objects": {
|
||||
"type": "array",
|
||||
"items": {"type": "string"}
|
||||
},
|
||||
"extra-compile-args": {
|
||||
"type": "array",
|
||||
"items": {"type": "string"}
|
||||
},
|
||||
"extra-link-args": {
|
||||
"type": "array",
|
||||
"items": {"type": "string"}
|
||||
},
|
||||
"export-symbols": {
|
||||
"type": "array",
|
||||
"items": {"type": "string"}
|
||||
},
|
||||
"swig-opts": {
|
||||
"type": "array",
|
||||
"items": {"type": "string"}
|
||||
},
|
||||
"depends": {
|
||||
"type": "array",
|
||||
"items": {"type": "string"}
|
||||
},
|
||||
"language": {"type": "string"},
|
||||
"optional": {"type": "boolean"},
|
||||
"py-limited-api": {"type": "boolean"}
|
||||
}
|
||||
},
|
||||
"file-directive": {
|
||||
"$id": "#/definitions/file-directive",
|
||||
"title": "'file:' directive",
|
||||
"description":
|
||||
"Value is read from a file (or list of files and then concatenated)",
|
||||
"type": "object",
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"file": {
|
||||
"oneOf": [
|
||||
{"type": "string"},
|
||||
{"type": "array", "items": {"type": "string"}}
|
||||
]
|
||||
}
|
||||
},
|
||||
"required": ["file"]
|
||||
},
|
||||
"file-directive-for-dependencies": {
|
||||
"title": "'file:' directive for dependencies",
|
||||
"allOf": [
|
||||
{
|
||||
"$$description": [
|
||||
"**BETA**: subset of the ``requirements.txt`` format",
|
||||
"without ``pip`` flags and options",
|
||||
"(one :pep:`508`-compliant string per line,",
|
||||
"lines that are blank or start with ``#`` are excluded).",
|
||||
"See `dynamic metadata",
|
||||
"<https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html#dynamic-metadata>`_."
|
||||
]
|
||||
},
|
||||
{"$ref": "#/definitions/file-directive"}
|
||||
]
|
||||
},
|
||||
"attr-directive": {
|
||||
"title": "'attr:' directive",
|
||||
"$id": "#/definitions/attr-directive",
|
||||
"$$description": [
|
||||
"Value is read from a module attribute. Supports callables and iterables;",
|
||||
"unsupported types are cast via ``str()``"
|
||||
],
|
||||
"type": "object",
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"attr": {"type": "string", "format": "python-qualified-identifier"}
|
||||
},
|
||||
"required": ["attr"]
|
||||
},
|
||||
"find-directive": {
|
||||
"$id": "#/definitions/find-directive",
|
||||
"title": "'find:' directive",
|
||||
"type": "object",
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"find": {
|
||||
"type": "object",
|
||||
"$$description": [
|
||||
"Dynamic `package discovery",
|
||||
"<https://setuptools.pypa.io/en/latest/userguide/package_discovery.html>`_."
|
||||
],
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"where": {
|
||||
"description":
|
||||
"Directories to be searched for packages (Unix-style relative path)",
|
||||
"type": "array",
|
||||
"items": {"type": "string"}
|
||||
},
|
||||
"exclude": {
|
||||
"type": "array",
|
||||
"$$description": [
|
||||
"Exclude packages that match the values listed in this field.",
|
||||
"Can container shell-style wildcards (e.g. ``'pkg.*'``)"
|
||||
],
|
||||
"items": {"type": "string"}
|
||||
},
|
||||
"include": {
|
||||
"type": "array",
|
||||
"$$description": [
|
||||
"Restrict the found packages to just the ones listed in this field.",
|
||||
"Can container shell-style wildcards (e.g. ``'pkg.*'``)"
|
||||
],
|
||||
"items": {"type": "string"}
|
||||
},
|
||||
"namespaces": {
|
||||
"type": "boolean",
|
||||
"$$description": [
|
||||
"When ``True``, directories without a ``__init__.py`` file will also",
|
||||
"be scanned for :pep:`420`-style implicit namespaces"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user