diff --git a/.cirrus.yml b/.cirrus.yml index 9f0c438d..d7d3d172 100644 --- a/.cirrus.yml +++ b/.cirrus.yml @@ -78,7 +78,7 @@ build_task: container: {image: "python:3.11-bullseye"} clone_script: *clone <<: *task-template - install_script: pip install tox + install_script: pip install tox tox-uv build_script: - tox -e clean,lint,typecheck,build - tar czf dist.tar.gz dist @@ -102,7 +102,7 @@ linux_task: container: {image: "python:3.13-rc-bookworm"} allow_failures: true # RC install_script: - - python -m pip install --upgrade pip tox pipx + - python -m pip install --upgrade pip tox tox-uv pipx <<: *test-template alias: base-test @@ -127,7 +127,7 @@ macos_task: freebsd_task: name: test (freebsd - 3.11) - freebsd_instance: {image_family: freebsd-14-0} + freebsd_instance: {image_family: freebsd-14-2} install_script: - pkg remove -y python lang/python - pkg install -y git python311 py311-pip py311-gdbm py311-sqlite3 py311-tox py311-tomli py311-pipx @@ -149,7 +149,7 @@ windows_task: - choco install -y --no-progress python3 --version=3.12.5 --params "/NoLockdown" - choco install -y --no-progress curl - pip install --upgrade certifi - - python -m pip install -U pip tox pipx + - python -m pip install -U pip tox tox-uv pipx <<: *test-template depends_on: [build, base-test] @@ -168,7 +168,7 @@ linkcheck_task: depends_on: [finalize] allow_failures: true <<: *task-template - install_script: pip install tox + install_script: pip install tox tox-uv download_artifact_script: *download-artifact linkcheck_script: tox --installpkg dist/*.whl -e linkcheck -- -q diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 9e06eb5e..695032e0 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -34,10 +34,15 @@ jobs: with: {fetch-depth: 0} # deep clone for setuptools-scm - uses: actions/setup-python@v5 with: {python-version: "3.10"} + - uses: astral-sh/setup-uv@v5 - name: Run static analysis and format checkers - run: pipx run --python python3.10 tox -e lint,typecheck + run: >- + uvx --with tox-uv + tox -e lint,typecheck - name: Build package distribution files - run: pipx run --python python3.10 tox -e clean,build + run: >- + uvx --with tox-uv + tox -e clean,build - name: Record the path of wheel distribution id: wheel-distribution run: echo "path=$(ls dist/*.whl)" >> $GITHUB_OUTPUT @@ -65,8 +70,8 @@ jobs: strategy: matrix: python: - - "3.8" # oldest Python supported by PSF - - "3.12" # newest Python that is stable + - "3.8" # oldest Python supported by validate-pyproject + - "3.x" # newest Python that is stable platform: - ubuntu-latest - macos-13 @@ -77,6 +82,7 @@ jobs: - uses: actions/setup-python@v5 with: python-version: ${{ matrix.python }} + - uses: astral-sh/setup-uv@v5 - name: Retrieve pre-built distribution files uses: actions/download-artifact@v4 with: {name: python-distribution-files, path: dist/} @@ -87,7 +93,8 @@ jobs: path: ${{ env.VALIDATE_PYPROJECT_CACHE_REMOTE }} - name: Run tests run: >- - pipx run tox + uvx --with tox-uv + tox --installpkg '${{ needs.prepare.outputs.wheel-distribution }}' -- -n 5 -rFEx --durations 10 --color yes - name: Generate coverage report @@ -118,6 +125,7 @@ jobs: - uses: actions/checkout@v4 - uses: actions/setup-python@v5 with: {python-version: "3.10"} + - uses: astral-sh/setup-uv@v5 - name: Retrieve pre-built distribution files uses: actions/download-artifact@v4 with: {name: python-distribution-files, path: dist/} @@ -127,4 +135,6 @@ jobs: TWINE_REPOSITORY: pypi TWINE_USERNAME: __token__ TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }} - run: pipx run tox -e publish + run: >- + uvx --with tox-uv + tox -e publish diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index c78f7cd3..b2445dfe 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -20,13 +20,13 @@ repos: args: ['--fix=auto'] # replace 'auto' with 'lf' to enforce Linux/Mac line endings or 'crlf' for Windows - repo: https://github.com/codespell-project/codespell - rev: v2.3.0 + rev: v2.4.1 hooks: - id: codespell args: [-w, -L, "THIRDPARTY"] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.7.2 # Ruff version + rev: v0.11.0 # Ruff version hooks: - id: ruff args: [--fix, --show-fixes] @@ -63,7 +63,7 @@ repos: - validate-pyproject[all]>=0.13 - repo: https://github.com/python-jsonschema/check-jsonschema - rev: 0.29.4 + rev: 0.31.3 hooks: - id: check-metaschema files: \.schema\.json$ @@ -71,7 +71,7 @@ repos: - id: check-github-workflows - repo: https://github.com/scientific-python/cookie - rev: 2024.08.19 + rev: 2025.01.22 hooks: - id: sp-repo-review name: Validate Python repository diff --git a/.readthedocs.yml b/.readthedocs.yml index 5ed1344e..4b1a0f4f 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -5,23 +5,25 @@ version: 2 build: - os: "ubuntu-22.04" + os: ubuntu-lts-latest tools: - python: "3.10" + python: latest + jobs: + pre_create_environment: + - asdf plugin add uv + - asdf install uv latest + - asdf global uv latest + create_environment: + - uv venv $READTHEDOCS_VIRTUALENV_PATH + install: + # Use a cache dir in the same mount to halve the install time + # pip and uv pip will gain support for groups soon + - VIRTUAL_ENV=$READTHEDOCS_VIRTUALENV_PATH uv sync --active --cache-dir $READTHEDOCS_VIRTUALENV_PATH/../../uv_cache --group docs --extra all # Build documentation in the docs/ directory with Sphinx sphinx: configuration: docs/conf.py -# Build documentation with MkDocs -#mkdocs: -# configuration: mkdocs.yml - # Optionally build your docs in additional formats such as PDF formats: - pdf - -python: - install: - - requirements: docs/requirements.txt - - {path: ., extra_requirements: [all], method: pip} diff --git a/CHANGELOG.rst b/CHANGELOG.rst index e5ff7709..e08559a6 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -6,6 +6,35 @@ Changelog Development Version ==================== + +Version 0.24.1 +============== +* Fixed multi plugin id was read from the wrong place by @henryiii, #240. +* Implemented alternative plugin sorting, #243 + +Version 0.24 +============ +* Fix integration with ``SchemaStore`` by loading extra/side schemas, #226, #229. +* Add support for loading extra schemas, #226. +* Fixed verify author dict is not empty, #232. +* Added support for ``validate_pyproject.multi_schema`` plugins with extra schemas, #231. +* ``validate-pyproject`` no longer communicates test dependencies via the ``tests`` + extra and documentation dependencies dependencies via the ``docs/requirements.txt`` file. + Instead :doc:`pypa:dependency-groups` have been adopted to support CI environments, #227. + + As a result, ``uv``'s high level interface also works for developers. You can use the :pypi:`dependency-groups` + package on PyPI if you need to convert to a classic requirements list. + +Contributions by @henryiii. + +Version 0.23 +============ +* Validate SPDX license expressions by @cdce8p in #217 + +Version 0.22 +============ +* Prevent injecting defaults and modifying input in-place, by @henryiii in #213 + Version 0.21 ============ * Added support PEP 735, #208 diff --git a/docs/dev-guide.rst b/docs/dev-guide.rst index 8591987a..a8dbfe7c 100644 --- a/docs/dev-guide.rst +++ b/docs/dev-guide.rst @@ -118,8 +118,62 @@ When using a :pep:`621`-compliant backend, the following can be add to your The plugin function will be automatically called with the ``tool_name`` argument as same name as given to the entrypoint (e.g. :samp:`your_plugin({"your-tool"})`). -Also notice plugins are activated in a specific order, using Python's built-in -``sorted`` function. + +Providing multiple schemas +-------------------------- + +A second system is defined for providing multiple schemas in a single plugin. +This is useful when a single plugin is responsible for multiple subtables +under the ``tool`` table, or if you need to provide multiple schemas for a +a single subtable. + +To use this system, the plugin function, which does not take any arguments, +should return a dictionary with two keys: ``tools``, which is a dictionary of +tool names to schemas, and optionally ``schemas``, which is a list of schemas +that are not associated with any specific tool, but are loaded via ref's from +the other tools. + +When using a :pep:`621`-compliant backend, the following can be add to your +``pyproject.toml`` file: + +.. code-block:: toml + + # in pyproject.toml + [project.entry-points."validate_pyproject.multi_schema"] + arbitrary = "your_package.your_module:your_plugin" + +An example of the plugin structure needed for this system is shown below: + +.. code-block:: python + + def your_plugin(tool_name: str) -> dict: + return { + "tools": {"my-tool": my_schema}, + "schemas": [my_extra_schema], + } + +Fragments for schemas are also supported with this system; use ``#`` to split +the tool name and fragment path in the dictionary key. + + +.. admonition:: Experimental: Conflict Resolution + + Please notice that when two plugins define the same ``tool`` + (or auxiliary schemas with the same ``$id``), + an internal conflict resolution heuristic is employed to decide + which schema will take effect. + + To influence this heuristic you can: + + - Define a numeric ``.priority`` property in the functions + pointed by the ``validate_pyproject.tool_schema`` entry-points. + - Add a ``"priority"`` key with a numeric value into the dictionary + returned by the ``validate_pyproject.multi_schema`` plugins. + + Typical values for ``priority`` are ``0`` and ``1``. + + The exact order in which the plugins are loaded is considered an + implementation detail. .. _entry-point: https://setuptools.pypa.io/en/stable/userguide/entry_point.html#entry-points diff --git a/docs/index.rst b/docs/index.rst index 3412c978..3ca46207 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -4,7 +4,7 @@ validate-pyproject **validate-pyproject** is a command line tool and Python library for validating ``pyproject.toml`` files based on JSON Schema, and includes checks for -:pep:`517`, :pep:`518` and :pep:`621`. +:pep:`517`, :pep:`518`, :pep:`621`, :pep:`639`, and :pep:`735`. Contents diff --git a/docs/requirements.txt b/docs/requirements.txt deleted file mode 100644 index 96aaa4e4..00000000 --- a/docs/requirements.txt +++ /dev/null @@ -1,9 +0,0 @@ -# Requirements file for ReadTheDocs, check .readthedocs.yml. -# To build the module reference correctly, make sure every external package -# under `install_requires` in `setup.cfg` is also listed here! -furo>=2023.08.17 -sphinx>=7.2.2 -sphinx-argparse>=0.3.1 -sphinx-copybutton -sphinx-jsonschema>=1.16.11 -sphinxemoji diff --git a/pyproject.toml b/pyproject.toml index e7c901c3..aab9cc35 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -35,6 +35,32 @@ all = [ "trove-classifiers>=2021.10.20", ] store = ["validate-pyproject-schema-store"] + +[project.scripts] +validate-pyproject = "validate_pyproject.cli:main" + +[project.entry-points."validate_pyproject.tool_schema"] +setuptools = "validate_pyproject.api:load_builtin_plugin" +distutils = "validate_pyproject.api:load_builtin_plugin" + +[project.entry-points."repo_review.checks"] +validate_pyproject = "validate_pyproject.repo_review:repo_review_checks" + +[project.entry-points."repo_review.families"] +validate_pyproject = "validate_pyproject.repo_review:repo_review_families" + +[dependency-groups] +dev = [ + { include-group = "test" }, +] +docs = [ + "furo>=2023.08.17", + "sphinx>=7.2.2", + "sphinx-argparse>=0.3.1", + "sphinx-copybutton", + "sphinx-jsonschema>=1.16.11", + "sphinxemoji", +] test = [ "setuptools", "pytest>=8.3.3", @@ -49,18 +75,13 @@ typecheck = [ "importlib-resources", ] -[project.scripts] -validate-pyproject = "validate_pyproject.cli:main" - -[project.entry-points."validate_pyproject.tool_schema"] -setuptools = "validate_pyproject.api:load_builtin_plugin" -distutils = "validate_pyproject.api:load_builtin_plugin" - -[project.entry-points."repo_review.checks"] -validate_pyproject = "validate_pyproject.repo_review:repo_review_checks" - -[project.entry-points."repo_review.families"] -validate_pyproject = "validate_pyproject.repo_review:repo_review_families" +[tool.uv] +environments = [ + "python_version >= '3.9'", +] +dev-dependencies = [ + "validate_pyproject[all]", +] [tool.setuptools_scm] version_scheme = "no-guess-dev" @@ -76,8 +97,10 @@ addopts = """ """ norecursedirs = ["dist", "build", ".*"] testpaths = ["src", "tests"] +log_cli_level = "info" [tool.mypy] +python_version = "3.8" enable_error_code = ["ignore-without-code", "redundant-expr", "truthy-bool"] show_traceback = true warn_unreachable = true diff --git a/src/validate_pyproject/api.py b/src/validate_pyproject/api.py index 7fbdb6d5..48e27e93 100644 --- a/src/validate_pyproject/api.py +++ b/src/validate_pyproject/api.py @@ -4,6 +4,7 @@ import json import logging +import sys import typing from enum import Enum from functools import partial, reduce @@ -33,15 +34,18 @@ from .plugins import PluginProtocol -try: # pragma: no cover +if sys.version_info >= (3, 9): # pragma: no cover from importlib.resources import files def read_text(package: Union[str, ModuleType], resource: str) -> str: """:meta private:""" return files(package).joinpath(resource).read_text(encoding="utf-8") -except ImportError: # pragma: no cover - from importlib.resources import read_text +else: # pragma: no cover + from importlib.resources import read_text as read_text # noqa: PLC0414 + + +__all__ = ["Validator"] T = TypeVar("T", bound=Mapping) @@ -94,7 +98,7 @@ def __init__(self, plugins: Sequence["PluginProtocol"] = ()): self._schemas: Dict[str, Tuple[str, str, Schema]] = {} # (which part of the TOML, who defines, schema) - top_level = typing.cast(dict, load(TOP_LEVEL_SCHEMA)) # Make it mutable + top_level = typing.cast("dict", load(TOP_LEVEL_SCHEMA)) # Make it mutable self._spec_version: str = top_level["$schema"] top_properties = top_level["properties"] tool_properties = top_properties["tool"].setdefault("properties", {}) @@ -109,19 +113,23 @@ def __init__(self, plugins: Sequence["PluginProtocol"] = ()): # Add tools using Plugins for plugin in plugins: - allow_overwrite: Optional[str] = None - if plugin.tool in tool_properties: - _logger.warning(f"{plugin.id} overwrites `tool.{plugin.tool}` schema") - allow_overwrite = plugin.schema.get("$id") + if plugin.tool: + allow_overwrite: Optional[str] = None + if plugin.tool in tool_properties: + _logger.warning(f"{plugin} overwrites `tool.{plugin.tool}` schema") + allow_overwrite = plugin.schema.get("$id") + else: + _logger.info(f"{plugin} defines `tool.{plugin.tool}` schema") + compatible = self._ensure_compatibility( + plugin.tool, plugin.schema, allow_overwrite + ) + sid = compatible["$id"] + sref = f"{sid}#{plugin.fragment}" if plugin.fragment else sid + tool_properties[plugin.tool] = {"$ref": sref} + self._schemas[sid] = (f"tool.{plugin.tool}", plugin.id, plugin.schema) else: - _logger.info(f"{plugin.id} defines `tool.{plugin.tool}` schema") - compatible = self._ensure_compatibility( - plugin.tool, plugin.schema, allow_overwrite - ) - sid = compatible["$id"] - sref = f"{sid}#{plugin.fragment}" if plugin.fragment else sid - tool_properties[plugin.tool] = {"$ref": sref} - self._schemas[sid] = (f"tool.{plugin.tool}", plugin.id, plugin.schema) + _logger.info(f"{plugin} defines extra schema {plugin.id}") + self._schemas[plugin.id] = (plugin.id, plugin.id, plugin.schema) self._main_id: str = top_level["$id"] main_schema = Schema(top_level) @@ -139,17 +147,22 @@ def main(self) -> str: return self._main_id def _ensure_compatibility( - self, reference: str, schema: Schema, allow_overwrite: Optional[str] = None + self, + reference: str, + schema: Schema, + allow_overwrite: Optional[str] = None, ) -> Schema: if "$id" not in schema or not schema["$id"]: - raise errors.SchemaMissingId(reference) + raise errors.SchemaMissingId(reference or "") sid = schema["$id"] if sid in self._schemas and sid != allow_overwrite: raise errors.SchemaWithDuplicatedId(sid) version = schema.get("$schema") # Support schemas with missing trailing # (incorrect, but required before 0.15) if version and version.rstrip("#") != self.spec_version.rstrip("#"): - raise errors.InvalidSchemaVersion(reference, version, self.spec_version) + raise errors.InvalidSchemaVersion( + reference or sid, version, self.spec_version + ) return schema def __getitem__(self, key: str) -> Schema: @@ -265,7 +278,7 @@ def __call__(self, pyproject: T) -> T: self.schema, self.handlers, dict(self.formats), use_default=False ) fn = partial(compiled, custom_formats=self._format_validators) - self._cache = typing.cast(ValidationFn, fn) + self._cache = typing.cast("ValidationFn", fn) with detailed_errors(): self._cache(pyproject) diff --git a/src/validate_pyproject/caching.py b/src/validate_pyproject/caching.py index f09e633c..458bcd28 100644 --- a/src/validate_pyproject/caching.py +++ b/src/validate_pyproject/caching.py @@ -33,7 +33,7 @@ def as_file( cache_path.write_text(f.getvalue(), encoding="utf-8") _logger.debug(f"Caching {arg} into {cache_path}") - return open(cache_path, "rb") # noqa: SIM115 -- not relevant + return open(cache_path, "rb") def path_for(arbitrary_id: str, cache: Optional[PathLike] = None) -> Optional[Path]: diff --git a/src/validate_pyproject/cli.py b/src/validate_pyproject/cli.py index 37a59713..a356640c 100644 --- a/src/validate_pyproject/cli.py +++ b/src/validate_pyproject/cli.py @@ -30,7 +30,7 @@ from . import _tomllib as tomllib from .api import Validator from .errors import ValidationError -from .plugins import PluginWrapper +from .plugins import PluginProtocol, PluginWrapper from .plugins import list_from_entry_points as list_plugins_from_entry_points from .remote import RemotePlugin, load_store @@ -124,7 +124,7 @@ class CliParams(NamedTuple): dump_json: bool = False -def __meta__(plugins: Sequence[PluginWrapper]) -> Dict[str, dict]: +def __meta__(plugins: Sequence[PluginProtocol]) -> Dict[str, dict]: """'Hyper parameters' to instruct :mod:`argparse` how to create the CLI""" meta = {k: v.copy() for k, v in META.items()} meta["enable"]["choices"] = {p.tool for p in plugins} @@ -135,9 +135,9 @@ def __meta__(plugins: Sequence[PluginWrapper]) -> Dict[str, dict]: @critical_logging() def parse_args( args: Sequence[str], - plugins: Sequence[PluginWrapper], + plugins: Sequence[PluginProtocol], description: str = "Validate a given TOML file", - get_parser_spec: Callable[[Sequence[PluginWrapper]], Dict[str, dict]] = __meta__, + get_parser_spec: Callable[[Sequence[PluginProtocol]], Dict[str, dict]] = __meta__, params_class: Type[T] = CliParams, # type: ignore[assignment] ) -> T: """Parse command line parameters @@ -167,11 +167,14 @@ def parse_args( return params_class(**params) # type: ignore[call-overload, no-any-return] +Plugins = TypeVar("Plugins", bound=PluginProtocol) + + def select_plugins( - plugins: Sequence[PluginWrapper], + plugins: Sequence[Plugins], enabled: Sequence[str] = (), disabled: Sequence[str] = (), -) -> List[PluginWrapper]: +) -> List[Plugins]: available = list(plugins) if enabled: available = [p for p in available if p.tool in enabled] @@ -219,7 +222,7 @@ def run(args: Sequence[str] = ()) -> int: (for example ``["--verbose", "setup.cfg"]``). """ args = args or sys.argv[1:] - plugins: List[PluginWrapper] = list_plugins_from_entry_points() + plugins = list_plugins_from_entry_points() params: CliParams = parse_args(args, plugins) setup_logging(params.loglevel) tool_plugins = [RemotePlugin.from_str(t) for t in params.tool] @@ -263,7 +266,7 @@ def _split_lines(self, text: str, width: int) -> List[str]: return list(chain.from_iterable(wrap(x, width) for x in text.splitlines())) -def plugins_help(plugins: Sequence[PluginWrapper]) -> str: +def plugins_help(plugins: Sequence[PluginProtocol]) -> str: return "\n".join(_format_plugin_help(p) for p in plugins) @@ -273,7 +276,7 @@ def _flatten_str(text: str) -> str: return (text[0].lower() + text[1:]).strip() -def _format_plugin_help(plugin: PluginWrapper) -> str: +def _format_plugin_help(plugin: PluginProtocol) -> str: help_text = plugin.help_text help_text = f": {_flatten_str(help_text)}" if help_text else "" return f"* {plugin.tool!r}{help_text}" diff --git a/src/validate_pyproject/extra_validations.py b/src/validate_pyproject/extra_validations.py index 789411d0..b99d9c91 100644 --- a/src/validate_pyproject/extra_validations.py +++ b/src/validate_pyproject/extra_validations.py @@ -19,8 +19,7 @@ class RedefiningStaticFieldAsDynamic(ValidationError): """ __doc__ = _DESC _URL = ( - "https://packaging.python.org/en/latest/specifications/" - "pyproject-toml/#dynamic" + "https://packaging.python.org/en/latest/specifications/pyproject-toml/#dynamic" ) diff --git a/src/validate_pyproject/plugins/__init__.py b/src/validate_pyproject/plugins/__init__.py index 19ca2c14..a9dcb610 100644 --- a/src/validate_pyproject/plugins/__init__.py +++ b/src/validate_pyproject/plugins/__init__.py @@ -7,14 +7,26 @@ import typing from importlib.metadata import EntryPoint, entry_points +from itertools import chain from string import Template from textwrap import dedent -from typing import Any, Callable, Iterable, List, Optional, Protocol +from typing import ( + Any, + Callable, + Generator, + Iterable, + List, + NamedTuple, + Optional, + Protocol, + Union, +) from .. import __version__ from ..types import Plugin, Schema -ENTRYPOINT_GROUP = "validate_pyproject.tool_schema" +_DEFAULT_MULTI_PRIORITY = 0 +_DEFAULT_TOOL_PRIORITY = 1 class PluginProtocol(Protocol): @@ -55,6 +67,10 @@ def schema(self) -> Schema: def fragment(self) -> str: return "" + @property + def priority(self) -> float: + return getattr(self._load_fn, "priority", _DEFAULT_TOOL_PRIORITY) + @property def help_text(self) -> str: tpl = self._load_fn.__doc__ @@ -65,35 +81,76 @@ def help_text(self) -> str: def __repr__(self) -> str: return f"{self.__class__.__name__}({self.tool!r}, {self.id})" + def __str__(self) -> str: + return self.id + + +class StoredPlugin: + def __init__(self, tool: str, schema: Schema, source: str, priority: float): + self._tool, _, self._fragment = tool.partition("#") + self._schema = schema + self._source = source + self._priority = priority + + @property + def id(self) -> str: + return self._schema["$id"] # type: ignore[no-any-return] + + @property + def tool(self) -> str: + return self._tool + + @property + def schema(self) -> Schema: + return self._schema + + @property + def fragment(self) -> str: + return self._fragment + + @property + def priority(self) -> float: + return self._priority + + @property + def help_text(self) -> str: + return self.schema.get("description", "") + + def __str__(self) -> str: + return self._source + + def __repr__(self) -> str: + args = [repr(self.tool), self.id] + if self.fragment: + args.append(f"fragment={self.fragment!r}") + return f"{self.__class__.__name__}({', '.join(args)}, )" + if typing.TYPE_CHECKING: - _: PluginProtocol = typing.cast(PluginWrapper, None) + _: PluginProtocol = typing.cast("PluginWrapper", None) -def iterate_entry_points(group: str = ENTRYPOINT_GROUP) -> Iterable[EntryPoint]: - """Produces a generator yielding an EntryPoint object for each plugin registered +def iterate_entry_points(group: str) -> Iterable[EntryPoint]: + """Produces an iterable yielding an EntryPoint object for each plugin registered via ``setuptools`` `entry point`_ mechanism. This method can be used in conjunction with :obj:`load_from_entry_point` to filter - the plugins before actually loading them. + the plugins before actually loading them. The entry points are not + deduplicated. """ entries = entry_points() if hasattr(entries, "select"): # pragma: no cover # The select method was introduced in importlib_metadata 3.9 (and Python 3.10) # and the previous dict interface was declared deprecated select = typing.cast( - Any, + "Callable[..., Iterable[EntryPoint]]", getattr(entries, "select"), # noqa: B009 ) # typecheck gymnastics - entries_: Iterable[EntryPoint] = select(group=group) - else: # pragma: no cover - # TODO: Once Python 3.10 becomes the oldest version supported, this fallback and - # conditional statement can be removed. - entries_ = (plugin for plugin in entries.get(group, [])) - deduplicated = { - e.name: e for e in sorted(entries_, key=lambda e: (e.name, e.value)) - } - return list(deduplicated.values()) + return select(group=group) + # pragma: no cover + # TODO: Once Python 3.10 becomes the oldest version supported, this fallback and + # conditional statement can be removed. + return (plugin for plugin in entries.get(group, [])) def load_from_entry_point(entry_point: EntryPoint) -> PluginWrapper: @@ -105,23 +162,75 @@ def load_from_entry_point(entry_point: EntryPoint) -> PluginWrapper: raise ErrorLoadingPlugin(entry_point=entry_point) from ex +def load_from_multi_entry_point( + entry_point: EntryPoint, +) -> Generator[StoredPlugin, None, None]: + """Carefully load the plugin, raising a meaningful message in case of errors""" + try: + fn = entry_point.load() + output = fn() + id_ = f"{fn.__module__}.{fn.__name__}" + except Exception as ex: + raise ErrorLoadingPlugin(entry_point=entry_point) from ex + + priority = output.get("priority", _DEFAULT_MULTI_PRIORITY) + for tool, schema in output["tools"].items(): + yield StoredPlugin(tool, schema, f"{id_}:{tool}", priority) + for i, schema in enumerate(output.get("schemas", [])): + yield StoredPlugin("", schema, f"{id_}:{i}", priority) + + +class _SortablePlugin(NamedTuple): + name: str + plugin: Union[PluginWrapper, StoredPlugin] + + def key(self) -> str: + return self.plugin.tool or self.plugin.id + + def __lt__(self, other: Any) -> bool: + # **Major concern**: + # Consistency and reproducibility on which entry-points have priority + # for a given environment. + # The plugin with higher priority overwrites the schema definition. + # The exact order that they are listed itself is not important for now. + # **Implementation detail**: + # By default, "single tool plugins" have priority 1 and "multi plugins" + # have priority 0. + # The order that the plugins will be listed is inverse to the priority. + # If 2 plugins have the same numerical priority, the one whose + # entry-point name is "higher alphabetically" wins. + return (self.plugin.priority, self.name, self.key()) < ( + other.plugin.priority, + other.name, + other.key(), + ) + + def list_from_entry_points( - group: str = ENTRYPOINT_GROUP, filtering: Callable[[EntryPoint], bool] = lambda _: True, -) -> List[PluginWrapper]: +) -> List[Union[PluginWrapper, StoredPlugin]]: """Produces a list of plugin objects for each plugin registered via ``setuptools`` `entry point`_ mechanism. Args: - group: name of the setuptools' entry point group where plugins is being - registered filtering: function returning a boolean deciding if the entry point should be loaded and included (or not) in the final list. A ``True`` return means the plugin should be included. """ - return [ - load_from_entry_point(e) for e in iterate_entry_points(group) if filtering(e) - ] + tool_eps = ( + _SortablePlugin(e.name, load_from_entry_point(e)) + for e in iterate_entry_points("validate_pyproject.tool_schema") + if filtering(e) + ) + multi_eps = ( + _SortablePlugin(e.name, p) + for e in iterate_entry_points("validate_pyproject.multi_schema") + for p in load_from_multi_entry_point(e) + if filtering(e) + ) + eps = chain(tool_eps, multi_eps) + dedup = {e.key(): e.plugin for e in sorted(eps)} + return list(dedup.values()) class ErrorLoadingPlugin(RuntimeError): diff --git a/src/validate_pyproject/pre_compile/cli.py b/src/validate_pyproject/pre_compile/cli.py index 985ba741..46e538e4 100644 --- a/src/validate_pyproject/pre_compile/cli.py +++ b/src/validate_pyproject/pre_compile/cli.py @@ -10,7 +10,7 @@ from typing import Any, Dict, List, Mapping, NamedTuple, Sequence from .. import cli -from ..plugins import PluginWrapper +from ..plugins import PluginProtocol, PluginWrapper from ..plugins import list_from_entry_points as list_plugins_from_entry_points from ..remote import RemotePlugin, load_store from . import pre_compile @@ -85,7 +85,9 @@ class CliParams(NamedTuple): store: str = "" -def parser_spec(plugins: Sequence[PluginWrapper]) -> Dict[str, dict]: +def parser_spec( + plugins: Sequence[PluginProtocol], +) -> Dict[str, dict]: common = ("version", "enable", "disable", "verbose", "very_verbose") cli_spec = cli.__meta__(plugins) meta = {k: v.copy() for k, v in META.items()} @@ -101,7 +103,7 @@ def run(args: Sequence[str] = ()) -> int: prms = cli.parse_args(args, plugins, desc, parser_spec, CliParams) cli.setup_logging(prms.loglevel) - tool_plugins = [RemotePlugin.from_str(t) for t in prms.tool] + tool_plugins: List[PluginProtocol] = [RemotePlugin.from_str(t) for t in prms.tool] if prms.store: tool_plugins.extend(load_store(prms.store)) diff --git a/src/validate_pyproject/project_metadata.schema.json b/src/validate_pyproject/project_metadata.schema.json index 00c3d03e..c04e6d3a 100644 --- a/src/validate_pyproject/project_metadata.schema.json +++ b/src/validate_pyproject/project_metadata.schema.json @@ -326,7 +326,11 @@ "format": "idn-email", "description": "MUST be a valid email address" } - } + }, + "anyOf": [ + { "required": ["name"] }, + { "required": ["email"] } + ] }, "entry-point-group": { "$id": "#/definitions/entry-point-group", diff --git a/src/validate_pyproject/remote.py b/src/validate_pyproject/remote.py index 2194c174..1ccae6e3 100644 --- a/src/validate_pyproject/remote.py +++ b/src/validate_pyproject/remote.py @@ -67,19 +67,26 @@ def load_store(pyproject_url: str) -> Generator[RemotePlugin, None, None]: fragment, contents = load_from_uri(pyproject_url) if fragment: - _logger.error(f"Must not be called with a fragment, got {fragment!r}") + _logger.error( + f"Must not be called with a fragment, got {fragment!r}" + ) # pragma: no cover table = contents["properties"]["tool"]["properties"] for tool, info in table.items(): if tool in {"setuptools", "distutils"}: pass # built-in elif "$ref" in info: _logger.info(f"Loading {tool} from store: {pyproject_url}") - yield RemotePlugin.from_url(tool, info["$ref"]) + rp = RemotePlugin.from_url(tool, info["$ref"]) + yield rp + for values in rp.schema["properties"].values(): + url = values.get("$ref", "") + if url.startswith(("https://", "https://")): + yield RemotePlugin.from_url("", url) else: - _logger.warning(f"{tool!r} does not contain $ref") + _logger.warning(f"{tool!r} does not contain $ref") # pragma: no cover if typing.TYPE_CHECKING: from .plugins import PluginProtocol - _: PluginProtocol = typing.cast(RemotePlugin, None) + _: PluginProtocol = typing.cast("RemotePlugin", None) diff --git a/src/validate_pyproject/repo_review.py b/src/validate_pyproject/repo_review.py index 09fc779d..51c93a6c 100644 --- a/src/validate_pyproject/repo_review.py +++ b/src/validate_pyproject/repo_review.py @@ -28,9 +28,9 @@ def repo_review_checks() -> Dict[str, VPP001]: def repo_review_families(pyproject: Dict[str, Any]) -> Dict[str, Dict[str, str]]: has_distutils = "distutils" in pyproject.get("tool", {}) - plugin_names = (ep.name for ep in plugins.iterate_entry_points()) - plugin_list = ( - f"`[tool.{n}]`" for n in plugin_names if n != "distutils" or has_distutils + plugin_list = plugins.list_from_entry_points( + lambda e: e.name != "distutils" or has_distutils ) - descr = f"Checks `[build-system]`, `[project]`, {', '.join(plugin_list)}" + plugin_names = (f"`[tool.{n.tool}]`" for n in plugin_list if n.tool) + descr = f"Checks `[build-system]`, `[project]`, {', '.join(plugin_names)}" return {"validate-pyproject": {"name": "Validate-PyProject", "description": descr}} diff --git a/tests/examples/simple/empty-author.toml b/tests/examples/simple/empty-author.toml new file mode 100644 index 00000000..51e5abdb --- /dev/null +++ b/tests/examples/simple/empty-author.toml @@ -0,0 +1,4 @@ +[project] +name = 'foo' +version = '1.0' +authors = [] diff --git a/tests/invalid-examples/pep621/missing-fields/empty-author.errors.txt b/tests/invalid-examples/pep621/missing-fields/empty-author.errors.txt new file mode 100644 index 00000000..6d2d01cf --- /dev/null +++ b/tests/invalid-examples/pep621/missing-fields/empty-author.errors.txt @@ -0,0 +1 @@ +`project.authors[0]` cannot be validated by any definition diff --git a/tests/invalid-examples/pep621/missing-fields/empty-author.toml b/tests/invalid-examples/pep621/missing-fields/empty-author.toml new file mode 100644 index 00000000..770e5d40 --- /dev/null +++ b/tests/invalid-examples/pep621/missing-fields/empty-author.toml @@ -0,0 +1,4 @@ +[project] +name = 'foo' +version = '1.0' +authors = [{}] diff --git a/tests/test_cli.py b/tests/test_cli.py index 7fd30f34..2b15e45d 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -198,6 +198,12 @@ def test_bad_url(tmp_path, capsys): ) +def test_bad_extra_url(tmp_path, capsys): + example = write_example(tmp_path, name="valid-pyproject.toml") + with pytest.raises(ValueError, match="URL must start with 'http:' or 'https:'"): + cli.run(["--tool", "=file://json.schemastore.org/poetry.toml", str(example)]) + + @pytest.mark.skipif(sys.version_info[:2] < (3, 11), reason="requires 3.11+") def test_parser_is_tomllib(): """Make sure Python >= 3.11 uses tomllib instead of tomli""" diff --git a/tests/test_formats.py b/tests/test_formats.py index cdff3ee2..5bcea215 100644 --- a/tests/test_formats.py +++ b/tests/test_formats.py @@ -139,9 +139,10 @@ def test_entrypoint_references_with_extras(): assert formats.python_entrypoint_reference(example) is False -@pytest.mark.parametrize("example", ["module" "invalid-module"]) +@pytest.mark.parametrize("example", ["module", "invalid-module"]) def test_invalid_entrypoint_references(example): - assert formats.python_entrypoint_reference(example) is False + result = example == "module" + assert formats.python_entrypoint_reference(example) is result @pytest.mark.parametrize("example", ["λ", "a", "_"]) diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 1f07dec5..26cc0478 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -1,24 +1,32 @@ # The code in this module is mostly borrowed/adapted from PyScaffold and was originally # published under the MIT license # The original PyScaffold license can be found in 'NOTICE.txt' -from importlib.metadata import EntryPoint # pragma: no cover +from __future__ import annotations + +import sys +from collections import defaultdict +from importlib.metadata import EntryPoint +from types import ModuleType +from typing import Callable, TypeVar import pytest from validate_pyproject import plugins -from validate_pyproject.plugins import ENTRYPOINT_GROUP, ErrorLoadingPlugin +from validate_pyproject.plugins import ErrorLoadingPlugin, PluginWrapper, StoredPlugin EXISTING = ( "setuptools", "distutils", ) +T = TypeVar("T", bound=Callable) + def test_load_from_entry_point__error(): # This module does not exist, so Python will have some trouble loading it # EntryPoint(name, value, group) entry = "mypkg.SOOOOO___fake___:activate" - fake = EntryPoint("fake", entry, ENTRYPOINT_GROUP) + fake = EntryPoint("fake", entry, "validate_pyproject.tool_schema") with pytest.raises(ErrorLoadingPlugin): plugins.load_from_entry_point(fake) @@ -28,7 +36,7 @@ def is_entry_point(ep): def test_iterate_entry_points(): - plugin_iter = plugins.iterate_entry_points() + plugin_iter = plugins.iterate_entry_points("validate_pyproject.tool_schema") assert hasattr(plugin_iter, "__iter__") pluging_list = list(plugin_iter) assert all(is_entry_point(e) for e in pluging_list) @@ -68,3 +76,170 @@ def _fn2(_): pw = plugins.PluginWrapper("name", _fn2) assert pw.help_text == "Help for `name`" + + +class TestStoredPlugin: + def test_empty_help_text(self): + def _fn1(_): + return {} + + pw = plugins.StoredPlugin("name", {}, "id1", 0) + assert pw.help_text == "" + + def _fn2(_): + """Help for `${tool}`""" + return {} + + pw = plugins.StoredPlugin("name", {"description": "Help for me"}, "id2", 0) + assert pw.help_text == "Help for me" + + +class _FakeEntryPoints: + def __init__( + self, + monkeypatch: pytest.MonkeyPatch, + group: str = "__NOT_SPECIFIED__", + data: defaultdict[str, list[EntryPoint]] | None = None, + ): + self._monkeypatch = monkeypatch + self._group = group + self._data = defaultdict(list) if data is None else data + self.get = self._data.__getitem__ + + def group(self, group: str) -> _FakeEntryPoints: + return _FakeEntryPoints(self._monkeypatch, group, self._data) + + def reverse(self) -> _FakeEntryPoints: + data = defaultdict(list, {k: list(reversed(v)) for k, v in self._data.items()}) + return _FakeEntryPoints(self._monkeypatch, self._group, data) + + def __call__(self, *, name: str, value: str) -> Callable[[T], T]: + def fake_entry_point(impl: T) -> T: + ep = EntryPoint(name=name, value=value, group=self._group) + self._data[ep.group].append(ep) + module, _, func = ep.value.partition(":") + if module not in sys.modules: + self._monkeypatch.setitem(sys.modules, module, ModuleType(module)) + self._monkeypatch.setattr(sys.modules[module], func, impl, raising=False) + return impl + + return fake_entry_point + + +def test_multi_plugins(monkeypatch): + fake_eps = _FakeEntryPoints(monkeypatch, group="validate_pyproject.multi_schema") + fake_eps(name="f", value="test_module:f")( + lambda: { + "tools": {"example#frag": {"$id": "example1"}}, + "schemas": [ + {"$id": "example2"}, + {"$id": "example3"}, + ], + } + ) + monkeypatch.setattr(plugins, "iterate_entry_points", fake_eps.get) + + lst = plugins.list_from_entry_points() + assert len(lst) == 3 + + (fragmented,) = (e for e in lst if e.tool) + assert fragmented.tool == "example" + assert fragmented.fragment == "frag" + assert fragmented.schema == {"$id": "example1"} + + +@pytest.mark.parametrize("epname", ["aaa", "zzz"]) +def test_combined_plugins(monkeypatch, epname): + fake_eps = _FakeEntryPoints(monkeypatch) + multi_eps = fake_eps.group("validate_pyproject.multi_schema") + tool_eps = fake_eps.group("validate_pyproject.tool_schema") + multi_eps(name=epname, value="test_module:f")( + lambda: { + "tools": { + "example1": {"$id": "example1"}, + "example2": {"$id": "example2"}, + "example3": {"$id": "example3"}, + } + } + ) + tool_eps(name="example1", value="test_module:f1")(lambda _: {"$id": "ztool1"}) + tool_eps(name="example2", value="test_module:f2")(lambda _: {"$id": "atool2"}) + tool_eps(name="example4", value="test_module:f2")(lambda _: {"$id": "tool4"}) + + monkeypatch.setattr(plugins, "iterate_entry_points", fake_eps.get) + + lst = plugins.list_from_entry_points() + print(lst) + assert len(lst) == 4 + + assert lst[0].tool == "example1" + assert isinstance(lst[0], PluginWrapper) + + assert lst[1].tool == "example2" + assert isinstance(lst[1], PluginWrapper) + + assert lst[2].tool == "example3" + assert isinstance(lst[2], StoredPlugin) + + assert lst[3].tool == "example4" + assert isinstance(lst[3], PluginWrapper) + + +def test_several_multi_plugins(monkeypatch): + fake_eps = _FakeEntryPoints(monkeypatch, "validate_pyproject.multi_schema") + fake_eps(name="zzz", value="test_module:f1")( + lambda: { + "tools": {"example": {"$id": "example1"}}, + } + ) + fake_eps(name="aaa", value="test_module:f2")( + lambda: { + "tools": {"example": {"$id": "example2"}, "other": {"$id": "example3"}} + } + ) + for eps in (fake_eps, fake_eps.reverse()): + monkeypatch.setattr(plugins, "iterate_entry_points", eps.get) + # entry-point names closer to "zzzzzzzz..." have priority + (plugin1, plugin2) = plugins.list_from_entry_points() + print(plugin1, plugin2) + assert plugin1.schema["$id"] == "example1" + assert plugin2.schema["$id"] == "example3" + + +def test_custom_priority(monkeypatch): + fake_eps = _FakeEntryPoints(monkeypatch) + tool_eps = fake_eps.group("validate_pyproject.tool_schema") + multi_eps = fake_eps.group("validate_pyproject.multi_schema") + + multi_schema = {"tools": {"example": {"$id": "multi-eps"}}} + multi_eps(name="example", value="test_module:f")(lambda: multi_schema) + + @tool_eps(name="example", value="test_module1:f1") + def tool_schema1(_name): + return {"$id": "tool-eps-1"} + + @tool_eps(name="example", value="test_module2:f1") + def tool_schema2(_name): + return {"$id": "tool-eps-2"} + + monkeypatch.setattr(plugins, "iterate_entry_points", fake_eps.get) + (winner,) = plugins.list_from_entry_points() # default: tool with "higher" ep name + assert winner.schema["$id"] == "tool-eps-2" + + tool_schema1.priority = 1.1 + (winner,) = plugins.list_from_entry_points() # default: tool has priority + assert winner.schema["$id"] == "tool-eps-1" + + tool_schema1.priority = 0.1 + tool_schema2.priority = 0.2 + multi_schema["priority"] = 0.9 + (winner,) = plugins.list_from_entry_points() # custom higher priority wins + assert winner.schema["$id"] == "multi-eps" + + +def test_broken_multi_plugin(monkeypatch): + fake_eps = _FakeEntryPoints(monkeypatch, "validate_pyproject.multi_schema") + fake_eps(name="broken", value="test_module.f")(lambda: {}["no-such-key"]) + monkeypatch.setattr(plugins, "iterate_entry_points", fake_eps.get) + with pytest.raises(ErrorLoadingPlugin): + plugins.list_from_entry_points() diff --git a/tests/test_repo_review.py b/tests/test_repo_review.py index aaf39643..25339f11 100644 --- a/tests/test_repo_review.py +++ b/tests/test_repo_review.py @@ -45,9 +45,9 @@ def test_valid_example(repo_review_processor, name: str) -> None: @pytest.mark.parametrize("name", ["pdm/invalid-version", "pdm/redefining-as-dynamic"]) def test_invalid_example(repo_review_processor, name: str) -> None: processed = repo_review_processor.process(INVALID_EXAMPLES / name) - assert any( - not r.result and r.result is not None for r in processed.results - ), f"{processed.results}" + assert any(not r.result and r.result is not None for r in processed.results), ( + f"{processed.results}" + ) def test_no_distutils(repo_review_processor) -> None: diff --git a/tox.ini b/tox.ini index e8a2d577..45af7d02 100644 --- a/tox.ini +++ b/tox.ini @@ -3,7 +3,7 @@ # THIS SCRIPT IS SUPPOSED TO BE AN EXAMPLE. MODIFY IT ACCORDING TO YOUR NEEDS! [tox] -minversion = 3.24 +minversion = 4.22 envlist = default isolated_build = True @@ -16,9 +16,8 @@ passenv = HOME SETUPTOOLS_* VALIDATE_PYPROJECT_* -extras = - all - test +dependency_groups = test +extras = all commands = pytest {posargs} @@ -36,14 +35,14 @@ commands = [testenv:typecheck] +base_python = 3.8 description = Invoke mypy to typecheck the source code changedir = {toxinidir} passenv = TERM # ^ ensure colors -extras = - all - typecheck +extras = all +dependency_groups = typecheck commands = python -m mypy {posargs:--pretty --show-error-context src} @@ -78,11 +77,8 @@ setenv = linkcheck: BUILD = linkcheck passenv = SETUPTOOLS_* -extras = - all -deps = - -r {toxinidir}/docs/requirements.txt - # ^ requirements.txt shared with Read The Docs +extras = all +dependency_groups = docs commands = sphinx-build -v -T -j auto --color -b {env:BUILD} -d "{env:BUILDDIR}/doctrees" "{env:DOCSDIR}" "{env:BUILDDIR}/{env:BUILD}" {posargs}