diff --git a/.cirrus.yml b/.cirrus.yml index 9f0c438d..d7d3d172 100644 --- a/.cirrus.yml +++ b/.cirrus.yml @@ -78,7 +78,7 @@ build_task: container: {image: "python:3.11-bullseye"} clone_script: *clone <<: *task-template - install_script: pip install tox + install_script: pip install tox tox-uv build_script: - tox -e clean,lint,typecheck,build - tar czf dist.tar.gz dist @@ -102,7 +102,7 @@ linux_task: container: {image: "python:3.13-rc-bookworm"} allow_failures: true # RC install_script: - - python -m pip install --upgrade pip tox pipx + - python -m pip install --upgrade pip tox tox-uv pipx <<: *test-template alias: base-test @@ -127,7 +127,7 @@ macos_task: freebsd_task: name: test (freebsd - 3.11) - freebsd_instance: {image_family: freebsd-14-0} + freebsd_instance: {image_family: freebsd-14-2} install_script: - pkg remove -y python lang/python - pkg install -y git python311 py311-pip py311-gdbm py311-sqlite3 py311-tox py311-tomli py311-pipx @@ -149,7 +149,7 @@ windows_task: - choco install -y --no-progress python3 --version=3.12.5 --params "/NoLockdown" - choco install -y --no-progress curl - pip install --upgrade certifi - - python -m pip install -U pip tox pipx + - python -m pip install -U pip tox tox-uv pipx <<: *test-template depends_on: [build, base-test] @@ -168,7 +168,7 @@ linkcheck_task: depends_on: [finalize] allow_failures: true <<: *task-template - install_script: pip install tox + install_script: pip install tox tox-uv download_artifact_script: *download-artifact linkcheck_script: tox --installpkg dist/*.whl -e linkcheck -- -q diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 9e06eb5e..695032e0 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -34,10 +34,15 @@ jobs: with: {fetch-depth: 0} # deep clone for setuptools-scm - uses: actions/setup-python@v5 with: {python-version: "3.10"} + - uses: astral-sh/setup-uv@v5 - name: Run static analysis and format checkers - run: pipx run --python python3.10 tox -e lint,typecheck + run: >- + uvx --with tox-uv + tox -e lint,typecheck - name: Build package distribution files - run: pipx run --python python3.10 tox -e clean,build + run: >- + uvx --with tox-uv + tox -e clean,build - name: Record the path of wheel distribution id: wheel-distribution run: echo "path=$(ls dist/*.whl)" >> $GITHUB_OUTPUT @@ -65,8 +70,8 @@ jobs: strategy: matrix: python: - - "3.8" # oldest Python supported by PSF - - "3.12" # newest Python that is stable + - "3.8" # oldest Python supported by validate-pyproject + - "3.x" # newest Python that is stable platform: - ubuntu-latest - macos-13 @@ -77,6 +82,7 @@ jobs: - uses: actions/setup-python@v5 with: python-version: ${{ matrix.python }} + - uses: astral-sh/setup-uv@v5 - name: Retrieve pre-built distribution files uses: actions/download-artifact@v4 with: {name: python-distribution-files, path: dist/} @@ -87,7 +93,8 @@ jobs: path: ${{ env.VALIDATE_PYPROJECT_CACHE_REMOTE }} - name: Run tests run: >- - pipx run tox + uvx --with tox-uv + tox --installpkg '${{ needs.prepare.outputs.wheel-distribution }}' -- -n 5 -rFEx --durations 10 --color yes - name: Generate coverage report @@ -118,6 +125,7 @@ jobs: - uses: actions/checkout@v4 - uses: actions/setup-python@v5 with: {python-version: "3.10"} + - uses: astral-sh/setup-uv@v5 - name: Retrieve pre-built distribution files uses: actions/download-artifact@v4 with: {name: python-distribution-files, path: dist/} @@ -127,4 +135,6 @@ jobs: TWINE_REPOSITORY: pypi TWINE_USERNAME: __token__ TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }} - run: pipx run tox -e publish + run: >- + uvx --with tox-uv + tox -e publish diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index c78f7cd3..03c1d90e 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -20,13 +20,13 @@ repos: args: ['--fix=auto'] # replace 'auto' with 'lf' to enforce Linux/Mac line endings or 'crlf' for Windows - repo: https://github.com/codespell-project/codespell - rev: v2.3.0 + rev: v2.4.1 hooks: - id: codespell args: [-w, -L, "THIRDPARTY"] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.7.2 # Ruff version + rev: v0.9.10 # Ruff version hooks: - id: ruff args: [--fix, --show-fixes] @@ -63,7 +63,7 @@ repos: - validate-pyproject[all]>=0.13 - repo: https://github.com/python-jsonschema/check-jsonschema - rev: 0.29.4 + rev: 0.31.3 hooks: - id: check-metaschema files: \.schema\.json$ @@ -71,7 +71,7 @@ repos: - id: check-github-workflows - repo: https://github.com/scientific-python/cookie - rev: 2024.08.19 + rev: 2025.01.22 hooks: - id: sp-repo-review name: Validate Python repository diff --git a/.readthedocs.yml b/.readthedocs.yml index 5ed1344e..4b1a0f4f 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -5,23 +5,25 @@ version: 2 build: - os: "ubuntu-22.04" + os: ubuntu-lts-latest tools: - python: "3.10" + python: latest + jobs: + pre_create_environment: + - asdf plugin add uv + - asdf install uv latest + - asdf global uv latest + create_environment: + - uv venv $READTHEDOCS_VIRTUALENV_PATH + install: + # Use a cache dir in the same mount to halve the install time + # pip and uv pip will gain support for groups soon + - VIRTUAL_ENV=$READTHEDOCS_VIRTUALENV_PATH uv sync --active --cache-dir $READTHEDOCS_VIRTUALENV_PATH/../../uv_cache --group docs --extra all # Build documentation in the docs/ directory with Sphinx sphinx: configuration: docs/conf.py -# Build documentation with MkDocs -#mkdocs: -# configuration: mkdocs.yml - # Optionally build your docs in additional formats such as PDF formats: - pdf - -python: - install: - - requirements: docs/requirements.txt - - {path: ., extra_requirements: [all], method: pip} diff --git a/CHANGELOG.rst b/CHANGELOG.rst index e5ff7709..952a2b44 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -6,6 +6,30 @@ Changelog Development Version ==================== +Version 0.24 +============ +* Fix integration with ``SchemaStore`` by loading extra/side schemas, #226, #229. +* Add support for loading extra schemas, #226. +* Fixed verify author dict is not empty, #232. +* Added support for ``validate_pyproject.multi_schema`` plugins with extra schemas, #231. +* ``validate-pyproject`` no longer communicates test dependencies via the ``tests`` + extra and documentation dependencies dependencies via the ``docs/requirements.txt`` file. + Instead :doc:`pypa:dependency-groups` have been adopted to support CI environments, #227. + + As a result, ``uv``'s high level interface also works for developers. You can use the :pypi:`dependency-groups` + package on PyPI if you need to convert to a classic requirements list. + +Contributions by @henryiii. + + +Version 0.23 +============ +* Validate SPDX license expressions by @cdce8p in #217 + +Version 0.22 +============ +* Prevent injecting defaults and modifying input in-place, by @henryiii in #213 + Version 0.21 ============ * Added support PEP 735, #208 diff --git a/docs/dev-guide.rst b/docs/dev-guide.rst index 8591987a..10c366f5 100644 --- a/docs/dev-guide.rst +++ b/docs/dev-guide.rst @@ -122,6 +122,42 @@ Also notice plugins are activated in a specific order, using Python's built-in ``sorted`` function. +Providing multiple schemas +-------------------------- + +A second system is provided for providing multiple schemas in a single plugin. +This is useful when a single plugin is responsible for multiple subtables +under the ``tool`` table, or if you need to provide multiple schemas for a +a single subtable. + +To use this system, the plugin function, which does not take any arguments, +should return a dictionary with two keys: ``tools``, which is a dictionary of +tool names to schemas, and optionally ``schemas``, which is a list of schemas +that are not associated with any specific tool, but are loaded via ref's from +the other tools. + +When using a :pep:`621`-compliant backend, the following can be add to your +``pyproject.toml`` file: + +.. code-block:: toml + + # in pyproject.toml + [project.entry-points."validate_pyproject.multi_schema"] + arbitrary = "your_package.your_module:your_plugin" + +An example of the plugin structure needed for this system is shown below: + +.. code-block:: python + + def your_plugin(tool_name: str) -> dict: + return { + "tools": {"my-tool": my_schema}, + "schemas": [my_extra_schema], + } + +Fragments for schemas are also supported with this system; use ``#`` to split +the tool name and fragment path in the dictionary key. + .. _entry-point: https://setuptools.pypa.io/en/stable/userguide/entry_point.html#entry-points .. _JSON Schema: https://json-schema.org/ .. _Python package: https://packaging.python.org/ diff --git a/docs/requirements.txt b/docs/requirements.txt deleted file mode 100644 index 96aaa4e4..00000000 --- a/docs/requirements.txt +++ /dev/null @@ -1,9 +0,0 @@ -# Requirements file for ReadTheDocs, check .readthedocs.yml. -# To build the module reference correctly, make sure every external package -# under `install_requires` in `setup.cfg` is also listed here! -furo>=2023.08.17 -sphinx>=7.2.2 -sphinx-argparse>=0.3.1 -sphinx-copybutton -sphinx-jsonschema>=1.16.11 -sphinxemoji diff --git a/pyproject.toml b/pyproject.toml index e7c901c3..aab9cc35 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -35,6 +35,32 @@ all = [ "trove-classifiers>=2021.10.20", ] store = ["validate-pyproject-schema-store"] + +[project.scripts] +validate-pyproject = "validate_pyproject.cli:main" + +[project.entry-points."validate_pyproject.tool_schema"] +setuptools = "validate_pyproject.api:load_builtin_plugin" +distutils = "validate_pyproject.api:load_builtin_plugin" + +[project.entry-points."repo_review.checks"] +validate_pyproject = "validate_pyproject.repo_review:repo_review_checks" + +[project.entry-points."repo_review.families"] +validate_pyproject = "validate_pyproject.repo_review:repo_review_families" + +[dependency-groups] +dev = [ + { include-group = "test" }, +] +docs = [ + "furo>=2023.08.17", + "sphinx>=7.2.2", + "sphinx-argparse>=0.3.1", + "sphinx-copybutton", + "sphinx-jsonschema>=1.16.11", + "sphinxemoji", +] test = [ "setuptools", "pytest>=8.3.3", @@ -49,18 +75,13 @@ typecheck = [ "importlib-resources", ] -[project.scripts] -validate-pyproject = "validate_pyproject.cli:main" - -[project.entry-points."validate_pyproject.tool_schema"] -setuptools = "validate_pyproject.api:load_builtin_plugin" -distutils = "validate_pyproject.api:load_builtin_plugin" - -[project.entry-points."repo_review.checks"] -validate_pyproject = "validate_pyproject.repo_review:repo_review_checks" - -[project.entry-points."repo_review.families"] -validate_pyproject = "validate_pyproject.repo_review:repo_review_families" +[tool.uv] +environments = [ + "python_version >= '3.9'", +] +dev-dependencies = [ + "validate_pyproject[all]", +] [tool.setuptools_scm] version_scheme = "no-guess-dev" @@ -76,8 +97,10 @@ addopts = """ """ norecursedirs = ["dist", "build", ".*"] testpaths = ["src", "tests"] +log_cli_level = "info" [tool.mypy] +python_version = "3.8" enable_error_code = ["ignore-without-code", "redundant-expr", "truthy-bool"] show_traceback = true warn_unreachable = true diff --git a/src/validate_pyproject/api.py b/src/validate_pyproject/api.py index 7fbdb6d5..2122d811 100644 --- a/src/validate_pyproject/api.py +++ b/src/validate_pyproject/api.py @@ -4,6 +4,7 @@ import json import logging +import sys import typing from enum import Enum from functools import partial, reduce @@ -33,15 +34,18 @@ from .plugins import PluginProtocol -try: # pragma: no cover +if sys.version_info >= (3, 9): # pragma: no cover from importlib.resources import files def read_text(package: Union[str, ModuleType], resource: str) -> str: """:meta private:""" return files(package).joinpath(resource).read_text(encoding="utf-8") -except ImportError: # pragma: no cover - from importlib.resources import read_text +else: # pragma: no cover + from importlib.resources import read_text as read_text # noqa: PLC0414 + + +__all__ = ["Validator"] T = TypeVar("T", bound=Mapping) @@ -109,19 +113,25 @@ def __init__(self, plugins: Sequence["PluginProtocol"] = ()): # Add tools using Plugins for plugin in plugins: - allow_overwrite: Optional[str] = None - if plugin.tool in tool_properties: - _logger.warning(f"{plugin.id} overwrites `tool.{plugin.tool}` schema") - allow_overwrite = plugin.schema.get("$id") + if plugin.tool: + allow_overwrite: Optional[str] = None + if plugin.tool in tool_properties: + _logger.warning( + f"{plugin.id} overwrites `tool.{plugin.tool}` schema" + ) + allow_overwrite = plugin.schema.get("$id") + else: + _logger.info(f"{plugin.id} defines `tool.{plugin.tool}` schema") + compatible = self._ensure_compatibility( + plugin.tool, plugin.schema, allow_overwrite + ) + sid = compatible["$id"] + sref = f"{sid}#{plugin.fragment}" if plugin.fragment else sid + tool_properties[plugin.tool] = {"$ref": sref} + self._schemas[sid] = (f"tool.{plugin.tool}", plugin.id, plugin.schema) else: - _logger.info(f"{plugin.id} defines `tool.{plugin.tool}` schema") - compatible = self._ensure_compatibility( - plugin.tool, plugin.schema, allow_overwrite - ) - sid = compatible["$id"] - sref = f"{sid}#{plugin.fragment}" if plugin.fragment else sid - tool_properties[plugin.tool] = {"$ref": sref} - self._schemas[sid] = (f"tool.{plugin.tool}", plugin.id, plugin.schema) + _logger.info(f"Extra schema: {plugin.id}") + self._schemas[plugin.id] = (plugin.id, plugin.id, plugin.schema) self._main_id: str = top_level["$id"] main_schema = Schema(top_level) @@ -139,17 +149,22 @@ def main(self) -> str: return self._main_id def _ensure_compatibility( - self, reference: str, schema: Schema, allow_overwrite: Optional[str] = None + self, + reference: str, + schema: Schema, + allow_overwrite: Optional[str] = None, ) -> Schema: if "$id" not in schema or not schema["$id"]: - raise errors.SchemaMissingId(reference) + raise errors.SchemaMissingId(reference or "") sid = schema["$id"] if sid in self._schemas and sid != allow_overwrite: raise errors.SchemaWithDuplicatedId(sid) version = schema.get("$schema") # Support schemas with missing trailing # (incorrect, but required before 0.15) if version and version.rstrip("#") != self.spec_version.rstrip("#"): - raise errors.InvalidSchemaVersion(reference, version, self.spec_version) + raise errors.InvalidSchemaVersion( + reference or sid, version, self.spec_version + ) return schema def __getitem__(self, key: str) -> Schema: diff --git a/src/validate_pyproject/caching.py b/src/validate_pyproject/caching.py index f09e633c..458bcd28 100644 --- a/src/validate_pyproject/caching.py +++ b/src/validate_pyproject/caching.py @@ -33,7 +33,7 @@ def as_file( cache_path.write_text(f.getvalue(), encoding="utf-8") _logger.debug(f"Caching {arg} into {cache_path}") - return open(cache_path, "rb") # noqa: SIM115 -- not relevant + return open(cache_path, "rb") def path_for(arbitrary_id: str, cache: Optional[PathLike] = None) -> Optional[Path]: diff --git a/src/validate_pyproject/cli.py b/src/validate_pyproject/cli.py index 37a59713..a356640c 100644 --- a/src/validate_pyproject/cli.py +++ b/src/validate_pyproject/cli.py @@ -30,7 +30,7 @@ from . import _tomllib as tomllib from .api import Validator from .errors import ValidationError -from .plugins import PluginWrapper +from .plugins import PluginProtocol, PluginWrapper from .plugins import list_from_entry_points as list_plugins_from_entry_points from .remote import RemotePlugin, load_store @@ -124,7 +124,7 @@ class CliParams(NamedTuple): dump_json: bool = False -def __meta__(plugins: Sequence[PluginWrapper]) -> Dict[str, dict]: +def __meta__(plugins: Sequence[PluginProtocol]) -> Dict[str, dict]: """'Hyper parameters' to instruct :mod:`argparse` how to create the CLI""" meta = {k: v.copy() for k, v in META.items()} meta["enable"]["choices"] = {p.tool for p in plugins} @@ -135,9 +135,9 @@ def __meta__(plugins: Sequence[PluginWrapper]) -> Dict[str, dict]: @critical_logging() def parse_args( args: Sequence[str], - plugins: Sequence[PluginWrapper], + plugins: Sequence[PluginProtocol], description: str = "Validate a given TOML file", - get_parser_spec: Callable[[Sequence[PluginWrapper]], Dict[str, dict]] = __meta__, + get_parser_spec: Callable[[Sequence[PluginProtocol]], Dict[str, dict]] = __meta__, params_class: Type[T] = CliParams, # type: ignore[assignment] ) -> T: """Parse command line parameters @@ -167,11 +167,14 @@ def parse_args( return params_class(**params) # type: ignore[call-overload, no-any-return] +Plugins = TypeVar("Plugins", bound=PluginProtocol) + + def select_plugins( - plugins: Sequence[PluginWrapper], + plugins: Sequence[Plugins], enabled: Sequence[str] = (), disabled: Sequence[str] = (), -) -> List[PluginWrapper]: +) -> List[Plugins]: available = list(plugins) if enabled: available = [p for p in available if p.tool in enabled] @@ -219,7 +222,7 @@ def run(args: Sequence[str] = ()) -> int: (for example ``["--verbose", "setup.cfg"]``). """ args = args or sys.argv[1:] - plugins: List[PluginWrapper] = list_plugins_from_entry_points() + plugins = list_plugins_from_entry_points() params: CliParams = parse_args(args, plugins) setup_logging(params.loglevel) tool_plugins = [RemotePlugin.from_str(t) for t in params.tool] @@ -263,7 +266,7 @@ def _split_lines(self, text: str, width: int) -> List[str]: return list(chain.from_iterable(wrap(x, width) for x in text.splitlines())) -def plugins_help(plugins: Sequence[PluginWrapper]) -> str: +def plugins_help(plugins: Sequence[PluginProtocol]) -> str: return "\n".join(_format_plugin_help(p) for p in plugins) @@ -273,7 +276,7 @@ def _flatten_str(text: str) -> str: return (text[0].lower() + text[1:]).strip() -def _format_plugin_help(plugin: PluginWrapper) -> str: +def _format_plugin_help(plugin: PluginProtocol) -> str: help_text = plugin.help_text help_text = f": {_flatten_str(help_text)}" if help_text else "" return f"* {plugin.tool!r}{help_text}" diff --git a/src/validate_pyproject/extra_validations.py b/src/validate_pyproject/extra_validations.py index 789411d0..b99d9c91 100644 --- a/src/validate_pyproject/extra_validations.py +++ b/src/validate_pyproject/extra_validations.py @@ -19,8 +19,7 @@ class RedefiningStaticFieldAsDynamic(ValidationError): """ __doc__ = _DESC _URL = ( - "https://packaging.python.org/en/latest/specifications/" - "pyproject-toml/#dynamic" + "https://packaging.python.org/en/latest/specifications/pyproject-toml/#dynamic" ) diff --git a/src/validate_pyproject/plugins/__init__.py b/src/validate_pyproject/plugins/__init__.py index 19ca2c14..cb1dd8dd 100644 --- a/src/validate_pyproject/plugins/__init__.py +++ b/src/validate_pyproject/plugins/__init__.py @@ -7,15 +7,24 @@ import typing from importlib.metadata import EntryPoint, entry_points +from itertools import chain from string import Template from textwrap import dedent -from typing import Any, Callable, Iterable, List, Optional, Protocol +from typing import ( + Any, + Callable, + Generator, + Iterable, + List, + NamedTuple, + Optional, + Protocol, + Union, +) from .. import __version__ from ..types import Plugin, Schema -ENTRYPOINT_GROUP = "validate_pyproject.tool_schema" - class PluginProtocol(Protocol): @property @@ -66,34 +75,63 @@ def __repr__(self) -> str: return f"{self.__class__.__name__}({self.tool!r}, {self.id})" +class StoredPlugin: + def __init__(self, tool: str, schema: Schema): + self._tool, _, self._fragment = tool.partition("#") + self._schema = schema + + @property + def id(self) -> str: + return self.schema.get("id", "MISSING ID") + + @property + def tool(self) -> str: + return self._tool + + @property + def schema(self) -> Schema: + return self._schema + + @property + def fragment(self) -> str: + return self._fragment + + @property + def help_text(self) -> str: + return self.schema.get("description", "") + + def __repr__(self) -> str: + args = [repr(self.tool), self.id] + if self.fragment: + args.append(f"fragment={self.fragment!r}") + return f"{self.__class__.__name__}({', '.join(args)}, )" + + if typing.TYPE_CHECKING: _: PluginProtocol = typing.cast(PluginWrapper, None) -def iterate_entry_points(group: str = ENTRYPOINT_GROUP) -> Iterable[EntryPoint]: - """Produces a generator yielding an EntryPoint object for each plugin registered +def iterate_entry_points(group: str) -> Iterable[EntryPoint]: + """Produces an iterable yielding an EntryPoint object for each plugin registered via ``setuptools`` `entry point`_ mechanism. This method can be used in conjunction with :obj:`load_from_entry_point` to filter - the plugins before actually loading them. + the plugins before actually loading them. The entry points are not + deduplicated. """ entries = entry_points() if hasattr(entries, "select"): # pragma: no cover # The select method was introduced in importlib_metadata 3.9 (and Python 3.10) # and the previous dict interface was declared deprecated select = typing.cast( - Any, + Callable[..., Iterable[EntryPoint]], getattr(entries, "select"), # noqa: B009 ) # typecheck gymnastics - entries_: Iterable[EntryPoint] = select(group=group) - else: # pragma: no cover - # TODO: Once Python 3.10 becomes the oldest version supported, this fallback and - # conditional statement can be removed. - entries_ = (plugin for plugin in entries.get(group, [])) - deduplicated = { - e.name: e for e in sorted(entries_, key=lambda e: (e.name, e.value)) - } - return list(deduplicated.values()) + return select(group=group) + # pragma: no cover + # TODO: Once Python 3.10 becomes the oldest version supported, this fallback and + # conditional statement can be removed. + return (plugin for plugin in entries.get(group, [])) def load_from_entry_point(entry_point: EntryPoint) -> PluginWrapper: @@ -105,23 +143,64 @@ def load_from_entry_point(entry_point: EntryPoint) -> PluginWrapper: raise ErrorLoadingPlugin(entry_point=entry_point) from ex +def load_from_multi_entry_point( + entry_point: EntryPoint, +) -> Generator[StoredPlugin, None, None]: + """Carefully load the plugin, raising a meaningful message in case of errors""" + try: + fn = entry_point.load() + output = fn() + except Exception as ex: + raise ErrorLoadingPlugin(entry_point=entry_point) from ex + + for tool, schema in output["tools"].items(): + yield StoredPlugin(tool, schema) + for schema in output.get("schemas", []): + yield StoredPlugin("", schema) + + +class _SortablePlugin(NamedTuple): + priority: int + name: str + plugin: Union[PluginWrapper, StoredPlugin] + + def __lt__(self, other: Any) -> bool: + return (self.plugin.tool or self.plugin.id, self.name, self.priority) < ( + other.plugin.tool or other.plugin.id, + other.name, + other.priority, + ) + + def list_from_entry_points( - group: str = ENTRYPOINT_GROUP, filtering: Callable[[EntryPoint], bool] = lambda _: True, -) -> List[PluginWrapper]: +) -> List[Union[PluginWrapper, StoredPlugin]]: """Produces a list of plugin objects for each plugin registered via ``setuptools`` `entry point`_ mechanism. Args: - group: name of the setuptools' entry point group where plugins is being - registered filtering: function returning a boolean deciding if the entry point should be loaded and included (or not) in the final list. A ``True`` return means the plugin should be included. """ - return [ - load_from_entry_point(e) for e in iterate_entry_points(group) if filtering(e) - ] + tool_eps = ( + _SortablePlugin(0, e.name, load_from_entry_point(e)) + for e in iterate_entry_points("validate_pyproject.tool_schema") + if filtering(e) + ) + multi_eps = ( + _SortablePlugin(1, e.name, p) + for e in sorted( + iterate_entry_points("validate_pyproject.multi_schema"), + key=lambda e: e.name, + reverse=True, + ) + for p in load_from_multi_entry_point(e) + if filtering(e) + ) + eps = chain(tool_eps, multi_eps) + dedup = {e.plugin.tool or e.plugin.id: e.plugin for e in sorted(eps, reverse=True)} + return list(dedup.values())[::-1] class ErrorLoadingPlugin(RuntimeError): diff --git a/src/validate_pyproject/pre_compile/cli.py b/src/validate_pyproject/pre_compile/cli.py index 985ba741..46e538e4 100644 --- a/src/validate_pyproject/pre_compile/cli.py +++ b/src/validate_pyproject/pre_compile/cli.py @@ -10,7 +10,7 @@ from typing import Any, Dict, List, Mapping, NamedTuple, Sequence from .. import cli -from ..plugins import PluginWrapper +from ..plugins import PluginProtocol, PluginWrapper from ..plugins import list_from_entry_points as list_plugins_from_entry_points from ..remote import RemotePlugin, load_store from . import pre_compile @@ -85,7 +85,9 @@ class CliParams(NamedTuple): store: str = "" -def parser_spec(plugins: Sequence[PluginWrapper]) -> Dict[str, dict]: +def parser_spec( + plugins: Sequence[PluginProtocol], +) -> Dict[str, dict]: common = ("version", "enable", "disable", "verbose", "very_verbose") cli_spec = cli.__meta__(plugins) meta = {k: v.copy() for k, v in META.items()} @@ -101,7 +103,7 @@ def run(args: Sequence[str] = ()) -> int: prms = cli.parse_args(args, plugins, desc, parser_spec, CliParams) cli.setup_logging(prms.loglevel) - tool_plugins = [RemotePlugin.from_str(t) for t in prms.tool] + tool_plugins: List[PluginProtocol] = [RemotePlugin.from_str(t) for t in prms.tool] if prms.store: tool_plugins.extend(load_store(prms.store)) diff --git a/src/validate_pyproject/project_metadata.schema.json b/src/validate_pyproject/project_metadata.schema.json index 00c3d03e..c04e6d3a 100644 --- a/src/validate_pyproject/project_metadata.schema.json +++ b/src/validate_pyproject/project_metadata.schema.json @@ -326,7 +326,11 @@ "format": "idn-email", "description": "MUST be a valid email address" } - } + }, + "anyOf": [ + { "required": ["name"] }, + { "required": ["email"] } + ] }, "entry-point-group": { "$id": "#/definitions/entry-point-group", diff --git a/src/validate_pyproject/remote.py b/src/validate_pyproject/remote.py index 2194c174..adf9c82e 100644 --- a/src/validate_pyproject/remote.py +++ b/src/validate_pyproject/remote.py @@ -67,16 +67,23 @@ def load_store(pyproject_url: str) -> Generator[RemotePlugin, None, None]: fragment, contents = load_from_uri(pyproject_url) if fragment: - _logger.error(f"Must not be called with a fragment, got {fragment!r}") + _logger.error( + f"Must not be called with a fragment, got {fragment!r}" + ) # pragma: no cover table = contents["properties"]["tool"]["properties"] for tool, info in table.items(): if tool in {"setuptools", "distutils"}: pass # built-in elif "$ref" in info: _logger.info(f"Loading {tool} from store: {pyproject_url}") - yield RemotePlugin.from_url(tool, info["$ref"]) + rp = RemotePlugin.from_url(tool, info["$ref"]) + yield rp + for values in rp.schema["properties"].values(): + url = values.get("$ref", "") + if url.startswith(("https://", "https://")): + yield RemotePlugin.from_url("", url) else: - _logger.warning(f"{tool!r} does not contain $ref") + _logger.warning(f"{tool!r} does not contain $ref") # pragma: no cover if typing.TYPE_CHECKING: diff --git a/src/validate_pyproject/repo_review.py b/src/validate_pyproject/repo_review.py index 09fc779d..51c93a6c 100644 --- a/src/validate_pyproject/repo_review.py +++ b/src/validate_pyproject/repo_review.py @@ -28,9 +28,9 @@ def repo_review_checks() -> Dict[str, VPP001]: def repo_review_families(pyproject: Dict[str, Any]) -> Dict[str, Dict[str, str]]: has_distutils = "distutils" in pyproject.get("tool", {}) - plugin_names = (ep.name for ep in plugins.iterate_entry_points()) - plugin_list = ( - f"`[tool.{n}]`" for n in plugin_names if n != "distutils" or has_distutils + plugin_list = plugins.list_from_entry_points( + lambda e: e.name != "distutils" or has_distutils ) - descr = f"Checks `[build-system]`, `[project]`, {', '.join(plugin_list)}" + plugin_names = (f"`[tool.{n.tool}]`" for n in plugin_list if n.tool) + descr = f"Checks `[build-system]`, `[project]`, {', '.join(plugin_names)}" return {"validate-pyproject": {"name": "Validate-PyProject", "description": descr}} diff --git a/tests/examples/simple/empty-author.toml b/tests/examples/simple/empty-author.toml new file mode 100644 index 00000000..51e5abdb --- /dev/null +++ b/tests/examples/simple/empty-author.toml @@ -0,0 +1,4 @@ +[project] +name = 'foo' +version = '1.0' +authors = [] diff --git a/tests/invalid-examples/pep621/missing-fields/empty-author.errors.txt b/tests/invalid-examples/pep621/missing-fields/empty-author.errors.txt new file mode 100644 index 00000000..6d2d01cf --- /dev/null +++ b/tests/invalid-examples/pep621/missing-fields/empty-author.errors.txt @@ -0,0 +1 @@ +`project.authors[0]` cannot be validated by any definition diff --git a/tests/invalid-examples/pep621/missing-fields/empty-author.toml b/tests/invalid-examples/pep621/missing-fields/empty-author.toml new file mode 100644 index 00000000..770e5d40 --- /dev/null +++ b/tests/invalid-examples/pep621/missing-fields/empty-author.toml @@ -0,0 +1,4 @@ +[project] +name = 'foo' +version = '1.0' +authors = [{}] diff --git a/tests/test_cli.py b/tests/test_cli.py index 7fd30f34..2b15e45d 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -198,6 +198,12 @@ def test_bad_url(tmp_path, capsys): ) +def test_bad_extra_url(tmp_path, capsys): + example = write_example(tmp_path, name="valid-pyproject.toml") + with pytest.raises(ValueError, match="URL must start with 'http:' or 'https:'"): + cli.run(["--tool", "=file://json.schemastore.org/poetry.toml", str(example)]) + + @pytest.mark.skipif(sys.version_info[:2] < (3, 11), reason="requires 3.11+") def test_parser_is_tomllib(): """Make sure Python >= 3.11 uses tomllib instead of tomli""" diff --git a/tests/test_formats.py b/tests/test_formats.py index cdff3ee2..5bcea215 100644 --- a/tests/test_formats.py +++ b/tests/test_formats.py @@ -139,9 +139,10 @@ def test_entrypoint_references_with_extras(): assert formats.python_entrypoint_reference(example) is False -@pytest.mark.parametrize("example", ["module" "invalid-module"]) +@pytest.mark.parametrize("example", ["module", "invalid-module"]) def test_invalid_entrypoint_references(example): - assert formats.python_entrypoint_reference(example) is False + result = example == "module" + assert formats.python_entrypoint_reference(example) is result @pytest.mark.parametrize("example", ["λ", "a", "_"]) diff --git a/tests/test_plugins.py b/tests/test_plugins.py index 1f07dec5..89f3c8bd 100644 --- a/tests/test_plugins.py +++ b/tests/test_plugins.py @@ -1,12 +1,16 @@ # The code in this module is mostly borrowed/adapted from PyScaffold and was originally # published under the MIT license # The original PyScaffold license can be found in 'NOTICE.txt' -from importlib.metadata import EntryPoint # pragma: no cover +import functools +import importlib.metadata +import sys +from types import ModuleType +from typing import List import pytest from validate_pyproject import plugins -from validate_pyproject.plugins import ENTRYPOINT_GROUP, ErrorLoadingPlugin +from validate_pyproject.plugins import ErrorLoadingPlugin, PluginWrapper, StoredPlugin EXISTING = ( "setuptools", @@ -18,7 +22,9 @@ def test_load_from_entry_point__error(): # This module does not exist, so Python will have some trouble loading it # EntryPoint(name, value, group) entry = "mypkg.SOOOOO___fake___:activate" - fake = EntryPoint("fake", entry, ENTRYPOINT_GROUP) + fake = importlib.metadata.EntryPoint( + "fake", entry, "validate_pyproject.tool_schema" + ) with pytest.raises(ErrorLoadingPlugin): plugins.load_from_entry_point(fake) @@ -28,7 +34,7 @@ def is_entry_point(ep): def test_iterate_entry_points(): - plugin_iter = plugins.iterate_entry_points() + plugin_iter = plugins.iterate_entry_points("validate_pyproject.tool_schema") assert hasattr(plugin_iter, "__iter__") pluging_list = list(plugin_iter) assert all(is_entry_point(e) for e in pluging_list) @@ -68,3 +74,155 @@ def _fn2(_): pw = plugins.PluginWrapper("name", _fn2) assert pw.help_text == "Help for `name`" + + +class TestStoredPlugin: + def test_empty_help_text(self): + def _fn1(_): + return {} + + pw = plugins.StoredPlugin("name", {}) + assert pw.help_text == "" + + def _fn2(_): + """Help for `${tool}`""" + return {} + + pw = plugins.StoredPlugin("name", {"description": "Help for me"}) + assert pw.help_text == "Help for me" + + +def fake_multi_iterate_entry_points(name: str) -> List[importlib.metadata.EntryPoint]: + if name == "validate_pyproject.multi_schema": + return [ + importlib.metadata.EntryPoint( + name="_", value="test_module:f", group="validate_pyproject.multi_schema" + ) + ] + return [] + + +def test_multi_plugins(monkeypatch): + s1 = {"id": "example1"} + s2 = {"id": "example2"} + s3 = {"id": "example3"} + sys.modules["test_module"] = ModuleType("test_module") + sys.modules["test_module"].f = lambda: { + "tools": {"example#frag": s1}, + "schemas": [s2, s3], + } # type: ignore[attr-defined] + monkeypatch.setattr( + plugins, "iterate_entry_points", fake_multi_iterate_entry_points + ) + + lst = plugins.list_from_entry_points() + assert len(lst) == 3 + assert all(e.id.startswith("example") for e in lst) + + (fragmented,) = (e for e in lst if e.tool) + assert fragmented.tool == "example" + assert fragmented.fragment == "frag" + assert fragmented.schema == s1 + + +def fake_both_iterate_entry_points(name: str) -> List[importlib.metadata.EntryPoint]: + if name == "validate_pyproject.multi_schema": + return [ + importlib.metadata.EntryPoint( + name="_", value="test_module:f", group="validate_pyproject.multi_schema" + ) + ] + if name == "validate_pyproject.tool_schema": + return [ + importlib.metadata.EntryPoint( + name="example1", + value="test_module:f1", + group="validate_pyproject.tool_schema", + ), + importlib.metadata.EntryPoint( + name="example3", + value="test_module:f3", + group="validate_pyproject.tool_schema", + ), + ] + return [] + + +def test_combined_plugins(monkeypatch): + s1 = {"id": "example1"} + s2 = {"id": "example2"} + sys.modules["test_module"] = ModuleType("test_module") + sys.modules["test_module"].f = lambda: { + "tools": {"example1": s1, "example2": s2}, + } # type: ignore[attr-defined] + sys.modules["test_module"].f1 = lambda _: {"id": "tool1"} # type: ignore[attr-defined] + sys.modules["test_module"].f3 = lambda _: {"id": "tool3"} # type: ignore[attr-defined] + monkeypatch.setattr(plugins, "iterate_entry_points", fake_both_iterate_entry_points) + + lst = plugins.list_from_entry_points() + assert len(lst) == 3 + + assert lst[0].tool == "example1" + assert isinstance(lst[0], StoredPlugin) + + assert lst[1].tool == "example2" + assert isinstance(lst[1], StoredPlugin) + + assert lst[2].tool == "example3" + assert isinstance(lst[2], PluginWrapper) + + +def fake_several_entry_points( + name: str, *, reverse: bool +) -> List[importlib.metadata.EntryPoint]: + if name == "validate_pyproject.multi_schema": + items = [ + importlib.metadata.EntryPoint( + name="a", + value="test_module:f1", + group="validate_pyproject.multi_schema", + ), + importlib.metadata.EntryPoint( + name="b", + value="test_module:f2", + group="validate_pyproject.multi_schema", + ), + ] + return items[::-1] if reverse else items + return [] + + +@pytest.mark.parametrize("reverse", [True, False]) +def test_several_multi_plugins(monkeypatch, reverse): + s1 = {"id": "example1"} + s2 = {"id": "example2"} + s3 = {"id": "example3"} + sys.modules["test_module"] = ModuleType("test_module") + sys.modules["test_module"].f1 = lambda: { + "tools": {"example": s1}, + } # type: ignore[attr-defined] + sys.modules["test_module"].f2 = lambda: { + "tools": {"example": s2, "other": s3}, + } # type: ignore[attr-defined] + monkeypatch.setattr( + plugins, + "iterate_entry_points", + functools.partial(fake_several_entry_points, reverse=reverse), + ) + + (plugin1, plugin2) = plugins.list_from_entry_points() + assert plugin1.id == "example1" + assert plugin2.id == "example3" + + +def test_broken_multi_plugin(monkeypatch): + def broken_ep(): + raise RuntimeError("Broken") + + sys.modules["test_module"] = ModuleType("test_module") + sys.modules["test_module"].f = broken_ep + monkeypatch.setattr( + plugins, "iterate_entry_points", fake_multi_iterate_entry_points + ) + with pytest.raises(ErrorLoadingPlugin): + plugins.list_from_entry_points() diff --git a/tests/test_repo_review.py b/tests/test_repo_review.py index aaf39643..25339f11 100644 --- a/tests/test_repo_review.py +++ b/tests/test_repo_review.py @@ -45,9 +45,9 @@ def test_valid_example(repo_review_processor, name: str) -> None: @pytest.mark.parametrize("name", ["pdm/invalid-version", "pdm/redefining-as-dynamic"]) def test_invalid_example(repo_review_processor, name: str) -> None: processed = repo_review_processor.process(INVALID_EXAMPLES / name) - assert any( - not r.result and r.result is not None for r in processed.results - ), f"{processed.results}" + assert any(not r.result and r.result is not None for r in processed.results), ( + f"{processed.results}" + ) def test_no_distutils(repo_review_processor) -> None: diff --git a/tox.ini b/tox.ini index e8a2d577..45af7d02 100644 --- a/tox.ini +++ b/tox.ini @@ -3,7 +3,7 @@ # THIS SCRIPT IS SUPPOSED TO BE AN EXAMPLE. MODIFY IT ACCORDING TO YOUR NEEDS! [tox] -minversion = 3.24 +minversion = 4.22 envlist = default isolated_build = True @@ -16,9 +16,8 @@ passenv = HOME SETUPTOOLS_* VALIDATE_PYPROJECT_* -extras = - all - test +dependency_groups = test +extras = all commands = pytest {posargs} @@ -36,14 +35,14 @@ commands = [testenv:typecheck] +base_python = 3.8 description = Invoke mypy to typecheck the source code changedir = {toxinidir} passenv = TERM # ^ ensure colors -extras = - all - typecheck +extras = all +dependency_groups = typecheck commands = python -m mypy {posargs:--pretty --show-error-context src} @@ -78,11 +77,8 @@ setenv = linkcheck: BUILD = linkcheck passenv = SETUPTOOLS_* -extras = - all -deps = - -r {toxinidir}/docs/requirements.txt - # ^ requirements.txt shared with Read The Docs +extras = all +dependency_groups = docs commands = sphinx-build -v -T -j auto --color -b {env:BUILD} -d "{env:BUILDDIR}/doctrees" "{env:DOCSDIR}" "{env:BUILDDIR}/{env:BUILD}" {posargs}