From f59df4df57fff53e1b7441d15371984ddb8032a3 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Thu, 22 Sep 2022 23:28:36 +0200 Subject: [PATCH 001/230] Fix typo in type hint --- src/graphql/language/parser.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/graphql/language/parser.py b/src/graphql/language/parser.py index 389913a5..ffdf09b7 100644 --- a/src/graphql/language/parser.py +++ b/src/graphql/language/parser.py @@ -191,7 +191,7 @@ class Parser: """ _lexer: Lexer - _no_Location: bool + _no_location: bool _allow_legacy_fragment_variables: bool def __init__( From 08bfe9ccede5be7e3bd20ceb4b9759cb8fad5bf7 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Fri, 23 Sep 2022 10:01:30 +0200 Subject: [PATCH 002/230] Better wording in README --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 910d0144..64f08801 100644 --- a/README.md +++ b/README.md @@ -16,7 +16,7 @@ An extensive test suite with over 2300 unit tests and 100% coverage comprises a replication of the complete test suite of GraphQL.js, making sure this port is reliable and compatible with GraphQL.js. -Note that for various reasons, GraphQL-core does not use SemVer like GraphQL.js. Increases in the major version of GraphQL.js are reflected in the minor version of GraphQL-core instead. This means there can be breaking changes in the API when the minor version changes, and only patch releases are fully backward compatible. Therefore, we recommend something like `=~ 3.2.0` as version specifier when including GraphQL-core as a dependency. +Note that for various reasons, GraphQL-core does not use SemVer like GraphQL.js. Changes in the major version of GraphQL.js are reflected in the minor version of GraphQL-core instead. This means there can be breaking changes in the API when the minor version changes, and only patch releases are fully backward compatible. Therefore, we recommend something like `=~ 3.2.0` as version specifier when including GraphQL-core as a dependency. ## Documentation From 6c5ab134afd119556d79697f9665c41d51c495c8 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Fri, 23 Sep 2022 10:58:19 +0200 Subject: [PATCH 003/230] Update version in README and prepare for next minor release --- .bumpversion.cfg | 2 +- README.md | 2 +- docs/conf.py | 4 ++-- pyproject.toml | 2 +- src/graphql/version.py | 4 ++-- 5 files changed, 7 insertions(+), 7 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index b7e42d6d..4bd8fc1a 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 3.2.2 +current_version = 3.3.0a0 commit = False tag = False diff --git a/README.md b/README.md index 64f08801..471c8303 100644 --- a/README.md +++ b/README.md @@ -10,7 +10,7 @@ a query language for APIs created by Facebook. ![Lint Status](https://github.com/graphql-python/graphql-core/actions/workflows/lint.yml/badge.svg) [![Code Style](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/ambv/black) -The current version 3.2.2 of GraphQL-core is up-to-date with GraphQL.js version 16.4.0. +The current version 3.2.3 of GraphQL-core is up-to-date with GraphQL.js version 16.6.0. An extensive test suite with over 2300 unit tests and 100% coverage comprises a replication of the complete test suite of GraphQL.js, making sure this port is diff --git a/docs/conf.py b/docs/conf.py index d5eca20e..8a0fa01f 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -59,9 +59,9 @@ # built documents. # # The short X.Y version. -# version = '3.2' +# version = '3.3' # The full version, including alpha/beta/rc tags. -version = release = '3.2.2' +version = release = '3.3.0a0' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/pyproject.toml b/pyproject.toml index 439a4256..fccf9a58 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "graphql-core" -version = "3.2.2" +version = "17.0.0a0" description = """ GraphQL-core is a Python port of GraphQL.js,\ the JavaScript reference implementation for GraphQL.""" diff --git a/src/graphql/version.py b/src/graphql/version.py index 424a9851..b2fbc622 100644 --- a/src/graphql/version.py +++ b/src/graphql/version.py @@ -4,9 +4,9 @@ __all__ = ["version", "version_info", "version_js", "version_info_js"] -version = "3.2.2" +version = "3.3.0a0" -version_js = "16.4.0" +version_js = "17.0.0a0" _re_version = re.compile(r"(\d+)\.(\d+)\.(\d+)(\D*)(\d*)") From d2d5030dfbc80202e098f45a84d5ab30060af5e4 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Fri, 23 Sep 2022 11:42:28 +0200 Subject: [PATCH 004/230] Remove support for Python 3.6 Note that Python 3.6 is not officially supported any more since 2022. --- .github/workflows/test.yml | 2 +- docs/conf.py | 2 +- pyproject.toml | 38 +++--------- setup.py | 5 +- src/graphql/language/character_classes.py | 75 ++++++----------------- src/graphql/pyutils/simple_pub_sub.py | 5 +- tests/test_docs.py | 17 +++-- tests/test_user_registry.py | 5 +- tox.ini | 13 ++-- 9 files changed, 48 insertions(+), 114 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 09278c14..02fe50c6 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -8,7 +8,7 @@ jobs: strategy: matrix: - python: ['3.6', '3.7', '3.8', '3.9', '3.10', 'pypy3'] + python: ['3.7', '3.8', '3.9', '3.10', 'pypy3'] steps: - uses: actions/checkout@v2 diff --git a/docs/conf.py b/docs/conf.py index 8a0fa01f..f42ff15e 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -68,7 +68,7 @@ # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = None +language = 'en' # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: diff --git a/pyproject.toml b/pyproject.toml index fccf9a58..f7f6a7f5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -18,7 +18,6 @@ classifiers = [ "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", @@ -43,47 +42,30 @@ packages = [ ] [tool.poetry.dependencies] -python = "^3.6" +python = "^3.7" typing-extensions = [ - { version = "^4.3", python = ">=3.7,<3.8" }, - { version = "^4.1", python = "<3.7" } + { version = "^4.3", python = "<3.8" }, ] [tool.poetry.dev-dependencies] -pytest = "^6.2" -pytest-asyncio = [ - {version=">=0.19,<1", python = ">=3.7" }, - {version=">=0.16,<0.17", python = "<3.7" }, -] +pytest = "^7.1" +pytest-asyncio = ">=0.19,<1" pytest-benchmark = "^3.4" pytest-cov = "^3.0" pytest-describe = "^2.0" pytest-timeout = "^2.1" -black = [ - {version = "22.8.0", python = ">=3.6.2"}, - {version = "20.8b1", python = "<3.6.2"} -] -flake8 = [ - {version = "^5.0", python = ">=3.6.1"}, - {version = "^4.0", python = "<3.6.1"} -] +black = "22.8.0" +flake8 = "^5.0" mypy = "0.971" -sphinx = "^4.3" +sphinx = "^5.1" sphinx_rtd_theme = ">=1,<2" check-manifest = ">=0.48,<1" bump2version = ">=1.0,<2" -tomli = [ - {version="^2", python = ">=3.7"}, - {version="^1.2", python = "<3.7"} -] -tox = [ - {version = "^3.26", python = ">=3.7"}, - {version = "3.25", python = "<3.7"} -] +tox = "^3.26" [tool.black] -target-version = ['py36', 'py37', 'py38', 'py39', 'py310'] +target-version = ['py37', 'py38', 'py39', 'py310'] [build-system] -requires = ["poetry_core>=1,<2", "setuptools>=59,<70"] +requires = ["poetry_core>=1,<2", "setuptools>=65,<70"] build-backend = "poetry.core.masonry.api" diff --git a/setup.py b/setup.py index d307a66a..ebac11ae 100644 --- a/setup.py +++ b/setup.py @@ -26,16 +26,15 @@ "License :: OSI Approved :: MIT License", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3 :: Only", - "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", ], install_requires=[ - "typing-extensions>=4.2,<5; python_version < '3.8'", + "typing-extensions>=4.3,<5; python_version < '3.8'", ], - python_requires=">=3.6,<4", + python_requires=">=3.7,<4", packages=find_packages("src"), package_dir={"": "src"}, # PEP-561: https://www.python.org/dev/peps/pep-0561/ diff --git a/src/graphql/language/character_classes.py b/src/graphql/language/character_classes.py index 0062a1fc..0f8db7de 100644 --- a/src/graphql/language/character_classes.py +++ b/src/graphql/language/character_classes.py @@ -1,68 +1,33 @@ __all__ = ["is_digit", "is_letter", "is_name_start", "is_name_continue"] -try: - "string".isascii() -except AttributeError: # Python < 3.7 - def is_digit(char: str) -> bool: - """Check whether char is a digit +def is_digit(char: str) -> bool: + """Check whether char is a digit - For internal use by the lexer only. - """ - return "0" <= char <= "9" + For internal use by the lexer only. + """ + return char.isascii() and char.isdigit() - def is_letter(char: str) -> bool: - """Check whether char is a plain ASCII letter - For internal use by the lexer only. - """ - return "a" <= char <= "z" or "A" <= char <= "Z" +def is_letter(char: str) -> bool: + """Check whether char is a plain ASCII letter - def is_name_start(char: str) -> bool: - """Check whether char is allowed at the beginning of a GraphQL name + For internal use by the lexer only. + """ + return char.isascii() and char.isalpha() - For internal use by the lexer only. - """ - return "a" <= char <= "z" or "A" <= char <= "Z" or char == "_" - def is_name_continue(char: str) -> bool: - """Check whether char is allowed in the continuation of a GraphQL name +def is_name_start(char: str) -> bool: + """Check whether char is allowed at the beginning of a GraphQL name - For internal use by the lexer only. - """ - return ( - "a" <= char <= "z" - or "A" <= char <= "Z" - or "0" <= char <= "9" - or char == "_" - ) + For internal use by the lexer only. + """ + return char.isascii() and (char.isalpha() or char == "_") -else: - def is_digit(char: str) -> bool: - """Check whether char is a digit +def is_name_continue(char: str) -> bool: + """Check whether char is allowed in the continuation of a GraphQL name - For internal use by the lexer only. - """ - return char.isascii() and char.isdigit() - - def is_letter(char: str) -> bool: - """Check whether char is a plain ASCII letter - - For internal use by the lexer only. - """ - return char.isascii() and char.isalpha() - - def is_name_start(char: str) -> bool: - """Check whether char is allowed at the beginning of a GraphQL name - - For internal use by the lexer only. - """ - return char.isascii() and (char.isalpha() or char == "_") - - def is_name_continue(char: str) -> bool: - """Check whether char is allowed in the continuation of a GraphQL name - - For internal use by the lexer only. - """ - return char.isascii() and (char.isalnum() or char == "_") + For internal use by the lexer only. + """ + return char.isascii() and (char.isalnum() or char == "_") diff --git a/src/graphql/pyutils/simple_pub_sub.py b/src/graphql/pyutils/simple_pub_sub.py index 6b1ba050..e65f957e 100644 --- a/src/graphql/pyutils/simple_pub_sub.py +++ b/src/graphql/pyutils/simple_pub_sub.py @@ -2,10 +2,7 @@ from inspect import isawaitable from typing import Any, AsyncIterator, Callable, Optional, Set -try: - from asyncio import get_running_loop -except ImportError: - from asyncio import get_event_loop as get_running_loop # Python < 3.7 +from asyncio import get_running_loop __all__ = ["SimplePubSub", "SimplePubSubIterator"] diff --git a/tests/test_docs.py b/tests/test_docs.py index 7796ec31..e7120aa4 100644 --- a/tests/test_docs.py +++ b/tests/test_docs.py @@ -135,16 +135,13 @@ def executing_queries(capsys): async_query = queries.pop(0) assert "asyncio" in async_query and "graphql_sync" not in async_query assert "asyncio.run" in async_query - try: # pragma: no cover - from asyncio import run # noqa: F401 - except ImportError: # Python < 3.7 - assert "ExecutionResult" in expected_result(queries) - else: # pragma: no cover - exec(async_query, scope) - out, err = capsys.readouterr() - assert not err - assert "R2-D2" in out - assert out == expected_result(queries) + from asyncio import run # noqa: F401 + + exec(async_query, scope) + out, err = capsys.readouterr() + assert not err + assert "R2-D2" in out + assert out == expected_result(queries) sync_query = queries.pop(0) assert "graphql_sync" in sync_query and "asyncio" not in sync_query diff --git a/tests/test_user_registry.py b/tests/test_user_registry.py index d5f2ba95..7e9395e0 100644 --- a/tests/test_user_registry.py +++ b/tests/test_user_registry.py @@ -10,10 +10,7 @@ from inspect import isawaitable from typing import Any, Dict, List, NamedTuple, Optional -try: - from asyncio import create_task -except ImportError: # Python < 3.7 - create_task = None # type: ignore +from asyncio import create_task from pytest import fixture, mark diff --git a/tox.ini b/tox.ini index f9c3a5e2..0a3cd9c8 100644 --- a/tox.ini +++ b/tox.ini @@ -1,10 +1,9 @@ [tox] -envlist = py3{6,7,8,9,10}, black, flake8, mypy, docs, manifest +envlist = py3{7,8,9,10}, black, flake8, mypy, docs, manifest isolated_build = true [gh-actions] python = - 3.6: py36 3.7: py37 3.8: py38 3.9: py39 @@ -26,14 +25,14 @@ commands = basepython = python3.9 deps = mypy==0.971 - pytest>=6.2,<7 + pytest>=7.1,<8 commands = mypy src tests [testenv:docs] basepython = python3.9 deps = - sphinx>=4.3,<5 + sphinx>=5.1,<6 sphinx_rtd_theme>=1,<2 commands = sphinx-build -b html -nEW docs docs/_build/html @@ -46,14 +45,12 @@ commands = [testenv] deps = - py37,py38,py39,py310: pytest>=7.1,<8 - py36: pytest>=6.2,<7 - pytest-asyncio>=0.16,<1 + pytest>=7.1,<8 + pytest-asyncio>=0.19,<1 pytest-benchmark>=3.4,<4 pytest-cov>=3,<4 pytest-describe>=2,<3 pytest-timeout>=2,<3 py37: typing-extensions>=4.3,<5 - py36: typing-extensions>=4.1,<5 commands = pytest tests {posargs: --cov-report=term-missing --cov=graphql --cov=tests --cov-fail-under=100} From 146984d41d4405a72f667bdba013f717792d5e57 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Fri, 23 Sep 2022 12:17:32 +0200 Subject: [PATCH 005/230] Use postponed evaluation of type annotations --- src/graphql/error/syntax_error.py | 4 +- src/graphql/execution/map_async_iterator.py | 4 +- src/graphql/language/ast.py | 10 +++-- src/graphql/language/location.py | 4 +- src/graphql/pyutils/frozen_dict.py | 6 ++- src/graphql/pyutils/frozen_list.py | 6 ++- src/graphql/pyutils/path.py | 4 +- src/graphql/pyutils/simple_pub_sub.py | 6 ++- src/graphql/type/definition.py | 48 +++++++++++---------- src/graphql/type/directives.py | 4 +- src/graphql/type/schema.py | 8 ++-- src/graphql/utilities/type_info.py | 4 +- src/graphql/version.py | 4 +- tests/execution/test_schema.py | 4 +- 14 files changed, 72 insertions(+), 44 deletions(-) diff --git a/src/graphql/error/syntax_error.py b/src/graphql/error/syntax_error.py index 9ab41c25..5a8e4091 100644 --- a/src/graphql/error/syntax_error.py +++ b/src/graphql/error/syntax_error.py @@ -1,3 +1,5 @@ +from __future__ import annotations # Python < 3.10 + from typing import TYPE_CHECKING from .graphql_error import GraphQLError @@ -11,7 +13,7 @@ class GraphQLSyntaxError(GraphQLError): """A GraphQLError representing a syntax error.""" - def __init__(self, source: "Source", position: int, description: str) -> None: + def __init__(self, source: Source, position: int, description: str) -> None: super().__init__( f"Syntax Error: {description}", source=source, positions=[position] ) diff --git a/src/graphql/execution/map_async_iterator.py b/src/graphql/execution/map_async_iterator.py index 43400fd3..310a73af 100644 --- a/src/graphql/execution/map_async_iterator.py +++ b/src/graphql/execution/map_async_iterator.py @@ -1,3 +1,5 @@ +from __future__ import annotations # Python < 3.10 + from asyncio import CancelledError, Event, Task, ensure_future, wait from concurrent.futures import FIRST_COMPLETED from inspect import isasyncgen, isawaitable @@ -23,7 +25,7 @@ def __init__(self, iterable: AsyncIterable, callback: Callable) -> None: self.callback = callback self._close_event = Event() - def __aiter__(self) -> "MapAsyncIterator": + def __aiter__(self) -> MapAsyncIterator: """Get the iterator object.""" return self diff --git a/src/graphql/language/ast.py b/src/graphql/language/ast.py index a2470560..c3320c55 100644 --- a/src/graphql/language/ast.py +++ b/src/graphql/language/ast.py @@ -1,3 +1,5 @@ +from __future__ import annotations # Python < 3.10 + from copy import copy, deepcopy from enum import Enum from typing import Any, Dict, List, Tuple, Optional, Union @@ -137,7 +139,7 @@ def __hash__(self) -> int: (self.kind, self.start, self.end, self.line, self.column, self.value) ) - def __copy__(self) -> "Token": + def __copy__(self) -> Token: """Create a shallow copy of the token""" token = self.__class__( self.kind, @@ -150,7 +152,7 @@ def __copy__(self) -> "Token": token.prev = self.prev return token - def __deepcopy__(self, memo: Dict) -> "Token": + def __deepcopy__(self, memo: Dict) -> Token: """Allow only shallow copies to avoid recursion.""" return copy(self) @@ -360,11 +362,11 @@ def __setattr__(self, key: str, value: Any) -> None: del self._hash super().__setattr__(key, value) - def __copy__(self) -> "Node": + def __copy__(self) -> Node: """Create a shallow copy of the node.""" return self.__class__(**{key: getattr(self, key) for key in self.keys}) - def __deepcopy__(self, memo: Dict) -> "Node": + def __deepcopy__(self, memo: Dict) -> Node: """Create a deep copy of the node""" # noinspection PyArgumentList return self.__class__( diff --git a/src/graphql/language/location.py b/src/graphql/language/location.py index 41aec902..ba479009 100644 --- a/src/graphql/language/location.py +++ b/src/graphql/language/location.py @@ -1,3 +1,5 @@ +from __future__ import annotations # Python < 3.10 + from typing import Any, NamedTuple, TYPE_CHECKING try: @@ -37,7 +39,7 @@ def __ne__(self, other: Any) -> bool: return not self == other -def get_location(source: "Source", position: int) -> SourceLocation: +def get_location(source: Source, position: int) -> SourceLocation: """Get the line and column for a character position in the source. Takes a Source and a UTF-8 character offset, and returns the corresponding line and diff --git a/src/graphql/pyutils/frozen_dict.py b/src/graphql/pyutils/frozen_dict.py index 93283596..776e7adb 100644 --- a/src/graphql/pyutils/frozen_dict.py +++ b/src/graphql/pyutils/frozen_dict.py @@ -1,3 +1,5 @@ +from __future__ import annotations # Python < 3.10 + from copy import deepcopy from typing import Dict, TypeVar @@ -28,12 +30,12 @@ def __iadd__(self, value): def __hash__(self): return hash(tuple(self.items())) - def __copy__(self) -> "FrozenDict": + def __copy__(self) -> FrozenDict: return FrozenDict(self) copy = __copy__ - def __deepcopy__(self, memo: Dict) -> "FrozenDict": + def __deepcopy__(self, memo: Dict) -> FrozenDict: return FrozenDict({k: deepcopy(v, memo) for k, v in self.items()}) def clear(self): diff --git a/src/graphql/pyutils/frozen_list.py b/src/graphql/pyutils/frozen_list.py index 01ead7c4..a4553097 100644 --- a/src/graphql/pyutils/frozen_list.py +++ b/src/graphql/pyutils/frozen_list.py @@ -1,3 +1,5 @@ +from __future__ import annotations # Python < 3.10 + from copy import deepcopy from typing import Dict, List, TypeVar @@ -39,10 +41,10 @@ def __imul__(self, value): def __hash__(self): return hash(tuple(self)) - def __copy__(self) -> "FrozenList": + def __copy__(self) -> FrozenList: return FrozenList(self) - def __deepcopy__(self, memo: Dict) -> "FrozenList": + def __deepcopy__(self, memo: Dict) -> FrozenList: return FrozenList(deepcopy(value, memo) for value in self) def append(self, x): diff --git a/src/graphql/pyutils/path.py b/src/graphql/pyutils/path.py index c928d296..5ef7b457 100644 --- a/src/graphql/pyutils/path.py +++ b/src/graphql/pyutils/path.py @@ -1,3 +1,5 @@ +from __future__ import annotations # Python < 3.10 + from typing import Any, List, NamedTuple, Optional, Union __all__ = ["Path"] @@ -13,7 +15,7 @@ class Path(NamedTuple): typename: Optional[str] """name of the parent type to avoid path ambiguity""" - def add_key(self, key: Union[str, int], typename: Optional[str] = None) -> "Path": + def add_key(self, key: Union[str, int], typename: Optional[str] = None) -> Path: """Return a new Path containing the given key.""" return Path(self, key, typename) diff --git a/src/graphql/pyutils/simple_pub_sub.py b/src/graphql/pyutils/simple_pub_sub.py index e65f957e..b79cd7e3 100644 --- a/src/graphql/pyutils/simple_pub_sub.py +++ b/src/graphql/pyutils/simple_pub_sub.py @@ -1,3 +1,5 @@ +from __future__ import annotations # Python < 3.10 + from asyncio import Future, Queue, ensure_future, sleep from inspect import isawaitable from typing import Any, AsyncIterator, Callable, Optional, Set @@ -31,7 +33,7 @@ def emit(self, event: Any) -> bool: def get_subscriber( self, transform: Optional[Callable] = None - ) -> "SimplePubSubIterator": + ) -> SimplePubSubIterator: return SimplePubSubIterator(self, transform) @@ -44,7 +46,7 @@ def __init__(self, pubsub: SimplePubSub, transform: Optional[Callable]) -> None: self.listening = True pubsub.subscribers.add(self.push_value) - def __aiter__(self) -> "SimplePubSubIterator": + def __aiter__(self) -> SimplePubSubIterator: return self async def __anext__(self) -> Any: diff --git a/src/graphql/type/definition.py b/src/graphql/type/definition.py index dbe03ada..170e2c98 100644 --- a/src/graphql/type/definition.py +++ b/src/graphql/type/definition.py @@ -1,3 +1,5 @@ +from __future__ import annotations # Python < 3.10 + from enum import Enum from typing import ( Any, @@ -280,7 +282,7 @@ def to_kwargs(self) -> GraphQLNamedTypeKwargs: extension_ast_nodes=self.extension_ast_nodes, ) - def __copy__(self) -> "GraphQLNamedType": # pragma: no cover + def __copy__(self) -> GraphQLNamedType: # pragma: no cover return self.__class__(**self.to_kwargs()) @@ -451,7 +453,7 @@ def to_kwargs(self) -> GraphQLScalarTypeKwargs: specified_by_url=self.specified_by_url, ) - def __copy__(self) -> "GraphQLScalarType": # pragma: no cover + def __copy__(self) -> GraphQLScalarType: # pragma: no cover return self.__class__(**self.to_kwargs()) @@ -469,7 +471,7 @@ def assert_scalar_type(type_: Any) -> GraphQLScalarType: class GraphQLFieldKwargs(TypedDict, total=False): - type_: "GraphQLOutputType" + type_: GraphQLOutputType args: Optional[GraphQLArgumentMap] resolve: Optional["GraphQLFieldResolver"] subscribe: Optional["GraphQLFieldResolver"] @@ -482,7 +484,7 @@ class GraphQLFieldKwargs(TypedDict, total=False): class GraphQLField: """Definition of a GraphQL field""" - type: "GraphQLOutputType" + type: GraphQLOutputType args: GraphQLArgumentMap resolve: Optional["GraphQLFieldResolver"] subscribe: Optional["GraphQLFieldResolver"] @@ -493,7 +495,7 @@ class GraphQLField: def __init__( self, - type_: "GraphQLOutputType", + type_: GraphQLOutputType, args: Optional[GraphQLArgumentMap] = None, resolve: Optional["GraphQLFieldResolver"] = None, subscribe: Optional["GraphQLFieldResolver"] = None, @@ -577,7 +579,7 @@ def to_kwargs(self) -> GraphQLFieldKwargs: ast_node=self.ast_node, ) - def __copy__(self) -> "GraphQLField": # pragma: no cover + def __copy__(self) -> GraphQLField: # pragma: no cover return self.__class__(**self.to_kwargs()) @@ -593,10 +595,10 @@ class GraphQLResolveInfo(NamedTuple): field_name: str field_nodes: List[FieldNode] - return_type: "GraphQLOutputType" - parent_type: "GraphQLObjectType" + return_type: GraphQLOutputType + parent_type: GraphQLObjectType path: Path - schema: "GraphQLSchema" + schema: GraphQLSchema fragments: Dict[str, FragmentDefinitionNode] root_value: Any operation: OperationDefinitionNode @@ -628,7 +630,7 @@ class GraphQLResolveInfo(NamedTuple): class GraphQLArgumentKwargs(TypedDict, total=False): - type_: "GraphQLInputType" + type_: GraphQLInputType default_value: Any description: Optional[str] deprecation_reason: Optional[str] @@ -640,7 +642,7 @@ class GraphQLArgumentKwargs(TypedDict, total=False): class GraphQLArgument: """Definition of a GraphQL argument""" - type: "GraphQLInputType" + type: GraphQLInputType default_value: Any description: Optional[str] deprecation_reason: Optional[str] @@ -650,7 +652,7 @@ class GraphQLArgument: def __init__( self, - type_: "GraphQLInputType", + type_: GraphQLInputType, default_value: Any = Undefined, description: Optional[str] = None, deprecation_reason: Optional[str] = None, @@ -706,7 +708,7 @@ def to_kwargs(self) -> GraphQLArgumentKwargs: ast_node=self.ast_node, ) - def __copy__(self) -> "GraphQLArgument": # pragma: no cover + def __copy__(self) -> GraphQLArgument: # pragma: no cover return self.__class__(**self.to_kwargs()) @@ -798,7 +800,7 @@ def to_kwargs(self) -> GraphQLObjectTypeKwargs: is_type_of=self.is_type_of, ) - def __copy__(self) -> "GraphQLObjectType": # pragma: no cover + def __copy__(self) -> GraphQLObjectType: # pragma: no cover return self.__class__(**self.to_kwargs()) @cached_property @@ -932,7 +934,7 @@ def to_kwargs(self) -> GraphQLInterfaceTypeKwargs: resolve_type=self.resolve_type, ) - def __copy__(self) -> "GraphQLInterfaceType": # pragma: no cover + def __copy__(self) -> GraphQLInterfaceType: # pragma: no cover return self.__class__(**self.to_kwargs()) @cached_property @@ -1063,7 +1065,7 @@ def to_kwargs(self) -> GraphQLUnionTypeKwargs: super().to_kwargs(), types=self.types, resolve_type=self.resolve_type ) - def __copy__(self) -> "GraphQLUnionType": # pragma: no cover + def __copy__(self) -> GraphQLUnionType: # pragma: no cover return self.__class__(**self.to_kwargs()) @cached_property @@ -1203,7 +1205,7 @@ def to_kwargs(self) -> GraphQLEnumTypeKwargs: super().to_kwargs(), values=self.values.copy() ) - def __copy__(self) -> "GraphQLEnumType": # pragma: no cover + def __copy__(self) -> GraphQLEnumType: # pragma: no cover return self.__class__(**self.to_kwargs()) @cached_property @@ -1352,7 +1354,7 @@ def to_kwargs(self) -> GraphQLEnumValueKwargs: ast_node=self.ast_node, ) - def __copy__(self) -> "GraphQLEnumValue": # pragma: no cover + def __copy__(self) -> GraphQLEnumValue: # pragma: no cover return self.__class__(**self.to_kwargs()) @@ -1446,7 +1448,7 @@ def to_kwargs(self) -> GraphQLInputObjectTypeKwargs: else self.out_type, ) - def __copy__(self) -> "GraphQLInputObjectType": # pragma: no cover + def __copy__(self) -> GraphQLInputObjectType: # pragma: no cover return self.__class__(**self.to_kwargs()) @cached_property @@ -1491,7 +1493,7 @@ def assert_input_object_type(type_: Any) -> GraphQLInputObjectType: class GraphQLInputFieldKwargs(TypedDict, total=False): - type_: "GraphQLInputType" + type_: GraphQLInputType default_value: Any description: Optional[str] deprecation_reason: Optional[str] @@ -1503,7 +1505,7 @@ class GraphQLInputFieldKwargs(TypedDict, total=False): class GraphQLInputField: """Definition of a GraphQL input field""" - type: "GraphQLInputType" + type: GraphQLInputType default_value: Any description: Optional[str] deprecation_reason: Optional[str] @@ -1513,7 +1515,7 @@ class GraphQLInputField: def __init__( self, - type_: "GraphQLInputType", + type_: GraphQLInputType, default_value: Any = Undefined, description: Optional[str] = None, deprecation_reason: Optional[str] = None, @@ -1569,7 +1571,7 @@ def to_kwargs(self) -> GraphQLInputFieldKwargs: ast_node=self.ast_node, ) - def __copy__(self) -> "GraphQLInputField": # pragma: no cover + def __copy__(self) -> GraphQLInputField: # pragma: no cover return self.__class__(**self.to_kwargs()) diff --git a/src/graphql/type/directives.py b/src/graphql/type/directives.py index 310968d1..de4ce5d0 100644 --- a/src/graphql/type/directives.py +++ b/src/graphql/type/directives.py @@ -1,3 +1,5 @@ +from __future__ import annotations # Python < 3.10 + from typing import Any, Collection, Dict, Optional, Tuple, cast from ..language import ast, DirectiveLocation @@ -143,7 +145,7 @@ def to_kwargs(self) -> GraphQLDirectiveKwargs: ast_node=self.ast_node, ) - def __copy__(self) -> "GraphQLDirective": # pragma: no cover + def __copy__(self) -> GraphQLDirective: # pragma: no cover return self.__class__(**self.to_kwargs()) diff --git a/src/graphql/type/schema.py b/src/graphql/type/schema.py index 321659c5..124ea771 100644 --- a/src/graphql/type/schema.py +++ b/src/graphql/type/schema.py @@ -1,3 +1,5 @@ +from __future__ import annotations # Python < 3.10 + from copy import copy, deepcopy from typing import ( Any, @@ -304,10 +306,10 @@ def to_kwargs(self) -> GraphQLSchemaKwargs: assume_valid=self._validation_errors is not None, ) - def __copy__(self) -> "GraphQLSchema": # pragma: no cover + def __copy__(self) -> GraphQLSchema: # pragma: no cover return self.__class__(**self.to_kwargs()) - def __deepcopy__(self, memo_: Dict) -> "GraphQLSchema": + def __deepcopy__(self, memo_: Dict) -> GraphQLSchema: from ..type import ( is_introspection_type, is_specified_scalar_type, @@ -405,7 +407,7 @@ class TypeSet(Dict[GraphQLNamedType, None]): """An ordered set of types that can be collected starting from initial types.""" @classmethod - def with_initial_types(cls, types: Collection[GraphQLType]) -> "TypeSet": + def with_initial_types(cls, types: Collection[GraphQLType]) -> TypeSet: return cast(TypeSet, super().fromkeys(types)) def collect_referenced_types(self, type_: GraphQLType) -> None: diff --git a/src/graphql/utilities/type_info.py b/src/graphql/utilities/type_info.py index 80a4ef3d..4067d7a7 100644 --- a/src/graphql/utilities/type_info.py +++ b/src/graphql/utilities/type_info.py @@ -1,3 +1,5 @@ +from __future__ import annotations # Python < 3.10 + from typing import Any, Callable, List, Optional, Union, cast from ..language import ( @@ -298,7 +300,7 @@ def get_field_def( class TypeInfoVisitor(Visitor): """A visitor which maintains a provided TypeInfo.""" - def __init__(self, type_info: "TypeInfo", visitor: Visitor): + def __init__(self, type_info: TypeInfo, visitor: Visitor): super().__init__() self.type_info = type_info self.visitor = visitor diff --git a/src/graphql/version.py b/src/graphql/version.py index b2fbc622..e4e8cde0 100644 --- a/src/graphql/version.py +++ b/src/graphql/version.py @@ -1,3 +1,5 @@ +from __future__ import annotations # Python < 3.10 + import re from typing import NamedTuple @@ -20,7 +22,7 @@ class VersionInfo(NamedTuple): serial: int @classmethod - def from_str(cls, v: str) -> "VersionInfo": + def from_str(cls, v: str) -> VersionInfo: groups = _re_version.match(v).groups() # type: ignore major, minor, micro = map(int, groups[:3]) level = (groups[3] or "")[:1] diff --git a/tests/execution/test_schema.py b/tests/execution/test_schema.py index 150f1681..9f5918f0 100644 --- a/tests/execution/test_schema.py +++ b/tests/execution/test_schema.py @@ -1,3 +1,5 @@ +from __future__ import annotations # Python < 3.10 + from graphql.execution import execute_sync from graphql.language import parse from graphql.type import ( @@ -90,7 +92,7 @@ def __init__(self, id: int): # noinspection PyPep8Naming,PyMethodMayBeStatic class Author: - def pic(self, info_, width: int, height: int) -> "Pic": + def pic(self, info_, width: int, height: int) -> Pic: return Pic(123, width, height) @property From a758448800b8c5a43c1de3ff5a1ffe80e54b2bd3 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Fri, 23 Sep 2022 12:29:45 +0200 Subject: [PATCH 006/230] Fix Sphinx warning --- docs/conf.py | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/conf.py b/docs/conf.py index f42ff15e..a9007dfb 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -134,6 +134,7 @@ enum.Enum traceback types.TracebackType +TypeMap EnterLeaveVisitor FormattedSourceLocation GraphQLAbstractType From 0060f946b8d3b56c09ae0f7993c88eb83b0589b3 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Fri, 23 Sep 2022 13:42:17 +0200 Subject: [PATCH 007/230] Move configuration to pyproject.toml where possible --- .coveragerc | 26 ------------------ .mypy.ini | 20 -------------- MANIFEST.in | 2 -- pyproject.toml | 74 ++++++++++++++++++++++++++++++++++++++++++++++---- setup.cfg | 14 ---------- 5 files changed, 68 insertions(+), 68 deletions(-) delete mode 100644 .coveragerc delete mode 100644 .mypy.ini diff --git a/.coveragerc b/.coveragerc deleted file mode 100644 index 8bdacb69..00000000 --- a/.coveragerc +++ /dev/null @@ -1,26 +0,0 @@ -[run] -branch = True -source = src -omit = - */conftest.py - */test_*_fuzz.py - */assert_valid_name.py - */cached_property.py - */character_classes.py - */is_iterable.py - */subscription/__init__.py - -[report] -exclude_lines = - pragma: no cover - except ImportError: - \# Python < - raise NotImplementedError - raise TypeError\(f?"Unexpected - assert False, - \s+next\($ - if MYPY: - if TYPE_CHECKING: - ^\s+\.\.\.$ - ^\s+pass$ -ignore_errors = True diff --git a/.mypy.ini b/.mypy.ini deleted file mode 100644 index 4b539ae9..00000000 --- a/.mypy.ini +++ /dev/null @@ -1,20 +0,0 @@ -[mypy] -python_version = 3.9 -check_untyped_defs = True -no_implicit_optional = True -strict_optional = True -warn_redundant_casts = True -warn_unused_ignores = True -disallow_untyped_defs = True - -[mypy-graphql.pyutils.frozen_dict] -disallow_untyped_defs = False - -[mypy-graphql.pyutils.frozen_list] -disallow_untyped_defs = False - -[mypy-graphql.type.introspection] -disallow_untyped_defs = False - -[mypy-tests.*] -disallow_untyped_defs = False diff --git a/MANIFEST.in b/MANIFEST.in index b59b72ad..421aa24f 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -6,10 +6,8 @@ include README.md include SECURITY.md include .bumpversion.cfg -include .coveragerc include .editorconfig include .flake8 -include .mypy.ini include .readthedocs.yaml include tox.ini diff --git a/pyproject.toml b/pyproject.toml index f7f6a7f5..a0b304b1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,9 +5,7 @@ description = """ GraphQL-core is a Python port of GraphQL.js,\ the JavaScript reference implementation for GraphQL.""" license = "MIT" -authors = [ - "Christoph Zwerschke " -] +authors = ["Christoph Zwerschke "] readme = "README.md" homepage = "https://github.com/graphql-python/graphql-core" repository = "https://github.com/graphql-python/graphql-core" @@ -28,10 +26,8 @@ packages = [ { include = "tests", format = "sdist" }, { include = "docs", format = "sdist" }, { include = '.bumpversion.cfg', format = "sdist" }, - { include = '.coveragerc', format = "sdist" }, { include = '.editorconfig', format = "sdist" }, { include = '.flake8', format = "sdist" }, - { include = '.mypy.ini', format = "sdist" }, { include = '.readthedocs.yaml', format = "sdist" }, { include = 'poetry.lock', format = "sdist" }, { include = 'tox.ini', format = "sdist" }, @@ -41,10 +37,13 @@ packages = [ { include = 'SECURITY.md', format = "sdist" } ] +[tool.poetry.urls] +Changelog = "https://github.com/graphql-python/graphql-core/releases" + [tool.poetry.dependencies] python = "^3.7" typing-extensions = [ - { version = "^4.3", python = "<3.8" }, + { version = "^4.3", python = "<3.8" } ] [tool.poetry.dev-dependencies] @@ -63,9 +62,72 @@ check-manifest = ">=0.48,<1" bump2version = ">=1.0,<2" tox = "^3.26" + [tool.black] target-version = ['py37', 'py38', 'py39', 'py310'] +[tool.coverage.run] +branch = true +source = ["src"] +omit = [ + "*/conftest.py", + "*/test_*_fuzz.py", + "*/assert_valid_name.py", + "*/cached_property.py", + "*/character_classes.py", + "*/is_iterable.py", + "*/subscription/__init__.py" +] + +[tool.coverage.report] +exclude_lines = [ + "pragma: no cover", + "except ImportError:", + "# Python <", + "raise NotImplementedError", + 'raise TypeError\(f?"Unexpected', + "assert False,", + '\s+next\($', + "if MYPY:", + "if TYPE_CHECKING:", + '^\s+\.\.\.$', + '^\s+pass$' +] +ignore_errors = true + +[tool.mypy] +python_version = 3.9 +check_untyped_defs = true +no_implicit_optional = true +strict_optional = true +warn_redundant_casts = true +warn_unused_ignores = true +disallow_untyped_defs = true + +[[tool.mypy.overrides]] +module = [ + "graphql.pyutils.frozen_dict", + "graphql.pyutils.frozen_list", + "graphql.type.introspection", + "tests.*" +] +disallow_untyped_defs = false + +[tool.pytest.ini_options] +minversion = "7.1" +# Only run benchmarks as tests. +# To actually run the benchmarks, use --benchmark-enable on the command line. +# To run the slow tests (fuzzing), add --run-slow on the command line. +addopts = "--benchmark-disable" +# Deactivate default name pattern for test classes (we use pytest_describe). +python_classes = "PyTest*" +# Handle all async fixtures and tests automatically by asyncio +asyncio_mode = "auto" +# Set a timeout in seconds for aborting tests that run too long. +timeout = "100" +# Ignore config options not (yet) available in older Python versions. +filterwarnings = "ignore::pytest.PytestConfigWarning" + [build-system] requires = ["poetry_core>=1,<2", "setuptools>=65,<70"] build-backend = "poetry.core.masonry.api" diff --git a/setup.cfg b/setup.cfg index dee6b2da..7daeda08 100644 --- a/setup.cfg +++ b/setup.cfg @@ -3,17 +3,3 @@ python-tag = py3 [aliases] test = pytest - -[tool:pytest] -# Only run benchmarks as tests. -# To actually run the benchmarks, use --benchmark-enable on the command line. -# To run the slow tests (fuzzing), add --run-slow on the command line. -addopts = --benchmark-disable -# Deactivate default name pattern for test classes (we use pytest_describe). -python_classes = PyTest* -# Handle all async fixtures and tests automatically by asyncio -asyncio_mode = auto -# Set a timeout in seconds for aborting tests that run too long. -timeout = 100 -# Ignore config options not (yet) available in older Python versions. -filterwarnings = ignore::pytest.PytestConfigWarning From 5f6a1944cf6923f6249d1575f5b3aad87e629c66 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Fri, 23 Sep 2022 14:02:49 +0200 Subject: [PATCH 008/230] Use isort to sort the import statements --- .github/workflows/lint.yml | 2 +- pyproject.toml | 7 +- setup.py | 4 +- src/graphql/__init__.py | 587 ++++++++---------- src/graphql/error/__init__.py | 3 +- src/graphql/error/graphql_error.py | 9 +- src/graphql/error/located_error.py | 1 + src/graphql/error/syntax_error.py | 1 + src/graphql/execution/__init__.py | 11 +- src/graphql/execution/collect_fields.py | 1 + src/graphql/execution/execute.py | 19 +- src/graphql/execution/map_async_iterator.py | 3 +- src/graphql/execution/middleware.py | 2 +- src/graphql/execution/subscribe.py | 14 +- src/graphql/execution/values.py | 5 +- src/graphql/graphql.py | 7 +- src/graphql/language/__init__.py | 152 +++-- src/graphql/language/ast.py | 5 +- src/graphql/language/block_string.py | 3 +- src/graphql/language/directive_locations.py | 1 + src/graphql/language/lexer.py | 3 +- src/graphql/language/location.py | 3 +- src/graphql/language/parser.py | 11 +- src/graphql/language/predicates.py | 3 +- src/graphql/language/printer.py | 3 +- src/graphql/language/source.py | 1 + src/graphql/language/token_kind.py | 1 + src/graphql/language/visitor.py | 2 +- src/graphql/pyutils/__init__.py | 15 +- src/graphql/pyutils/awaitable_or_value.py | 1 + src/graphql/pyutils/cached_property.py | 3 +- src/graphql/pyutils/convert_case.py | 1 + src/graphql/pyutils/description.py | 1 + src/graphql/pyutils/did_you_mean.py | 1 + src/graphql/pyutils/frozen_dict.py | 1 + src/graphql/pyutils/frozen_list.py | 1 + src/graphql/pyutils/group_by.py | 1 + src/graphql/pyutils/identity_func.py | 3 +- src/graphql/pyutils/inspect.py | 13 +- src/graphql/pyutils/is_awaitable.py | 3 +- src/graphql/pyutils/is_iterable.py | 11 +- src/graphql/pyutils/merge_kwargs.py | 3 +- src/graphql/pyutils/natural_compare.py | 2 +- src/graphql/pyutils/path.py | 1 + src/graphql/pyutils/simple_pub_sub.py | 4 +- src/graphql/pyutils/suggestion_list.py | 3 +- src/graphql/pyutils/undefined.py | 1 + src/graphql/subscription/__init__.py | 3 +- src/graphql/type/__init__.py | 241 ++++--- src/graphql/type/assert_name.py | 3 +- src/graphql/type/definition.py | 9 +- src/graphql/type/directives.py | 3 +- src/graphql/type/introspection.py | 5 +- src/graphql/type/scalars.py | 3 +- src/graphql/type/schema.py | 11 +- src/graphql/type/validate.py | 21 +- src/graphql/utilities/__init__.py | 97 ++- src/graphql/utilities/assert_valid_name.py | 3 +- src/graphql/utilities/ast_from_value.py | 5 +- src/graphql/utilities/build_ast_schema.py | 3 +- src/graphql/utilities/build_client_schema.py | 7 +- src/graphql/utilities/coerce_input_value.py | 8 +- src/graphql/utilities/concat_ast.py | 1 + src/graphql/utilities/extend_schema.py | 11 +- .../utilities/find_breaking_changes.py | 7 +- .../utilities/get_introspection_query.py | 6 +- src/graphql/utilities/get_operation_ast.py | 1 + .../utilities/get_operation_root_type.py | 3 +- .../utilities/introspection_from_schema.py | 5 +- .../utilities/lexicographic_sort_schema.py | 1 + src/graphql/utilities/print_schema.py | 3 +- src/graphql/utilities/separate_operations.py | 1 + src/graphql/utilities/sort_value_node.py | 1 + .../utilities/strip_ignored_characters.py | 3 +- src/graphql/utilities/type_comparators.py | 1 + src/graphql/utilities/type_from_ast.py | 7 +- src/graphql/utilities/type_info.py | 17 +- src/graphql/utilities/value_from_ast.py | 3 +- .../utilities/value_from_ast_untyped.py | 4 +- src/graphql/validation/__init__.py | 45 +- src/graphql/validation/rules/__init__.py | 1 + .../validation/rules/custom/no_deprecated.py | 1 + .../rules/custom/no_schema_introspection.py | 1 + .../rules/executable_definitions.py | 3 +- .../rules/fields_on_correct_type.py | 7 +- .../rules/fragments_on_composite_types.py | 7 +- .../validation/rules/known_argument_names.py | 5 +- .../validation/rules/known_directives.py | 5 +- .../validation/rules/known_fragment_names.py | 1 + .../validation/rules/known_type_names.py | 11 +- .../rules/lone_anonymous_operation.py | 1 + .../rules/lone_schema_definition.py | 3 +- .../validation/rules/no_fragment_cycles.py | 3 +- .../rules/no_undefined_variables.py | 1 + .../validation/rules/no_unused_fragments.py | 3 +- .../validation/rules/no_unused_variables.py | 1 + .../rules/overlapping_fields_can_be_merged.py | 1 + .../rules/possible_fragment_spreads.py | 3 +- .../rules/possible_type_extensions.py | 1 + .../rules/provided_required_arguments.py | 5 +- src/graphql/validation/rules/scalar_leafs.py | 1 + .../rules/single_field_subscriptions.py | 1 + .../rules/unique_argument_definition_names.py | 3 +- .../validation/rules/unique_argument_names.py | 1 + .../rules/unique_directive_names.py | 3 +- .../rules/unique_directives_per_location.py | 1 + .../rules/unique_enum_value_names.py | 7 +- .../rules/unique_field_definition_names.py | 5 +- .../validation/rules/unique_fragment_names.py | 3 +- .../rules/unique_input_field_names.py | 1 + .../rules/unique_operation_names.py | 3 +- .../rules/unique_operation_types.py | 5 +- .../validation/rules/unique_type_names.py | 3 +- .../validation/rules/unique_variable_names.py | 1 + .../rules/values_of_correct_type.py | 9 +- .../rules/variables_are_input_types.py | 1 + .../rules/variables_in_allowed_position.py | 3 +- src/graphql/validation/specified_rules.py | 111 ++-- src/graphql/validation/validate.py | 3 +- src/graphql/validation/validation_context.py | 1 + src/graphql/version.py | 1 + tests/benchmarks/test_build_ast_schema.py | 2 +- tests/benchmarks/test_build_client_schema.py | 2 +- tests/benchmarks/test_execution_async.py | 5 +- tests/benchmarks/test_execution_sync.py | 4 +- .../test_introspection_from_schema.py | 2 +- tests/benchmarks/test_parser.py | 2 +- tests/benchmarks/test_visit.py | 2 +- tests/error/test_graphql_error.py | 6 +- tests/error/test_located_error.py | 2 +- tests/error/test_print_location.py | 2 +- tests/execution/test_abstract.py | 2 +- tests/execution/test_customize.py | 4 +- tests/execution/test_directives.py | 5 +- tests/execution/test_executor.py | 8 +- tests/execution/test_lists.py | 4 +- tests/execution/test_nonnull.py | 3 +- tests/execution/test_parallel.py | 10 +- tests/execution/test_resolve.py | 4 +- tests/execution/test_subscribe.py | 6 +- tests/execution/test_union_interface.py | 2 +- tests/execution/test_variables.py | 4 +- tests/fixtures/__init__.py | 1 + tests/language/test_ast.py | 2 +- tests/language/test_block_string.py | 4 +- tests/language/test_block_string_fuzz.py | 4 +- tests/language/test_character_classes.py | 6 +- tests/language/test_lexer.py | 1 + tests/language/test_parser.py | 13 +- tests/language/test_predicates.py | 13 +- tests/language/test_schema_parser.py | 3 +- tests/language/test_schema_printer.py | 2 +- tests/language/test_source.py | 3 +- tests/language/test_visitor.py | 16 +- tests/pyutils/test_description.py | 14 +- tests/pyutils/test_frozen_dict.py | 2 +- tests/pyutils/test_identity_func.py | 2 +- tests/pyutils/test_inspect.py | 7 +- tests/pyutils/test_merge_kwargs.py | 1 + tests/pyutils/test_natural_compare.py | 1 + tests/star_wars_data.py | 1 + tests/star_wars_schema.py | 1 + tests/test_docs.py | 1 + tests/test_star_wars_validation.py | 2 +- tests/test_user_registry.py | 13 +- tests/test_version.py | 3 +- tests/type/test_assert_name.py | 2 +- tests/type/test_custom_scalars.py | 1 + tests/type/test_definition.py | 11 +- tests/type/test_directives.py | 2 +- tests/type/test_enum.py | 1 + tests/type/test_extensions.py | 1 + tests/type/test_introspection.py | 2 +- tests/type/test_predicate.py | 7 +- tests/type/test_scalars.py | 6 +- tests/type/test_schema.py | 2 +- tests/type/test_validation.py | 27 +- tests/utilities/test_ast_to_dict.py | 2 +- tests/utilities/test_build_ast_schema.py | 22 +- tests/utilities/test_build_client_schema.py | 2 +- tests/utilities/test_concat_ast.py | 2 +- tests/utilities/test_extend_schema.py | 8 +- tests/utilities/test_find_breaking_changes.py | 2 +- .../utilities/test_get_introspection_query.py | 2 +- .../utilities/test_get_operation_root_type.py | 2 +- .../test_introspection_from_schema.py | 6 +- .../test_lexicographic_sort_schema.py | 2 +- tests/utilities/test_print_schema.py | 8 +- .../test_strip_ignored_characters.py | 4 +- tests/utilities/test_type_from_ast.py | 2 +- tests/utilities/test_type_info.py | 2 +- tests/utilities/test_value_from_ast.py | 2 +- .../utilities/test_value_from_ast_untyped.py | 2 +- tests/utils/__init__.py | 1 + tests/utils/dedent.py | 1 + tests/utils/gen_fuzz_strings.py | 1 + tests/validation/__init__.py | 1 + tests/validation/harness.py | 3 +- .../validation/test_executable_definitions.py | 1 + .../validation/test_fields_on_correct_type.py | 3 +- .../test_fragments_on_composite_types.py | 1 + tests/validation/test_known_argument_names.py | 3 +- tests/validation/test_known_directives.py | 3 +- tests/validation/test_known_fragment_names.py | 1 + tests/validation/test_known_type_names.py | 3 +- .../test_lone_anonymous_operation.py | 1 + .../validation/test_lone_schema_definition.py | 1 + tests/validation/test_no_fragment_cycles.py | 1 + .../test_no_schema_introspection.py | 1 + .../validation/test_no_undefined_variables.py | 1 + tests/validation/test_no_unused_fragments.py | 1 + tests/validation/test_no_unused_variables.py | 1 + .../test_overlapping_fields_can_be_merged.py | 1 + .../test_possible_fragment_spreads.py | 1 + .../test_possible_type_extensions.py | 1 + .../test_provided_required_arguments.py | 3 +- tests/validation/test_scalar_leafs.py | 1 + .../test_single_field_subscriptions.py | 1 + .../test_unique_argument_definition_names.py | 1 + .../validation/test_unique_argument_names.py | 2 + .../validation/test_unique_directive_names.py | 1 + .../test_unique_directives_per_location.py | 3 +- .../test_unique_enum_value_names.py | 1 + .../test_unique_field_definition_names.py | 1 + .../validation/test_unique_fragment_names.py | 1 + .../test_unique_input_field_names.py | 1 + .../validation/test_unique_operation_names.py | 1 + .../validation/test_unique_operation_types.py | 1 + tests/validation/test_unique_type_names.py | 1 + .../validation/test_unique_variable_names.py | 1 + .../validation/test_values_of_correct_type.py | 3 +- .../test_variables_are_input_types.py | 1 + .../test_variables_in_allowed_position.py | 1 + tox.ini | 8 +- 234 files changed, 1078 insertions(+), 1022 deletions(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 626f94c2..373eb176 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -22,4 +22,4 @@ jobs: - name: Run code quality tests with tox run: tox env: - TOXENV: black,flake8,mypy,docs,manifest + TOXENV: black,flake8,isort,mypy,docs,manifest diff --git a/pyproject.toml b/pyproject.toml index a0b304b1..805a581e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -55,6 +55,7 @@ pytest-describe = "^2.0" pytest-timeout = "^2.1" black = "22.8.0" flake8 = "^5.0" +isort = "^5.10" mypy = "0.971" sphinx = "^5.1" sphinx_rtd_theme = ">=1,<2" @@ -62,7 +63,6 @@ check-manifest = ">=0.48,<1" bump2version = ">=1.0,<2" tox = "^3.26" - [tool.black] target-version = ['py37', 'py38', 'py39', 'py310'] @@ -95,6 +95,11 @@ exclude_lines = [ ] ignore_errors = true +[tool.isort] +profile = "black" +force_single_line = false +lines_after_imports = 2 + [tool.mypy] python_version = 3.9 check_untyped_defs = true diff --git a/setup.py b/setup.py index ebac11ae..c3743a7a 100644 --- a/setup.py +++ b/setup.py @@ -1,5 +1,7 @@ from re import search -from setuptools import setup, find_packages + +from setuptools import find_packages, setup + with open("src/graphql/version.py") as version_file: version = search('version = "(.*)"', version_file.read()).group(1) diff --git a/src/graphql/__init__.py b/src/graphql/__init__.py index f1b21ab3..6ae64f15 100644 --- a/src/graphql/__init__.py +++ b/src/graphql/__init__.py @@ -38,11 +38,6 @@ # The GraphQL-core 3 and GraphQL.js version info. -from .version import version, version_info, version_js, version_info_js - -# Utilities for compatibility with the Python language. -from .pyutils import Undefined, UndefinedType - # Create, format, and print GraphQL errors. from .error import ( GraphQLError, @@ -52,333 +47,305 @@ located_error, ) +# Execute GraphQL documents. +from .execution import ExecutionContext # Types; Subscription; Middleware +from .execution import ( + ExecutionResult, + FormattedExecutionResult, + MapAsyncIterator, + Middleware, + MiddlewareManager, + create_source_event_stream, + default_field_resolver, + default_type_resolver, + execute, + execute_sync, + get_argument_values, + get_directive_values, + get_variable_values, + subscribe, +) + +# The primary entry point into fulfilling a GraphQL request. +from .graphql import graphql, graphql_sync + # Parse and operate on GraphQL language source files. from .language import ( - Source, - get_location, - # Print source location - print_location, - print_source_location, - # Lex - Lexer, - TokenKind, - # Parse - parse, - parse_value, - parse_const_value, - parse_type, - # Print - print_ast, - # Visit - visit, - ParallelVisitor, - Visitor, - VisitorAction, - VisitorKeyMap, - BREAK, - SKIP, - REMOVE, + BREAK, # Print source location; Lex; Parse; Print; Visit; Predicates; Types; AST nodes; Each kind of AST node +) +from .language import ( IDLE, + REMOVE, + SKIP, + ArgumentNode, + BooleanValueNode, + ConstArgumentNode, + ConstDirectiveNode, + ConstListValueNode, + ConstObjectFieldNode, + ConstObjectValueNode, + ConstValueNode, + DefinitionNode, + DirectiveDefinitionNode, DirectiveLocation, - # Predicates - is_definition_node, - is_executable_definition_node, - is_selection_node, - is_value_node, - is_const_value_node, - is_type_node, - is_type_system_definition_node, - is_type_definition_node, - is_type_system_extension_node, - is_type_extension_node, - # Types - SourceLocation, - Location, - Token, - # AST nodes - Node, - # Each kind of AST node - NameNode, + DirectiveNode, DocumentNode, - DefinitionNode, + EnumTypeDefinitionNode, + EnumTypeExtensionNode, + EnumValueDefinitionNode, + EnumValueNode, ExecutableDefinitionNode, - OperationDefinitionNode, - OperationType, - VariableDefinitionNode, - VariableNode, - SelectionSetNode, - SelectionNode, + FieldDefinitionNode, FieldNode, - ArgumentNode, - ConstArgumentNode, + FloatValueNode, + FragmentDefinitionNode, FragmentSpreadNode, InlineFragmentNode, - FragmentDefinitionNode, - ValueNode, - ConstValueNode, + InputObjectTypeDefinitionNode, + InputObjectTypeExtensionNode, + InputValueDefinitionNode, + InterfaceTypeDefinitionNode, + InterfaceTypeExtensionNode, IntValueNode, - FloatValueNode, - StringValueNode, - BooleanValueNode, - NullValueNode, - EnumValueNode, + Lexer, + ListTypeNode, ListValueNode, - ConstListValueNode, - ObjectValueNode, - ConstObjectValueNode, - ObjectFieldNode, - ConstObjectFieldNode, - DirectiveNode, - ConstDirectiveNode, - TypeNode, + Location, NamedTypeNode, - ListTypeNode, + NameNode, + Node, NonNullTypeNode, - TypeSystemDefinitionNode, - SchemaDefinitionNode, + NullValueNode, + ObjectFieldNode, + ObjectTypeDefinitionNode, + ObjectTypeExtensionNode, + ObjectValueNode, + OperationDefinitionNode, + OperationType, OperationTypeDefinitionNode, - TypeDefinitionNode, + ParallelVisitor, ScalarTypeDefinitionNode, - ObjectTypeDefinitionNode, - FieldDefinitionNode, - InputValueDefinitionNode, - InterfaceTypeDefinitionNode, - UnionTypeDefinitionNode, - EnumTypeDefinitionNode, - EnumValueDefinitionNode, - InputObjectTypeDefinitionNode, - DirectiveDefinitionNode, - TypeSystemExtensionNode, + ScalarTypeExtensionNode, + SchemaDefinitionNode, SchemaExtensionNode, + SelectionNode, + SelectionSetNode, + Source, + SourceLocation, + StringValueNode, + Token, + TokenKind, + TypeDefinitionNode, TypeExtensionNode, - ScalarTypeExtensionNode, - ObjectTypeExtensionNode, - InterfaceTypeExtensionNode, + TypeNode, + TypeSystemDefinitionNode, + TypeSystemExtensionNode, + UnionTypeDefinitionNode, UnionTypeExtensionNode, - EnumTypeExtensionNode, - InputObjectTypeExtensionNode, + ValueNode, + VariableDefinitionNode, + VariableNode, + Visitor, + VisitorAction, + VisitorKeyMap, + get_location, + is_const_value_node, + is_definition_node, + is_executable_definition_node, + is_selection_node, + is_type_definition_node, + is_type_extension_node, + is_type_node, + is_type_system_definition_node, + is_type_system_extension_node, + is_value_node, + parse, + parse_const_value, + parse_type, + parse_value, + print_ast, + print_location, + print_source_location, + visit, ) -# Utilities for operating on GraphQL type schema and parsed sources. -from .utilities import ( - # Produce the GraphQL query recommended for a full schema introspection. - # Accepts optional IntrospectionOptions. - get_introspection_query, - IntrospectionQuery, - # Get the target Operation from a Document. - get_operation_ast, - # Get the Type for the target Operation AST. - get_operation_root_type, - # Convert a GraphQLSchema to an IntrospectionQuery. - introspection_from_schema, - # Build a GraphQLSchema from an introspection result. - build_client_schema, - # Build a GraphQLSchema from a parsed GraphQL Schema language AST. - build_ast_schema, - # Build a GraphQLSchema from a GraphQL schema language document. - build_schema, - # Extend an existing GraphQLSchema from a parsed GraphQL Schema language AST. - extend_schema, - # Sort a GraphQLSchema. - lexicographic_sort_schema, - # Print a GraphQLSchema to GraphQL Schema language. - print_schema, - # Print a GraphQLType to GraphQL Schema language. - print_type, - # Prints the built-in introspection schema in the Schema Language format. - print_introspection_schema, - # Create a GraphQLType from a GraphQL language AST. - type_from_ast, - # Convert a language AST to a dictionary. - ast_to_dict, - # Create a Python value from a GraphQL language AST with a Type. - value_from_ast, - # Create a Python value from a GraphQL language AST without a Type. - value_from_ast_untyped, - # Create a GraphQL language AST from a Python value. - ast_from_value, - # A helper to use within recursive-descent visitors which need to be aware of the - # GraphQL type system. - TypeInfo, - TypeInfoVisitor, - # Coerce a Python value to a GraphQL type, or produce errors. - coerce_input_value, - # Concatenates multiple ASTs together. - concat_ast, - # Separate an AST into an AST per Operation. - separate_operations, - # Strip characters that are not significant to the validity or execution - # of a GraphQL document. - strip_ignored_characters, - # Comparators for types - is_equal_type, - is_type_sub_type_of, - do_types_overlap, - # Assert a string is a valid GraphQL name. - assert_valid_name, - # Determine if a string is a valid GraphQL name. - is_valid_name_error, - # Compare two GraphQLSchemas and detect breaking changes. - BreakingChange, - BreakingChangeType, - DangerousChange, - DangerousChangeType, - find_breaking_changes, - find_dangerous_changes, -) +# Utilities for compatibility with the Python language. +from .pyutils import Undefined, UndefinedType # Create and operate on GraphQL type definitions and schema. from .type import ( - # Definitions - GraphQLSchema, + DEFAULT_DEPRECATION_REASON, # Definitions; Standard GraphQL Scalars; Int boundaries constants; Built-in Directives defined by the Spec; "Enum" of Type Kinds; Constant Deprecation Reason; GraphQL Types for introspection.; Meta-field definitions.; Predicates; Assertions; Un-modifiers; Thunk handling; Validate GraphQL schema.; Uphold the spec rules about naming; Types; Keyword args +) +from .type import ( + GRAPHQL_MAX_INT, + GRAPHQL_MIN_INT, + GraphQLAbstractType, + GraphQLArgument, + GraphQLArgumentKwargs, + GraphQLArgumentMap, + GraphQLBoolean, + GraphQLCompositeType, + GraphQLDeprecatedDirective, GraphQLDirective, - GraphQLScalarType, - GraphQLObjectType, - GraphQLInterfaceType, - GraphQLUnionType, + GraphQLDirectiveKwargs, GraphQLEnumType, + GraphQLEnumTypeKwargs, + GraphQLEnumValue, + GraphQLEnumValueKwargs, + GraphQLEnumValueMap, + GraphQLField, + GraphQLFieldKwargs, + GraphQLFieldMap, + GraphQLFieldResolver, + GraphQLFloat, + GraphQLID, + GraphQLIncludeDirective, + GraphQLInputField, + GraphQLInputFieldKwargs, + GraphQLInputFieldMap, GraphQLInputObjectType, + GraphQLInputObjectTypeKwargs, + GraphQLInputType, + GraphQLInt, + GraphQLInterfaceType, + GraphQLInterfaceTypeKwargs, + GraphQLIsTypeOfFn, + GraphQLLeafType, GraphQLList, + GraphQLNamedInputType, + GraphQLNamedOutputType, + GraphQLNamedType, + GraphQLNamedTypeKwargs, GraphQLNonNull, - # Standard GraphQL Scalars - specified_scalar_types, - GraphQLInt, - GraphQLFloat, - GraphQLString, - GraphQLBoolean, - GraphQLID, - # Int boundaries constants - GRAPHQL_MAX_INT, - GRAPHQL_MIN_INT, - # Built-in Directives defined by the Spec - specified_directives, - GraphQLIncludeDirective, + GraphQLNullableType, + GraphQLObjectType, + GraphQLObjectTypeKwargs, + GraphQLOutputType, + GraphQLResolveInfo, + GraphQLScalarLiteralParser, + GraphQLScalarSerializer, + GraphQLScalarType, + GraphQLScalarTypeKwargs, + GraphQLScalarValueParser, + GraphQLSchema, + GraphQLSchemaKwargs, GraphQLSkipDirective, - GraphQLDeprecatedDirective, GraphQLSpecifiedByDirective, - # "Enum" of Type Kinds - TypeKind, - # Constant Deprecation Reason - DEFAULT_DEPRECATION_REASON, - # GraphQL Types for introspection. - introspection_types, - # Meta-field definitions. + GraphQLString, + GraphQLType, + GraphQLTypeResolver, + GraphQLUnionType, + GraphQLUnionTypeKwargs, + GraphQLWrappingType, + ResponsePath, SchemaMetaFieldDef, + Thunk, + ThunkCollection, + ThunkMapping, + TypeKind, TypeMetaFieldDef, TypeNameMetaFieldDef, - # Predicates - is_schema, + assert_abstract_type, + assert_composite_type, + assert_directive, + assert_enum_type, + assert_enum_value_name, + assert_input_object_type, + assert_input_type, + assert_interface_type, + assert_leaf_type, + assert_list_type, + assert_name, + assert_named_type, + assert_non_null_type, + assert_nullable_type, + assert_object_type, + assert_output_type, + assert_scalar_type, + assert_schema, + assert_type, + assert_union_type, + assert_valid_schema, + assert_wrapping_type, + get_named_type, + get_nullable_type, + introspection_types, + is_abstract_type, + is_composite_type, is_directive, - is_type, - is_scalar_type, - is_object_type, - is_interface_type, - is_union_type, is_enum_type, is_input_object_type, - is_list_type, - is_non_null_type, is_input_type, - is_output_type, + is_interface_type, + is_introspection_type, is_leaf_type, - is_composite_type, - is_abstract_type, - is_wrapping_type, - is_nullable_type, + is_list_type, is_named_type, + is_non_null_type, + is_nullable_type, + is_object_type, + is_output_type, is_required_argument, is_required_input_field, - is_specified_scalar_type, - is_introspection_type, + is_scalar_type, + is_schema, is_specified_directive, - # Assertions - assert_schema, - assert_directive, - assert_type, - assert_scalar_type, - assert_object_type, - assert_interface_type, - assert_union_type, - assert_enum_type, - assert_input_object_type, - assert_list_type, - assert_non_null_type, - assert_input_type, - assert_output_type, - assert_leaf_type, - assert_composite_type, - assert_abstract_type, - assert_wrapping_type, - assert_nullable_type, - assert_named_type, - # Un-modifiers - get_nullable_type, - get_named_type, - # Thunk handling + is_specified_scalar_type, + is_type, + is_union_type, + is_wrapping_type, resolve_thunk, - # Validate GraphQL schema. + specified_directives, + specified_scalar_types, validate_schema, - assert_valid_schema, - # Uphold the spec rules about naming - assert_name, - assert_enum_value_name, - # Types - GraphQLType, - GraphQLInputType, - GraphQLOutputType, - GraphQLLeafType, - GraphQLCompositeType, - GraphQLAbstractType, - GraphQLWrappingType, - GraphQLNullableType, - GraphQLNamedType, - GraphQLNamedInputType, - GraphQLNamedOutputType, - Thunk, - ThunkCollection, - ThunkMapping, - GraphQLArgument, - GraphQLArgumentMap, - GraphQLEnumValue, - GraphQLEnumValueMap, - GraphQLField, - GraphQLFieldMap, - GraphQLFieldResolver, - GraphQLInputField, - GraphQLInputFieldMap, - GraphQLScalarSerializer, - GraphQLScalarValueParser, - GraphQLScalarLiteralParser, - GraphQLIsTypeOfFn, - GraphQLResolveInfo, - ResponsePath, - GraphQLTypeResolver, - # Keyword args - GraphQLArgumentKwargs, - GraphQLDirectiveKwargs, - GraphQLEnumTypeKwargs, - GraphQLEnumValueKwargs, - GraphQLFieldKwargs, - GraphQLInputFieldKwargs, - GraphQLInputObjectTypeKwargs, - GraphQLInterfaceTypeKwargs, - GraphQLNamedTypeKwargs, - GraphQLObjectTypeKwargs, - GraphQLScalarTypeKwargs, - GraphQLSchemaKwargs, - GraphQLUnionTypeKwargs, +) + +# Utilities for operating on GraphQL type schema and parsed sources. +from .utilities import ( + BreakingChange, # Produce the GraphQL query recommended for a full schema introspection.; Accepts optional IntrospectionOptions.; Get the target Operation from a Document.; Get the Type for the target Operation AST.; Convert a GraphQLSchema to an IntrospectionQuery.; Build a GraphQLSchema from an introspection result.; Build a GraphQLSchema from a parsed GraphQL Schema language AST.; Build a GraphQLSchema from a GraphQL schema language document.; Extend an existing GraphQLSchema from a parsed GraphQL Schema language AST.; Sort a GraphQLSchema.; Print a GraphQLSchema to GraphQL Schema language.; Print a GraphQLType to GraphQL Schema language.; Prints the built-in introspection schema in the Schema Language format.; Create a GraphQLType from a GraphQL language AST.; Convert a language AST to a dictionary.; Create a Python value from a GraphQL language AST with a Type.; Create a Python value from a GraphQL language AST without a Type.; Create a GraphQL language AST from a Python value.; A helper to use within recursive-descent visitors which need to be aware of the; GraphQL type system.; Coerce a Python value to a GraphQL type, or produce errors.; Concatenates multiple ASTs together.; Separate an AST into an AST per Operation.; Strip characters that are not significant to the validity or execution; of a GraphQL document.; Comparators for types; Assert a string is a valid GraphQL name.; Determine if a string is a valid GraphQL name.; Compare two GraphQLSchemas and detect breaking changes. +) +from .utilities import ( + BreakingChangeType, + DangerousChange, + DangerousChangeType, + IntrospectionQuery, + TypeInfo, + TypeInfoVisitor, + assert_valid_name, + ast_from_value, + ast_to_dict, + build_ast_schema, + build_client_schema, + build_schema, + coerce_input_value, + concat_ast, + do_types_overlap, + extend_schema, + find_breaking_changes, + find_dangerous_changes, + get_introspection_query, + get_operation_ast, + get_operation_root_type, + introspection_from_schema, + is_equal_type, + is_type_sub_type_of, + is_valid_name_error, + lexicographic_sort_schema, + print_introspection_schema, + print_schema, + print_type, + separate_operations, + strip_ignored_characters, + type_from_ast, + value_from_ast, + value_from_ast_untyped, ) # Validate GraphQL queries. from .validation import ( - validate, - ValidationContext, - ValidationRule, - ASTValidationRule, - SDLValidationRule, - # All validation rules in the GraphQL Specification. - specified_rules, - # Individual validation rules. + ASTValidationRule, # All validation rules in the GraphQL Specification.; Individual validation rules.; SDL-specific validation rules; Custom validation rules +) +from .validation import ( ExecutableDefinitionsRule, FieldsOnCorrectTypeRule, FragmentsOnCompositeTypesRule, @@ -387,62 +354,42 @@ KnownFragmentNamesRule, KnownTypeNamesRule, LoneAnonymousOperationRule, + LoneSchemaDefinitionRule, + NoDeprecatedCustomRule, NoFragmentCyclesRule, + NoSchemaIntrospectionCustomRule, NoUndefinedVariablesRule, NoUnusedFragmentsRule, NoUnusedVariablesRule, OverlappingFieldsCanBeMergedRule, PossibleFragmentSpreadsRule, + PossibleTypeExtensionsRule, ProvidedRequiredArgumentsRule, ScalarLeafsRule, + SDLValidationRule, SingleFieldSubscriptionsRule, + UniqueArgumentDefinitionNamesRule, UniqueArgumentNamesRule, + UniqueDirectiveNamesRule, UniqueDirectivesPerLocationRule, + UniqueEnumValueNamesRule, + UniqueFieldDefinitionNamesRule, UniqueFragmentNamesRule, UniqueInputFieldNamesRule, UniqueOperationNamesRule, + UniqueOperationTypesRule, + UniqueTypeNamesRule, UniqueVariableNamesRule, + ValidationContext, + ValidationRule, ValuesOfCorrectTypeRule, VariablesAreInputTypesRule, VariablesInAllowedPositionRule, - # SDL-specific validation rules - LoneSchemaDefinitionRule, - UniqueOperationTypesRule, - UniqueTypeNamesRule, - UniqueEnumValueNamesRule, - UniqueFieldDefinitionNamesRule, - UniqueArgumentDefinitionNamesRule, - UniqueDirectiveNamesRule, - PossibleTypeExtensionsRule, - # Custom validation rules - NoDeprecatedCustomRule, - NoSchemaIntrospectionCustomRule, -) - -# Execute GraphQL documents. -from .execution import ( - execute, - execute_sync, - default_field_resolver, - default_type_resolver, - get_argument_values, - get_directive_values, - get_variable_values, - # Types - ExecutionContext, - ExecutionResult, - FormattedExecutionResult, - # Subscription - subscribe, - create_source_event_stream, - MapAsyncIterator, - # Middleware - Middleware, - MiddlewareManager, + specified_rules, + validate, ) +from .version import version, version_info, version_info_js, version_js -# The primary entry point into fulfilling a GraphQL request. -from .graphql import graphql, graphql_sync INVALID = Undefined # deprecated alias diff --git a/src/graphql/error/__init__.py b/src/graphql/error/__init__.py index 20f5a7c5..7b91c8e9 100644 --- a/src/graphql/error/__init__.py +++ b/src/graphql/error/__init__.py @@ -5,10 +5,9 @@ """ from .graphql_error import GraphQLError, GraphQLErrorExtensions, GraphQLFormattedError - +from .located_error import located_error from .syntax_error import GraphQLSyntaxError -from .located_error import located_error __all__ = [ "GraphQLError", diff --git a/src/graphql/error/graphql_error.py b/src/graphql/error/graphql_error.py index 75e34b6a..cc203820 100644 --- a/src/graphql/error/graphql_error.py +++ b/src/graphql/error/graphql_error.py @@ -1,5 +1,6 @@ from sys import exc_info -from typing import Any, Collection, Dict, List, Optional, Union, TYPE_CHECKING +from typing import TYPE_CHECKING, Any, Collection, Dict, List, Optional, Union + try: from typing import TypedDict @@ -8,10 +9,8 @@ if TYPE_CHECKING: from ..language.ast import Node # noqa: F401 - from ..language.location import ( - SourceLocation, - FormattedSourceLocation, - ) # noqa: F401 + from ..language.location import FormattedSourceLocation # noqa: F401 + from ..language.location import SourceLocation from ..language.source import Source # noqa: F401 __all__ = ["GraphQLError", "GraphQLErrorExtensions", "GraphQLFormattedError"] diff --git a/src/graphql/error/located_error.py b/src/graphql/error/located_error.py index d295163c..cabd737c 100644 --- a/src/graphql/error/located_error.py +++ b/src/graphql/error/located_error.py @@ -3,6 +3,7 @@ from ..pyutils import inspect from .graphql_error import GraphQLError + if TYPE_CHECKING: from ..language.ast import Node # noqa: F401 diff --git a/src/graphql/error/syntax_error.py b/src/graphql/error/syntax_error.py index 5a8e4091..2b24879d 100644 --- a/src/graphql/error/syntax_error.py +++ b/src/graphql/error/syntax_error.py @@ -4,6 +4,7 @@ from .graphql_error import GraphQLError + if TYPE_CHECKING: from ..language.source import Source # noqa: F401 diff --git a/src/graphql/execution/__init__.py b/src/graphql/execution/__init__.py index 7317fef2..75121038 100644 --- a/src/graphql/execution/__init__.py +++ b/src/graphql/execution/__init__.py @@ -5,20 +5,21 @@ """ from .execute import ( - execute, - execute_sync, - default_field_resolver, - default_type_resolver, ExecutionContext, ExecutionResult, FormattedExecutionResult, Middleware, + default_field_resolver, + default_type_resolver, + execute, + execute_sync, ) from .map_async_iterator import MapAsyncIterator -from .subscribe import subscribe, create_source_event_stream from .middleware import MiddlewareManager +from .subscribe import create_source_event_stream, subscribe from .values import get_argument_values, get_directive_values, get_variable_values + __all__ = [ "create_source_event_stream", "execute", diff --git a/src/graphql/execution/collect_fields.py b/src/graphql/execution/collect_fields.py index 60ae75fa..f782162a 100644 --- a/src/graphql/execution/collect_fields.py +++ b/src/graphql/execution/collect_fields.py @@ -18,6 +18,7 @@ from ..utilities.type_from_ast import type_from_ast from .values import get_directive_values + __all__ = ["collect_fields", "collect_sub_fields"] diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index 01ec288a..9a7b3108 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -10,12 +10,13 @@ Iterable, List, Optional, - Union, Tuple, Type, + Union, cast, ) + try: from typing import TypedDict except ImportError: # Python < 3.8 @@ -29,26 +30,21 @@ OperationDefinitionNode, OperationType, ) -from ..pyutils import ( - inspect, - is_awaitable as default_is_awaitable, - is_iterable, - AwaitableOrValue, - Path, - Undefined, -) +from ..pyutils import AwaitableOrValue, Path, Undefined, inspect +from ..pyutils import is_awaitable as default_is_awaitable +from ..pyutils import is_iterable from ..type import ( GraphQLAbstractType, GraphQLField, + GraphQLFieldResolver, GraphQLLeafType, GraphQLList, GraphQLNonNull, GraphQLObjectType, GraphQLOutputType, + GraphQLResolveInfo, GraphQLSchema, - GraphQLFieldResolver, GraphQLTypeResolver, - GraphQLResolveInfo, SchemaMetaFieldDef, TypeMetaFieldDef, TypeNameMetaFieldDef, @@ -63,6 +59,7 @@ from .middleware import MiddlewareManager from .values import get_argument_values, get_variable_values + __all__ = [ "assert_valid_execution_arguments", "default_field_resolver", diff --git a/src/graphql/execution/map_async_iterator.py b/src/graphql/execution/map_async_iterator.py index 310a73af..76ff7bc5 100644 --- a/src/graphql/execution/map_async_iterator.py +++ b/src/graphql/execution/map_async_iterator.py @@ -3,8 +3,9 @@ from asyncio import CancelledError, Event, Task, ensure_future, wait from concurrent.futures import FIRST_COMPLETED from inspect import isasyncgen, isawaitable -from typing import cast, Any, AsyncIterable, Callable, Optional, Set, Type, Union from types import TracebackType +from typing import Any, AsyncIterable, Callable, Optional, Set, Type, Union, cast + __all__ = ["MapAsyncIterator"] diff --git a/src/graphql/execution/middleware.py b/src/graphql/execution/middleware.py index 452b9910..1db54f09 100644 --- a/src/graphql/execution/middleware.py +++ b/src/graphql/execution/middleware.py @@ -1,7 +1,7 @@ from functools import partial, reduce from inspect import isfunction +from typing import Any, Callable, Dict, Iterator, List, Optional, Tuple -from typing import Callable, Iterator, Dict, List, Tuple, Any, Optional __all__ = ["MiddlewareManager"] diff --git a/src/graphql/execution/subscribe.py b/src/graphql/execution/subscribe.py index 21fe4db3..b40022fe 100644 --- a/src/graphql/execution/subscribe.py +++ b/src/graphql/execution/subscribe.py @@ -1,21 +1,14 @@ from inspect import isawaitable -from typing import ( - Any, - AsyncIterable, - AsyncIterator, - Dict, - Optional, - Union, -) +from typing import Any, AsyncIterable, AsyncIterator, Dict, Optional, Union from ..error import GraphQLError, located_error from ..execution.collect_fields import collect_fields from ..execution.execute import ( + ExecutionContext, + ExecutionResult, assert_valid_execution_arguments, execute, get_field_def, - ExecutionContext, - ExecutionResult, ) from ..execution.values import get_argument_values from ..language import DocumentNode @@ -23,6 +16,7 @@ from ..type import GraphQLFieldResolver, GraphQLSchema from .map_async_iterator import MapAsyncIterator + __all__ = ["subscribe", "create_source_event_stream"] diff --git a/src/graphql/execution/values.py b/src/graphql/execution/values.py index e11733fc..625d2028 100644 --- a/src/graphql/execution/values.py +++ b/src/graphql/execution/values.py @@ -5,8 +5,8 @@ DirectiveNode, EnumValueDefinitionNode, ExecutableDefinitionNode, - FieldNode, FieldDefinitionNode, + FieldNode, InputValueDefinitionNode, NullValueNode, SchemaDefinitionNode, @@ -17,7 +17,7 @@ VariableNode, print_ast, ) -from ..pyutils import inspect, print_path_list, Undefined +from ..pyutils import Undefined, inspect, print_path_list from ..type import ( GraphQLDirective, GraphQLField, @@ -30,6 +30,7 @@ from ..utilities.type_from_ast import type_from_ast from ..utilities.value_from_ast import value_from_ast + __all__ = ["get_argument_values", "get_directive_values", "get_variable_values"] diff --git a/src/graphql/graphql.py b/src/graphql/graphql.py index c2e804cd..a0265ec3 100644 --- a/src/graphql/graphql.py +++ b/src/graphql/graphql.py @@ -1,10 +1,10 @@ from asyncio import ensure_future from inspect import isawaitable -from typing import Any, Awaitable, Callable, Dict, Optional, Union, Type, cast +from typing import Any, Awaitable, Callable, Dict, Optional, Type, Union, cast from .error import GraphQLError -from .execution import execute, ExecutionResult, ExecutionContext, Middleware -from .language import parse, Source +from .execution import ExecutionContext, ExecutionResult, Middleware, execute +from .language import Source, parse from .pyutils import AwaitableOrValue from .type import ( GraphQLFieldResolver, @@ -13,6 +13,7 @@ validate_schema, ) + __all__ = ["graphql", "graphql_sync"] diff --git a/src/graphql/language/__init__.py b/src/graphql/language/__init__.py index 7d3120f5..86422c48 100644 --- a/src/graphql/language/__init__.py +++ b/src/graphql/language/__init__.py @@ -4,110 +4,102 @@ GraphQL language. """ -from .source import Source - -from .location import get_location, SourceLocation, FormattedSourceLocation - -from .print_location import print_location, print_source_location - -from .token_kind import TokenKind - -from .lexer import Lexer - -from .parser import parse, parse_type, parse_value, parse_const_value - -from .printer import print_ast - -from .visitor import ( - visit, - Visitor, - ParallelVisitor, - VisitorAction, - VisitorKeyMap, - BREAK, - SKIP, - REMOVE, - IDLE, -) - +from .ast import ArgumentNode # Each kind of AST node from .ast import ( - Location, - Token, - Node, - # Each kind of AST node - NameNode, - DocumentNode, + BooleanValueNode, + ConstArgumentNode, + ConstDirectiveNode, + ConstListValueNode, + ConstObjectFieldNode, + ConstObjectValueNode, + ConstValueNode, DefinitionNode, + DirectiveDefinitionNode, + DirectiveNode, + DocumentNode, + EnumTypeDefinitionNode, + EnumTypeExtensionNode, + EnumValueDefinitionNode, + EnumValueNode, ExecutableDefinitionNode, - OperationDefinitionNode, - OperationType, - VariableDefinitionNode, - VariableNode, - SelectionSetNode, - SelectionNode, + FieldDefinitionNode, FieldNode, - ArgumentNode, - ConstArgumentNode, + FloatValueNode, + FragmentDefinitionNode, FragmentSpreadNode, InlineFragmentNode, - FragmentDefinitionNode, - ValueNode, - ConstValueNode, + InputObjectTypeDefinitionNode, + InputObjectTypeExtensionNode, + InputValueDefinitionNode, + InterfaceTypeDefinitionNode, + InterfaceTypeExtensionNode, IntValueNode, - FloatValueNode, - StringValueNode, - BooleanValueNode, - NullValueNode, - EnumValueNode, + ListTypeNode, ListValueNode, - ConstListValueNode, - ObjectValueNode, - ConstObjectValueNode, - ObjectFieldNode, - ConstObjectFieldNode, - DirectiveNode, - ConstDirectiveNode, - TypeNode, + Location, NamedTypeNode, - ListTypeNode, + NameNode, + Node, NonNullTypeNode, - TypeSystemDefinitionNode, - SchemaDefinitionNode, + NullValueNode, + ObjectFieldNode, + ObjectTypeDefinitionNode, + ObjectTypeExtensionNode, + ObjectValueNode, + OperationDefinitionNode, + OperationType, OperationTypeDefinitionNode, - TypeDefinitionNode, ScalarTypeDefinitionNode, - ObjectTypeDefinitionNode, - FieldDefinitionNode, - InputValueDefinitionNode, - InterfaceTypeDefinitionNode, - UnionTypeDefinitionNode, - EnumTypeDefinitionNode, - EnumValueDefinitionNode, - InputObjectTypeDefinitionNode, - DirectiveDefinitionNode, - TypeSystemExtensionNode, + ScalarTypeExtensionNode, + SchemaDefinitionNode, SchemaExtensionNode, + SelectionNode, + SelectionSetNode, + StringValueNode, + Token, + TypeDefinitionNode, TypeExtensionNode, - ScalarTypeExtensionNode, - ObjectTypeExtensionNode, - InterfaceTypeExtensionNode, + TypeNode, + TypeSystemDefinitionNode, + TypeSystemExtensionNode, + UnionTypeDefinitionNode, UnionTypeExtensionNode, - EnumTypeExtensionNode, - InputObjectTypeExtensionNode, + ValueNode, + VariableDefinitionNode, + VariableNode, ) +from .directive_locations import DirectiveLocation +from .lexer import Lexer +from .location import FormattedSourceLocation, SourceLocation, get_location +from .parser import parse, parse_const_value, parse_type, parse_value from .predicates import ( + is_const_value_node, is_definition_node, is_executable_definition_node, is_selection_node, - is_value_node, - is_const_value_node, + is_type_definition_node, + is_type_extension_node, is_type_node, is_type_system_definition_node, - is_type_definition_node, is_type_system_extension_node, - is_type_extension_node, + is_value_node, ) -from .directive_locations import DirectiveLocation +from .print_location import print_location, print_source_location +from .printer import print_ast +from .source import Source +from .token_kind import TokenKind +from .visitor import ( + BREAK, + IDLE, + REMOVE, + SKIP, + ParallelVisitor, + Visitor, + VisitorAction, + VisitorKeyMap, + visit, +) + __all__ = [ "get_location", diff --git a/src/graphql/language/ast.py b/src/graphql/language/ast.py index c3320c55..aa4fe9e7 100644 --- a/src/graphql/language/ast.py +++ b/src/graphql/language/ast.py @@ -2,11 +2,12 @@ from copy import copy, deepcopy from enum import Enum -from typing import Any, Dict, List, Tuple, Optional, Union +from typing import Any, Dict, List, Optional, Tuple, Union +from ..pyutils import camel_to_snake from .source import Source from .token_kind import TokenKind -from ..pyutils import camel_to_snake + __all__ = [ "Location", diff --git a/src/graphql/language/block_string.py b/src/graphql/language/block_string.py index 9b9e2fb8..296c0b18 100644 --- a/src/graphql/language/block_string.py +++ b/src/graphql/language/block_string.py @@ -1,5 +1,6 @@ -from typing import Collection, List from sys import maxsize +from typing import Collection, List + __all__ = [ "dedent_block_string_lines", diff --git a/src/graphql/language/directive_locations.py b/src/graphql/language/directive_locations.py index dfce34d9..3d88382f 100644 --- a/src/graphql/language/directive_locations.py +++ b/src/graphql/language/directive_locations.py @@ -1,5 +1,6 @@ from enum import Enum + __all__ = ["DirectiveLocation"] diff --git a/src/graphql/language/lexer.py b/src/graphql/language/lexer.py index f41932bf..3e842fc6 100644 --- a/src/graphql/language/lexer.py +++ b/src/graphql/language/lexer.py @@ -3,10 +3,11 @@ from ..error import GraphQLSyntaxError from .ast import Token from .block_string import dedent_block_string_lines -from .character_classes import is_digit, is_name_start, is_name_continue +from .character_classes import is_digit, is_name_continue, is_name_start from .source import Source from .token_kind import TokenKind + __all__ = ["Lexer", "is_punctuator_token_kind"] diff --git a/src/graphql/language/location.py b/src/graphql/language/location.py index ba479009..897a8595 100644 --- a/src/graphql/language/location.py +++ b/src/graphql/language/location.py @@ -1,6 +1,7 @@ from __future__ import annotations # Python < 3.10 -from typing import Any, NamedTuple, TYPE_CHECKING +from typing import TYPE_CHECKING, Any, NamedTuple + try: from typing import TypedDict diff --git a/src/graphql/language/parser.py b/src/graphql/language/parser.py index ffdf09b7..7cd8e8c2 100644 --- a/src/graphql/language/parser.py +++ b/src/graphql/language/parser.py @@ -1,6 +1,7 @@ -from typing import Callable, Dict, List, Optional, Union, TypeVar, cast from functools import partial +from typing import Callable, Dict, List, Optional, TypeVar, Union, cast +from ..error import GraphQLError, GraphQLSyntaxError from .ast import ( ArgumentNode, BooleanValueNode, @@ -24,14 +25,14 @@ InputObjectTypeDefinitionNode, InputObjectTypeExtensionNode, InputValueDefinitionNode, - IntValueNode, InterfaceTypeDefinitionNode, InterfaceTypeExtensionNode, + IntValueNode, ListTypeNode, ListValueNode, Location, - NameNode, NamedTypeNode, + NameNode, NonNullTypeNode, NullValueNode, ObjectFieldNode, @@ -48,6 +49,7 @@ SelectionNode, SelectionSetNode, StringValueNode, + Token, TypeNode, TypeSystemExtensionNode, UnionTypeDefinitionNode, @@ -57,11 +59,10 @@ VariableNode, ) from .directive_locations import DirectiveLocation -from .ast import Token from .lexer import Lexer, is_punctuator_token_kind from .source import Source, is_source from .token_kind import TokenKind -from ..error import GraphQLError, GraphQLSyntaxError + __all__ = ["parse", "parse_type", "parse_value", "parse_const_value"] diff --git a/src/graphql/language/predicates.py b/src/graphql/language/predicates.py index 24d7c7a5..ebd9e5ea 100644 --- a/src/graphql/language/predicates.py +++ b/src/graphql/language/predicates.py @@ -1,8 +1,8 @@ from .ast import ( - Node, DefinitionNode, ExecutableDefinitionNode, ListValueNode, + Node, ObjectValueNode, SchemaExtensionNode, SelectionNode, @@ -14,6 +14,7 @@ VariableNode, ) + __all__ = [ "is_definition_node", "is_executable_definition_node", diff --git a/src/graphql/language/printer.py b/src/graphql/language/printer.py index 07ed1b15..7382064c 100644 --- a/src/graphql/language/printer.py +++ b/src/graphql/language/printer.py @@ -3,7 +3,8 @@ from ..language.ast import Node, OperationType from .block_string import print_block_string from .print_string import print_string -from .visitor import visit, Visitor +from .visitor import Visitor, visit + __all__ = ["print_ast"] diff --git a/src/graphql/language/source.py b/src/graphql/language/source.py index 4143c13a..e4ec0e89 100644 --- a/src/graphql/language/source.py +++ b/src/graphql/language/source.py @@ -2,6 +2,7 @@ from .location import SourceLocation + __all__ = ["Source", "is_source"] diff --git a/src/graphql/language/token_kind.py b/src/graphql/language/token_kind.py index 45f6e82a..543ac22f 100644 --- a/src/graphql/language/token_kind.py +++ b/src/graphql/language/token_kind.py @@ -1,5 +1,6 @@ from enum import Enum + __all__ = ["TokenKind"] diff --git a/src/graphql/language/visitor.py b/src/graphql/language/visitor.py index 996c7194..267cb585 100644 --- a/src/graphql/language/visitor.py +++ b/src/graphql/language/visitor.py @@ -14,8 +14,8 @@ from ..pyutils import inspect, snake_to_camel from . import ast +from .ast import QUERY_DOCUMENT_KEYS, Node -from .ast import Node, QUERY_DOCUMENT_KEYS __all__ = [ "Visitor", diff --git a/src/graphql/pyutils/__init__.py b/src/graphql/pyutils/__init__.py index c156de41..9a01433f 100644 --- a/src/graphql/pyutils/__init__.py +++ b/src/graphql/pyutils/__init__.py @@ -8,8 +8,9 @@ These functions are not part of the module interface and are subject to change. """ -from .convert_case import camel_to_snake, snake_to_camel +from .awaitable_or_value import AwaitableOrValue from .cached_property import cached_property +from .convert_case import camel_to_snake, snake_to_camel from .description import ( Description, is_description, @@ -17,23 +18,23 @@ unregister_description, ) from .did_you_mean import did_you_mean +from .frozen_dict import FrozenDict +from .frozen_error import FrozenError +from .frozen_list import FrozenList from .group_by import group_by from .identity_func import identity_func from .inspect import inspect from .is_awaitable import is_awaitable from .is_iterable import is_collection, is_iterable -from .natural_compare import natural_comparison_key -from .awaitable_or_value import AwaitableOrValue -from .suggestion_list import suggestion_list -from .frozen_error import FrozenError -from .frozen_list import FrozenList -from .frozen_dict import FrozenDict from .merge_kwargs import merge_kwargs +from .natural_compare import natural_comparison_key from .path import Path from .print_path_list import print_path_list from .simple_pub_sub import SimplePubSub, SimplePubSubIterator +from .suggestion_list import suggestion_list from .undefined import Undefined, UndefinedType + __all__ = [ "camel_to_snake", "snake_to_camel", diff --git a/src/graphql/pyutils/awaitable_or_value.py b/src/graphql/pyutils/awaitable_or_value.py index b497a787..071b1fe2 100644 --- a/src/graphql/pyutils/awaitable_or_value.py +++ b/src/graphql/pyutils/awaitable_or_value.py @@ -1,5 +1,6 @@ from typing import Awaitable, TypeVar, Union + __all__ = ["AwaitableOrValue"] diff --git a/src/graphql/pyutils/cached_property.py b/src/graphql/pyutils/cached_property.py index ddb33725..43e34eda 100644 --- a/src/graphql/pyutils/cached_property.py +++ b/src/graphql/pyutils/cached_property.py @@ -1,4 +1,5 @@ -from typing import Any, Callable, TYPE_CHECKING +from typing import TYPE_CHECKING, Any, Callable + if TYPE_CHECKING: standard_cached_property = None diff --git a/src/graphql/pyutils/convert_case.py b/src/graphql/pyutils/convert_case.py index 1fe0300f..ea09880e 100644 --- a/src/graphql/pyutils/convert_case.py +++ b/src/graphql/pyutils/convert_case.py @@ -2,6 +2,7 @@ import re + __all__ = ["camel_to_snake", "snake_to_camel"] _re_camel_to_snake = re.compile(r"([a-z]|[A-Z0-9]+)(?=[A-Z])") diff --git a/src/graphql/pyutils/description.py b/src/graphql/pyutils/description.py index ccb858ea..c171979c 100644 --- a/src/graphql/pyutils/description.py +++ b/src/graphql/pyutils/description.py @@ -1,5 +1,6 @@ from typing import Any, Tuple, Union + __all__ = [ "Description", "is_description", diff --git a/src/graphql/pyutils/did_you_mean.py b/src/graphql/pyutils/did_you_mean.py index 88ae6201..e5821aa1 100644 --- a/src/graphql/pyutils/did_you_mean.py +++ b/src/graphql/pyutils/did_you_mean.py @@ -1,5 +1,6 @@ from typing import Optional, Sequence + __all__ = ["did_you_mean"] MAX_LENGTH = 5 diff --git a/src/graphql/pyutils/frozen_dict.py b/src/graphql/pyutils/frozen_dict.py index 776e7adb..f466b88a 100644 --- a/src/graphql/pyutils/frozen_dict.py +++ b/src/graphql/pyutils/frozen_dict.py @@ -5,6 +5,7 @@ from .frozen_error import FrozenError + __all__ = ["FrozenDict"] KT = TypeVar("KT") diff --git a/src/graphql/pyutils/frozen_list.py b/src/graphql/pyutils/frozen_list.py index a4553097..52dd355b 100644 --- a/src/graphql/pyutils/frozen_list.py +++ b/src/graphql/pyutils/frozen_list.py @@ -5,6 +5,7 @@ from .frozen_error import FrozenError + __all__ = ["FrozenList"] diff --git a/src/graphql/pyutils/group_by.py b/src/graphql/pyutils/group_by.py index e9cd0ba3..677aa1c1 100644 --- a/src/graphql/pyutils/group_by.py +++ b/src/graphql/pyutils/group_by.py @@ -1,6 +1,7 @@ from collections import defaultdict from typing import Callable, Collection, Dict, List, TypeVar + __all__ = ["group_by"] K = TypeVar("K") diff --git a/src/graphql/pyutils/identity_func.py b/src/graphql/pyutils/identity_func.py index 88a96738..d8efdf26 100644 --- a/src/graphql/pyutils/identity_func.py +++ b/src/graphql/pyutils/identity_func.py @@ -1,7 +1,8 @@ -from typing import cast, Any, TypeVar +from typing import Any, TypeVar, cast from .undefined import Undefined + __all__ = ["identity_func"] diff --git a/src/graphql/pyutils/inspect.py b/src/graphql/pyutils/inspect.py index 8fc99dce..78c81e7d 100644 --- a/src/graphql/pyutils/inspect.py +++ b/src/graphql/pyutils/inspect.py @@ -1,18 +1,19 @@ from inspect import ( + isasyncgen, + isasyncgenfunction, isclass, - ismethod, + iscoroutine, + iscoroutinefunction, isfunction, - isgeneratorfunction, isgenerator, - iscoroutinefunction, - iscoroutine, - isasyncgenfunction, - isasyncgen, + isgeneratorfunction, + ismethod, ) from typing import Any, List from .undefined import Undefined + __all__ = ["inspect"] max_recursive_depth = 2 diff --git a/src/graphql/pyutils/is_awaitable.py b/src/graphql/pyutils/is_awaitable.py index 80c3be3f..5427ab65 100644 --- a/src/graphql/pyutils/is_awaitable.py +++ b/src/graphql/pyutils/is_awaitable.py @@ -1,6 +1,7 @@ import inspect -from typing import Any from types import CoroutineType, GeneratorType +from typing import Any + __all__ = ["is_awaitable"] diff --git a/src/graphql/pyutils/is_iterable.py b/src/graphql/pyutils/is_iterable.py index cffbc51c..30417cb7 100644 --- a/src/graphql/pyutils/is_iterable.py +++ b/src/graphql/pyutils/is_iterable.py @@ -1,12 +1,5 @@ -from typing import ( - Any, - ByteString, - Collection, - Iterable, - Mapping, - Text, - ValuesView, -) +from typing import Any, ByteString, Collection, Iterable, Mapping, Text, ValuesView + __all__ = ["is_collection", "is_iterable"] diff --git a/src/graphql/pyutils/merge_kwargs.py b/src/graphql/pyutils/merge_kwargs.py index 5b7ac202..e557bddc 100644 --- a/src/graphql/pyutils/merge_kwargs.py +++ b/src/graphql/pyutils/merge_kwargs.py @@ -1,4 +1,5 @@ -from typing import cast, Any, Dict, TypeVar +from typing import Any, Dict, TypeVar, cast + T = TypeVar("T") diff --git a/src/graphql/pyutils/natural_compare.py b/src/graphql/pyutils/natural_compare.py index 6af02038..11525e84 100644 --- a/src/graphql/pyutils/natural_compare.py +++ b/src/graphql/pyutils/natural_compare.py @@ -1,7 +1,7 @@ import re +from itertools import cycle from typing import Tuple -from itertools import cycle __all__ = ["natural_comparison_key"] diff --git a/src/graphql/pyutils/path.py b/src/graphql/pyutils/path.py index 5ef7b457..19dd79ba 100644 --- a/src/graphql/pyutils/path.py +++ b/src/graphql/pyutils/path.py @@ -2,6 +2,7 @@ from typing import Any, List, NamedTuple, Optional, Union + __all__ = ["Path"] diff --git a/src/graphql/pyutils/simple_pub_sub.py b/src/graphql/pyutils/simple_pub_sub.py index b79cd7e3..5a884c6e 100644 --- a/src/graphql/pyutils/simple_pub_sub.py +++ b/src/graphql/pyutils/simple_pub_sub.py @@ -1,11 +1,9 @@ from __future__ import annotations # Python < 3.10 -from asyncio import Future, Queue, ensure_future, sleep +from asyncio import Future, Queue, ensure_future, get_running_loop, sleep from inspect import isawaitable from typing import Any, AsyncIterator, Callable, Optional, Set -from asyncio import get_running_loop - __all__ = ["SimplePubSub", "SimplePubSubIterator"] diff --git a/src/graphql/pyutils/suggestion_list.py b/src/graphql/pyutils/suggestion_list.py index 0020b3ce..7270e3e4 100644 --- a/src/graphql/pyutils/suggestion_list.py +++ b/src/graphql/pyutils/suggestion_list.py @@ -1,7 +1,8 @@ -from typing import Collection, Optional, List +from typing import Collection, List, Optional from .natural_compare import natural_comparison_key + __all__ = ["suggestion_list"] diff --git a/src/graphql/pyutils/undefined.py b/src/graphql/pyutils/undefined.py index 73dc5314..8a078eba 100644 --- a/src/graphql/pyutils/undefined.py +++ b/src/graphql/pyutils/undefined.py @@ -1,5 +1,6 @@ from typing import Any + __all__ = ["Undefined", "UndefinedType"] diff --git a/src/graphql/subscription/__init__.py b/src/graphql/subscription/__init__.py index f0c90910..22ffd3bb 100644 --- a/src/graphql/subscription/__init__.py +++ b/src/graphql/subscription/__init__.py @@ -11,6 +11,7 @@ package. In v3.3, the :mod:`graphql.subscription` package will be dropped entirely. """ -from ..execution import subscribe, create_source_event_stream, MapAsyncIterator +from ..execution import MapAsyncIterator, create_source_event_stream, subscribe + __all__ = ["subscribe", "create_source_event_stream", "MapAsyncIterator"] diff --git a/src/graphql/type/__init__.py b/src/graphql/type/__init__.py index 6a86c0f7..c0c2218c 100644 --- a/src/graphql/type/__init__.py +++ b/src/graphql/type/__init__.py @@ -5,170 +5,147 @@ from ..pyutils import Path as ResponsePath -from .schema import ( - # Predicate - is_schema, - # Assertion - assert_schema, - # GraphQL Schema definition - GraphQLSchema, - # Keyword Args - GraphQLSchemaKwargs, -) - # Uphold the spec rules about naming. -from .assert_name import assert_name, assert_enum_value_name - +from .assert_name import assert_enum_value_name, assert_name +from .definition import ( + GraphQLAbstractType, # Predicates; Assertions; Un-modifiers; Thunk handling; Definitions; Type Wrappers; Types; Keyword Args; Resolvers +) from .definition import ( - # Predicates - is_type, - is_scalar_type, - is_object_type, - is_interface_type, - is_union_type, - is_enum_type, - is_input_object_type, - is_list_type, - is_non_null_type, - is_input_type, - is_output_type, - is_leaf_type, - is_composite_type, - is_abstract_type, - is_wrapping_type, - is_nullable_type, - is_named_type, - is_required_argument, - is_required_input_field, - # Assertions - assert_type, - assert_scalar_type, - assert_object_type, - assert_interface_type, - assert_union_type, - assert_enum_type, - assert_input_object_type, - assert_list_type, - assert_non_null_type, - assert_input_type, - assert_output_type, - assert_leaf_type, - assert_composite_type, - assert_abstract_type, - assert_wrapping_type, - assert_nullable_type, - assert_named_type, - # Un-modifiers - get_nullable_type, - get_named_type, - # Thunk handling - resolve_thunk, - # Definitions - GraphQLScalarType, - GraphQLObjectType, - GraphQLInterfaceType, - GraphQLUnionType, - GraphQLEnumType, - GraphQLInputObjectType, - # Type Wrappers - GraphQLList, - GraphQLNonNull, - # Types - GraphQLType, - GraphQLInputType, - GraphQLOutputType, - GraphQLLeafType, - GraphQLCompositeType, - GraphQLAbstractType, - GraphQLWrappingType, - GraphQLNullableType, - GraphQLNamedType, - GraphQLNamedInputType, - GraphQLNamedOutputType, - Thunk, - ThunkCollection, - ThunkMapping, GraphQLArgument, + GraphQLArgumentKwargs, GraphQLArgumentMap, + GraphQLCompositeType, + GraphQLEnumType, + GraphQLEnumTypeKwargs, GraphQLEnumValue, + GraphQLEnumValueKwargs, GraphQLEnumValueMap, GraphQLField, + GraphQLFieldKwargs, GraphQLFieldMap, + GraphQLFieldResolver, GraphQLInputField, - GraphQLInputFieldMap, - GraphQLScalarSerializer, - GraphQLScalarValueParser, - GraphQLScalarLiteralParser, - # Keyword Args - GraphQLArgumentKwargs, - GraphQLEnumTypeKwargs, - GraphQLEnumValueKwargs, - GraphQLFieldKwargs, GraphQLInputFieldKwargs, + GraphQLInputFieldMap, + GraphQLInputObjectType, GraphQLInputObjectTypeKwargs, + GraphQLInputType, + GraphQLInterfaceType, GraphQLInterfaceTypeKwargs, + GraphQLIsTypeOfFn, + GraphQLLeafType, + GraphQLList, + GraphQLNamedInputType, + GraphQLNamedOutputType, + GraphQLNamedType, GraphQLNamedTypeKwargs, + GraphQLNonNull, + GraphQLNullableType, + GraphQLObjectType, GraphQLObjectTypeKwargs, + GraphQLOutputType, + GraphQLResolveInfo, + GraphQLScalarLiteralParser, + GraphQLScalarSerializer, + GraphQLScalarType, GraphQLScalarTypeKwargs, - GraphQLUnionTypeKwargs, - # Resolvers - GraphQLFieldResolver, + GraphQLScalarValueParser, + GraphQLType, GraphQLTypeResolver, - GraphQLIsTypeOfFn, - GraphQLResolveInfo, + GraphQLUnionType, + GraphQLUnionTypeKwargs, + GraphQLWrappingType, + Thunk, + ThunkCollection, + ThunkMapping, + assert_abstract_type, + assert_composite_type, + assert_enum_type, + assert_input_object_type, + assert_input_type, + assert_interface_type, + assert_leaf_type, + assert_list_type, + assert_named_type, + assert_non_null_type, + assert_nullable_type, + assert_object_type, + assert_output_type, + assert_scalar_type, + assert_type, + assert_union_type, + assert_wrapping_type, + get_named_type, + get_nullable_type, + is_abstract_type, + is_composite_type, + is_enum_type, + is_input_object_type, + is_input_type, + is_interface_type, + is_leaf_type, + is_list_type, + is_named_type, + is_non_null_type, + is_nullable_type, + is_object_type, + is_output_type, + is_required_argument, + is_required_input_field, + is_scalar_type, + is_type, + is_union_type, + is_wrapping_type, + resolve_thunk, ) - from .directives import ( - # Predicate - is_directive, - # Assertion - assert_directive, - # Directives Definition + DEFAULT_DEPRECATION_REASON, # Predicate; Assertion; Directives Definition; Built-in Directives defined by the Spec; Keyword Args; Constant Deprecation Reason +) +from .directives import ( + GraphQLDeprecatedDirective, GraphQLDirective, - # Built-in Directives defined by the Spec - is_specified_directive, - specified_directives, + GraphQLDirectiveKwargs, GraphQLIncludeDirective, GraphQLSkipDirective, - GraphQLDeprecatedDirective, GraphQLSpecifiedByDirective, - # Keyword Args - GraphQLDirectiveKwargs, - # Constant Deprecation Reason - DEFAULT_DEPRECATION_REASON, + assert_directive, + is_directive, + is_specified_directive, + specified_directives, +) +from .introspection import ( + SchemaMetaFieldDef, # Predicate; GraphQL Types for introspection.; "Enum" of Type Kinds; Meta-field definitions. +) +from .introspection import ( + TypeKind, + TypeMetaFieldDef, + TypeNameMetaFieldDef, + introspection_types, + is_introspection_type, ) # Common built-in scalar instances. from .scalars import ( - # Predicate - is_specified_scalar_type, - # Standard GraphQL Scalars - specified_scalar_types, - GraphQLInt, - GraphQLFloat, - GraphQLString, + GRAPHQL_MAX_INT, # Predicate; Standard GraphQL Scalars; Int boundaries constants +) +from .scalars import ( + GRAPHQL_MIN_INT, GraphQLBoolean, + GraphQLFloat, GraphQLID, - # Int boundaries constants - GRAPHQL_MAX_INT, - GRAPHQL_MIN_INT, + GraphQLInt, + GraphQLString, + is_specified_scalar_type, + specified_scalar_types, ) - -from .introspection import ( - # Predicate - is_introspection_type, - # GraphQL Types for introspection. - introspection_types, - # "Enum" of Type Kinds - TypeKind, - # Meta-field definitions. - SchemaMetaFieldDef, - TypeMetaFieldDef, - TypeNameMetaFieldDef, +from .schema import ( + GraphQLSchema, # Predicate; Assertion; GraphQL Schema definition; Keyword Args ) +from .schema import GraphQLSchemaKwargs, assert_schema, is_schema # Validate GraphQL schema. -from .validate import validate_schema, assert_valid_schema +from .validate import assert_valid_schema, validate_schema + __all__ = [ "is_schema", diff --git a/src/graphql/type/assert_name.py b/src/graphql/type/assert_name.py index 0ab6b2e7..d1fe8dd6 100644 --- a/src/graphql/type/assert_name.py +++ b/src/graphql/type/assert_name.py @@ -1,5 +1,6 @@ from ..error import GraphQLError -from ..language.character_classes import is_name_start, is_name_continue +from ..language.character_classes import is_name_continue, is_name_start + __all__ = ["assert_name", "assert_enum_value_name"] diff --git a/src/graphql/type/definition.py b/src/graphql/type/definition.py index 170e2c98..0f5895ac 100644 --- a/src/graphql/type/definition.py +++ b/src/graphql/type/definition.py @@ -2,6 +2,7 @@ from enum import Enum from typing import ( + TYPE_CHECKING, Any, Callable, Collection, @@ -12,7 +13,6 @@ NamedTuple, Optional, Tuple, - TYPE_CHECKING, Type, TypeVar, Union, @@ -23,8 +23,8 @@ from ..error import GraphQLError from ..language import ( EnumTypeDefinitionNode, - EnumValueDefinitionNode, EnumTypeExtensionNode, + EnumValueDefinitionNode, EnumValueNode, FieldDefinitionNode, FieldNode, @@ -49,16 +49,17 @@ from ..pyutils import ( AwaitableOrValue, Path, + Undefined, cached_property, did_you_mean, inspect, is_collection, is_description, suggestion_list, - Undefined, ) from ..utilities.value_from_ast_untyped import value_from_ast_untyped -from .assert_name import assert_name, assert_enum_value_name +from .assert_name import assert_enum_value_name, assert_name + try: from typing import TypedDict diff --git a/src/graphql/type/directives.py b/src/graphql/type/directives.py index de4ce5d0..714b5b67 100644 --- a/src/graphql/type/directives.py +++ b/src/graphql/type/directives.py @@ -2,12 +2,13 @@ from typing import Any, Collection, Dict, Optional, Tuple, cast -from ..language import ast, DirectiveLocation +from ..language import DirectiveLocation, ast from ..pyutils import inspect, is_description from .assert_name import assert_name from .definition import GraphQLArgument, GraphQLInputType, GraphQLNonNull, is_input_type from .scalars import GraphQLBoolean, GraphQLString + try: from typing import TypedDict except ImportError: # Python < 3.8 diff --git a/src/graphql/type/introspection.py b/src/graphql/type/introspection.py index 123bf8f9..160c582c 100644 --- a/src/graphql/type/introspection.py +++ b/src/graphql/type/introspection.py @@ -1,6 +1,8 @@ from enum import Enum from typing import Mapping +from ..language import DirectiveLocation, print_ast +from ..pyutils import inspect from .definition import ( GraphQLArgument, GraphQLEnumType, @@ -20,10 +22,9 @@ is_scalar_type, is_union_type, ) -from ..language import DirectiveLocation, print_ast -from ..pyutils import inspect from .scalars import GraphQLBoolean, GraphQLString + __all__ = [ "SchemaMetaFieldDef", "TypeKind", diff --git a/src/graphql/type/scalars.py b/src/graphql/type/scalars.py index 2609d095..1271e27b 100644 --- a/src/graphql/type/scalars.py +++ b/src/graphql/type/scalars.py @@ -2,7 +2,6 @@ from typing import Any, Mapping from ..error import GraphQLError -from ..pyutils import inspect from ..language.ast import ( BooleanValueNode, FloatValueNode, @@ -11,8 +10,10 @@ ValueNode, ) from ..language.printer import print_ast +from ..pyutils import inspect from .definition import GraphQLNamedType, GraphQLScalarType + __all__ = [ "is_specified_scalar_type", "specified_scalar_types", diff --git a/src/graphql/type/schema.py b/src/graphql/type/schema.py index 124ea771..25e18d8b 100644 --- a/src/graphql/type/schema.py +++ b/src/graphql/type/schema.py @@ -15,16 +15,16 @@ ) from ..error import GraphQLError -from ..language import ast, OperationType +from ..language import OperationType, ast from ..pyutils import inspect, is_collection, is_description from .definition import ( GraphQLAbstractType, - GraphQLInterfaceType, GraphQLInputObjectType, + GraphQLInterfaceType, GraphQLNamedType, GraphQLObjectType, - GraphQLUnionType, GraphQLType, + GraphQLUnionType, GraphQLWrappingType, get_named_type, is_input_object_type, @@ -33,9 +33,10 @@ is_union_type, is_wrapping_type, ) -from .directives import GraphQLDirective, specified_directives, is_directive +from .directives import GraphQLDirective, is_directive, specified_directives from .introspection import introspection_types + try: from typing import TypedDict except ImportError: # Python < 3.8 @@ -312,8 +313,8 @@ def __copy__(self) -> GraphQLSchema: # pragma: no cover def __deepcopy__(self, memo_: Dict) -> GraphQLSchema: from ..type import ( is_introspection_type, - is_specified_scalar_type, is_specified_directive, + is_specified_scalar_type, ) type_map: TypeMap = { diff --git a/src/graphql/type/validate.py b/src/graphql/type/validate.py index 555bc30d..f49a5123 100644 --- a/src/graphql/type/validate.py +++ b/src/graphql/type/validate.py @@ -1,18 +1,7 @@ from operator import attrgetter, itemgetter -from typing import ( - Any, - Collection, - Dict, - List, - Optional, - Set, - Tuple, - Union, - cast, -) +from typing import Any, Collection, Dict, List, Optional, Set, Tuple, Union, cast from ..error import GraphQLError -from ..pyutils import inspect from ..language import ( DirectiveNode, InputValueDefinitionNode, @@ -22,6 +11,8 @@ SchemaDefinitionNode, SchemaExtensionNode, ) +from ..pyutils import inspect +from ..utilities.type_comparators import is_equal_type, is_type_sub_type_of from .definition import ( GraphQLEnumType, GraphQLInputField, @@ -37,15 +28,15 @@ is_non_null_type, is_object_type, is_output_type, - is_union_type, is_required_argument, is_required_input_field, + is_union_type, ) -from ..utilities.type_comparators import is_equal_type, is_type_sub_type_of -from .directives import is_directive, GraphQLDeprecatedDirective +from .directives import GraphQLDeprecatedDirective, is_directive from .introspection import is_introspection_type from .schema import GraphQLSchema, assert_schema + __all__ = ["validate_schema", "assert_valid_schema"] diff --git a/src/graphql/utilities/__init__.py b/src/graphql/utilities/__init__.py index 1571485b..7255b17c 100644 --- a/src/graphql/utilities/__init__.py +++ b/src/graphql/utilities/__init__.py @@ -4,62 +4,58 @@ the GraphQL language and type objects. """ -# Produce the GraphQL query recommended for a full schema introspection. -from .get_introspection_query import get_introspection_query, IntrospectionQuery +# Assert that a string is a valid GraphQL name. +from .assert_valid_name import assert_valid_name, is_valid_name_error -# Get the target Operation from a Document. -from .get_operation_ast import get_operation_ast +# Create a GraphQL language AST from a Python value. +from .ast_from_value import ast_from_value -# Get the Type for the target Operation AST. -from .get_operation_root_type import get_operation_root_type +# Convert a language AST to a dictionary. +from .ast_to_dict import ast_to_dict -# Convert a GraphQLSchema to an IntrospectionQuery. -from .introspection_from_schema import introspection_from_schema +# Build a GraphQLSchema from GraphQL Schema language. +from .build_ast_schema import build_ast_schema, build_schema # Build a GraphQLSchema from an introspection result. from .build_client_schema import build_client_schema -# Build a GraphQLSchema from GraphQL Schema language. -from .build_ast_schema import build_ast_schema, build_schema +# Coerce a Python value to a GraphQL type, or produce errors. +from .coerce_input_value import coerce_input_value + +# Concatenate multiple ASTs together. +from .concat_ast import concat_ast # Extend an existing GraphQLSchema from a parsed GraphQL Schema language AST. from .extend_schema import extend_schema -# Sort a GraphQLSchema. -from .lexicographic_sort_schema import lexicographic_sort_schema - -# Print a GraphQLSchema to GraphQL Schema language. -from .print_schema import ( - print_introspection_schema, - print_schema, - print_type, - print_value, # deprecated +# Compare two GraphQLSchemas and detect breaking changes. +from .find_breaking_changes import ( + BreakingChange, + BreakingChangeType, + DangerousChange, + DangerousChangeType, + find_breaking_changes, + find_dangerous_changes, ) -# Create a GraphQLType from a GraphQL language AST. -from .type_from_ast import type_from_ast - -# Convert a language AST to a dictionary. -from .ast_to_dict import ast_to_dict - -# Create a Python value from a GraphQL language AST with a type. -from .value_from_ast import value_from_ast +# Produce the GraphQL query recommended for a full schema introspection. +from .get_introspection_query import IntrospectionQuery, get_introspection_query -# Create a Python value from a GraphQL language AST without a type. -from .value_from_ast_untyped import value_from_ast_untyped +# Get the target Operation from a Document. +from .get_operation_ast import get_operation_ast -# Create a GraphQL language AST from a Python value. -from .ast_from_value import ast_from_value +# Get the Type for the target Operation AST. +from .get_operation_root_type import get_operation_root_type -# A helper to use within recursive-descent visitors which need to be aware of -# the GraphQL type system -from .type_info import TypeInfo, TypeInfoVisitor +# Convert a GraphQLSchema to an IntrospectionQuery. +from .introspection_from_schema import introspection_from_schema -# Coerce a Python value to a GraphQL type, or produce errors. -from .coerce_input_value import coerce_input_value +# Sort a GraphQLSchema. +from .lexicographic_sort_schema import lexicographic_sort_schema -# Concatenate multiple ASTs together. -from .concat_ast import concat_ast +# Print a GraphQLSchema to GraphQL Schema language. +from .print_schema import print_value # deprecated +from .print_schema import print_introspection_schema, print_schema, print_type # Separate an AST into an AST per Operation. from .separate_operations import separate_operations @@ -69,20 +65,21 @@ from .strip_ignored_characters import strip_ignored_characters # Comparators for types -from .type_comparators import is_equal_type, is_type_sub_type_of, do_types_overlap +from .type_comparators import do_types_overlap, is_equal_type, is_type_sub_type_of -# Assert that a string is a valid GraphQL name. -from .assert_valid_name import assert_valid_name, is_valid_name_error +# Create a GraphQLType from a GraphQL language AST. +from .type_from_ast import type_from_ast + +# A helper to use within recursive-descent visitors which need to be aware of +# the GraphQL type system +from .type_info import TypeInfo, TypeInfoVisitor + +# Create a Python value from a GraphQL language AST with a type. +from .value_from_ast import value_from_ast + +# Create a Python value from a GraphQL language AST without a type. +from .value_from_ast_untyped import value_from_ast_untyped -# Compare two GraphQLSchemas and detect breaking changes. -from .find_breaking_changes import ( - BreakingChange, - BreakingChangeType, - DangerousChange, - DangerousChangeType, - find_breaking_changes, - find_dangerous_changes, -) __all__ = [ "BreakingChange", diff --git a/src/graphql/utilities/assert_valid_name.py b/src/graphql/utilities/assert_valid_name.py index e727a482..4019d73e 100644 --- a/src/graphql/utilities/assert_valid_name.py +++ b/src/graphql/utilities/assert_valid_name.py @@ -1,7 +1,8 @@ from typing import Optional -from ..type.assert_name import assert_name from ..error import GraphQLError +from ..type.assert_name import assert_name + __all__ = ["assert_valid_name", "is_valid_name_error"] diff --git a/src/graphql/utilities/ast_from_value.py b/src/graphql/utilities/ast_from_value.py index 208d9d95..f604c1c5 100644 --- a/src/graphql/utilities/ast_from_value.py +++ b/src/graphql/utilities/ast_from_value.py @@ -15,11 +15,11 @@ StringValueNode, ValueNode, ) -from ..pyutils import inspect, is_iterable, Undefined +from ..pyutils import Undefined, inspect, is_iterable from ..type import ( GraphQLID, - GraphQLInputType, GraphQLInputObjectType, + GraphQLInputType, GraphQLList, GraphQLNonNull, is_enum_type, @@ -29,6 +29,7 @@ is_non_null_type, ) + __all__ = ["ast_from_value"] _re_integer_string = re.compile("^-?(?:0|[1-9][0-9]*)$") diff --git a/src/graphql/utilities/build_ast_schema.py b/src/graphql/utilities/build_ast_schema.py index 30990f49..d2597e0e 100644 --- a/src/graphql/utilities/build_ast_schema.py +++ b/src/graphql/utilities/build_ast_schema.py @@ -1,4 +1,4 @@ -from typing import cast, Union +from typing import Union, cast from ..language import DocumentNode, Source, parse from ..type import ( @@ -9,6 +9,7 @@ ) from .extend_schema import extend_schema_impl + __all__ = [ "build_ast_schema", "build_schema", diff --git a/src/graphql/utilities/build_client_schema.py b/src/graphql/utilities/build_client_schema.py index b9ad32ed..e1c128fb 100644 --- a/src/graphql/utilities/build_client_schema.py +++ b/src/graphql/utilities/build_client_schema.py @@ -1,8 +1,8 @@ from itertools import chain -from typing import cast, Callable, Collection, Dict, List, Union +from typing import Callable, Collection, Dict, List, Union, cast from ..language import DirectiveLocation, parse_value -from ..pyutils import inspect, Undefined +from ..pyutils import Undefined, inspect from ..type import ( GraphQLArgument, GraphQLDirective, @@ -35,9 +35,9 @@ IntrospectionDirective, IntrospectionEnumType, IntrospectionField, - IntrospectionInterfaceType, IntrospectionInputObjectType, IntrospectionInputValue, + IntrospectionInterfaceType, IntrospectionObjectType, IntrospectionQuery, IntrospectionScalarType, @@ -47,6 +47,7 @@ ) from .value_from_ast import value_from_ast + __all__ = ["build_client_schema"] diff --git a/src/graphql/utilities/coerce_input_value.py b/src/graphql/utilities/coerce_input_value.py index 6901c892..8b4cbbd8 100644 --- a/src/graphql/utilities/coerce_input_value.py +++ b/src/graphql/utilities/coerce_input_value.py @@ -1,28 +1,28 @@ from typing import Any, Callable, Dict, List, Optional, Union, cast - from ..error import GraphQLError from ..pyutils import ( Path, + Undefined, did_you_mean, inspect, is_iterable, print_path_list, suggestion_list, - Undefined, ) from ..type import ( GraphQLInputObjectType, GraphQLInputType, GraphQLList, + GraphQLNonNull, GraphQLScalarType, - is_leaf_type, is_input_object_type, + is_leaf_type, is_list_type, is_non_null_type, - GraphQLNonNull, ) + __all__ = ["coerce_input_value"] diff --git a/src/graphql/utilities/concat_ast.py b/src/graphql/utilities/concat_ast.py index cd12a74f..6aca4b18 100644 --- a/src/graphql/utilities/concat_ast.py +++ b/src/graphql/utilities/concat_ast.py @@ -3,6 +3,7 @@ from ..language.ast import DocumentNode + __all__ = ["concat_ast"] diff --git a/src/graphql/utilities/extend_schema.py b/src/graphql/utilities/extend_schema.py index 93c8dce4..37b9bd98 100644 --- a/src/graphql/utilities/extend_schema.py +++ b/src/graphql/utilities/extend_schema.py @@ -33,8 +33,8 @@ OperationType, ScalarTypeDefinitionNode, ScalarTypeExtensionNode, - SchemaExtensionNode, SchemaDefinitionNode, + SchemaExtensionNode, TypeDefinitionNode, TypeExtensionNode, TypeNode, @@ -53,9 +53,9 @@ GraphQLField, GraphQLFieldMap, GraphQLInputField, + GraphQLInputFieldMap, GraphQLInputObjectType, GraphQLInputType, - GraphQLInputFieldMap, GraphQLInterfaceType, GraphQLList, GraphQLNamedType, @@ -70,21 +70,22 @@ GraphQLType, GraphQLUnionType, assert_schema, + introspection_types, is_enum_type, is_input_object_type, is_interface_type, + is_introspection_type, is_list_type, is_non_null_type, is_object_type, is_scalar_type, - is_union_type, - is_introspection_type, is_specified_scalar_type, - introspection_types, + is_union_type, specified_scalar_types, ) from .value_from_ast import value_from_ast + __all__ = [ "extend_schema", "extend_schema_impl", diff --git a/src/graphql/utilities/find_breaking_changes.py b/src/graphql/utilities/find_breaking_changes.py index 281a2def..bb89de3e 100644 --- a/src/graphql/utilities/find_breaking_changes.py +++ b/src/graphql/utilities/find_breaking_changes.py @@ -2,15 +2,15 @@ from typing import Any, Collection, Dict, List, NamedTuple, Union, cast from ..language import print_ast -from ..pyutils import inspect, Undefined +from ..pyutils import Undefined, inspect from ..type import ( GraphQLEnumType, GraphQLField, + GraphQLInputType, + GraphQLInterfaceType, GraphQLList, GraphQLNamedType, GraphQLNonNull, - GraphQLInputType, - GraphQLInterfaceType, GraphQLObjectType, GraphQLSchema, GraphQLType, @@ -31,6 +31,7 @@ from ..utilities.sort_value_node import sort_value_node from .ast_from_value import ast_from_value + __all__ = [ "BreakingChange", "BreakingChangeType", diff --git a/src/graphql/utilities/get_introspection_query.py b/src/graphql/utilities/get_introspection_query.py index aed2348a..0c13c32a 100644 --- a/src/graphql/utilities/get_introspection_query.py +++ b/src/graphql/utilities/get_introspection_query.py @@ -3,10 +3,12 @@ from ..language import DirectiveLocation + try: - from typing import TypedDict, Literal + from typing import Literal, TypedDict except ImportError: # Python < 3.8 - from typing_extensions import TypedDict, Literal # type: ignore + from typing_extensions import Literal # type: ignore + from typing_extensions import TypedDict __all__ = [ "get_introspection_query", diff --git a/src/graphql/utilities/get_operation_ast.py b/src/graphql/utilities/get_operation_ast.py index b7d79317..08f8bb9a 100644 --- a/src/graphql/utilities/get_operation_ast.py +++ b/src/graphql/utilities/get_operation_ast.py @@ -2,6 +2,7 @@ from ..language import DocumentNode, OperationDefinitionNode + __all__ = ["get_operation_ast"] diff --git a/src/graphql/utilities/get_operation_root_type.py b/src/graphql/utilities/get_operation_root_type.py index be0e5e6e..65f76c9c 100644 --- a/src/graphql/utilities/get_operation_root_type.py +++ b/src/graphql/utilities/get_operation_root_type.py @@ -2,12 +2,13 @@ from ..error import GraphQLError from ..language import ( - OperationType, OperationDefinitionNode, + OperationType, OperationTypeDefinitionNode, ) from ..type import GraphQLObjectType, GraphQLSchema + __all__ = ["get_operation_root_type"] diff --git a/src/graphql/utilities/introspection_from_schema.py b/src/graphql/utilities/introspection_from_schema.py index e0634860..4fb0a65f 100644 --- a/src/graphql/utilities/introspection_from_schema.py +++ b/src/graphql/utilities/introspection_from_schema.py @@ -3,7 +3,8 @@ from ..error import GraphQLError from ..language import parse from ..type import GraphQLSchema -from .get_introspection_query import get_introspection_query, IntrospectionQuery +from .get_introspection_query import IntrospectionQuery, get_introspection_query + __all__ = ["introspection_from_schema"] @@ -34,7 +35,7 @@ def introspection_from_schema( ) ) - from ..execution.execute import execute_sync, ExecutionResult + from ..execution.execute import ExecutionResult, execute_sync result = execute_sync(schema, document) if not isinstance(result, ExecutionResult): # pragma: no cover diff --git a/src/graphql/utilities/lexicographic_sort_schema.py b/src/graphql/utilities/lexicographic_sort_schema.py index de6326cb..f8362489 100644 --- a/src/graphql/utilities/lexicographic_sort_schema.py +++ b/src/graphql/utilities/lexicographic_sort_schema.py @@ -29,6 +29,7 @@ is_union_type, ) + __all__ = ["lexicographic_sort_schema"] diff --git a/src/graphql/utilities/print_schema.py b/src/graphql/utilities/print_schema.py index 55a6a58a..218f3548 100644 --- a/src/graphql/utilities/print_schema.py +++ b/src/graphql/utilities/print_schema.py @@ -1,6 +1,6 @@ from typing import Any, Callable, Dict, List, Optional, Union, cast -from ..language import print_ast, StringValueNode +from ..language import StringValueNode, print_ast from ..language.block_string import is_printable_as_block_string from ..pyutils import inspect from ..type import ( @@ -29,6 +29,7 @@ ) from .ast_from_value import ast_from_value + __all__ = ["print_schema", "print_introspection_schema", "print_type", "print_value"] diff --git a/src/graphql/utilities/separate_operations.py b/src/graphql/utilities/separate_operations.py index b40bc686..48ce555e 100644 --- a/src/graphql/utilities/separate_operations.py +++ b/src/graphql/utilities/separate_operations.py @@ -10,6 +10,7 @@ visit, ) + __all__ = ["separate_operations"] diff --git a/src/graphql/utilities/sort_value_node.py b/src/graphql/utilities/sort_value_node.py index 77a8dcb7..5edd0069 100644 --- a/src/graphql/utilities/sort_value_node.py +++ b/src/graphql/utilities/sort_value_node.py @@ -4,6 +4,7 @@ from ..language import ListValueNode, ObjectFieldNode, ObjectValueNode, ValueNode from ..pyutils import natural_comparison_key + __all__ = ["sort_value_node"] diff --git a/src/graphql/utilities/strip_ignored_characters.py b/src/graphql/utilities/strip_ignored_characters.py index 1ed5bbef..7c212733 100644 --- a/src/graphql/utilities/strip_ignored_characters.py +++ b/src/graphql/utilities/strip_ignored_characters.py @@ -1,9 +1,10 @@ from typing import Union, cast from ..language import Lexer, TokenKind -from ..language.source import Source, is_source from ..language.block_string import print_block_string from ..language.lexer import is_punctuator_token_kind +from ..language.source import Source, is_source + __all__ = ["strip_ignored_characters"] diff --git a/src/graphql/utilities/type_comparators.py b/src/graphql/utilities/type_comparators.py index 62883785..f0360d91 100644 --- a/src/graphql/utilities/type_comparators.py +++ b/src/graphql/utilities/type_comparators.py @@ -15,6 +15,7 @@ is_object_type, ) + __all__ = ["is_equal_type", "is_type_sub_type_of", "do_types_overlap"] diff --git a/src/graphql/utilities/type_from_ast.py b/src/graphql/utilities/type_from_ast.py index d8f2a5be..749cb5bb 100644 --- a/src/graphql/utilities/type_from_ast.py +++ b/src/graphql/utilities/type_from_ast.py @@ -1,16 +1,17 @@ -from typing import cast, overload, Optional +from typing import Optional, cast, overload from ..language import ListTypeNode, NamedTypeNode, NonNullTypeNode, TypeNode from ..pyutils import inspect from ..type import ( - GraphQLSchema, - GraphQLNamedType, GraphQLList, + GraphQLNamedType, GraphQLNonNull, GraphQLNullableType, + GraphQLSchema, GraphQLType, ) + __all__ = ["type_from_ast"] diff --git a/src/graphql/utilities/type_info.py b/src/graphql/utilities/type_info.py index 4067d7a7..11bdea58 100644 --- a/src/graphql/utilities/type_info.py +++ b/src/graphql/utilities/type_info.py @@ -32,22 +32,23 @@ GraphQLOutputType, GraphQLSchema, GraphQLType, - is_composite_type, - is_input_type, - is_output_type, - get_named_type, SchemaMetaFieldDef, TypeMetaFieldDef, TypeNameMetaFieldDef, - is_object_type, - is_interface_type, + get_named_type, get_nullable_type, - is_list_type, - is_input_object_type, + is_composite_type, is_enum_type, + is_input_object_type, + is_input_type, + is_interface_type, + is_list_type, + is_object_type, + is_output_type, ) from .type_from_ast import type_from_ast + __all__ = ["TypeInfo", "TypeInfoVisitor"] diff --git a/src/graphql/utilities/value_from_ast.py b/src/graphql/utilities/value_from_ast.py index 62a506c8..6fac2a8e 100644 --- a/src/graphql/utilities/value_from_ast.py +++ b/src/graphql/utilities/value_from_ast.py @@ -7,7 +7,7 @@ ValueNode, VariableNode, ) -from ..pyutils import inspect, Undefined +from ..pyutils import Undefined, inspect from ..type import ( GraphQLInputObjectType, GraphQLInputType, @@ -20,6 +20,7 @@ is_non_null_type, ) + __all__ = ["value_from_ast"] diff --git a/src/graphql/utilities/value_from_ast_untyped.py b/src/graphql/utilities/value_from_ast_untyped.py index 795160b5..d5ba819c 100644 --- a/src/graphql/utilities/value_from_ast_untyped.py +++ b/src/graphql/utilities/value_from_ast_untyped.py @@ -2,7 +2,6 @@ from typing import Any, Callable, Dict, Optional, Union from ..language import ( - ValueNode, BooleanValueNode, EnumValueNode, FloatValueNode, @@ -11,10 +10,11 @@ NullValueNode, ObjectValueNode, StringValueNode, + ValueNode, VariableNode, ) +from ..pyutils import Undefined, inspect -from ..pyutils import inspect, Undefined __all__ = ["value_from_ast_untyped"] diff --git a/src/graphql/validation/__init__.py b/src/graphql/validation/__init__.py index 313073a5..03c91bd2 100644 --- a/src/graphql/validation/__init__.py +++ b/src/graphql/validation/__init__.py @@ -4,18 +4,11 @@ GraphQL result. """ -from .validate import validate +from .rules import ASTValidationRule, SDLValidationRule, ValidationRule -from .validation_context import ( - ASTValidationContext, - SDLValidationContext, - ValidationContext, -) - -from .rules import ValidationRule, ASTValidationRule, SDLValidationRule - -# All validation rules in the GraphQL Specification. -from .specified_rules import specified_rules +# Optional rules not defined by the GraphQL Specification +from .rules.custom.no_deprecated import NoDeprecatedCustomRule +from .rules.custom.no_schema_introspection import NoSchemaIntrospectionCustomRule # Spec Section: "Executable Definitions" from .rules.executable_definitions import ExecutableDefinitionsRule @@ -41,6 +34,9 @@ # Spec Section: "Lone Anonymous Operation" from .rules.lone_anonymous_operation import LoneAnonymousOperationRule +# SDL-specific validation rules +from .rules.lone_schema_definition import LoneSchemaDefinitionRule + # Spec Section: "Fragments must not form cycles" from .rules.no_fragment_cycles import NoFragmentCyclesRule @@ -58,6 +54,7 @@ # Spec Section: "Fragment spread is possible" from .rules.possible_fragment_spreads import PossibleFragmentSpreadsRule +from .rules.possible_type_extensions import PossibleTypeExtensionsRule # Spec Section: "Argument Optionality" from .rules.provided_required_arguments import ProvidedRequiredArgumentsRule @@ -67,12 +64,16 @@ # Spec Section: "Subscriptions with Single Root Field" from .rules.single_field_subscriptions import SingleFieldSubscriptionsRule +from .rules.unique_argument_definition_names import UniqueArgumentDefinitionNamesRule # Spec Section: "Argument Uniqueness" from .rules.unique_argument_names import UniqueArgumentNamesRule +from .rules.unique_directive_names import UniqueDirectiveNamesRule # Spec Section: "Directives Are Unique Per Location" from .rules.unique_directives_per_location import UniqueDirectivesPerLocationRule +from .rules.unique_enum_value_names import UniqueEnumValueNamesRule +from .rules.unique_field_definition_names import UniqueFieldDefinitionNamesRule # Spec Section: "Fragment Name Uniqueness" from .rules.unique_fragment_names import UniqueFragmentNamesRule @@ -82,6 +83,8 @@ # Spec Section: "Operation Name Uniqueness" from .rules.unique_operation_names import UniqueOperationNamesRule +from .rules.unique_operation_types import UniqueOperationTypesRule +from .rules.unique_type_names import UniqueTypeNamesRule # Spec Section: "Variable Uniqueness" from .rules.unique_variable_names import UniqueVariableNamesRule @@ -95,19 +98,15 @@ # Spec Section: "All Variable Usages Are Allowed" from .rules.variables_in_allowed_position import VariablesInAllowedPositionRule -# SDL-specific validation rules -from .rules.lone_schema_definition import LoneSchemaDefinitionRule -from .rules.unique_operation_types import UniqueOperationTypesRule -from .rules.unique_type_names import UniqueTypeNamesRule -from .rules.unique_enum_value_names import UniqueEnumValueNamesRule -from .rules.unique_field_definition_names import UniqueFieldDefinitionNamesRule -from .rules.unique_argument_definition_names import UniqueArgumentDefinitionNamesRule -from .rules.unique_directive_names import UniqueDirectiveNamesRule -from .rules.possible_type_extensions import PossibleTypeExtensionsRule +# All validation rules in the GraphQL Specification. +from .specified_rules import specified_rules +from .validate import validate +from .validation_context import ( + ASTValidationContext, + SDLValidationContext, + ValidationContext, +) -# Optional rules not defined by the GraphQL Specification -from .rules.custom.no_deprecated import NoDeprecatedCustomRule -from .rules.custom.no_schema_introspection import NoSchemaIntrospectionCustomRule __all__ = [ "validate", diff --git a/src/graphql/validation/rules/__init__.py b/src/graphql/validation/rules/__init__.py index 1b0c5d57..d4e13617 100644 --- a/src/graphql/validation/rules/__init__.py +++ b/src/graphql/validation/rules/__init__.py @@ -8,6 +8,7 @@ ValidationContext, ) + __all__ = ["ASTValidationRule", "SDLValidationRule", "ValidationRule"] diff --git a/src/graphql/validation/rules/custom/no_deprecated.py b/src/graphql/validation/rules/custom/no_deprecated.py index e4b897ff..14e9d0e0 100644 --- a/src/graphql/validation/rules/custom/no_deprecated.py +++ b/src/graphql/validation/rules/custom/no_deprecated.py @@ -5,6 +5,7 @@ from ....type import GraphQLInputObjectType, get_named_type, is_input_object_type from .. import ValidationRule + __all__ = ["NoDeprecatedCustomRule"] diff --git a/src/graphql/validation/rules/custom/no_schema_introspection.py b/src/graphql/validation/rules/custom/no_schema_introspection.py index 76f15ea7..97fb4346 100644 --- a/src/graphql/validation/rules/custom/no_schema_introspection.py +++ b/src/graphql/validation/rules/custom/no_schema_introspection.py @@ -5,6 +5,7 @@ from ....type import get_named_type, is_introspection_type from .. import ValidationRule + __all__ = ["NoSchemaIntrospectionCustomRule"] diff --git a/src/graphql/validation/rules/executable_definitions.py b/src/graphql/validation/rules/executable_definitions.py index eead0fae..01c38fd6 100644 --- a/src/graphql/validation/rules/executable_definitions.py +++ b/src/graphql/validation/rules/executable_definitions.py @@ -2,6 +2,7 @@ from ...error import GraphQLError from ...language import ( + SKIP, DirectiveDefinitionNode, DocumentNode, ExecutableDefinitionNode, @@ -9,10 +10,10 @@ SchemaExtensionNode, TypeDefinitionNode, VisitorAction, - SKIP, ) from . import ASTValidationRule + __all__ = ["ExecutableDefinitionsRule"] diff --git a/src/graphql/validation/rules/fields_on_correct_type.py b/src/graphql/validation/rules/fields_on_correct_type.py index d5622f76..a333a901 100644 --- a/src/graphql/validation/rules/fields_on_correct_type.py +++ b/src/graphql/validation/rules/fields_on_correct_type.py @@ -2,6 +2,9 @@ from functools import cmp_to_key from typing import Any, Dict, List, Union, cast +from ...error import GraphQLError +from ...language import FieldNode +from ...pyutils import did_you_mean, natural_comparison_key, suggestion_list from ...type import ( GraphQLAbstractType, GraphQLInterfaceType, @@ -12,11 +15,9 @@ is_interface_type, is_object_type, ) -from ...error import GraphQLError -from ...language import FieldNode -from ...pyutils import did_you_mean, natural_comparison_key, suggestion_list from . import ValidationRule + __all__ = ["FieldsOnCorrectTypeRule"] diff --git a/src/graphql/validation/rules/fragments_on_composite_types.py b/src/graphql/validation/rules/fragments_on_composite_types.py index c0c9cb5f..06737f4f 100644 --- a/src/graphql/validation/rules/fragments_on_composite_types.py +++ b/src/graphql/validation/rules/fragments_on_composite_types.py @@ -1,15 +1,12 @@ from typing import Any from ...error import GraphQLError -from ...language import ( - FragmentDefinitionNode, - InlineFragmentNode, - print_ast, -) +from ...language import FragmentDefinitionNode, InlineFragmentNode, print_ast from ...type import is_composite_type from ...utilities import type_from_ast from . import ValidationRule + __all__ = ["FragmentsOnCompositeTypesRule"] diff --git a/src/graphql/validation/rules/known_argument_names.py b/src/graphql/validation/rules/known_argument_names.py index d0eb6b9f..e66cc078 100644 --- a/src/graphql/validation/rules/known_argument_names.py +++ b/src/graphql/validation/rules/known_argument_names.py @@ -1,17 +1,18 @@ -from typing import cast, Any, Dict, List, Union +from typing import Any, Dict, List, Union, cast from ...error import GraphQLError from ...language import ( + SKIP, ArgumentNode, DirectiveDefinitionNode, DirectiveNode, - SKIP, VisitorAction, ) from ...pyutils import did_you_mean, suggestion_list from ...type import specified_directives from . import ASTValidationRule, SDLValidationContext, ValidationContext + __all__ = ["KnownArgumentNamesRule", "KnownArgumentNamesOnDirectivesRule"] diff --git a/src/graphql/validation/rules/known_directives.py b/src/graphql/validation/rules/known_directives.py index 26c5c75b..f0cad1d8 100644 --- a/src/graphql/validation/rules/known_directives.py +++ b/src/graphql/validation/rules/known_directives.py @@ -1,9 +1,9 @@ -from typing import cast, Any, Dict, List, Optional, Tuple, Union +from typing import Any, Dict, List, Optional, Tuple, Union, cast from ...error import GraphQLError from ...language import ( - DirectiveLocation, DirectiveDefinitionNode, + DirectiveLocation, DirectiveNode, Node, OperationDefinitionNode, @@ -11,6 +11,7 @@ from ...type import specified_directives from . import ASTValidationRule, SDLValidationContext, ValidationContext + __all__ = ["KnownDirectivesRule"] diff --git a/src/graphql/validation/rules/known_fragment_names.py b/src/graphql/validation/rules/known_fragment_names.py index bb21ea77..40e5173e 100644 --- a/src/graphql/validation/rules/known_fragment_names.py +++ b/src/graphql/validation/rules/known_fragment_names.py @@ -4,6 +4,7 @@ from ...language import FragmentSpreadNode from . import ValidationRule + __all__ = ["KnownFragmentNamesRule"] diff --git a/src/graphql/validation/rules/known_type_names.py b/src/graphql/validation/rules/known_type_names.py index 68e10454..eb601203 100644 --- a/src/graphql/validation/rules/known_type_names.py +++ b/src/graphql/validation/rules/known_type_names.py @@ -2,16 +2,17 @@ from ...error import GraphQLError from ...language import ( + NamedTypeNode, + Node, + TypeDefinitionNode, is_type_definition_node, is_type_system_definition_node, is_type_system_extension_node, - Node, - NamedTypeNode, - TypeDefinitionNode, ) -from ...type import introspection_types, specified_scalar_types from ...pyutils import did_you_mean, suggestion_list -from . import ASTValidationRule, ValidationContext, SDLValidationContext +from ...type import introspection_types, specified_scalar_types +from . import ASTValidationRule, SDLValidationContext, ValidationContext + __all__ = ["KnownTypeNamesRule"] diff --git a/src/graphql/validation/rules/lone_anonymous_operation.py b/src/graphql/validation/rules/lone_anonymous_operation.py index f88b5c85..aa9e9052 100644 --- a/src/graphql/validation/rules/lone_anonymous_operation.py +++ b/src/graphql/validation/rules/lone_anonymous_operation.py @@ -4,6 +4,7 @@ from ...language import DocumentNode, OperationDefinitionNode from . import ASTValidationContext, ASTValidationRule + __all__ = ["LoneAnonymousOperationRule"] diff --git a/src/graphql/validation/rules/lone_schema_definition.py b/src/graphql/validation/rules/lone_schema_definition.py index 2d33cb86..cee76405 100644 --- a/src/graphql/validation/rules/lone_schema_definition.py +++ b/src/graphql/validation/rules/lone_schema_definition.py @@ -2,7 +2,8 @@ from ...error import GraphQLError from ...language import SchemaDefinitionNode -from . import SDLValidationRule, SDLValidationContext +from . import SDLValidationContext, SDLValidationRule + __all__ = ["LoneSchemaDefinitionRule"] diff --git a/src/graphql/validation/rules/no_fragment_cycles.py b/src/graphql/validation/rules/no_fragment_cycles.py index cf2b7e37..971d42ee 100644 --- a/src/graphql/validation/rules/no_fragment_cycles.py +++ b/src/graphql/validation/rules/no_fragment_cycles.py @@ -1,9 +1,10 @@ from typing import Any, Dict, List, Set from ...error import GraphQLError -from ...language import FragmentDefinitionNode, FragmentSpreadNode, VisitorAction, SKIP +from ...language import SKIP, FragmentDefinitionNode, FragmentSpreadNode, VisitorAction from . import ASTValidationContext, ASTValidationRule + __all__ = ["NoFragmentCyclesRule"] diff --git a/src/graphql/validation/rules/no_undefined_variables.py b/src/graphql/validation/rules/no_undefined_variables.py index a890473f..73dec8b3 100644 --- a/src/graphql/validation/rules/no_undefined_variables.py +++ b/src/graphql/validation/rules/no_undefined_variables.py @@ -4,6 +4,7 @@ from ...language import OperationDefinitionNode, VariableDefinitionNode from . import ValidationContext, ValidationRule + __all__ = ["NoUndefinedVariablesRule"] diff --git a/src/graphql/validation/rules/no_unused_fragments.py b/src/graphql/validation/rules/no_unused_fragments.py index c2b46cf0..a5edb2d8 100644 --- a/src/graphql/validation/rules/no_unused_fragments.py +++ b/src/graphql/validation/rules/no_unused_fragments.py @@ -2,13 +2,14 @@ from ...error import GraphQLError from ...language import ( + SKIP, FragmentDefinitionNode, OperationDefinitionNode, VisitorAction, - SKIP, ) from . import ASTValidationContext, ASTValidationRule + __all__ = ["NoUnusedFragmentsRule"] diff --git a/src/graphql/validation/rules/no_unused_variables.py b/src/graphql/validation/rules/no_unused_variables.py index b8770944..d402b7b6 100644 --- a/src/graphql/validation/rules/no_unused_variables.py +++ b/src/graphql/validation/rules/no_unused_variables.py @@ -4,6 +4,7 @@ from ...language import OperationDefinitionNode, VariableDefinitionNode from . import ValidationContext, ValidationRule + __all__ = ["NoUnusedVariablesRule"] diff --git a/src/graphql/validation/rules/overlapping_fields_can_be_merged.py b/src/graphql/validation/rules/overlapping_fields_can_be_merged.py index 33c1b09e..8eda47a2 100644 --- a/src/graphql/validation/rules/overlapping_fields_can_be_merged.py +++ b/src/graphql/validation/rules/overlapping_fields_can_be_merged.py @@ -30,6 +30,7 @@ from ...utilities.sort_value_node import sort_value_node from . import ValidationContext, ValidationRule + MYPY = False __all__ = ["OverlappingFieldsCanBeMergedRule"] diff --git a/src/graphql/validation/rules/possible_fragment_spreads.py b/src/graphql/validation/rules/possible_fragment_spreads.py index 27c2ad9b..944c1d31 100644 --- a/src/graphql/validation/rules/possible_fragment_spreads.py +++ b/src/graphql/validation/rules/possible_fragment_spreads.py @@ -1,4 +1,4 @@ -from typing import cast, Any, Optional +from typing import Any, Optional, cast from ...error import GraphQLError from ...language import FragmentSpreadNode, InlineFragmentNode @@ -6,6 +6,7 @@ from ...utilities import do_types_overlap, type_from_ast from . import ValidationRule + __all__ = ["PossibleFragmentSpreadsRule"] diff --git a/src/graphql/validation/rules/possible_type_extensions.py b/src/graphql/validation/rules/possible_type_extensions.py index 63c90dbc..6ad06723 100644 --- a/src/graphql/validation/rules/possible_type_extensions.py +++ b/src/graphql/validation/rules/possible_type_extensions.py @@ -15,6 +15,7 @@ ) from . import SDLValidationContext, SDLValidationRule + __all__ = ["PossibleTypeExtensionsRule"] diff --git a/src/graphql/validation/rules/provided_required_arguments.py b/src/graphql/validation/rules/provided_required_arguments.py index 99e23cea..2b822e17 100644 --- a/src/graphql/validation/rules/provided_required_arguments.py +++ b/src/graphql/validation/rules/provided_required_arguments.py @@ -1,7 +1,8 @@ -from typing import cast, Any, Dict, List, Union +from typing import Any, Dict, List, Union, cast from ...error import GraphQLError from ...language import ( + SKIP, DirectiveDefinitionNode, DirectiveNode, FieldNode, @@ -9,12 +10,12 @@ NonNullTypeNode, TypeNode, VisitorAction, - SKIP, print_ast, ) from ...type import GraphQLArgument, is_required_argument, is_type, specified_directives from . import ASTValidationRule, SDLValidationContext, ValidationContext + __all__ = ["ProvidedRequiredArgumentsRule", "ProvidedRequiredArgumentsOnDirectivesRule"] diff --git a/src/graphql/validation/rules/scalar_leafs.py b/src/graphql/validation/rules/scalar_leafs.py index 174df4e4..9a6c07c4 100644 --- a/src/graphql/validation/rules/scalar_leafs.py +++ b/src/graphql/validation/rules/scalar_leafs.py @@ -5,6 +5,7 @@ from ...type import get_named_type, is_leaf_type from . import ValidationRule + __all__ = ["ScalarLeafsRule"] diff --git a/src/graphql/validation/rules/single_field_subscriptions.py b/src/graphql/validation/rules/single_field_subscriptions.py index 53a7218f..9b96813b 100644 --- a/src/graphql/validation/rules/single_field_subscriptions.py +++ b/src/graphql/validation/rules/single_field_subscriptions.py @@ -10,6 +10,7 @@ ) from . import ValidationRule + __all__ = ["SingleFieldSubscriptionsRule"] diff --git a/src/graphql/validation/rules/unique_argument_definition_names.py b/src/graphql/validation/rules/unique_argument_definition_names.py index ba0e13ac..6664373b 100644 --- a/src/graphql/validation/rules/unique_argument_definition_names.py +++ b/src/graphql/validation/rules/unique_argument_definition_names.py @@ -3,6 +3,7 @@ from ...error import GraphQLError from ...language import ( + SKIP, DirectiveDefinitionNode, FieldDefinitionNode, InputValueDefinitionNode, @@ -12,11 +13,11 @@ ObjectTypeDefinitionNode, ObjectTypeExtensionNode, VisitorAction, - SKIP, ) from ...pyutils import group_by from . import SDLValidationRule + __all__ = ["UniqueArgumentDefinitionNamesRule"] diff --git a/src/graphql/validation/rules/unique_argument_names.py b/src/graphql/validation/rules/unique_argument_names.py index 9afe1c09..027b0788 100644 --- a/src/graphql/validation/rules/unique_argument_names.py +++ b/src/graphql/validation/rules/unique_argument_names.py @@ -6,6 +6,7 @@ from ...pyutils import group_by from . import ASTValidationRule + __all__ = ["UniqueArgumentNamesRule"] diff --git a/src/graphql/validation/rules/unique_directive_names.py b/src/graphql/validation/rules/unique_directive_names.py index b25dafe1..989ed366 100644 --- a/src/graphql/validation/rules/unique_directive_names.py +++ b/src/graphql/validation/rules/unique_directive_names.py @@ -1,9 +1,10 @@ from typing import Any, Dict from ...error import GraphQLError -from ...language import DirectiveDefinitionNode, NameNode, VisitorAction, SKIP +from ...language import SKIP, DirectiveDefinitionNode, NameNode, VisitorAction from . import SDLValidationContext, SDLValidationRule + __all__ = ["UniqueDirectiveNamesRule"] diff --git a/src/graphql/validation/rules/unique_directives_per_location.py b/src/graphql/validation/rules/unique_directives_per_location.py index 98e592ac..0baeac4b 100644 --- a/src/graphql/validation/rules/unique_directives_per_location.py +++ b/src/graphql/validation/rules/unique_directives_per_location.py @@ -16,6 +16,7 @@ from ...type import specified_directives from . import ASTValidationRule, SDLValidationContext, ValidationContext + __all__ = ["UniqueDirectivesPerLocationRule"] diff --git a/src/graphql/validation/rules/unique_enum_value_names.py b/src/graphql/validation/rules/unique_enum_value_names.py index 9be41e8c..cf2e42bb 100644 --- a/src/graphql/validation/rules/unique_enum_value_names.py +++ b/src/graphql/validation/rules/unique_enum_value_names.py @@ -1,11 +1,12 @@ from collections import defaultdict -from typing import cast, Any, Dict +from typing import Any, Dict, cast from ...error import GraphQLError -from ...language import NameNode, EnumTypeDefinitionNode, VisitorAction, SKIP -from ...type import is_enum_type, GraphQLEnumType +from ...language import SKIP, EnumTypeDefinitionNode, NameNode, VisitorAction +from ...type import GraphQLEnumType, is_enum_type from . import SDLValidationContext, SDLValidationRule + __all__ = ["UniqueEnumValueNamesRule"] diff --git a/src/graphql/validation/rules/unique_field_definition_names.py b/src/graphql/validation/rules/unique_field_definition_names.py index fe29d6d5..28f16711 100644 --- a/src/graphql/validation/rules/unique_field_definition_names.py +++ b/src/graphql/validation/rules/unique_field_definition_names.py @@ -2,10 +2,11 @@ from typing import Any, Dict from ...error import GraphQLError -from ...language import NameNode, ObjectTypeDefinitionNode, VisitorAction, SKIP -from ...type import is_object_type, is_interface_type, is_input_object_type +from ...language import SKIP, NameNode, ObjectTypeDefinitionNode, VisitorAction +from ...type import is_input_object_type, is_interface_type, is_object_type from . import SDLValidationContext, SDLValidationRule + __all__ = ["UniqueFieldDefinitionNamesRule"] diff --git a/src/graphql/validation/rules/unique_fragment_names.py b/src/graphql/validation/rules/unique_fragment_names.py index c951fc75..82e1bf60 100644 --- a/src/graphql/validation/rules/unique_fragment_names.py +++ b/src/graphql/validation/rules/unique_fragment_names.py @@ -1,9 +1,10 @@ from typing import Any, Dict from ...error import GraphQLError -from ...language import NameNode, FragmentDefinitionNode, VisitorAction, SKIP +from ...language import SKIP, FragmentDefinitionNode, NameNode, VisitorAction from . import ASTValidationContext, ASTValidationRule + __all__ = ["UniqueFragmentNamesRule"] diff --git a/src/graphql/validation/rules/unique_input_field_names.py b/src/graphql/validation/rules/unique_input_field_names.py index 76739a75..0b7d49d2 100644 --- a/src/graphql/validation/rules/unique_input_field_names.py +++ b/src/graphql/validation/rules/unique_input_field_names.py @@ -4,6 +4,7 @@ from ...language import NameNode, ObjectFieldNode from . import ASTValidationContext, ASTValidationRule + __all__ = ["UniqueInputFieldNamesRule"] diff --git a/src/graphql/validation/rules/unique_operation_names.py b/src/graphql/validation/rules/unique_operation_names.py index 346ebf91..f151d0d8 100644 --- a/src/graphql/validation/rules/unique_operation_names.py +++ b/src/graphql/validation/rules/unique_operation_names.py @@ -1,9 +1,10 @@ from typing import Any, Dict from ...error import GraphQLError -from ...language import NameNode, OperationDefinitionNode, VisitorAction, SKIP +from ...language import SKIP, NameNode, OperationDefinitionNode, VisitorAction from . import ASTValidationContext, ASTValidationRule + __all__ = ["UniqueOperationNamesRule"] diff --git a/src/graphql/validation/rules/unique_operation_types.py b/src/graphql/validation/rules/unique_operation_types.py index 190963f7..f8170fc7 100644 --- a/src/graphql/validation/rules/unique_operation_types.py +++ b/src/graphql/validation/rules/unique_operation_types.py @@ -2,16 +2,17 @@ from ...error import GraphQLError from ...language import ( - OperationTypeDefinitionNode, + SKIP, OperationType, + OperationTypeDefinitionNode, SchemaDefinitionNode, SchemaExtensionNode, VisitorAction, - SKIP, ) from ...type import GraphQLObjectType from . import SDLValidationContext, SDLValidationRule + __all__ = ["UniqueOperationTypesRule"] diff --git a/src/graphql/validation/rules/unique_type_names.py b/src/graphql/validation/rules/unique_type_names.py index 7082a0fb..723fa7df 100644 --- a/src/graphql/validation/rules/unique_type_names.py +++ b/src/graphql/validation/rules/unique_type_names.py @@ -1,9 +1,10 @@ from typing import Any, Dict from ...error import GraphQLError -from ...language import NameNode, TypeDefinitionNode, VisitorAction, SKIP +from ...language import SKIP, NameNode, TypeDefinitionNode, VisitorAction from . import SDLValidationContext, SDLValidationRule + __all__ = ["UniqueTypeNamesRule"] diff --git a/src/graphql/validation/rules/unique_variable_names.py b/src/graphql/validation/rules/unique_variable_names.py index 4111cb32..8e547382 100644 --- a/src/graphql/validation/rules/unique_variable_names.py +++ b/src/graphql/validation/rules/unique_variable_names.py @@ -6,6 +6,7 @@ from ...pyutils import group_by from . import ASTValidationRule + __all__ = ["UniqueVariableNamesRule"] diff --git a/src/graphql/validation/rules/values_of_correct_type.py b/src/graphql/validation/rules/values_of_correct_type.py index 44dc6a0d..982b87ed 100644 --- a/src/graphql/validation/rules/values_of_correct_type.py +++ b/src/graphql/validation/rules/values_of_correct_type.py @@ -1,22 +1,22 @@ -from typing import cast, Any +from typing import Any, cast from ...error import GraphQLError from ...language import ( + SKIP, BooleanValueNode, EnumValueNode, FloatValueNode, IntValueNode, - NullValueNode, ListValueNode, + NullValueNode, ObjectFieldNode, ObjectValueNode, StringValueNode, ValueNode, VisitorAction, - SKIP, print_ast, ) -from ...pyutils import did_you_mean, suggestion_list, Undefined +from ...pyutils import Undefined, did_you_mean, suggestion_list from ...type import ( GraphQLInputObjectType, GraphQLScalarType, @@ -30,6 +30,7 @@ ) from . import ValidationRule + __all__ = ["ValuesOfCorrectTypeRule"] diff --git a/src/graphql/validation/rules/variables_are_input_types.py b/src/graphql/validation/rules/variables_are_input_types.py index 449ad290..30ef713c 100644 --- a/src/graphql/validation/rules/variables_are_input_types.py +++ b/src/graphql/validation/rules/variables_are_input_types.py @@ -6,6 +6,7 @@ from ...utilities import type_from_ast from . import ValidationRule + __all__ = ["VariablesAreInputTypesRule"] diff --git a/src/graphql/validation/rules/variables_in_allowed_position.py b/src/graphql/validation/rules/variables_in_allowed_position.py index 49d3b416..c2d488b6 100644 --- a/src/graphql/validation/rules/variables_in_allowed_position.py +++ b/src/graphql/validation/rules/variables_in_allowed_position.py @@ -9,9 +9,10 @@ ) from ...pyutils import Undefined from ...type import GraphQLNonNull, GraphQLSchema, GraphQLType, is_non_null_type -from ...utilities import type_from_ast, is_type_sub_type_of +from ...utilities import is_type_sub_type_of, type_from_ast from . import ValidationContext, ValidationRule + __all__ = ["VariablesInAllowedPositionRule"] diff --git a/src/graphql/validation/specified_rules.py b/src/graphql/validation/specified_rules.py index db990aeb..8df6977f 100644 --- a/src/graphql/validation/specified_rules.py +++ b/src/graphql/validation/specified_rules.py @@ -5,92 +5,97 @@ # Spec Section: "Executable Definitions" from .rules.executable_definitions import ExecutableDefinitionsRule -# Spec Section: "Operation Name Uniqueness" -from .rules.unique_operation_names import UniqueOperationNamesRule - -# Spec Section: "Lone Anonymous Operation" -from .rules.lone_anonymous_operation import LoneAnonymousOperationRule - -# Spec Section: "Subscriptions with Single Root Field" -from .rules.single_field_subscriptions import SingleFieldSubscriptionsRule - -# Spec Section: "Fragment Spread Type Existence" -from .rules.known_type_names import KnownTypeNamesRule +# Spec Section: "Field Selections on Objects, Interfaces, and Unions Types" +from .rules.fields_on_correct_type import FieldsOnCorrectTypeRule # Spec Section: "Fragments on Composite Types" from .rules.fragments_on_composite_types import FragmentsOnCompositeTypesRule -# Spec Section: "Variables are Input Types" -from .rules.variables_are_input_types import VariablesAreInputTypesRule - -# Spec Section: "Leaf Field Selections" -from .rules.scalar_leafs import ScalarLeafsRule - -# Spec Section: "Field Selections on Objects, Interfaces, and Unions Types" -from .rules.fields_on_correct_type import FieldsOnCorrectTypeRule +# Spec Section: "Argument Names" +from .rules.known_argument_names import ( + KnownArgumentNamesOnDirectivesRule, + KnownArgumentNamesRule, +) -# Spec Section: "Fragment Name Uniqueness" -from .rules.unique_fragment_names import UniqueFragmentNamesRule +# Spec Section: "Directives Are Defined" +from .rules.known_directives import KnownDirectivesRule # Spec Section: "Fragment spread target defined" from .rules.known_fragment_names import KnownFragmentNamesRule -# Spec Section: "Fragments must be used" -from .rules.no_unused_fragments import NoUnusedFragmentsRule +# Spec Section: "Fragment Spread Type Existence" +from .rules.known_type_names import KnownTypeNamesRule -# Spec Section: "Fragment spread is possible" -from .rules.possible_fragment_spreads import PossibleFragmentSpreadsRule +# Spec Section: "Lone Anonymous Operation" +from .rules.lone_anonymous_operation import LoneAnonymousOperationRule + +# Schema definition language: +from .rules.lone_schema_definition import LoneSchemaDefinitionRule # Spec Section: "Fragments must not form cycles" from .rules.no_fragment_cycles import NoFragmentCyclesRule -# Spec Section: "Variable Uniqueness" -from .rules.unique_variable_names import UniqueVariableNamesRule - # Spec Section: "All Variable Used Defined" from .rules.no_undefined_variables import NoUndefinedVariablesRule +# Spec Section: "Fragments must be used" +from .rules.no_unused_fragments import NoUnusedFragmentsRule + # Spec Section: "All Variables Used" from .rules.no_unused_variables import NoUnusedVariablesRule -# Spec Section: "Directives Are Defined" -from .rules.known_directives import KnownDirectivesRule +# Spec Section: "Field Selection Merging" +from .rules.overlapping_fields_can_be_merged import OverlappingFieldsCanBeMergedRule -# Spec Section: "Directives Are Unique Per Location" -from .rules.unique_directives_per_location import UniqueDirectivesPerLocationRule +# Spec Section: "Fragment spread is possible" +from .rules.possible_fragment_spreads import PossibleFragmentSpreadsRule +from .rules.possible_type_extensions import PossibleTypeExtensionsRule -# Spec Section: "Argument Names" -from .rules.known_argument_names import KnownArgumentNamesRule -from .rules.known_argument_names import KnownArgumentNamesOnDirectivesRule +# Spec Section: "Argument Optionality" +from .rules.provided_required_arguments import ( + ProvidedRequiredArgumentsOnDirectivesRule, + ProvidedRequiredArgumentsRule, +) -# Spec Section: "Argument Uniqueness" -from .rules.unique_argument_names import UniqueArgumentNamesRule +# Spec Section: "Leaf Field Selections" +from .rules.scalar_leafs import ScalarLeafsRule -# Spec Section: "Value Type Correctness" -from .rules.values_of_correct_type import ValuesOfCorrectTypeRule +# Spec Section: "Subscriptions with Single Root Field" +from .rules.single_field_subscriptions import SingleFieldSubscriptionsRule +from .rules.unique_argument_definition_names import UniqueArgumentDefinitionNamesRule -# Spec Section: "Argument Optionality" -from .rules.provided_required_arguments import ProvidedRequiredArgumentsRule -from .rules.provided_required_arguments import ProvidedRequiredArgumentsOnDirectivesRule +# Spec Section: "Argument Uniqueness" +from .rules.unique_argument_names import UniqueArgumentNamesRule +from .rules.unique_directive_names import UniqueDirectiveNamesRule -# Spec Section: "All Variable Usages Are Allowed" -from .rules.variables_in_allowed_position import VariablesInAllowedPositionRule +# Spec Section: "Directives Are Unique Per Location" +from .rules.unique_directives_per_location import UniqueDirectivesPerLocationRule +from .rules.unique_enum_value_names import UniqueEnumValueNamesRule +from .rules.unique_field_definition_names import UniqueFieldDefinitionNamesRule -# Spec Section: "Field Selection Merging" -from .rules.overlapping_fields_can_be_merged import OverlappingFieldsCanBeMergedRule +# Spec Section: "Fragment Name Uniqueness" +from .rules.unique_fragment_names import UniqueFragmentNamesRule # Spec Section: "Input Object Field Uniqueness" from .rules.unique_input_field_names import UniqueInputFieldNamesRule -# Schema definition language: -from .rules.lone_schema_definition import LoneSchemaDefinitionRule +# Spec Section: "Operation Name Uniqueness" +from .rules.unique_operation_names import UniqueOperationNamesRule from .rules.unique_operation_types import UniqueOperationTypesRule from .rules.unique_type_names import UniqueTypeNamesRule -from .rules.unique_enum_value_names import UniqueEnumValueNamesRule -from .rules.unique_field_definition_names import UniqueFieldDefinitionNamesRule -from .rules.unique_argument_definition_names import UniqueArgumentDefinitionNamesRule -from .rules.unique_directive_names import UniqueDirectiveNamesRule -from .rules.possible_type_extensions import PossibleTypeExtensionsRule + +# Spec Section: "Variable Uniqueness" +from .rules.unique_variable_names import UniqueVariableNamesRule + +# Spec Section: "Value Type Correctness" +from .rules.values_of_correct_type import ValuesOfCorrectTypeRule + +# Spec Section: "Variables are Input Types" +from .rules.variables_are_input_types import VariablesAreInputTypesRule + +# Spec Section: "All Variable Usages Are Allowed" +from .rules.variables_in_allowed_position import VariablesInAllowedPositionRule + __all__ = ["specified_rules", "specified_sdl_rules"] diff --git a/src/graphql/validation/validate.py b/src/graphql/validation/validate.py index 13dc5243..8f301396 100644 --- a/src/graphql/validation/validate.py +++ b/src/graphql/validation/validate.py @@ -2,13 +2,14 @@ from ..error import GraphQLError from ..language import DocumentNode, ParallelVisitor, visit -from ..type import GraphQLSchema, assert_valid_schema from ..pyutils import inspect, is_collection +from ..type import GraphQLSchema, assert_valid_schema from ..utilities import TypeInfo, TypeInfoVisitor from .rules import ASTValidationRule from .specified_rules import specified_rules, specified_sdl_rules from .validation_context import SDLValidationContext, ValidationContext + __all__ = ["assert_valid_sdl", "assert_valid_sdl_extension", "validate", "validate_sdl"] diff --git a/src/graphql/validation/validation_context.py b/src/graphql/validation/validation_context.py index 931a19d9..b3dadc3f 100644 --- a/src/graphql/validation/validation_context.py +++ b/src/graphql/validation/validation_context.py @@ -24,6 +24,7 @@ ) from ..utilities import TypeInfo, TypeInfoVisitor + __all__ = [ "ASTValidationContext", "SDLValidationContext", diff --git a/src/graphql/version.py b/src/graphql/version.py index e4e8cde0..88726e13 100644 --- a/src/graphql/version.py +++ b/src/graphql/version.py @@ -3,6 +3,7 @@ import re from typing import NamedTuple + __all__ = ["version", "version_info", "version_js", "version_info_js"] diff --git a/tests/benchmarks/test_build_ast_schema.py b/tests/benchmarks/test_build_ast_schema.py index b41626fc..cf201dec 100644 --- a/tests/benchmarks/test_build_ast_schema.py +++ b/tests/benchmarks/test_build_ast_schema.py @@ -1,4 +1,4 @@ -from graphql import parse, build_ast_schema, GraphQLSchema +from graphql import GraphQLSchema, build_ast_schema, parse from ..fixtures import big_schema_sdl # noqa: F401 diff --git a/tests/benchmarks/test_build_client_schema.py b/tests/benchmarks/test_build_client_schema.py index a8627f8d..6c4b6e40 100644 --- a/tests/benchmarks/test_build_client_schema.py +++ b/tests/benchmarks/test_build_client_schema.py @@ -1,4 +1,4 @@ -from graphql import build_client_schema, GraphQLSchema +from graphql import GraphQLSchema, build_client_schema from ..fixtures import big_schema_introspection_result # noqa: F401 diff --git a/tests/benchmarks/test_execution_async.py b/tests/benchmarks/test_execution_async.py index de7de2e5..1db3a157 100644 --- a/tests/benchmarks/test_execution_async.py +++ b/tests/benchmarks/test_execution_async.py @@ -1,8 +1,9 @@ import asyncio + from graphql import ( - GraphQLSchema, - GraphQLObjectType, GraphQLField, + GraphQLObjectType, + GraphQLSchema, GraphQLString, graphql, ) diff --git a/tests/benchmarks/test_execution_sync.py b/tests/benchmarks/test_execution_sync.py index bfdb7cc2..5d7d9135 100644 --- a/tests/benchmarks/test_execution_sync.py +++ b/tests/benchmarks/test_execution_sync.py @@ -1,7 +1,7 @@ from graphql import ( - GraphQLSchema, - GraphQLObjectType, GraphQLField, + GraphQLObjectType, + GraphQLSchema, GraphQLString, graphql_sync, ) diff --git a/tests/benchmarks/test_introspection_from_schema.py b/tests/benchmarks/test_introspection_from_schema.py index f67a1f58..4c30d965 100644 --- a/tests/benchmarks/test_introspection_from_schema.py +++ b/tests/benchmarks/test_introspection_from_schema.py @@ -1,4 +1,4 @@ -from graphql import build_schema, parse, execute_sync +from graphql import build_schema, execute_sync, parse from graphql.utilities import get_introspection_query from ..fixtures import big_schema_sdl # noqa: F401 diff --git a/tests/benchmarks/test_parser.py b/tests/benchmarks/test_parser.py index 7d059f2b..8a99a760 100644 --- a/tests/benchmarks/test_parser.py +++ b/tests/benchmarks/test_parser.py @@ -1,4 +1,4 @@ -from graphql import parse, DocumentNode +from graphql import DocumentNode, parse from ..fixtures import kitchen_sink_query # noqa: F401 diff --git a/tests/benchmarks/test_visit.py b/tests/benchmarks/test_visit.py index 4a91f0c0..53bfc98e 100644 --- a/tests/benchmarks/test_visit.py +++ b/tests/benchmarks/test_visit.py @@ -1,5 +1,5 @@ from graphql import parse -from graphql.language import visit, Visitor, ParallelVisitor +from graphql.language import ParallelVisitor, Visitor, visit from ..fixtures import big_schema_sdl # noqa: F401 diff --git a/tests/error/test_graphql_error.py b/tests/error/test_graphql_error.py index 6c4689da..d331bae3 100644 --- a/tests/error/test_graphql_error.py +++ b/tests/error/test_graphql_error.py @@ -1,14 +1,14 @@ -from typing import cast, List, Union +from typing import List, Union, cast from pytest import raises from graphql.error import GraphQLError from graphql.language import ( - parse, Node, - OperationDefinitionNode, ObjectTypeDefinitionNode, + OperationDefinitionNode, Source, + parse, ) from ..utils import dedent diff --git a/tests/error/test_located_error.py b/tests/error/test_located_error.py index 4fd9acd9..593b24ad 100644 --- a/tests/error/test_located_error.py +++ b/tests/error/test_located_error.py @@ -1,4 +1,4 @@ -from typing import cast, Any +from typing import Any, cast from graphql.error import GraphQLError, located_error diff --git a/tests/error/test_print_location.py b/tests/error/test_print_location.py index 69f6143e..46b3497d 100644 --- a/tests/error/test_print_location.py +++ b/tests/error/test_print_location.py @@ -1,4 +1,4 @@ -from graphql.language import print_source_location, Source, SourceLocation +from graphql.language import Source, SourceLocation, print_source_location from ..utils import dedent diff --git a/tests/execution/test_abstract.py b/tests/execution/test_abstract.py index efc130f1..0ea10d47 100644 --- a/tests/execution/test_abstract.py +++ b/tests/execution/test_abstract.py @@ -3,7 +3,7 @@ from pytest import mark -from graphql.execution import execute, execute_sync, ExecutionResult +from graphql.execution import ExecutionResult, execute, execute_sync from graphql.language import parse from graphql.type import ( GraphQLBoolean, diff --git a/tests/execution/test_customize.py b/tests/execution/test_customize.py index f5b6554a..23a1e9d7 100644 --- a/tests/execution/test_customize.py +++ b/tests/execution/test_customize.py @@ -1,6 +1,6 @@ -from graphql.execution import execute, ExecutionContext +from graphql.execution import ExecutionContext, execute from graphql.language import parse -from graphql.type import GraphQLSchema, GraphQLObjectType, GraphQLString, GraphQLField +from graphql.type import GraphQLField, GraphQLObjectType, GraphQLSchema, GraphQLString def describe_customize_execution(): diff --git a/tests/execution/test_directives.py b/tests/execution/test_directives.py index 2beadc77..80abb8ff 100644 --- a/tests/execution/test_directives.py +++ b/tests/execution/test_directives.py @@ -1,6 +1,7 @@ -from graphql.execution import execute_sync, ExecutionResult +from graphql.execution import ExecutionResult, execute_sync from graphql.language import parse -from graphql.type import GraphQLObjectType, GraphQLField, GraphQLSchema, GraphQLString +from graphql.type import GraphQLField, GraphQLObjectType, GraphQLSchema, GraphQLString + schema = GraphQLSchema( GraphQLObjectType( diff --git a/tests/execution/test_executor.py b/tests/execution/test_executor.py index 7cd2260a..d93abf21 100644 --- a/tests/execution/test_executor.py +++ b/tests/execution/test_executor.py @@ -1,12 +1,12 @@ import asyncio -from typing import cast, Any, Awaitable, Optional +from typing import Any, Awaitable, Optional, cast from pytest import mark, raises from graphql.error import GraphQLError from graphql.execution import execute, execute_sync -from graphql.language import parse, FieldNode, OperationDefinitionNode -from graphql.pyutils import inspect, Undefined +from graphql.language import FieldNode, OperationDefinitionNode, parse +from graphql.pyutils import Undefined, inspect from graphql.type import ( GraphQLArgument, GraphQLBoolean, @@ -17,8 +17,8 @@ GraphQLNonNull, GraphQLObjectType, GraphQLResolveInfo, - GraphQLSchema, GraphQLScalarType, + GraphQLSchema, GraphQLString, GraphQLUnionType, ResponsePath, diff --git a/tests/execution/test_lists.py b/tests/execution/test_lists.py index 729c1191..5a3b5ad6 100644 --- a/tests/execution/test_lists.py +++ b/tests/execution/test_lists.py @@ -1,8 +1,8 @@ -from typing import cast, Any, Awaitable +from typing import Any, Awaitable, cast from pytest import mark -from graphql.execution import execute, execute_sync, ExecutionResult +from graphql.execution import ExecutionResult, execute, execute_sync from graphql.language import parse from graphql.pyutils import is_awaitable from graphql.utilities import build_schema diff --git a/tests/execution/test_nonnull.py b/tests/execution/test_nonnull.py index da15e1b6..cee46c0c 100644 --- a/tests/execution/test_nonnull.py +++ b/tests/execution/test_nonnull.py @@ -3,7 +3,7 @@ from pytest import mark -from graphql.execution import execute, execute_sync, ExecutionResult +from graphql.execution import ExecutionResult, execute, execute_sync from graphql.language import parse from graphql.pyutils import AwaitableOrValue from graphql.type import ( @@ -16,6 +16,7 @@ ) from graphql.utilities import build_schema + sync_error = RuntimeError("sync") sync_non_null_error = RuntimeError("syncNonNull") promise_error = RuntimeError("promise") diff --git a/tests/execution/test_parallel.py b/tests/execution/test_parallel.py index aeb2a142..e5841d56 100644 --- a/tests/execution/test_parallel.py +++ b/tests/execution/test_parallel.py @@ -6,13 +6,13 @@ from graphql.execution import execute from graphql.language import parse from graphql.type import ( - GraphQLSchema, - GraphQLObjectType, - GraphQLField, - GraphQLList, - GraphQLInterfaceType, GraphQLBoolean, + GraphQLField, GraphQLInt, + GraphQLInterfaceType, + GraphQLList, + GraphQLObjectType, + GraphQLSchema, GraphQLString, ) diff --git a/tests/execution/test_resolve.py b/tests/execution/test_resolve.py index f945e6c3..cccb6105 100644 --- a/tests/execution/test_resolve.py +++ b/tests/execution/test_resolve.py @@ -2,8 +2,8 @@ from typing import Any from graphql.error import GraphQLError -from graphql.execution import execute_sync, ExecutionResult -from graphql.language import parse, SourceLocation +from graphql.execution import ExecutionResult, execute_sync +from graphql.language import SourceLocation, parse from graphql.type import ( GraphQLArgument, GraphQLField, diff --git a/tests/execution/test_subscribe.py b/tests/execution/test_subscribe.py index 8b1fe639..2f1ae39c 100644 --- a/tests/execution/test_subscribe.py +++ b/tests/execution/test_subscribe.py @@ -1,9 +1,9 @@ import asyncio - -from typing import Any, Dict, List, Callable +from typing import Any, Callable, Dict, List from pytest import mark, raises +from graphql.execution import MapAsyncIterator, create_source_event_stream, subscribe from graphql.language import parse from graphql.pyutils import SimplePubSub from graphql.type import ( @@ -16,7 +16,7 @@ GraphQLSchema, GraphQLString, ) -from graphql.execution import create_source_event_stream, subscribe, MapAsyncIterator + try: anext diff --git a/tests/execution/test_union_interface.py b/tests/execution/test_union_interface.py index e6858e43..0a1cb299 100644 --- a/tests/execution/test_union_interface.py +++ b/tests/execution/test_union_interface.py @@ -1,4 +1,4 @@ -from typing import Optional, Union, List +from typing import List, Optional, Union from graphql.execution import execute_sync from graphql.language import parse diff --git a/tests/execution/test_variables.py b/tests/execution/test_variables.py index dde674c7..88cf180e 100644 --- a/tests/execution/test_variables.py +++ b/tests/execution/test_variables.py @@ -1,9 +1,9 @@ from math import nan from typing import Any, Dict, Optional -from graphql.execution import execute_sync, ExecutionResult +from graphql.execution import ExecutionResult, execute_sync from graphql.execution.values import get_variable_values -from graphql.language import parse, OperationDefinitionNode, StringValueNode, ValueNode +from graphql.language import OperationDefinitionNode, StringValueNode, ValueNode, parse from graphql.pyutils import Undefined from graphql.type import ( GraphQLArgument, diff --git a/tests/fixtures/__init__.py b/tests/fixtures/__init__.py index fe63e16b..2d216f5c 100644 --- a/tests/fixtures/__init__.py +++ b/tests/fixtures/__init__.py @@ -4,6 +4,7 @@ from pytest import fixture + __all__ = [ "kitchen_sink_query", "kitchen_sink_sdl", diff --git a/tests/language/test_ast.py b/tests/language/test_ast.py index bbf4440c..d61502b9 100644 --- a/tests/language/test_ast.py +++ b/tests/language/test_ast.py @@ -1,5 +1,5 @@ -from copy import copy, deepcopy import weakref +from copy import copy, deepcopy from graphql.language import Location, Node, Source, Token, TokenKind from graphql.pyutils import inspect diff --git a/tests/language/test_block_string.py b/tests/language/test_block_string.py index d617de27..73e31d1b 100644 --- a/tests/language/test_block_string.py +++ b/tests/language/test_block_string.py @@ -1,8 +1,8 @@ -from typing import cast, Collection, Optional +from typing import Collection, Optional, cast from graphql.language.block_string import ( - is_printable_as_block_string, dedent_block_string_lines, + is_printable_as_block_string, print_block_string, ) diff --git a/tests/language/test_block_string_fuzz.py b/tests/language/test_block_string_fuzz.py index b780ce9f..8de96b22 100644 --- a/tests/language/test_block_string_fuzz.py +++ b/tests/language/test_block_string_fuzz.py @@ -1,9 +1,9 @@ from pytest import mark -from graphql.language import Source, Lexer, TokenKind +from graphql.language import Lexer, Source, TokenKind from graphql.language.block_string import ( - print_block_string, is_printable_as_block_string, + print_block_string, ) from ..utils import dedent, gen_fuzz_strings diff --git a/tests/language/test_character_classes.py b/tests/language/test_character_classes.py index 37cddf5d..c682b76a 100644 --- a/tests/language/test_character_classes.py +++ b/tests/language/test_character_classes.py @@ -1,12 +1,14 @@ -from string import ascii_letters as letters, digits, punctuation +from string import ascii_letters as letters +from string import digits, punctuation from graphql.language.character_classes import ( is_digit, is_letter, - is_name_start, is_name_continue, + is_name_start, ) + non_ascii = "¯_±¹²³½£ºµÄäÖöØø×〇᧐〸αΑωΩ" diff --git a/tests/language/test_lexer.py b/tests/language/test_lexer.py index a1ac5abf..81759768 100644 --- a/tests/language/test_lexer.py +++ b/tests/language/test_lexer.py @@ -9,6 +9,7 @@ from ..utils import dedent + Location = Optional[Tuple[int, int]] diff --git a/tests/language/test_parser.py b/tests/language/test_parser.py index 027a605b..a98d4d69 100644 --- a/tests/language/test_parser.py +++ b/tests/language/test_parser.py @@ -1,4 +1,4 @@ -from typing import cast, Optional, Tuple +from typing import Optional, Tuple, cast from pytest import raises @@ -11,8 +11,8 @@ IntValueNode, ListTypeNode, ListValueNode, - NameNode, NamedTypeNode, + NameNode, NonNullTypeNode, NullValueNode, ObjectFieldNode, @@ -20,22 +20,23 @@ OperationDefinitionNode, OperationType, SelectionSetNode, + Source, StringValueNode, - ValueNode, - VariableNode, Token, TokenKind, + ValueNode, + VariableNode, parse, + parse_const_value, parse_type, parse_value, - parse_const_value, - Source, ) from graphql.pyutils import inspect from ..fixtures import kitchen_sink_query # noqa: F401 from ..utils import dedent + Location = Optional[Tuple[int, int]] diff --git a/tests/language/test_predicates.py b/tests/language/test_predicates.py index 06369b0a..419dd35a 100644 --- a/tests/language/test_predicates.py +++ b/tests/language/test_predicates.py @@ -2,21 +2,22 @@ from typing import Callable from graphql.language import ( - ast, Node, - parse_value, + ast, + is_const_value_node, is_definition_node, is_executable_definition_node, is_selection_node, - is_value_node, - is_const_value_node, + is_type_definition_node, + is_type_extension_node, is_type_node, is_type_system_definition_node, - is_type_definition_node, is_type_system_extension_node, - is_type_extension_node, + is_value_node, + parse_value, ) + all_ast_nodes = sorted( [ node_type() diff --git a/tests/language/test_schema_parser.py b/tests/language/test_schema_parser.py index 7eef1978..673f1554 100644 --- a/tests/language/test_schema_parser.py +++ b/tests/language/test_schema_parser.py @@ -18,8 +18,8 @@ InterfaceTypeDefinitionNode, InterfaceTypeExtensionNode, ListTypeNode, - NameNode, NamedTypeNode, + NameNode, NonNullTypeNode, ObjectTypeDefinitionNode, ObjectTypeExtensionNode, @@ -37,6 +37,7 @@ from ..fixtures import kitchen_sink_sdl # noqa: F401 + Location = Optional[Tuple[int, int]] diff --git a/tests/language/test_schema_printer.py b/tests/language/test_schema_printer.py index cd097a80..6ede54b6 100644 --- a/tests/language/test_schema_printer.py +++ b/tests/language/test_schema_printer.py @@ -2,7 +2,7 @@ from pytest import raises -from graphql.language import ScalarTypeDefinitionNode, NameNode, print_ast, parse +from graphql.language import NameNode, ScalarTypeDefinitionNode, parse, print_ast from ..fixtures import kitchen_sink_sdl # noqa: F401 from ..utils import dedent diff --git a/tests/language/test_source.py b/tests/language/test_source.py index 1b74aa1b..ca5592ea 100644 --- a/tests/language/test_source.py +++ b/tests/language/test_source.py @@ -1,6 +1,5 @@ import weakref - -from typing import cast, Tuple +from typing import Tuple, cast from pytest import raises diff --git a/tests/language/test_visitor.py b/tests/language/test_visitor.py index d16fe88d..21567a7d 100644 --- a/tests/language/test_visitor.py +++ b/tests/language/test_visitor.py @@ -1,23 +1,23 @@ from copy import copy from functools import partial -from typing import cast, List, Optional +from typing import List, Optional, cast from pytest import mark, raises from graphql.language import ( - Node, - FieldNode, - NameNode, - SelectionNode, - SelectionSetNode, - parse, - visit, BREAK, REMOVE, SKIP, + FieldNode, + NameNode, + Node, ParallelVisitor, + SelectionNode, + SelectionSetNode, Visitor, VisitorKeyMap, + parse, + visit, ) from ..fixtures import kitchen_sink_query # noqa: F401 diff --git a/tests/pyutils/test_description.py b/tests/pyutils/test_description.py index af87ccb6..d217f72d 100644 --- a/tests/pyutils/test_description.py +++ b/tests/pyutils/test_description.py @@ -4,23 +4,23 @@ from pytest import raises from graphql import graphql_sync +from graphql.pyutils import ( + Description, + is_description, + register_description, + unregister_description, +) from graphql.type import ( GraphQLArgument, GraphQLDirective, GraphQLEnumValue, GraphQLField, GraphQLInputField, - GraphQLObjectType, GraphQLNamedType, + GraphQLObjectType, GraphQLSchema, GraphQLString, ) -from graphql.pyutils import ( - Description, - is_description, - register_description, - unregister_description, -) from graphql.utilities import get_introspection_query, print_schema from ..utils import dedent diff --git a/tests/pyutils/test_frozen_dict.py b/tests/pyutils/test_frozen_dict.py index 594f4826..27160980 100644 --- a/tests/pyutils/test_frozen_dict.py +++ b/tests/pyutils/test_frozen_dict.py @@ -2,7 +2,7 @@ from pytest import raises -from graphql.pyutils import FrozenError, FrozenDict +from graphql.pyutils import FrozenDict, FrozenError def describe_frozen_list(): diff --git a/tests/pyutils/test_identity_func.py b/tests/pyutils/test_identity_func.py index b3ca5143..6c4c33db 100644 --- a/tests/pyutils/test_identity_func.py +++ b/tests/pyutils/test_identity_func.py @@ -1,4 +1,4 @@ -from graphql.pyutils import identity_func, Undefined +from graphql.pyutils import Undefined, identity_func def describe_identity_func(): diff --git a/tests/pyutils/test_inspect.py b/tests/pyutils/test_inspect.py index 6ace1fcd..272044c5 100644 --- a/tests/pyutils/test_inspect.py +++ b/tests/pyutils/test_inspect.py @@ -1,21 +1,22 @@ -from math import nan, inf from contextlib import contextmanager from importlib import import_module +from math import inf, nan from typing import Any, Dict, FrozenSet, List, Set, Tuple from pytest import mark -from graphql.pyutils import inspect, Undefined +from graphql.pyutils import Undefined, inspect from graphql.type import ( GraphQLDirective, GraphQLField, GraphQLInt, GraphQLList, - GraphQLObjectType, GraphQLNonNull, + GraphQLObjectType, GraphQLString, ) + inspect_module = import_module(inspect.__module__) diff --git a/tests/pyutils/test_merge_kwargs.py b/tests/pyutils/test_merge_kwargs.py index 05ef249a..f60bf562 100644 --- a/tests/pyutils/test_merge_kwargs.py +++ b/tests/pyutils/test_merge_kwargs.py @@ -1,5 +1,6 @@ from graphql.pyutils import merge_kwargs + try: from typing import TypedDict except ImportError: # Python < 3.8 diff --git a/tests/pyutils/test_natural_compare.py b/tests/pyutils/test_natural_compare.py index dd3bb8d8..f3c2584c 100644 --- a/tests/pyutils/test_natural_compare.py +++ b/tests/pyutils/test_natural_compare.py @@ -1,5 +1,6 @@ from graphql.pyutils import natural_comparison_key + key = natural_comparison_key diff --git a/tests/star_wars_data.py b/tests/star_wars_data.py index 167d2a32..55951692 100644 --- a/tests/star_wars_data.py +++ b/tests/star_wars_data.py @@ -7,6 +7,7 @@ from typing import Awaitable, Collection, Dict, Iterator, Optional + __all__ = ["get_droid", "get_friends", "get_hero", "get_human", "get_secret_backstory"] # These are classes which correspond to the schema. diff --git a/tests/star_wars_schema.py b/tests/star_wars_schema.py index 575bf482..65eeeebe 100644 --- a/tests/star_wars_schema.py +++ b/tests/star_wars_schema.py @@ -62,6 +62,7 @@ get_secret_backstory, ) + __all__ = ["star_wars_schema"] # We begin by setting up our schema. diff --git a/tests/test_docs.py b/tests/test_docs.py index e7120aa4..a85ea6aa 100644 --- a/tests/test_docs.py +++ b/tests/test_docs.py @@ -5,6 +5,7 @@ from .utils import dedent + Scope = Dict[str, Any] diff --git a/tests/test_star_wars_validation.py b/tests/test_star_wars_validation.py index 798a28ea..2c469b5f 100644 --- a/tests/test_star_wars_validation.py +++ b/tests/test_star_wars_validation.py @@ -1,7 +1,7 @@ from typing import List from graphql.error import GraphQLError -from graphql.language import parse, Source +from graphql.language import Source, parse from graphql.validation import validate from .star_wars_schema import star_wars_schema diff --git a/tests/test_user_registry.py b/tests/test_user_registry.py index 7e9395e0..670f5d0f 100644 --- a/tests/test_user_registry.py +++ b/tests/test_user_registry.py @@ -4,20 +4,15 @@ operations on a simulated user registry database backend. """ -from asyncio import sleep, wait +from asyncio import create_task, sleep, wait from collections import defaultdict from enum import Enum from inspect import isawaitable from typing import Any, Dict, List, NamedTuple, Optional -from asyncio import create_task - from pytest import fixture, mark from graphql import ( - graphql, - parse, - subscribe, GraphQLArgument, GraphQLBoolean, GraphQLEnumType, @@ -30,10 +25,12 @@ GraphQLObjectType, GraphQLSchema, GraphQLString, + graphql, + parse, + subscribe, ) - -from graphql.pyutils import SimplePubSub, SimplePubSubIterator from graphql.execution.map_async_iterator import MapAsyncIterator +from graphql.pyutils import SimplePubSub, SimplePubSubIterator class User(NamedTuple): diff --git a/tests/test_version.py b/tests/test_version.py index fe575f04..2a4ba509 100644 --- a/tests/test_version.py +++ b/tests/test_version.py @@ -5,10 +5,11 @@ VersionInfo, version, version_info, - version_js, version_info_js, + version_js, ) + _re_version = re.compile(r"(\d+)\.(\d+)\.(\d+)(?:(a|b|r?c)(\d+))?$") diff --git a/tests/type/test_assert_name.py b/tests/type/test_assert_name.py index 0d535d42..a26a7ea0 100644 --- a/tests/type/test_assert_name.py +++ b/tests/type/test_assert_name.py @@ -1,7 +1,7 @@ from pytest import mark, raises from graphql.error import GraphQLError -from graphql.type import assert_name, assert_enum_value_name +from graphql.type import assert_enum_value_name, assert_name def describe_assert_name(): diff --git a/tests/type/test_custom_scalars.py b/tests/type/test_custom_scalars.py index a4dac893..bf63c0b5 100644 --- a/tests/type/test_custom_scalars.py +++ b/tests/type/test_custom_scalars.py @@ -15,6 +15,7 @@ ) from graphql.utilities import value_from_ast_untyped + # this test is not (yet) part of GraphQL.js, see # https://github.com/graphql/graphql-js/issues/2657 diff --git a/tests/type/test_definition.py b/tests/type/test_definition.py index 1c5b5bfc..8515de89 100644 --- a/tests/type/test_definition.py +++ b/tests/type/test_definition.py @@ -1,21 +1,20 @@ from enum import Enum from math import isnan, nan -from typing import cast, Dict +from typing import Dict, cast from pytest import mark, raises from graphql.error import GraphQLError from graphql.language import ( - parse_value, EnumTypeDefinitionNode, EnumTypeExtensionNode, EnumValueNode, - Node, InputObjectTypeDefinitionNode, InputObjectTypeExtensionNode, InputValueDefinitionNode, InterfaceTypeDefinitionNode, InterfaceTypeExtensionNode, + Node, ObjectTypeDefinitionNode, ObjectTypeExtensionNode, ScalarTypeDefinitionNode, @@ -23,15 +22,16 @@ StringValueNode, TypeDefinitionNode, TypeExtensionNode, - ValueNode, UnionTypeDefinitionNode, UnionTypeExtensionNode, + ValueNode, + parse_value, ) from graphql.pyutils import Undefined from graphql.type import ( GraphQLArgument, - GraphQLEnumValue, GraphQLEnumType, + GraphQLEnumValue, GraphQLField, GraphQLInputField, GraphQLInputObjectType, @@ -45,6 +45,7 @@ GraphQLUnionType, ) + ScalarType = GraphQLScalarType("Scalar") ObjectType = GraphQLObjectType("Object", {}) InterfaceType = GraphQLInterfaceType("Interface", {}) diff --git a/tests/type/test_directives.py b/tests/type/test_directives.py index 278d1d05..7398e108 100644 --- a/tests/type/test_directives.py +++ b/tests/type/test_directives.py @@ -1,7 +1,7 @@ from pytest import raises from graphql.error import GraphQLError -from graphql.language import DirectiveLocation, DirectiveDefinitionNode, Node +from graphql.language import DirectiveDefinitionNode, DirectiveLocation, Node from graphql.type import GraphQLArgument, GraphQLDirective, GraphQLInt, GraphQLString diff --git a/tests/type/test_enum.py b/tests/type/test_enum.py index f85a466b..0de836b3 100644 --- a/tests/type/test_enum.py +++ b/tests/type/test_enum.py @@ -16,6 +16,7 @@ ) from graphql.utilities import introspection_from_schema + ColorType = GraphQLEnumType("Color", values={"RED": 0, "GREEN": 1, "BLUE": 2}) diff --git a/tests/type/test_extensions.py b/tests/type/test_extensions.py index 8c062458..19eefd71 100644 --- a/tests/type/test_extensions.py +++ b/tests/type/test_extensions.py @@ -17,6 +17,7 @@ GraphQLUnionType, ) + dummy_type = GraphQLScalarType("DummyScalar") bad_extensions = [param([], id="list"), param({1: "ext"}, id="non_string_key")] diff --git a/tests/type/test_introspection.py b/tests/type/test_introspection.py index f51d8f06..20f26c20 100644 --- a/tests/type/test_introspection.py +++ b/tests/type/test_introspection.py @@ -1,5 +1,5 @@ from graphql import graphql_sync -from graphql.utilities import get_introspection_query, build_schema +from graphql.utilities import build_schema, get_introspection_query def describe_introspection(): diff --git a/tests/type/test_predicate.py b/tests/type/test_predicate.py index 60a23877..c2ed29ef 100644 --- a/tests/type/test_predicate.py +++ b/tests/type/test_predicate.py @@ -5,8 +5,8 @@ from graphql.language import DirectiveLocation from graphql.type import ( GraphQLArgument, - GraphQLDeprecatedDirective, GraphQLBoolean, + GraphQLDeprecatedDirective, GraphQLDirective, GraphQLEnumType, GraphQLFloat, @@ -53,12 +53,12 @@ is_leaf_type, is_list_type, is_named_type, - is_required_argument, - is_required_input_field, is_non_null_type, is_nullable_type, is_object_type, is_output_type, + is_required_argument, + is_required_input_field, is_scalar_type, is_specified_directive, is_specified_scalar_type, @@ -67,6 +67,7 @@ is_wrapping_type, ) + ObjectType = GraphQLObjectType("Object", {}) InterfaceType = GraphQLInterfaceType("Interface", {}) UnionType = GraphQLUnionType("Union", types=[ObjectType]) diff --git a/tests/type/test_scalars.py b/tests/type/test_scalars.py index e5dd7c6f..f9d04f7a 100644 --- a/tests/type/test_scalars.py +++ b/tests/type/test_scalars.py @@ -7,11 +7,11 @@ from graphql.language import parse_value as parse_value_to_ast from graphql.pyutils import Undefined from graphql.type import ( - GraphQLInt, - GraphQLFloat, - GraphQLString, GraphQLBoolean, + GraphQLFloat, GraphQLID, + GraphQLInt, + GraphQLString, ) diff --git a/tests/type/test_schema.py b/tests/type/test_schema.py index ef483b54..8dfc2c48 100644 --- a/tests/type/test_schema.py +++ b/tests/type/test_schema.py @@ -15,8 +15,8 @@ GraphQLDirective, GraphQLField, GraphQLFieldMap, - GraphQLInputObjectType, GraphQLInputField, + GraphQLInputObjectType, GraphQLInt, GraphQLInterfaceType, GraphQLList, diff --git a/tests/type/test_validation.py b/tests/type/test_validation.py index 875d8902..20b8f4eb 100644 --- a/tests/type/test_validation.py +++ b/tests/type/test_validation.py @@ -3,27 +3,16 @@ from pytest import mark, raises -from graphql.language import parse, DirectiveLocation +from graphql.language import DirectiveLocation, parse from graphql.pyutils import inspect from graphql.type import ( - assert_directive, - assert_enum_type, - assert_input_object_type, - assert_interface_type, - assert_object_type, - assert_scalar_type, - assert_union_type, - assert_valid_schema, - is_input_type, - is_output_type, - validate_schema, GraphQLArgument, GraphQLDirective, GraphQLEnumType, GraphQLField, GraphQLInputField, - GraphQLInputType, GraphQLInputObjectType, + GraphQLInputType, GraphQLInt, GraphQLInterfaceType, GraphQLList, @@ -34,11 +23,23 @@ GraphQLSchema, GraphQLString, GraphQLUnionType, + assert_directive, + assert_enum_type, + assert_input_object_type, + assert_interface_type, + assert_object_type, + assert_scalar_type, + assert_union_type, + assert_valid_schema, + is_input_type, + is_output_type, + validate_schema, ) from graphql.utilities import build_schema, extend_schema from ..utils import dedent + SomeSchema = build_schema( """ scalar SomeScalar diff --git a/tests/utilities/test_ast_to_dict.py b/tests/utilities/test_ast_to_dict.py index d0fa1b24..a755e8aa 100644 --- a/tests/utilities/test_ast_to_dict.py +++ b/tests/utilities/test_ast_to_dict.py @@ -1,4 +1,4 @@ -from graphql.language import parse, FieldNode, NameNode, OperationType, SelectionSetNode +from graphql.language import FieldNode, NameNode, OperationType, SelectionSetNode, parse from graphql.utilities import ast_to_dict diff --git a/tests/utilities/test_build_ast_schema.py b/tests/utilities/test_build_ast_schema.py index ba9c6050..1ae7ffb5 100644 --- a/tests/utilities/test_build_ast_schema.py +++ b/tests/utilities/test_build_ast_schema.py @@ -4,24 +4,24 @@ from pytest import mark, raises from graphql import graphql_sync -from graphql.language import parse, print_ast, DocumentNode, InterfaceTypeDefinitionNode +from graphql.language import DocumentNode, InterfaceTypeDefinitionNode, parse, print_ast from graphql.type import ( - GraphQLDeprecatedDirective, - GraphQLIncludeDirective, - GraphQLSchema, - GraphQLSkipDirective, - GraphQLSpecifiedByDirective, - GraphQLBoolean, - GraphQLFloat, - GraphQLID, - GraphQLInt, - GraphQLString, GraphQLArgument, + GraphQLBoolean, + GraphQLDeprecatedDirective, GraphQLEnumType, GraphQLEnumValue, GraphQLField, + GraphQLFloat, + GraphQLID, + GraphQLIncludeDirective, GraphQLInputField, + GraphQLInt, GraphQLNamedType, + GraphQLSchema, + GraphQLSkipDirective, + GraphQLSpecifiedByDirective, + GraphQLString, assert_directive, assert_enum_type, assert_input_object_type, diff --git a/tests/utilities/test_build_client_schema.py b/tests/utilities/test_build_client_schema.py index b566ba1b..07123178 100644 --- a/tests/utilities/test_build_client_schema.py +++ b/tests/utilities/test_build_client_schema.py @@ -18,8 +18,8 @@ assert_enum_type, ) from graphql.utilities import ( - build_schema, build_client_schema, + build_schema, introspection_from_schema, print_schema, ) diff --git a/tests/utilities/test_concat_ast.py b/tests/utilities/test_concat_ast.py index 7d25d0b3..4d764562 100644 --- a/tests/utilities/test_concat_ast.py +++ b/tests/utilities/test_concat_ast.py @@ -1,4 +1,4 @@ -from graphql.language import parse, print_ast, Source +from graphql.language import Source, parse, print_ast from graphql.utilities import concat_ast from ..utils import dedent diff --git a/tests/utilities/test_extend_schema.py b/tests/utilities/test_extend_schema.py index 8a844abd..84391714 100644 --- a/tests/utilities/test_extend_schema.py +++ b/tests/utilities/test_extend_schema.py @@ -25,15 +25,11 @@ assert_union_type, validate_schema, ) -from graphql.utilities import ( - build_schema, - concat_ast, - extend_schema, - print_schema, -) +from graphql.utilities import build_schema, concat_ast, extend_schema, print_schema from ..utils import dedent + TypeWithAstNode = Union[ GraphQLArgument, GraphQLEnumValue, diff --git a/tests/utilities/test_find_breaking_changes.py b/tests/utilities/test_find_breaking_changes.py index f0ad2ab7..72dde601 100644 --- a/tests/utilities/test_find_breaking_changes.py +++ b/tests/utilities/test_find_breaking_changes.py @@ -1,7 +1,7 @@ from graphql.type import ( - GraphQLSchema, GraphQLDeprecatedDirective, GraphQLIncludeDirective, + GraphQLSchema, GraphQLSkipDirective, GraphQLSpecifiedByDirective, ) diff --git a/tests/utilities/test_get_introspection_query.py b/tests/utilities/test_get_introspection_query.py index 279112bb..57e37145 100644 --- a/tests/utilities/test_get_introspection_query.py +++ b/tests/utilities/test_get_introspection_query.py @@ -1,11 +1,11 @@ import re - from typing import Pattern from graphql.language import parse from graphql.utilities import build_schema, get_introspection_query from graphql.validation import validate + dummy_schema = build_schema( """ type Query { diff --git a/tests/utilities/test_get_operation_root_type.py b/tests/utilities/test_get_operation_root_type.py index f0e8a4e3..6d6d7f76 100644 --- a/tests/utilities/test_get_operation_root_type.py +++ b/tests/utilities/test_get_operation_root_type.py @@ -2,11 +2,11 @@ from graphql.error import GraphQLError from graphql.language import ( - parse, DocumentNode, OperationDefinitionNode, OperationTypeDefinitionNode, SchemaDefinitionNode, + parse, ) from graphql.type import GraphQLField, GraphQLObjectType, GraphQLSchema, GraphQLString from graphql.utilities import get_operation_root_type diff --git a/tests/utilities/test_introspection_from_schema.py b/tests/utilities/test_introspection_from_schema.py index d06788c0..96ec968f 100644 --- a/tests/utilities/test_introspection_from_schema.py +++ b/tests/utilities/test_introspection_from_schema.py @@ -1,9 +1,9 @@ -from graphql.type import GraphQLSchema, GraphQLObjectType, GraphQLField, GraphQLString +from graphql.type import GraphQLField, GraphQLObjectType, GraphQLSchema, GraphQLString from graphql.utilities import ( + IntrospectionQuery, build_client_schema, - print_schema, introspection_from_schema, - IntrospectionQuery, + print_schema, ) from ..utils import dedent diff --git a/tests/utilities/test_lexicographic_sort_schema.py b/tests/utilities/test_lexicographic_sort_schema.py index e93b680b..43740178 100644 --- a/tests/utilities/test_lexicographic_sort_schema.py +++ b/tests/utilities/test_lexicographic_sort_schema.py @@ -1,4 +1,4 @@ -from graphql.utilities import build_schema, print_schema, lexicographic_sort_schema +from graphql.utilities import build_schema, lexicographic_sort_schema, print_schema from ..utils import dedent diff --git a/tests/utilities/test_print_schema.py b/tests/utilities/test_print_schema.py index 37337b23..4bc5a266 100644 --- a/tests/utilities/test_print_schema.py +++ b/tests/utilities/test_print_schema.py @@ -1,12 +1,14 @@ -from typing import cast, Any, Dict +from typing import Any, Dict, cast from graphql.language import DirectiveLocation from graphql.type import ( GraphQLArgument, GraphQLBoolean, + GraphQLDirective, GraphQLEnumType, GraphQLField, GraphQLFloat, + GraphQLInputField, GraphQLInputObjectType, GraphQLInt, GraphQLInterfaceType, @@ -17,13 +19,11 @@ GraphQLSchema, GraphQLString, GraphQLUnionType, - GraphQLInputField, - GraphQLDirective, ) from graphql.utilities import ( build_schema, - print_schema, print_introspection_schema, + print_schema, print_value, ) diff --git a/tests/utilities/test_strip_ignored_characters.py b/tests/utilities/test_strip_ignored_characters.py index 67e5b4e1..272a3fdc 100644 --- a/tests/utilities/test_strip_ignored_characters.py +++ b/tests/utilities/test_strip_ignored_characters.py @@ -7,9 +7,11 @@ from graphql.language import Lexer, Source, TokenKind, parse from graphql.utilities import strip_ignored_characters -from ..fixtures import kitchen_sink_query, kitchen_sink_sdl # noqa: F401 +from ..fixtures import kitchen_sink_query # noqa: F401 +from ..fixtures import kitchen_sink_sdl from ..utils import dedent + ignored_tokens = [ # UnicodeBOM "\uFEFF", # Byte Order Mark (U+FEFF) diff --git a/tests/utilities/test_type_from_ast.py b/tests/utilities/test_type_from_ast.py index 9842a31f..dd5cd6e9 100644 --- a/tests/utilities/test_type_from_ast.py +++ b/tests/utilities/test_type_from_ast.py @@ -1,6 +1,6 @@ from pytest import raises -from graphql.language import parse_type, TypeNode +from graphql.language import TypeNode, parse_type from graphql.type import GraphQLList, GraphQLNonNull, GraphQLObjectType from graphql.utilities import type_from_ast diff --git a/tests/utilities/test_type_info.py b/tests/utilities/test_type_info.py index 650697f4..28445257 100644 --- a/tests/utilities/test_type_info.py +++ b/tests/utilities/test_type_info.py @@ -4,11 +4,11 @@ Node, OperationDefinitionNode, SelectionSetNode, + Visitor, parse, parse_value, print_ast, visit, - Visitor, ) from graphql.type import GraphQLSchema, get_named_type, is_composite_type from graphql.utilities import TypeInfo, TypeInfoVisitor, build_schema diff --git a/tests/utilities/test_value_from_ast.py b/tests/utilities/test_value_from_ast.py index cd72e278..6e969f93 100644 --- a/tests/utilities/test_value_from_ast.py +++ b/tests/utilities/test_value_from_ast.py @@ -1,7 +1,7 @@ from math import isnan, nan from typing import Any, Dict, Optional -from graphql.language import parse_value, ValueNode +from graphql.language import ValueNode, parse_value from graphql.pyutils import Undefined from graphql.type import ( GraphQLBoolean, diff --git a/tests/utilities/test_value_from_ast_untyped.py b/tests/utilities/test_value_from_ast_untyped.py index 4b82138d..78c4edeb 100644 --- a/tests/utilities/test_value_from_ast_untyped.py +++ b/tests/utilities/test_value_from_ast_untyped.py @@ -1,7 +1,7 @@ from math import nan from typing import Any, Dict, Optional -from graphql.language import parse_value, FloatValueNode, IntValueNode +from graphql.language import FloatValueNode, IntValueNode, parse_value from graphql.pyutils import Undefined from graphql.utilities import value_from_ast_untyped diff --git a/tests/utils/__init__.py b/tests/utils/__init__.py index a6e55a48..d6392286 100644 --- a/tests/utils/__init__.py +++ b/tests/utils/__init__.py @@ -3,4 +3,5 @@ from .dedent import dedent from .gen_fuzz_strings import gen_fuzz_strings + __all__ = ["dedent", "gen_fuzz_strings"] diff --git a/tests/utils/dedent.py b/tests/utils/dedent.py index a65c2d96..f22ae40d 100644 --- a/tests/utils/dedent.py +++ b/tests/utils/dedent.py @@ -1,5 +1,6 @@ from textwrap import dedent as _dedent + __all__ = ["dedent"] diff --git a/tests/utils/gen_fuzz_strings.py b/tests/utils/gen_fuzz_strings.py index 306984b7..50f71589 100644 --- a/tests/utils/gen_fuzz_strings.py +++ b/tests/utils/gen_fuzz_strings.py @@ -1,6 +1,7 @@ from itertools import product from typing import Generator + __all__ = ["gen_fuzz_strings"] diff --git a/tests/validation/__init__.py b/tests/validation/__init__.py index 5449639d..235d2846 100644 --- a/tests/validation/__init__.py +++ b/tests/validation/__init__.py @@ -2,4 +2,5 @@ from pytest import register_assert_rewrite + register_assert_rewrite("tests.validation.harness") diff --git a/tests/validation/harness.py b/tests/validation/harness.py index 214a91bc..01bb7641 100644 --- a/tests/validation/harness.py +++ b/tests/validation/harness.py @@ -4,9 +4,10 @@ from graphql.language import parse from graphql.type import GraphQLSchema from graphql.utilities import build_schema -from graphql.validation import ValidationRule, SDLValidationRule +from graphql.validation import SDLValidationRule, ValidationRule from graphql.validation.validate import validate, validate_sdl + __all__ = [ "test_schema", "assert_validation_errors", diff --git a/tests/validation/test_executable_definitions.py b/tests/validation/test_executable_definitions.py index 4a21c63b..bcfb8538 100644 --- a/tests/validation/test_executable_definitions.py +++ b/tests/validation/test_executable_definitions.py @@ -4,6 +4,7 @@ from .harness import assert_validation_errors + assert_errors = partial(assert_validation_errors, ExecutableDefinitionsRule) assert_valid = partial(assert_errors, errors=[]) diff --git a/tests/validation/test_fields_on_correct_type.py b/tests/validation/test_fields_on_correct_type.py index 949e2c28..ca279070 100644 --- a/tests/validation/test_fields_on_correct_type.py +++ b/tests/validation/test_fields_on_correct_type.py @@ -3,10 +3,11 @@ from graphql.language import parse from graphql.type import GraphQLSchema from graphql.utilities import build_schema -from graphql.validation import validate, FieldsOnCorrectTypeRule +from graphql.validation import FieldsOnCorrectTypeRule, validate from .harness import assert_validation_errors + test_schema = build_schema( """ interface Pet { diff --git a/tests/validation/test_fragments_on_composite_types.py b/tests/validation/test_fragments_on_composite_types.py index a4957668..1f9be282 100644 --- a/tests/validation/test_fragments_on_composite_types.py +++ b/tests/validation/test_fragments_on_composite_types.py @@ -4,6 +4,7 @@ from .harness import assert_validation_errors + assert_errors = partial(assert_validation_errors, FragmentsOnCompositeTypesRule) assert_valid = partial(assert_errors, errors=[]) diff --git a/tests/validation/test_known_argument_names.py b/tests/validation/test_known_argument_names.py index 3291b7c9..03055459 100644 --- a/tests/validation/test_known_argument_names.py +++ b/tests/validation/test_known_argument_names.py @@ -6,7 +6,8 @@ KnownArgumentNamesOnDirectivesRule, ) -from .harness import assert_validation_errors, assert_sdl_validation_errors +from .harness import assert_sdl_validation_errors, assert_validation_errors + assert_errors = partial(assert_validation_errors, KnownArgumentNamesRule) diff --git a/tests/validation/test_known_directives.py b/tests/validation/test_known_directives.py index 3c837ba0..8c451926 100644 --- a/tests/validation/test_known_directives.py +++ b/tests/validation/test_known_directives.py @@ -3,7 +3,8 @@ from graphql.utilities import build_schema from graphql.validation import KnownDirectivesRule -from .harness import assert_validation_errors, assert_sdl_validation_errors +from .harness import assert_sdl_validation_errors, assert_validation_errors + schema_with_directives = build_schema( """ diff --git a/tests/validation/test_known_fragment_names.py b/tests/validation/test_known_fragment_names.py index 8a9b864b..1f95c70d 100644 --- a/tests/validation/test_known_fragment_names.py +++ b/tests/validation/test_known_fragment_names.py @@ -4,6 +4,7 @@ from .harness import assert_validation_errors + assert_errors = partial(assert_validation_errors, KnownFragmentNamesRule) assert_valid = partial(assert_errors, errors=[]) diff --git a/tests/validation/test_known_type_names.py b/tests/validation/test_known_type_names.py index 4b4683ae..e4ef19c3 100644 --- a/tests/validation/test_known_type_names.py +++ b/tests/validation/test_known_type_names.py @@ -3,7 +3,8 @@ from graphql.utilities import build_schema from graphql.validation import KnownTypeNamesRule -from .harness import assert_validation_errors, assert_sdl_validation_errors +from .harness import assert_sdl_validation_errors, assert_validation_errors + assert_errors = partial(assert_validation_errors, KnownTypeNamesRule) diff --git a/tests/validation/test_lone_anonymous_operation.py b/tests/validation/test_lone_anonymous_operation.py index 83e431bf..d3930d64 100644 --- a/tests/validation/test_lone_anonymous_operation.py +++ b/tests/validation/test_lone_anonymous_operation.py @@ -4,6 +4,7 @@ from .harness import assert_validation_errors + assert_errors = partial(assert_validation_errors, LoneAnonymousOperationRule) assert_valid = partial(assert_errors, errors=[]) diff --git a/tests/validation/test_lone_schema_definition.py b/tests/validation/test_lone_schema_definition.py index 9b6d88fa..66080597 100644 --- a/tests/validation/test_lone_schema_definition.py +++ b/tests/validation/test_lone_schema_definition.py @@ -5,6 +5,7 @@ from .harness import assert_sdl_validation_errors + assert_sdl_errors = partial(assert_sdl_validation_errors, LoneSchemaDefinitionRule) assert_sdl_valid = partial(assert_sdl_errors, errors=[]) diff --git a/tests/validation/test_no_fragment_cycles.py b/tests/validation/test_no_fragment_cycles.py index 3bc60a0a..2eeca95a 100644 --- a/tests/validation/test_no_fragment_cycles.py +++ b/tests/validation/test_no_fragment_cycles.py @@ -4,6 +4,7 @@ from .harness import assert_validation_errors + assert_errors = partial(assert_validation_errors, NoFragmentCyclesRule) assert_valid = partial(assert_errors, errors=[]) diff --git a/tests/validation/test_no_schema_introspection.py b/tests/validation/test_no_schema_introspection.py index 03f1ddc8..5e95e621 100644 --- a/tests/validation/test_no_schema_introspection.py +++ b/tests/validation/test_no_schema_introspection.py @@ -5,6 +5,7 @@ from .harness import assert_validation_errors + schema = build_schema( """ type Query { diff --git a/tests/validation/test_no_undefined_variables.py b/tests/validation/test_no_undefined_variables.py index f9537234..890b629a 100644 --- a/tests/validation/test_no_undefined_variables.py +++ b/tests/validation/test_no_undefined_variables.py @@ -4,6 +4,7 @@ from .harness import assert_validation_errors + assert_errors = partial(assert_validation_errors, NoUndefinedVariablesRule) assert_valid = partial(assert_errors, errors=[]) diff --git a/tests/validation/test_no_unused_fragments.py b/tests/validation/test_no_unused_fragments.py index f317d1f5..8c25956b 100644 --- a/tests/validation/test_no_unused_fragments.py +++ b/tests/validation/test_no_unused_fragments.py @@ -4,6 +4,7 @@ from .harness import assert_validation_errors + assert_errors = partial(assert_validation_errors, NoUnusedFragmentsRule) assert_valid = partial(assert_errors, errors=[]) diff --git a/tests/validation/test_no_unused_variables.py b/tests/validation/test_no_unused_variables.py index 7366ba55..4ed8b4b1 100644 --- a/tests/validation/test_no_unused_variables.py +++ b/tests/validation/test_no_unused_variables.py @@ -4,6 +4,7 @@ from .harness import assert_validation_errors + assert_errors = partial(assert_validation_errors, NoUnusedVariablesRule) assert_valid = partial(assert_errors, errors=[]) diff --git a/tests/validation/test_overlapping_fields_can_be_merged.py b/tests/validation/test_overlapping_fields_can_be_merged.py index 5f7800f7..6a190485 100644 --- a/tests/validation/test_overlapping_fields_can_be_merged.py +++ b/tests/validation/test_overlapping_fields_can_be_merged.py @@ -5,6 +5,7 @@ from .harness import assert_validation_errors + assert_errors = partial(assert_validation_errors, OverlappingFieldsCanBeMergedRule) assert_valid = partial(assert_errors, errors=[]) diff --git a/tests/validation/test_possible_fragment_spreads.py b/tests/validation/test_possible_fragment_spreads.py index 791edfc1..a450f890 100644 --- a/tests/validation/test_possible_fragment_spreads.py +++ b/tests/validation/test_possible_fragment_spreads.py @@ -5,6 +5,7 @@ from .harness import assert_validation_errors + test_schema = build_schema( """ interface Being { diff --git a/tests/validation/test_possible_type_extensions.py b/tests/validation/test_possible_type_extensions.py index 473e0c88..f6bfbd44 100644 --- a/tests/validation/test_possible_type_extensions.py +++ b/tests/validation/test_possible_type_extensions.py @@ -5,6 +5,7 @@ from .harness import assert_sdl_validation_errors + assert_errors = partial(assert_sdl_validation_errors, PossibleTypeExtensionsRule) assert_valid = partial(assert_errors, errors=[]) diff --git a/tests/validation/test_provided_required_arguments.py b/tests/validation/test_provided_required_arguments.py index 86bb5233..7a7922ae 100644 --- a/tests/validation/test_provided_required_arguments.py +++ b/tests/validation/test_provided_required_arguments.py @@ -6,7 +6,8 @@ ProvidedRequiredArgumentsOnDirectivesRule, ) -from .harness import assert_validation_errors, assert_sdl_validation_errors +from .harness import assert_sdl_validation_errors, assert_validation_errors + assert_errors = partial(assert_validation_errors, ProvidedRequiredArgumentsRule) diff --git a/tests/validation/test_scalar_leafs.py b/tests/validation/test_scalar_leafs.py index 5d7b83d1..364df493 100644 --- a/tests/validation/test_scalar_leafs.py +++ b/tests/validation/test_scalar_leafs.py @@ -4,6 +4,7 @@ from .harness import assert_validation_errors + assert_errors = partial(assert_validation_errors, ScalarLeafsRule) assert_valid = partial(assert_errors, errors=[]) diff --git a/tests/validation/test_single_field_subscriptions.py b/tests/validation/test_single_field_subscriptions.py index c6278f77..580f3e18 100644 --- a/tests/validation/test_single_field_subscriptions.py +++ b/tests/validation/test_single_field_subscriptions.py @@ -5,6 +5,7 @@ from .harness import assert_validation_errors + schema = build_schema( """ type Message { diff --git a/tests/validation/test_unique_argument_definition_names.py b/tests/validation/test_unique_argument_definition_names.py index 9d6625a7..5c1e8a1b 100644 --- a/tests/validation/test_unique_argument_definition_names.py +++ b/tests/validation/test_unique_argument_definition_names.py @@ -6,6 +6,7 @@ from .harness import assert_sdl_validation_errors + assert_sdl_errors = partial( assert_sdl_validation_errors, UniqueArgumentDefinitionNamesRule ) diff --git a/tests/validation/test_unique_argument_names.py b/tests/validation/test_unique_argument_names.py index ef82f67e..e7d5ab16 100644 --- a/tests/validation/test_unique_argument_names.py +++ b/tests/validation/test_unique_argument_names.py @@ -1,8 +1,10 @@ from functools import partial + from graphql.validation import UniqueArgumentNamesRule from .harness import assert_validation_errors + assert_errors = partial(assert_validation_errors, UniqueArgumentNamesRule) assert_valid = partial(assert_errors, errors=[]) diff --git a/tests/validation/test_unique_directive_names.py b/tests/validation/test_unique_directive_names.py index 1cb9dc58..54d35638 100644 --- a/tests/validation/test_unique_directive_names.py +++ b/tests/validation/test_unique_directive_names.py @@ -5,6 +5,7 @@ from .harness import assert_sdl_validation_errors + assert_errors = partial(assert_sdl_validation_errors, UniqueDirectiveNamesRule) assert_valid = partial(assert_errors, errors=[]) diff --git a/tests/validation/test_unique_directives_per_location.py b/tests/validation/test_unique_directives_per_location.py index 29842087..ea6993d0 100644 --- a/tests/validation/test_unique_directives_per_location.py +++ b/tests/validation/test_unique_directives_per_location.py @@ -4,7 +4,8 @@ from graphql.utilities import extend_schema from graphql.validation import UniqueDirectivesPerLocationRule -from .harness import assert_validation_errors, assert_sdl_validation_errors, test_schema +from .harness import assert_sdl_validation_errors, assert_validation_errors, test_schema + extension_sdl = """ directive @directive on FIELD | FRAGMENT_DEFINITION diff --git a/tests/validation/test_unique_enum_value_names.py b/tests/validation/test_unique_enum_value_names.py index 5611b45c..560bf97e 100644 --- a/tests/validation/test_unique_enum_value_names.py +++ b/tests/validation/test_unique_enum_value_names.py @@ -5,6 +5,7 @@ from .harness import assert_sdl_validation_errors + assert_errors = partial(assert_sdl_validation_errors, UniqueEnumValueNamesRule) assert_valid = partial(assert_errors, errors=[]) diff --git a/tests/validation/test_unique_field_definition_names.py b/tests/validation/test_unique_field_definition_names.py index 5e9b1c21..eefa0f3f 100644 --- a/tests/validation/test_unique_field_definition_names.py +++ b/tests/validation/test_unique_field_definition_names.py @@ -7,6 +7,7 @@ from .harness import assert_sdl_validation_errors + assert_errors = partial(assert_sdl_validation_errors, UniqueFieldDefinitionNamesRule) assert_valid = partial(assert_errors, errors=[]) diff --git a/tests/validation/test_unique_fragment_names.py b/tests/validation/test_unique_fragment_names.py index 37370c9d..b2270260 100644 --- a/tests/validation/test_unique_fragment_names.py +++ b/tests/validation/test_unique_fragment_names.py @@ -4,6 +4,7 @@ from .harness import assert_validation_errors + assert_errors = partial(assert_validation_errors, UniqueFragmentNamesRule) assert_valid = partial(assert_errors, errors=[]) diff --git a/tests/validation/test_unique_input_field_names.py b/tests/validation/test_unique_input_field_names.py index 857118da..0c9e2b48 100644 --- a/tests/validation/test_unique_input_field_names.py +++ b/tests/validation/test_unique_input_field_names.py @@ -4,6 +4,7 @@ from .harness import assert_validation_errors + assert_errors = partial(assert_validation_errors, UniqueInputFieldNamesRule) assert_valid = partial(assert_errors, errors=[]) diff --git a/tests/validation/test_unique_operation_names.py b/tests/validation/test_unique_operation_names.py index f2ba8a23..e80dd611 100644 --- a/tests/validation/test_unique_operation_names.py +++ b/tests/validation/test_unique_operation_names.py @@ -4,6 +4,7 @@ from .harness import assert_validation_errors + assert_errors = partial(assert_validation_errors, UniqueOperationNamesRule) assert_valid = partial(assert_errors, errors=[]) diff --git a/tests/validation/test_unique_operation_types.py b/tests/validation/test_unique_operation_types.py index c341e84c..fd1028ca 100644 --- a/tests/validation/test_unique_operation_types.py +++ b/tests/validation/test_unique_operation_types.py @@ -5,6 +5,7 @@ from .harness import assert_sdl_validation_errors + assert_errors = partial(assert_sdl_validation_errors, UniqueOperationTypesRule) assert_valid = partial(assert_errors, errors=[]) diff --git a/tests/validation/test_unique_type_names.py b/tests/validation/test_unique_type_names.py index 1ff03b8f..0d809f81 100644 --- a/tests/validation/test_unique_type_names.py +++ b/tests/validation/test_unique_type_names.py @@ -5,6 +5,7 @@ from .harness import assert_sdl_validation_errors + assert_errors = partial(assert_sdl_validation_errors, UniqueTypeNamesRule) assert_valid = partial(assert_errors, errors=[]) diff --git a/tests/validation/test_unique_variable_names.py b/tests/validation/test_unique_variable_names.py index 9b5c10e6..7c50ccda 100644 --- a/tests/validation/test_unique_variable_names.py +++ b/tests/validation/test_unique_variable_names.py @@ -4,6 +4,7 @@ from .harness import assert_validation_errors + assert_errors = partial(assert_validation_errors, UniqueVariableNamesRule) assert_valid = partial(assert_errors, errors=[]) diff --git a/tests/validation/test_values_of_correct_type.py b/tests/validation/test_values_of_correct_type.py index a41cd9f6..7208ddca 100644 --- a/tests/validation/test_values_of_correct_type.py +++ b/tests/validation/test_values_of_correct_type.py @@ -5,14 +5,15 @@ GraphQLArgument, GraphQLField, GraphQLObjectType, - GraphQLSchema, GraphQLScalarType, + GraphQLSchema, GraphQLString, ) from graphql.validation import ValuesOfCorrectTypeRule from .harness import assert_validation_errors + assert_errors = partial(assert_validation_errors, ValuesOfCorrectTypeRule) assert_valid = partial(assert_errors, errors=[]) diff --git a/tests/validation/test_variables_are_input_types.py b/tests/validation/test_variables_are_input_types.py index 9440f1b2..86ee3d80 100644 --- a/tests/validation/test_variables_are_input_types.py +++ b/tests/validation/test_variables_are_input_types.py @@ -4,6 +4,7 @@ from .harness import assert_validation_errors + assert_errors = partial(assert_validation_errors, VariablesAreInputTypesRule) assert_valid = partial(assert_errors, errors=[]) diff --git a/tests/validation/test_variables_in_allowed_position.py b/tests/validation/test_variables_in_allowed_position.py index 03fbbcbb..429c781f 100644 --- a/tests/validation/test_variables_in_allowed_position.py +++ b/tests/validation/test_variables_in_allowed_position.py @@ -4,6 +4,7 @@ from .harness import assert_validation_errors + assert_errors = partial(assert_validation_errors, VariablesInAllowedPositionRule) assert_valid = partial(assert_errors, errors=[]) diff --git a/tox.ini b/tox.ini index 0a3cd9c8..7887ed5c 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = py3{7,8,9,10}, black, flake8, mypy, docs, manifest +envlist = py3{7,8,9,10}, black, flake8, isort, mypy, docs, manifest isolated_build = true [gh-actions] @@ -21,6 +21,12 @@ deps = flake8>=5,<6 commands = flake8 src tests setup.py +[testenv:isort] +basepython = python3.9 +deps = isort>=5.10,<6 +commands = + isort src tests setup.py --check-only + [testenv:mypy] basepython = python3.9 deps = From 50ace3ad99c68d6acb668617d8d661eade5f295d Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Fri, 23 Sep 2022 15:14:52 +0200 Subject: [PATCH 009/230] Revert isort on package init files These init files are manually sorted for better documentation and also sometimes to avoid circular imports. --- pyproject.toml | 4 +- src/graphql/__init__.py | 587 ++++++++++-------- src/graphql/error/__init__.py | 3 +- src/graphql/error/graphql_error.py | 6 +- src/graphql/execution/__init__.py | 11 +- src/graphql/language/__init__.py | 152 ++--- src/graphql/language/visitor.py | 1 + src/graphql/pyutils/__init__.py | 15 +- src/graphql/subscription/__init__.py | 3 +- src/graphql/type/__init__.py | 241 +++---- src/graphql/utilities/__init__.py | 97 +-- .../utilities/get_introspection_query.py | 3 +- src/graphql/validation/__init__.py | 45 +- src/graphql/validation/rules/__init__.py | 1 - 14 files changed, 629 insertions(+), 540 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 805a581e..4d47bb5f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -68,7 +68,7 @@ target-version = ['py37', 'py38', 'py39', 'py310'] [tool.coverage.run] branch = true -source = ["src"] +source = ["src", "tests"] omit = [ "*/conftest.py", "*/test_*_fuzz.py", @@ -96,6 +96,8 @@ exclude_lines = [ ignore_errors = true [tool.isort] +src_paths = ["src", "tests"] +skip_glob = ["src/**/__init__.py"] profile = "black" force_single_line = false lines_after_imports = 2 diff --git a/src/graphql/__init__.py b/src/graphql/__init__.py index 6ae64f15..f1b21ab3 100644 --- a/src/graphql/__init__.py +++ b/src/graphql/__init__.py @@ -38,6 +38,11 @@ # The GraphQL-core 3 and GraphQL.js version info. +from .version import version, version_info, version_js, version_info_js + +# Utilities for compatibility with the Python language. +from .pyutils import Undefined, UndefinedType + # Create, format, and print GraphQL errors. from .error import ( GraphQLError, @@ -47,305 +52,333 @@ located_error, ) -# Execute GraphQL documents. -from .execution import ExecutionContext # Types; Subscription; Middleware -from .execution import ( - ExecutionResult, - FormattedExecutionResult, - MapAsyncIterator, - Middleware, - MiddlewareManager, - create_source_event_stream, - default_field_resolver, - default_type_resolver, - execute, - execute_sync, - get_argument_values, - get_directive_values, - get_variable_values, - subscribe, -) - -# The primary entry point into fulfilling a GraphQL request. -from .graphql import graphql, graphql_sync - # Parse and operate on GraphQL language source files. from .language import ( - BREAK, # Print source location; Lex; Parse; Print; Visit; Predicates; Types; AST nodes; Each kind of AST node -) -from .language import ( - IDLE, - REMOVE, + Source, + get_location, + # Print source location + print_location, + print_source_location, + # Lex + Lexer, + TokenKind, + # Parse + parse, + parse_value, + parse_const_value, + parse_type, + # Print + print_ast, + # Visit + visit, + ParallelVisitor, + Visitor, + VisitorAction, + VisitorKeyMap, + BREAK, SKIP, - ArgumentNode, - BooleanValueNode, - ConstArgumentNode, - ConstDirectiveNode, - ConstListValueNode, - ConstObjectFieldNode, - ConstObjectValueNode, - ConstValueNode, - DefinitionNode, - DirectiveDefinitionNode, + REMOVE, + IDLE, DirectiveLocation, - DirectiveNode, + # Predicates + is_definition_node, + is_executable_definition_node, + is_selection_node, + is_value_node, + is_const_value_node, + is_type_node, + is_type_system_definition_node, + is_type_definition_node, + is_type_system_extension_node, + is_type_extension_node, + # Types + SourceLocation, + Location, + Token, + # AST nodes + Node, + # Each kind of AST node + NameNode, DocumentNode, - EnumTypeDefinitionNode, - EnumTypeExtensionNode, - EnumValueDefinitionNode, - EnumValueNode, + DefinitionNode, ExecutableDefinitionNode, - FieldDefinitionNode, + OperationDefinitionNode, + OperationType, + VariableDefinitionNode, + VariableNode, + SelectionSetNode, + SelectionNode, FieldNode, - FloatValueNode, - FragmentDefinitionNode, + ArgumentNode, + ConstArgumentNode, FragmentSpreadNode, InlineFragmentNode, - InputObjectTypeDefinitionNode, - InputObjectTypeExtensionNode, - InputValueDefinitionNode, - InterfaceTypeDefinitionNode, - InterfaceTypeExtensionNode, + FragmentDefinitionNode, + ValueNode, + ConstValueNode, IntValueNode, - Lexer, - ListTypeNode, + FloatValueNode, + StringValueNode, + BooleanValueNode, + NullValueNode, + EnumValueNode, ListValueNode, - Location, + ConstListValueNode, + ObjectValueNode, + ConstObjectValueNode, + ObjectFieldNode, + ConstObjectFieldNode, + DirectiveNode, + ConstDirectiveNode, + TypeNode, NamedTypeNode, - NameNode, - Node, + ListTypeNode, NonNullTypeNode, - NullValueNode, - ObjectFieldNode, - ObjectTypeDefinitionNode, - ObjectTypeExtensionNode, - ObjectValueNode, - OperationDefinitionNode, - OperationType, + TypeSystemDefinitionNode, + SchemaDefinitionNode, OperationTypeDefinitionNode, - ParallelVisitor, + TypeDefinitionNode, ScalarTypeDefinitionNode, - ScalarTypeExtensionNode, - SchemaDefinitionNode, + ObjectTypeDefinitionNode, + FieldDefinitionNode, + InputValueDefinitionNode, + InterfaceTypeDefinitionNode, + UnionTypeDefinitionNode, + EnumTypeDefinitionNode, + EnumValueDefinitionNode, + InputObjectTypeDefinitionNode, + DirectiveDefinitionNode, + TypeSystemExtensionNode, SchemaExtensionNode, - SelectionNode, - SelectionSetNode, - Source, - SourceLocation, - StringValueNode, - Token, - TokenKind, - TypeDefinitionNode, TypeExtensionNode, - TypeNode, - TypeSystemDefinitionNode, - TypeSystemExtensionNode, - UnionTypeDefinitionNode, + ScalarTypeExtensionNode, + ObjectTypeExtensionNode, + InterfaceTypeExtensionNode, UnionTypeExtensionNode, - ValueNode, - VariableDefinitionNode, - VariableNode, - Visitor, - VisitorAction, - VisitorKeyMap, - get_location, - is_const_value_node, - is_definition_node, - is_executable_definition_node, - is_selection_node, - is_type_definition_node, - is_type_extension_node, - is_type_node, - is_type_system_definition_node, - is_type_system_extension_node, - is_value_node, - parse, - parse_const_value, - parse_type, - parse_value, - print_ast, - print_location, - print_source_location, - visit, + EnumTypeExtensionNode, + InputObjectTypeExtensionNode, ) -# Utilities for compatibility with the Python language. -from .pyutils import Undefined, UndefinedType +# Utilities for operating on GraphQL type schema and parsed sources. +from .utilities import ( + # Produce the GraphQL query recommended for a full schema introspection. + # Accepts optional IntrospectionOptions. + get_introspection_query, + IntrospectionQuery, + # Get the target Operation from a Document. + get_operation_ast, + # Get the Type for the target Operation AST. + get_operation_root_type, + # Convert a GraphQLSchema to an IntrospectionQuery. + introspection_from_schema, + # Build a GraphQLSchema from an introspection result. + build_client_schema, + # Build a GraphQLSchema from a parsed GraphQL Schema language AST. + build_ast_schema, + # Build a GraphQLSchema from a GraphQL schema language document. + build_schema, + # Extend an existing GraphQLSchema from a parsed GraphQL Schema language AST. + extend_schema, + # Sort a GraphQLSchema. + lexicographic_sort_schema, + # Print a GraphQLSchema to GraphQL Schema language. + print_schema, + # Print a GraphQLType to GraphQL Schema language. + print_type, + # Prints the built-in introspection schema in the Schema Language format. + print_introspection_schema, + # Create a GraphQLType from a GraphQL language AST. + type_from_ast, + # Convert a language AST to a dictionary. + ast_to_dict, + # Create a Python value from a GraphQL language AST with a Type. + value_from_ast, + # Create a Python value from a GraphQL language AST without a Type. + value_from_ast_untyped, + # Create a GraphQL language AST from a Python value. + ast_from_value, + # A helper to use within recursive-descent visitors which need to be aware of the + # GraphQL type system. + TypeInfo, + TypeInfoVisitor, + # Coerce a Python value to a GraphQL type, or produce errors. + coerce_input_value, + # Concatenates multiple ASTs together. + concat_ast, + # Separate an AST into an AST per Operation. + separate_operations, + # Strip characters that are not significant to the validity or execution + # of a GraphQL document. + strip_ignored_characters, + # Comparators for types + is_equal_type, + is_type_sub_type_of, + do_types_overlap, + # Assert a string is a valid GraphQL name. + assert_valid_name, + # Determine if a string is a valid GraphQL name. + is_valid_name_error, + # Compare two GraphQLSchemas and detect breaking changes. + BreakingChange, + BreakingChangeType, + DangerousChange, + DangerousChangeType, + find_breaking_changes, + find_dangerous_changes, +) # Create and operate on GraphQL type definitions and schema. from .type import ( - DEFAULT_DEPRECATION_REASON, # Definitions; Standard GraphQL Scalars; Int boundaries constants; Built-in Directives defined by the Spec; "Enum" of Type Kinds; Constant Deprecation Reason; GraphQL Types for introspection.; Meta-field definitions.; Predicates; Assertions; Un-modifiers; Thunk handling; Validate GraphQL schema.; Uphold the spec rules about naming; Types; Keyword args -) -from .type import ( - GRAPHQL_MAX_INT, - GRAPHQL_MIN_INT, - GraphQLAbstractType, - GraphQLArgument, - GraphQLArgumentKwargs, - GraphQLArgumentMap, - GraphQLBoolean, - GraphQLCompositeType, - GraphQLDeprecatedDirective, + # Definitions + GraphQLSchema, GraphQLDirective, - GraphQLDirectiveKwargs, + GraphQLScalarType, + GraphQLObjectType, + GraphQLInterfaceType, + GraphQLUnionType, GraphQLEnumType, - GraphQLEnumTypeKwargs, - GraphQLEnumValue, - GraphQLEnumValueKwargs, - GraphQLEnumValueMap, - GraphQLField, - GraphQLFieldKwargs, - GraphQLFieldMap, - GraphQLFieldResolver, - GraphQLFloat, - GraphQLID, - GraphQLIncludeDirective, - GraphQLInputField, - GraphQLInputFieldKwargs, - GraphQLInputFieldMap, GraphQLInputObjectType, - GraphQLInputObjectTypeKwargs, - GraphQLInputType, - GraphQLInt, - GraphQLInterfaceType, - GraphQLInterfaceTypeKwargs, - GraphQLIsTypeOfFn, - GraphQLLeafType, GraphQLList, - GraphQLNamedInputType, - GraphQLNamedOutputType, - GraphQLNamedType, - GraphQLNamedTypeKwargs, GraphQLNonNull, - GraphQLNullableType, - GraphQLObjectType, - GraphQLObjectTypeKwargs, - GraphQLOutputType, - GraphQLResolveInfo, - GraphQLScalarLiteralParser, - GraphQLScalarSerializer, - GraphQLScalarType, - GraphQLScalarTypeKwargs, - GraphQLScalarValueParser, - GraphQLSchema, - GraphQLSchemaKwargs, + # Standard GraphQL Scalars + specified_scalar_types, + GraphQLInt, + GraphQLFloat, + GraphQLString, + GraphQLBoolean, + GraphQLID, + # Int boundaries constants + GRAPHQL_MAX_INT, + GRAPHQL_MIN_INT, + # Built-in Directives defined by the Spec + specified_directives, + GraphQLIncludeDirective, GraphQLSkipDirective, + GraphQLDeprecatedDirective, GraphQLSpecifiedByDirective, - GraphQLString, - GraphQLType, - GraphQLTypeResolver, - GraphQLUnionType, - GraphQLUnionTypeKwargs, - GraphQLWrappingType, - ResponsePath, - SchemaMetaFieldDef, - Thunk, - ThunkCollection, - ThunkMapping, + # "Enum" of Type Kinds TypeKind, + # Constant Deprecation Reason + DEFAULT_DEPRECATION_REASON, + # GraphQL Types for introspection. + introspection_types, + # Meta-field definitions. + SchemaMetaFieldDef, TypeMetaFieldDef, TypeNameMetaFieldDef, - assert_abstract_type, - assert_composite_type, - assert_directive, - assert_enum_type, - assert_enum_value_name, - assert_input_object_type, - assert_input_type, - assert_interface_type, - assert_leaf_type, - assert_list_type, - assert_name, - assert_named_type, - assert_non_null_type, - assert_nullable_type, - assert_object_type, - assert_output_type, - assert_scalar_type, - assert_schema, - assert_type, - assert_union_type, - assert_valid_schema, - assert_wrapping_type, - get_named_type, - get_nullable_type, - introspection_types, - is_abstract_type, - is_composite_type, + # Predicates + is_schema, is_directive, + is_type, + is_scalar_type, + is_object_type, + is_interface_type, + is_union_type, is_enum_type, is_input_object_type, - is_input_type, - is_interface_type, - is_introspection_type, - is_leaf_type, is_list_type, - is_named_type, is_non_null_type, - is_nullable_type, - is_object_type, + is_input_type, is_output_type, + is_leaf_type, + is_composite_type, + is_abstract_type, + is_wrapping_type, + is_nullable_type, + is_named_type, is_required_argument, is_required_input_field, - is_scalar_type, - is_schema, - is_specified_directive, is_specified_scalar_type, - is_type, - is_union_type, - is_wrapping_type, + is_introspection_type, + is_specified_directive, + # Assertions + assert_schema, + assert_directive, + assert_type, + assert_scalar_type, + assert_object_type, + assert_interface_type, + assert_union_type, + assert_enum_type, + assert_input_object_type, + assert_list_type, + assert_non_null_type, + assert_input_type, + assert_output_type, + assert_leaf_type, + assert_composite_type, + assert_abstract_type, + assert_wrapping_type, + assert_nullable_type, + assert_named_type, + # Un-modifiers + get_nullable_type, + get_named_type, + # Thunk handling resolve_thunk, - specified_directives, - specified_scalar_types, + # Validate GraphQL schema. validate_schema, -) - -# Utilities for operating on GraphQL type schema and parsed sources. -from .utilities import ( - BreakingChange, # Produce the GraphQL query recommended for a full schema introspection.; Accepts optional IntrospectionOptions.; Get the target Operation from a Document.; Get the Type for the target Operation AST.; Convert a GraphQLSchema to an IntrospectionQuery.; Build a GraphQLSchema from an introspection result.; Build a GraphQLSchema from a parsed GraphQL Schema language AST.; Build a GraphQLSchema from a GraphQL schema language document.; Extend an existing GraphQLSchema from a parsed GraphQL Schema language AST.; Sort a GraphQLSchema.; Print a GraphQLSchema to GraphQL Schema language.; Print a GraphQLType to GraphQL Schema language.; Prints the built-in introspection schema in the Schema Language format.; Create a GraphQLType from a GraphQL language AST.; Convert a language AST to a dictionary.; Create a Python value from a GraphQL language AST with a Type.; Create a Python value from a GraphQL language AST without a Type.; Create a GraphQL language AST from a Python value.; A helper to use within recursive-descent visitors which need to be aware of the; GraphQL type system.; Coerce a Python value to a GraphQL type, or produce errors.; Concatenates multiple ASTs together.; Separate an AST into an AST per Operation.; Strip characters that are not significant to the validity or execution; of a GraphQL document.; Comparators for types; Assert a string is a valid GraphQL name.; Determine if a string is a valid GraphQL name.; Compare two GraphQLSchemas and detect breaking changes. -) -from .utilities import ( - BreakingChangeType, - DangerousChange, - DangerousChangeType, - IntrospectionQuery, - TypeInfo, - TypeInfoVisitor, - assert_valid_name, - ast_from_value, - ast_to_dict, - build_ast_schema, - build_client_schema, - build_schema, - coerce_input_value, - concat_ast, - do_types_overlap, - extend_schema, - find_breaking_changes, - find_dangerous_changes, - get_introspection_query, - get_operation_ast, - get_operation_root_type, - introspection_from_schema, - is_equal_type, - is_type_sub_type_of, - is_valid_name_error, - lexicographic_sort_schema, - print_introspection_schema, - print_schema, - print_type, - separate_operations, - strip_ignored_characters, - type_from_ast, - value_from_ast, - value_from_ast_untyped, + assert_valid_schema, + # Uphold the spec rules about naming + assert_name, + assert_enum_value_name, + # Types + GraphQLType, + GraphQLInputType, + GraphQLOutputType, + GraphQLLeafType, + GraphQLCompositeType, + GraphQLAbstractType, + GraphQLWrappingType, + GraphQLNullableType, + GraphQLNamedType, + GraphQLNamedInputType, + GraphQLNamedOutputType, + Thunk, + ThunkCollection, + ThunkMapping, + GraphQLArgument, + GraphQLArgumentMap, + GraphQLEnumValue, + GraphQLEnumValueMap, + GraphQLField, + GraphQLFieldMap, + GraphQLFieldResolver, + GraphQLInputField, + GraphQLInputFieldMap, + GraphQLScalarSerializer, + GraphQLScalarValueParser, + GraphQLScalarLiteralParser, + GraphQLIsTypeOfFn, + GraphQLResolveInfo, + ResponsePath, + GraphQLTypeResolver, + # Keyword args + GraphQLArgumentKwargs, + GraphQLDirectiveKwargs, + GraphQLEnumTypeKwargs, + GraphQLEnumValueKwargs, + GraphQLFieldKwargs, + GraphQLInputFieldKwargs, + GraphQLInputObjectTypeKwargs, + GraphQLInterfaceTypeKwargs, + GraphQLNamedTypeKwargs, + GraphQLObjectTypeKwargs, + GraphQLScalarTypeKwargs, + GraphQLSchemaKwargs, + GraphQLUnionTypeKwargs, ) # Validate GraphQL queries. from .validation import ( - ASTValidationRule, # All validation rules in the GraphQL Specification.; Individual validation rules.; SDL-specific validation rules; Custom validation rules -) -from .validation import ( + validate, + ValidationContext, + ValidationRule, + ASTValidationRule, + SDLValidationRule, + # All validation rules in the GraphQL Specification. + specified_rules, + # Individual validation rules. ExecutableDefinitionsRule, FieldsOnCorrectTypeRule, FragmentsOnCompositeTypesRule, @@ -354,42 +387,62 @@ KnownFragmentNamesRule, KnownTypeNamesRule, LoneAnonymousOperationRule, - LoneSchemaDefinitionRule, - NoDeprecatedCustomRule, NoFragmentCyclesRule, - NoSchemaIntrospectionCustomRule, NoUndefinedVariablesRule, NoUnusedFragmentsRule, NoUnusedVariablesRule, OverlappingFieldsCanBeMergedRule, PossibleFragmentSpreadsRule, - PossibleTypeExtensionsRule, ProvidedRequiredArgumentsRule, ScalarLeafsRule, - SDLValidationRule, SingleFieldSubscriptionsRule, - UniqueArgumentDefinitionNamesRule, UniqueArgumentNamesRule, - UniqueDirectiveNamesRule, UniqueDirectivesPerLocationRule, - UniqueEnumValueNamesRule, - UniqueFieldDefinitionNamesRule, UniqueFragmentNamesRule, UniqueInputFieldNamesRule, UniqueOperationNamesRule, - UniqueOperationTypesRule, - UniqueTypeNamesRule, UniqueVariableNamesRule, - ValidationContext, - ValidationRule, ValuesOfCorrectTypeRule, VariablesAreInputTypesRule, VariablesInAllowedPositionRule, - specified_rules, - validate, + # SDL-specific validation rules + LoneSchemaDefinitionRule, + UniqueOperationTypesRule, + UniqueTypeNamesRule, + UniqueEnumValueNamesRule, + UniqueFieldDefinitionNamesRule, + UniqueArgumentDefinitionNamesRule, + UniqueDirectiveNamesRule, + PossibleTypeExtensionsRule, + # Custom validation rules + NoDeprecatedCustomRule, + NoSchemaIntrospectionCustomRule, +) + +# Execute GraphQL documents. +from .execution import ( + execute, + execute_sync, + default_field_resolver, + default_type_resolver, + get_argument_values, + get_directive_values, + get_variable_values, + # Types + ExecutionContext, + ExecutionResult, + FormattedExecutionResult, + # Subscription + subscribe, + create_source_event_stream, + MapAsyncIterator, + # Middleware + Middleware, + MiddlewareManager, ) -from .version import version, version_info, version_info_js, version_js +# The primary entry point into fulfilling a GraphQL request. +from .graphql import graphql, graphql_sync INVALID = Undefined # deprecated alias diff --git a/src/graphql/error/__init__.py b/src/graphql/error/__init__.py index 7b91c8e9..20f5a7c5 100644 --- a/src/graphql/error/__init__.py +++ b/src/graphql/error/__init__.py @@ -5,9 +5,10 @@ """ from .graphql_error import GraphQLError, GraphQLErrorExtensions, GraphQLFormattedError -from .located_error import located_error + from .syntax_error import GraphQLSyntaxError +from .located_error import located_error __all__ = [ "GraphQLError", diff --git a/src/graphql/error/graphql_error.py b/src/graphql/error/graphql_error.py index cc203820..9f515606 100644 --- a/src/graphql/error/graphql_error.py +++ b/src/graphql/error/graphql_error.py @@ -9,8 +9,10 @@ if TYPE_CHECKING: from ..language.ast import Node # noqa: F401 - from ..language.location import FormattedSourceLocation # noqa: F401 - from ..language.location import SourceLocation + from ..language.location import ( # noqa: F401 + FormattedSourceLocation, + SourceLocation, + ) from ..language.source import Source # noqa: F401 __all__ = ["GraphQLError", "GraphQLErrorExtensions", "GraphQLFormattedError"] diff --git a/src/graphql/execution/__init__.py b/src/graphql/execution/__init__.py index 75121038..7317fef2 100644 --- a/src/graphql/execution/__init__.py +++ b/src/graphql/execution/__init__.py @@ -5,21 +5,20 @@ """ from .execute import ( + execute, + execute_sync, + default_field_resolver, + default_type_resolver, ExecutionContext, ExecutionResult, FormattedExecutionResult, Middleware, - default_field_resolver, - default_type_resolver, - execute, - execute_sync, ) from .map_async_iterator import MapAsyncIterator +from .subscribe import subscribe, create_source_event_stream from .middleware import MiddlewareManager -from .subscribe import create_source_event_stream, subscribe from .values import get_argument_values, get_directive_values, get_variable_values - __all__ = [ "create_source_event_stream", "execute", diff --git a/src/graphql/language/__init__.py b/src/graphql/language/__init__.py index 86422c48..7d3120f5 100644 --- a/src/graphql/language/__init__.py +++ b/src/graphql/language/__init__.py @@ -4,102 +4,110 @@ GraphQL language. """ -from .ast import ArgumentNode # Each kind of AST node +from .source import Source + +from .location import get_location, SourceLocation, FormattedSourceLocation + +from .print_location import print_location, print_source_location + +from .token_kind import TokenKind + +from .lexer import Lexer + +from .parser import parse, parse_type, parse_value, parse_const_value + +from .printer import print_ast + +from .visitor import ( + visit, + Visitor, + ParallelVisitor, + VisitorAction, + VisitorKeyMap, + BREAK, + SKIP, + REMOVE, + IDLE, +) + from .ast import ( - BooleanValueNode, - ConstArgumentNode, - ConstDirectiveNode, - ConstListValueNode, - ConstObjectFieldNode, - ConstObjectValueNode, - ConstValueNode, - DefinitionNode, - DirectiveDefinitionNode, - DirectiveNode, + Location, + Token, + Node, + # Each kind of AST node + NameNode, DocumentNode, - EnumTypeDefinitionNode, - EnumTypeExtensionNode, - EnumValueDefinitionNode, - EnumValueNode, + DefinitionNode, ExecutableDefinitionNode, - FieldDefinitionNode, + OperationDefinitionNode, + OperationType, + VariableDefinitionNode, + VariableNode, + SelectionSetNode, + SelectionNode, FieldNode, - FloatValueNode, - FragmentDefinitionNode, + ArgumentNode, + ConstArgumentNode, FragmentSpreadNode, InlineFragmentNode, - InputObjectTypeDefinitionNode, - InputObjectTypeExtensionNode, - InputValueDefinitionNode, - InterfaceTypeDefinitionNode, - InterfaceTypeExtensionNode, + FragmentDefinitionNode, + ValueNode, + ConstValueNode, IntValueNode, - ListTypeNode, + FloatValueNode, + StringValueNode, + BooleanValueNode, + NullValueNode, + EnumValueNode, ListValueNode, - Location, + ConstListValueNode, + ObjectValueNode, + ConstObjectValueNode, + ObjectFieldNode, + ConstObjectFieldNode, + DirectiveNode, + ConstDirectiveNode, + TypeNode, NamedTypeNode, - NameNode, - Node, + ListTypeNode, NonNullTypeNode, - NullValueNode, - ObjectFieldNode, - ObjectTypeDefinitionNode, - ObjectTypeExtensionNode, - ObjectValueNode, - OperationDefinitionNode, - OperationType, + TypeSystemDefinitionNode, + SchemaDefinitionNode, OperationTypeDefinitionNode, + TypeDefinitionNode, ScalarTypeDefinitionNode, - ScalarTypeExtensionNode, - SchemaDefinitionNode, + ObjectTypeDefinitionNode, + FieldDefinitionNode, + InputValueDefinitionNode, + InterfaceTypeDefinitionNode, + UnionTypeDefinitionNode, + EnumTypeDefinitionNode, + EnumValueDefinitionNode, + InputObjectTypeDefinitionNode, + DirectiveDefinitionNode, + TypeSystemExtensionNode, SchemaExtensionNode, - SelectionNode, - SelectionSetNode, - StringValueNode, - Token, - TypeDefinitionNode, TypeExtensionNode, - TypeNode, - TypeSystemDefinitionNode, - TypeSystemExtensionNode, - UnionTypeDefinitionNode, + ScalarTypeExtensionNode, + ObjectTypeExtensionNode, + InterfaceTypeExtensionNode, UnionTypeExtensionNode, - ValueNode, - VariableDefinitionNode, - VariableNode, + EnumTypeExtensionNode, + InputObjectTypeExtensionNode, ) -from .directive_locations import DirectiveLocation -from .lexer import Lexer -from .location import FormattedSourceLocation, SourceLocation, get_location -from .parser import parse, parse_const_value, parse_type, parse_value from .predicates import ( - is_const_value_node, is_definition_node, is_executable_definition_node, is_selection_node, - is_type_definition_node, - is_type_extension_node, + is_value_node, + is_const_value_node, is_type_node, is_type_system_definition_node, + is_type_definition_node, is_type_system_extension_node, - is_value_node, -) -from .print_location import print_location, print_source_location -from .printer import print_ast -from .source import Source -from .token_kind import TokenKind -from .visitor import ( - BREAK, - IDLE, - REMOVE, - SKIP, - ParallelVisitor, - Visitor, - VisitorAction, - VisitorKeyMap, - visit, + is_type_extension_node, ) - +from .directive_locations import DirectiveLocation __all__ = [ "get_location", diff --git a/src/graphql/language/visitor.py b/src/graphql/language/visitor.py index 267cb585..f6fe9eb1 100644 --- a/src/graphql/language/visitor.py +++ b/src/graphql/language/visitor.py @@ -21,6 +21,7 @@ "Visitor", "ParallelVisitor", "VisitorAction", + "VisitorKeyMap", "visit", "BREAK", "SKIP", diff --git a/src/graphql/pyutils/__init__.py b/src/graphql/pyutils/__init__.py index 9a01433f..c156de41 100644 --- a/src/graphql/pyutils/__init__.py +++ b/src/graphql/pyutils/__init__.py @@ -8,9 +8,8 @@ These functions are not part of the module interface and are subject to change. """ -from .awaitable_or_value import AwaitableOrValue -from .cached_property import cached_property from .convert_case import camel_to_snake, snake_to_camel +from .cached_property import cached_property from .description import ( Description, is_description, @@ -18,23 +17,23 @@ unregister_description, ) from .did_you_mean import did_you_mean -from .frozen_dict import FrozenDict -from .frozen_error import FrozenError -from .frozen_list import FrozenList from .group_by import group_by from .identity_func import identity_func from .inspect import inspect from .is_awaitable import is_awaitable from .is_iterable import is_collection, is_iterable -from .merge_kwargs import merge_kwargs from .natural_compare import natural_comparison_key +from .awaitable_or_value import AwaitableOrValue +from .suggestion_list import suggestion_list +from .frozen_error import FrozenError +from .frozen_list import FrozenList +from .frozen_dict import FrozenDict +from .merge_kwargs import merge_kwargs from .path import Path from .print_path_list import print_path_list from .simple_pub_sub import SimplePubSub, SimplePubSubIterator -from .suggestion_list import suggestion_list from .undefined import Undefined, UndefinedType - __all__ = [ "camel_to_snake", "snake_to_camel", diff --git a/src/graphql/subscription/__init__.py b/src/graphql/subscription/__init__.py index 22ffd3bb..f0c90910 100644 --- a/src/graphql/subscription/__init__.py +++ b/src/graphql/subscription/__init__.py @@ -11,7 +11,6 @@ package. In v3.3, the :mod:`graphql.subscription` package will be dropped entirely. """ -from ..execution import MapAsyncIterator, create_source_event_stream, subscribe - +from ..execution import subscribe, create_source_event_stream, MapAsyncIterator __all__ = ["subscribe", "create_source_event_stream", "MapAsyncIterator"] diff --git a/src/graphql/type/__init__.py b/src/graphql/type/__init__.py index c0c2218c..6a86c0f7 100644 --- a/src/graphql/type/__init__.py +++ b/src/graphql/type/__init__.py @@ -5,147 +5,170 @@ from ..pyutils import Path as ResponsePath -# Uphold the spec rules about naming. -from .assert_name import assert_enum_value_name, assert_name -from .definition import ( - GraphQLAbstractType, # Predicates; Assertions; Un-modifiers; Thunk handling; Definitions; Type Wrappers; Types; Keyword Args; Resolvers +from .schema import ( + # Predicate + is_schema, + # Assertion + assert_schema, + # GraphQL Schema definition + GraphQLSchema, + # Keyword Args + GraphQLSchemaKwargs, ) + +# Uphold the spec rules about naming. +from .assert_name import assert_name, assert_enum_value_name + from .definition import ( + # Predicates + is_type, + is_scalar_type, + is_object_type, + is_interface_type, + is_union_type, + is_enum_type, + is_input_object_type, + is_list_type, + is_non_null_type, + is_input_type, + is_output_type, + is_leaf_type, + is_composite_type, + is_abstract_type, + is_wrapping_type, + is_nullable_type, + is_named_type, + is_required_argument, + is_required_input_field, + # Assertions + assert_type, + assert_scalar_type, + assert_object_type, + assert_interface_type, + assert_union_type, + assert_enum_type, + assert_input_object_type, + assert_list_type, + assert_non_null_type, + assert_input_type, + assert_output_type, + assert_leaf_type, + assert_composite_type, + assert_abstract_type, + assert_wrapping_type, + assert_nullable_type, + assert_named_type, + # Un-modifiers + get_nullable_type, + get_named_type, + # Thunk handling + resolve_thunk, + # Definitions + GraphQLScalarType, + GraphQLObjectType, + GraphQLInterfaceType, + GraphQLUnionType, + GraphQLEnumType, + GraphQLInputObjectType, + # Type Wrappers + GraphQLList, + GraphQLNonNull, + # Types + GraphQLType, + GraphQLInputType, + GraphQLOutputType, + GraphQLLeafType, + GraphQLCompositeType, + GraphQLAbstractType, + GraphQLWrappingType, + GraphQLNullableType, + GraphQLNamedType, + GraphQLNamedInputType, + GraphQLNamedOutputType, + Thunk, + ThunkCollection, + ThunkMapping, GraphQLArgument, - GraphQLArgumentKwargs, GraphQLArgumentMap, - GraphQLCompositeType, - GraphQLEnumType, - GraphQLEnumTypeKwargs, GraphQLEnumValue, - GraphQLEnumValueKwargs, GraphQLEnumValueMap, GraphQLField, - GraphQLFieldKwargs, GraphQLFieldMap, - GraphQLFieldResolver, GraphQLInputField, - GraphQLInputFieldKwargs, GraphQLInputFieldMap, - GraphQLInputObjectType, + GraphQLScalarSerializer, + GraphQLScalarValueParser, + GraphQLScalarLiteralParser, + # Keyword Args + GraphQLArgumentKwargs, + GraphQLEnumTypeKwargs, + GraphQLEnumValueKwargs, + GraphQLFieldKwargs, + GraphQLInputFieldKwargs, GraphQLInputObjectTypeKwargs, - GraphQLInputType, - GraphQLInterfaceType, GraphQLInterfaceTypeKwargs, - GraphQLIsTypeOfFn, - GraphQLLeafType, - GraphQLList, - GraphQLNamedInputType, - GraphQLNamedOutputType, - GraphQLNamedType, GraphQLNamedTypeKwargs, - GraphQLNonNull, - GraphQLNullableType, - GraphQLObjectType, GraphQLObjectTypeKwargs, - GraphQLOutputType, - GraphQLResolveInfo, - GraphQLScalarLiteralParser, - GraphQLScalarSerializer, - GraphQLScalarType, GraphQLScalarTypeKwargs, - GraphQLScalarValueParser, - GraphQLType, - GraphQLTypeResolver, - GraphQLUnionType, GraphQLUnionTypeKwargs, - GraphQLWrappingType, - Thunk, - ThunkCollection, - ThunkMapping, - assert_abstract_type, - assert_composite_type, - assert_enum_type, - assert_input_object_type, - assert_input_type, - assert_interface_type, - assert_leaf_type, - assert_list_type, - assert_named_type, - assert_non_null_type, - assert_nullable_type, - assert_object_type, - assert_output_type, - assert_scalar_type, - assert_type, - assert_union_type, - assert_wrapping_type, - get_named_type, - get_nullable_type, - is_abstract_type, - is_composite_type, - is_enum_type, - is_input_object_type, - is_input_type, - is_interface_type, - is_leaf_type, - is_list_type, - is_named_type, - is_non_null_type, - is_nullable_type, - is_object_type, - is_output_type, - is_required_argument, - is_required_input_field, - is_scalar_type, - is_type, - is_union_type, - is_wrapping_type, - resolve_thunk, -) -from .directives import ( - DEFAULT_DEPRECATION_REASON, # Predicate; Assertion; Directives Definition; Built-in Directives defined by the Spec; Keyword Args; Constant Deprecation Reason + # Resolvers + GraphQLFieldResolver, + GraphQLTypeResolver, + GraphQLIsTypeOfFn, + GraphQLResolveInfo, ) + from .directives import ( - GraphQLDeprecatedDirective, + # Predicate + is_directive, + # Assertion + assert_directive, + # Directives Definition GraphQLDirective, - GraphQLDirectiveKwargs, + # Built-in Directives defined by the Spec + is_specified_directive, + specified_directives, GraphQLIncludeDirective, GraphQLSkipDirective, + GraphQLDeprecatedDirective, GraphQLSpecifiedByDirective, - assert_directive, - is_directive, - is_specified_directive, - specified_directives, -) -from .introspection import ( - SchemaMetaFieldDef, # Predicate; GraphQL Types for introspection.; "Enum" of Type Kinds; Meta-field definitions. -) -from .introspection import ( - TypeKind, - TypeMetaFieldDef, - TypeNameMetaFieldDef, - introspection_types, - is_introspection_type, + # Keyword Args + GraphQLDirectiveKwargs, + # Constant Deprecation Reason + DEFAULT_DEPRECATION_REASON, ) # Common built-in scalar instances. from .scalars import ( - GRAPHQL_MAX_INT, # Predicate; Standard GraphQL Scalars; Int boundaries constants -) -from .scalars import ( - GRAPHQL_MIN_INT, - GraphQLBoolean, - GraphQLFloat, - GraphQLID, - GraphQLInt, - GraphQLString, + # Predicate is_specified_scalar_type, + # Standard GraphQL Scalars specified_scalar_types, + GraphQLInt, + GraphQLFloat, + GraphQLString, + GraphQLBoolean, + GraphQLID, + # Int boundaries constants + GRAPHQL_MAX_INT, + GRAPHQL_MIN_INT, ) -from .schema import ( - GraphQLSchema, # Predicate; Assertion; GraphQL Schema definition; Keyword Args + +from .introspection import ( + # Predicate + is_introspection_type, + # GraphQL Types for introspection. + introspection_types, + # "Enum" of Type Kinds + TypeKind, + # Meta-field definitions. + SchemaMetaFieldDef, + TypeMetaFieldDef, + TypeNameMetaFieldDef, ) -from .schema import GraphQLSchemaKwargs, assert_schema, is_schema # Validate GraphQL schema. -from .validate import assert_valid_schema, validate_schema - +from .validate import validate_schema, assert_valid_schema __all__ = [ "is_schema", diff --git a/src/graphql/utilities/__init__.py b/src/graphql/utilities/__init__.py index 7255b17c..1571485b 100644 --- a/src/graphql/utilities/__init__.py +++ b/src/graphql/utilities/__init__.py @@ -4,58 +4,62 @@ the GraphQL language and type objects. """ -# Assert that a string is a valid GraphQL name. -from .assert_valid_name import assert_valid_name, is_valid_name_error +# Produce the GraphQL query recommended for a full schema introspection. +from .get_introspection_query import get_introspection_query, IntrospectionQuery -# Create a GraphQL language AST from a Python value. -from .ast_from_value import ast_from_value +# Get the target Operation from a Document. +from .get_operation_ast import get_operation_ast -# Convert a language AST to a dictionary. -from .ast_to_dict import ast_to_dict +# Get the Type for the target Operation AST. +from .get_operation_root_type import get_operation_root_type -# Build a GraphQLSchema from GraphQL Schema language. -from .build_ast_schema import build_ast_schema, build_schema +# Convert a GraphQLSchema to an IntrospectionQuery. +from .introspection_from_schema import introspection_from_schema # Build a GraphQLSchema from an introspection result. from .build_client_schema import build_client_schema -# Coerce a Python value to a GraphQL type, or produce errors. -from .coerce_input_value import coerce_input_value - -# Concatenate multiple ASTs together. -from .concat_ast import concat_ast +# Build a GraphQLSchema from GraphQL Schema language. +from .build_ast_schema import build_ast_schema, build_schema # Extend an existing GraphQLSchema from a parsed GraphQL Schema language AST. from .extend_schema import extend_schema -# Compare two GraphQLSchemas and detect breaking changes. -from .find_breaking_changes import ( - BreakingChange, - BreakingChangeType, - DangerousChange, - DangerousChangeType, - find_breaking_changes, - find_dangerous_changes, +# Sort a GraphQLSchema. +from .lexicographic_sort_schema import lexicographic_sort_schema + +# Print a GraphQLSchema to GraphQL Schema language. +from .print_schema import ( + print_introspection_schema, + print_schema, + print_type, + print_value, # deprecated ) -# Produce the GraphQL query recommended for a full schema introspection. -from .get_introspection_query import IntrospectionQuery, get_introspection_query +# Create a GraphQLType from a GraphQL language AST. +from .type_from_ast import type_from_ast -# Get the target Operation from a Document. -from .get_operation_ast import get_operation_ast +# Convert a language AST to a dictionary. +from .ast_to_dict import ast_to_dict -# Get the Type for the target Operation AST. -from .get_operation_root_type import get_operation_root_type +# Create a Python value from a GraphQL language AST with a type. +from .value_from_ast import value_from_ast -# Convert a GraphQLSchema to an IntrospectionQuery. -from .introspection_from_schema import introspection_from_schema +# Create a Python value from a GraphQL language AST without a type. +from .value_from_ast_untyped import value_from_ast_untyped -# Sort a GraphQLSchema. -from .lexicographic_sort_schema import lexicographic_sort_schema +# Create a GraphQL language AST from a Python value. +from .ast_from_value import ast_from_value -# Print a GraphQLSchema to GraphQL Schema language. -from .print_schema import print_value # deprecated -from .print_schema import print_introspection_schema, print_schema, print_type +# A helper to use within recursive-descent visitors which need to be aware of +# the GraphQL type system +from .type_info import TypeInfo, TypeInfoVisitor + +# Coerce a Python value to a GraphQL type, or produce errors. +from .coerce_input_value import coerce_input_value + +# Concatenate multiple ASTs together. +from .concat_ast import concat_ast # Separate an AST into an AST per Operation. from .separate_operations import separate_operations @@ -65,21 +69,20 @@ from .strip_ignored_characters import strip_ignored_characters # Comparators for types -from .type_comparators import do_types_overlap, is_equal_type, is_type_sub_type_of +from .type_comparators import is_equal_type, is_type_sub_type_of, do_types_overlap -# Create a GraphQLType from a GraphQL language AST. -from .type_from_ast import type_from_ast - -# A helper to use within recursive-descent visitors which need to be aware of -# the GraphQL type system -from .type_info import TypeInfo, TypeInfoVisitor - -# Create a Python value from a GraphQL language AST with a type. -from .value_from_ast import value_from_ast - -# Create a Python value from a GraphQL language AST without a type. -from .value_from_ast_untyped import value_from_ast_untyped +# Assert that a string is a valid GraphQL name. +from .assert_valid_name import assert_valid_name, is_valid_name_error +# Compare two GraphQLSchemas and detect breaking changes. +from .find_breaking_changes import ( + BreakingChange, + BreakingChangeType, + DangerousChange, + DangerousChangeType, + find_breaking_changes, + find_dangerous_changes, +) __all__ = [ "BreakingChange", diff --git a/src/graphql/utilities/get_introspection_query.py b/src/graphql/utilities/get_introspection_query.py index 0c13c32a..c1b839f0 100644 --- a/src/graphql/utilities/get_introspection_query.py +++ b/src/graphql/utilities/get_introspection_query.py @@ -7,8 +7,7 @@ try: from typing import Literal, TypedDict except ImportError: # Python < 3.8 - from typing_extensions import Literal # type: ignore - from typing_extensions import TypedDict + from typing_extensions import Literal, TypedDict # type: ignore __all__ = [ "get_introspection_query", diff --git a/src/graphql/validation/__init__.py b/src/graphql/validation/__init__.py index 03c91bd2..313073a5 100644 --- a/src/graphql/validation/__init__.py +++ b/src/graphql/validation/__init__.py @@ -4,11 +4,18 @@ GraphQL result. """ -from .rules import ASTValidationRule, SDLValidationRule, ValidationRule +from .validate import validate -# Optional rules not defined by the GraphQL Specification -from .rules.custom.no_deprecated import NoDeprecatedCustomRule -from .rules.custom.no_schema_introspection import NoSchemaIntrospectionCustomRule +from .validation_context import ( + ASTValidationContext, + SDLValidationContext, + ValidationContext, +) + +from .rules import ValidationRule, ASTValidationRule, SDLValidationRule + +# All validation rules in the GraphQL Specification. +from .specified_rules import specified_rules # Spec Section: "Executable Definitions" from .rules.executable_definitions import ExecutableDefinitionsRule @@ -34,9 +41,6 @@ # Spec Section: "Lone Anonymous Operation" from .rules.lone_anonymous_operation import LoneAnonymousOperationRule -# SDL-specific validation rules -from .rules.lone_schema_definition import LoneSchemaDefinitionRule - # Spec Section: "Fragments must not form cycles" from .rules.no_fragment_cycles import NoFragmentCyclesRule @@ -54,7 +58,6 @@ # Spec Section: "Fragment spread is possible" from .rules.possible_fragment_spreads import PossibleFragmentSpreadsRule -from .rules.possible_type_extensions import PossibleTypeExtensionsRule # Spec Section: "Argument Optionality" from .rules.provided_required_arguments import ProvidedRequiredArgumentsRule @@ -64,16 +67,12 @@ # Spec Section: "Subscriptions with Single Root Field" from .rules.single_field_subscriptions import SingleFieldSubscriptionsRule -from .rules.unique_argument_definition_names import UniqueArgumentDefinitionNamesRule # Spec Section: "Argument Uniqueness" from .rules.unique_argument_names import UniqueArgumentNamesRule -from .rules.unique_directive_names import UniqueDirectiveNamesRule # Spec Section: "Directives Are Unique Per Location" from .rules.unique_directives_per_location import UniqueDirectivesPerLocationRule -from .rules.unique_enum_value_names import UniqueEnumValueNamesRule -from .rules.unique_field_definition_names import UniqueFieldDefinitionNamesRule # Spec Section: "Fragment Name Uniqueness" from .rules.unique_fragment_names import UniqueFragmentNamesRule @@ -83,8 +82,6 @@ # Spec Section: "Operation Name Uniqueness" from .rules.unique_operation_names import UniqueOperationNamesRule -from .rules.unique_operation_types import UniqueOperationTypesRule -from .rules.unique_type_names import UniqueTypeNamesRule # Spec Section: "Variable Uniqueness" from .rules.unique_variable_names import UniqueVariableNamesRule @@ -98,15 +95,19 @@ # Spec Section: "All Variable Usages Are Allowed" from .rules.variables_in_allowed_position import VariablesInAllowedPositionRule -# All validation rules in the GraphQL Specification. -from .specified_rules import specified_rules -from .validate import validate -from .validation_context import ( - ASTValidationContext, - SDLValidationContext, - ValidationContext, -) +# SDL-specific validation rules +from .rules.lone_schema_definition import LoneSchemaDefinitionRule +from .rules.unique_operation_types import UniqueOperationTypesRule +from .rules.unique_type_names import UniqueTypeNamesRule +from .rules.unique_enum_value_names import UniqueEnumValueNamesRule +from .rules.unique_field_definition_names import UniqueFieldDefinitionNamesRule +from .rules.unique_argument_definition_names import UniqueArgumentDefinitionNamesRule +from .rules.unique_directive_names import UniqueDirectiveNamesRule +from .rules.possible_type_extensions import PossibleTypeExtensionsRule +# Optional rules not defined by the GraphQL Specification +from .rules.custom.no_deprecated import NoDeprecatedCustomRule +from .rules.custom.no_schema_introspection import NoSchemaIntrospectionCustomRule __all__ = [ "validate", diff --git a/src/graphql/validation/rules/__init__.py b/src/graphql/validation/rules/__init__.py index d4e13617..1b0c5d57 100644 --- a/src/graphql/validation/rules/__init__.py +++ b/src/graphql/validation/rules/__init__.py @@ -8,7 +8,6 @@ ValidationContext, ) - __all__ = ["ASTValidationRule", "SDLValidationRule", "ValidationRule"] From 617e8542b9942124f9aa7714c0c883caefc45984 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Fri, 23 Sep 2022 15:40:38 +0200 Subject: [PATCH 010/230] Fix minor Flake8 issue --- tests/utilities/test_strip_ignored_characters.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/utilities/test_strip_ignored_characters.py b/tests/utilities/test_strip_ignored_characters.py index 272a3fdc..21353c44 100644 --- a/tests/utilities/test_strip_ignored_characters.py +++ b/tests/utilities/test_strip_ignored_characters.py @@ -7,8 +7,7 @@ from graphql.language import Lexer, Source, TokenKind, parse from graphql.utilities import strip_ignored_characters -from ..fixtures import kitchen_sink_query # noqa: F401 -from ..fixtures import kitchen_sink_sdl +from ..fixtures import kitchen_sink_query, kitchen_sink_sdl # noqa: F401 from ..utils import dedent From 780da3b8704dde0dc11b3ff957dfa879714eed55 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Fri, 23 Sep 2022 16:45:14 +0200 Subject: [PATCH 011/230] Update Poetry and use dependency groups (#177) Unfortunately, this does not solve the problem of conflicting dependencies between Sphinx and Flake8. --- poetry.lock | 1499 ------------------------------------------------ pyproject.toml | 23 +- 2 files changed, 18 insertions(+), 1504 deletions(-) delete mode 100644 poetry.lock diff --git a/poetry.lock b/poetry.lock deleted file mode 100644 index 7c0e0394..00000000 --- a/poetry.lock +++ /dev/null @@ -1,1499 +0,0 @@ -[[package]] -name = "alabaster" -version = "0.7.12" -description = "A configurable sidebar-enabled Sphinx theme" -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "appdirs" -version = "1.4.4" -description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "atomicwrites" -version = "1.4.1" -description = "Atomic file writes." -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - -[[package]] -name = "attrs" -version = "22.1.0" -description = "Classes Without Boilerplate" -category = "dev" -optional = false -python-versions = ">=3.5" - -[package.extras] -dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"] -docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] -tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"] -tests_no_zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"] - -[[package]] -name = "Babel" -version = "2.10.3" -description = "Internationalization utilities" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -pytz = ">=2015.7" - -[[package]] -name = "black" -version = "20.8b1" -description = "The uncompromising code formatter." -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -appdirs = "*" -click = ">=7.1.2" -dataclasses = {version = ">=0.6", markers = "python_version < \"3.7\""} -mypy-extensions = ">=0.4.3" -pathspec = ">=0.6,<1" -regex = ">=2020.1.8" -toml = ">=0.10.1" -typed-ast = ">=1.4.0" -typing-extensions = ">=3.7.4" - -[package.extras] -colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.3.2)", "aiohttp-cors"] - -[[package]] -name = "black" -version = "22.8.0" -description = "The uncompromising code formatter." -category = "dev" -optional = false -python-versions = ">=3.6.2" - -[package.dependencies] -click = ">=8.0.0" -dataclasses = {version = ">=0.6", markers = "python_version < \"3.7\""} -mypy-extensions = ">=0.4.3" -pathspec = ">=0.9.0" -platformdirs = ">=2" -tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""} -typed-ast = {version = ">=1.4.2", markers = "python_version < \"3.8\" and implementation_name == \"cpython\""} -typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} - -[package.extras] -colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)"] -jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] -uvloop = ["uvloop (>=0.15.2)"] - -[[package]] -name = "build" -version = "0.8.0" -description = "A simple, correct PEP 517 build frontend" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -colorama = {version = "*", markers = "os_name == \"nt\""} -importlib-metadata = {version = ">=0.22", markers = "python_version < \"3.8\""} -packaging = ">=19.0" -pep517 = ">=0.9.1" -tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} - -[package.extras] -docs = ["furo (>=2021.08.31)", "sphinx (>=4.0,<5.0)", "sphinx-argparse-cli (>=1.5)", "sphinx-autodoc-typehints (>=1.10)"] -test = ["filelock (>=3)", "pytest (>=6.2.4)", "pytest-cov (>=2.12)", "pytest-mock (>=2)", "pytest-rerunfailures (>=9.1)", "pytest-xdist (>=1.34)", "setuptools (>=42.0.0)", "setuptools (>=56.0.0)", "toml (>=0.10.0)", "wheel (>=0.36.0)"] -typing = ["importlib-metadata (>=4.6.4)", "mypy (==0.950)", "typing-extensions (>=3.7.4.3)"] -virtualenv = ["virtualenv (>=20.0.35)"] - -[[package]] -name = "bump2version" -version = "1.0.1" -description = "Version-bump your software with a single command!" -category = "dev" -optional = false -python-versions = ">=3.5" - -[[package]] -name = "certifi" -version = "2022.9.14" -description = "Python package for providing Mozilla's CA Bundle." -category = "dev" -optional = false -python-versions = ">=3.6" - -[[package]] -name = "charset-normalizer" -version = "2.0.12" -description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "dev" -optional = false -python-versions = ">=3.5.0" - -[package.extras] -unicode_backport = ["unicodedata2"] - -[[package]] -name = "check-manifest" -version = "0.48" -description = "Check MANIFEST.in in a Python source package for completeness" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -build = ">=0.1" -setuptools = "*" -tomli = "*" - -[package.extras] -test = ["mock (>=3.0.0)", "pytest"] - -[[package]] -name = "click" -version = "8.0.4" -description = "Composable command line interface toolkit" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} -importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} - -[[package]] -name = "colorama" -version = "0.4.5" -description = "Cross-platform colored terminal text." -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" - -[[package]] -name = "coverage" -version = "6.2" -description = "Code coverage measurement for Python" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -tomli = {version = "*", optional = true, markers = "extra == \"toml\""} - -[package.extras] -toml = ["tomli"] - -[[package]] -name = "dataclasses" -version = "0.8" -description = "A backport of the dataclasses module for Python 3.6" -category = "dev" -optional = false -python-versions = ">=3.6, <3.7" - -[[package]] -name = "distlib" -version = "0.3.6" -description = "Distribution utilities" -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "docutils" -version = "0.17.1" -description = "Docutils -- Python Documentation Utilities" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" - -[[package]] -name = "filelock" -version = "3.4.1" -description = "A platform independent file lock." -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.extras] -docs = ["furo (>=2021.8.17b43)", "sphinx (>=4.1)", "sphinx-autodoc-typehints (>=1.12)"] -testing = ["covdefaults (>=1.2.0)", "coverage (>=4)", "pytest (>=4)", "pytest-cov", "pytest-timeout (>=1.4.2)"] - -[[package]] -name = "filelock" -version = "3.8.0" -description = "A platform independent file lock." -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.extras] -docs = ["furo (>=2022.6.21)", "sphinx (>=5.1.1)", "sphinx-autodoc-typehints (>=1.19.1)"] -testing = ["covdefaults (>=2.2)", "coverage (>=6.4.2)", "pytest (>=7.1.2)", "pytest-cov (>=3)", "pytest-timeout (>=2.1)"] - -[[package]] -name = "flake8" -version = "4.0.1" -description = "the modular source code checker: pep8 pyflakes and co" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -importlib-metadata = {version = "<4.3", markers = "python_version < \"3.8\""} -mccabe = ">=0.6.0,<0.7.0" -pycodestyle = ">=2.8.0,<2.9.0" -pyflakes = ">=2.4.0,<2.5.0" - -[[package]] -name = "flake8" -version = "5.0.4" -description = "the modular source code checker: pep8 pyflakes and co" -category = "dev" -optional = false -python-versions = ">=3.6.1" - -[package.dependencies] -importlib-metadata = {version = ">=1.1.0,<4.3", markers = "python_version < \"3.8\""} -mccabe = ">=0.7.0,<0.8.0" -pycodestyle = ">=2.9.0,<2.10.0" -pyflakes = ">=2.5.0,<2.6.0" - -[[package]] -name = "idna" -version = "3.4" -description = "Internationalized Domain Names in Applications (IDNA)" -category = "dev" -optional = false -python-versions = ">=3.5" - -[[package]] -name = "imagesize" -version = "1.4.1" -description = "Getting image size from png/jpeg/jpeg2000/gif file" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - -[[package]] -name = "importlib-metadata" -version = "4.2.0" -description = "Read metadata from Python packages" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} -zipp = ">=0.5" - -[package.extras] -docs = ["jaraco.packaging (>=8.2)", "rst.linker (>=1.9)", "sphinx"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pep517", "pyfakefs", "pytest (>=4.6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy"] - -[[package]] -name = "importlib-resources" -version = "5.4.0" -description = "Read resources from Python packages" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} - -[package.extras] -docs = ["jaraco.packaging (>=8.2)", "rst.linker (>=1.9)", "sphinx"] -testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy"] - -[[package]] -name = "iniconfig" -version = "1.1.1" -description = "iniconfig: brain-dead simple config-ini parsing" -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "Jinja2" -version = "3.0.3" -description = "A very fast and expressive template engine." -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -MarkupSafe = ">=2.0" - -[package.extras] -i18n = ["Babel (>=2.7)"] - -[[package]] -name = "MarkupSafe" -version = "2.0.1" -description = "Safely add untrusted strings to HTML/XML markup." -category = "dev" -optional = false -python-versions = ">=3.6" - -[[package]] -name = "mccabe" -version = "0.6.1" -description = "McCabe checker, plugin for flake8" -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "mccabe" -version = "0.7.0" -description = "McCabe checker, plugin for flake8" -category = "dev" -optional = false -python-versions = ">=3.6" - -[[package]] -name = "mypy" -version = "0.971" -description = "Optional static typing for Python" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -mypy-extensions = ">=0.4.3" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typed-ast = {version = ">=1.4.0,<2", markers = "python_version < \"3.8\""} -typing-extensions = ">=3.10" - -[package.extras] -dmypy = ["psutil (>=4.0)"] -python2 = ["typed-ast (>=1.4.0,<2)"] -reports = ["lxml"] - -[[package]] -name = "mypy-extensions" -version = "0.4.3" -description = "Experimental type system extensions for programs checked with the mypy typechecker." -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "packaging" -version = "21.3" -description = "Core utilities for Python packages" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" - -[[package]] -name = "pathspec" -version = "0.9.0" -description = "Utility library for gitignore style pattern matching of file paths." -category = "dev" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" - -[[package]] -name = "pep517" -version = "0.13.0" -description = "Wrappers to build Python packages using PEP 517 hooks" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -importlib_metadata = {version = "*", markers = "python_version < \"3.8\""} -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -zipp = {version = "*", markers = "python_version < \"3.8\""} - -[[package]] -name = "platformdirs" -version = "2.4.0" -description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.extras] -docs = ["Sphinx (>=4)", "furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)"] -test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"] - -[[package]] -name = "pluggy" -version = "1.0.0" -description = "plugin and hook calling mechanisms for python" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} - -[package.extras] -dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] - -[[package]] -name = "py" -version = "1.11.0" -description = "library with cross-python path, ini-parsing, io, code, log facilities" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" - -[[package]] -name = "py-cpuinfo" -version = "8.0.0" -description = "Get CPU info with pure Python 2 & 3" -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "pycodestyle" -version = "2.8.0" -description = "Python style guide checker" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" - -[[package]] -name = "pycodestyle" -version = "2.9.1" -description = "Python style guide checker" -category = "dev" -optional = false -python-versions = ">=3.6" - -[[package]] -name = "pyflakes" -version = "2.4.0" -description = "passive checker of Python programs" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - -[[package]] -name = "pyflakes" -version = "2.5.0" -description = "passive checker of Python programs" -category = "dev" -optional = false -python-versions = ">=3.6" - -[[package]] -name = "Pygments" -version = "2.13.0" -description = "Pygments is a syntax highlighting package written in Python." -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.extras] -plugins = ["importlib-metadata"] - -[[package]] -name = "pyparsing" -version = "3.0.7" -description = "Python parsing module" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.extras] -diagrams = ["jinja2", "railroad-diagrams"] - -[[package]] -name = "pytest" -version = "6.2.5" -description = "pytest: simple powerful testing with Python" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} -attrs = ">=19.2.0" -colorama = {version = "*", markers = "sys_platform == \"win32\""} -importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} -iniconfig = "*" -packaging = "*" -pluggy = ">=0.12,<2.0" -py = ">=1.8.2" -toml = "*" - -[package.extras] -testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] - -[[package]] -name = "pytest-asyncio" -version = "0.16.0" -description = "Pytest support for asyncio." -category = "dev" -optional = false -python-versions = ">= 3.6" - -[package.dependencies] -pytest = ">=5.4.0" - -[package.extras] -testing = ["coverage", "hypothesis (>=5.7.1)"] - -[[package]] -name = "pytest-asyncio" -version = "0.19.0" -description = "Pytest support for asyncio" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -pytest = ">=6.1.0" -typing-extensions = {version = ">=3.7.2", markers = "python_version < \"3.8\""} - -[package.extras] -testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy (>=0.931)", "pytest-trio (>=0.7.0)"] - -[[package]] -name = "pytest-benchmark" -version = "3.4.1" -description = "A ``pytest`` fixture for benchmarking code. It will group the tests into rounds that are calibrated to the chosen timer." -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" - -[package.dependencies] -py-cpuinfo = "*" -pytest = ">=3.8" - -[package.extras] -aspect = ["aspectlib"] -elasticsearch = ["elasticsearch"] -histogram = ["pygal", "pygaljs"] - -[[package]] -name = "pytest-cov" -version = "3.0.0" -description = "Pytest plugin for measuring coverage." -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -coverage = {version = ">=5.2.1", extras = ["toml"]} -pytest = ">=4.6" - -[package.extras] -testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] - -[[package]] -name = "pytest-describe" -version = "2.0.1" -description = "Describe-style plugin for pytest" -category = "dev" -optional = false -python-versions = "*" - -[package.dependencies] -pytest = ">=4.0.0" - -[[package]] -name = "pytest-timeout" -version = "2.1.0" -description = "pytest plugin to abort hanging tests" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -pytest = ">=5.0.0" - -[[package]] -name = "pytz" -version = "2022.2.1" -description = "World timezone definitions, modern and historical" -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "regex" -version = "2022.9.13" -description = "Alternative regular expression module, to replace re." -category = "dev" -optional = false -python-versions = ">=3.6" - -[[package]] -name = "requests" -version = "2.27.1" -description = "Python HTTP for Humans." -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" - -[package.dependencies] -certifi = ">=2017.4.17" -charset-normalizer = {version = ">=2.0.0,<2.1.0", markers = "python_version >= \"3\""} -idna = {version = ">=2.5,<4", markers = "python_version >= \"3\""} -urllib3 = ">=1.21.1,<1.27" - -[package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] -use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"] - -[[package]] -name = "setuptools" -version = "59.6.0" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.extras] -docs = ["furo", "jaraco.packaging (>=8.2)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx", "sphinx-inline-tabs", "sphinxcontrib-towncrier"] -testing = ["flake8-2020", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mock", "paver", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy", "pytest-virtualenv (>=1.2.7)", "pytest-xdist", "sphinx", "virtualenv (>=13.0.0)", "wheel"] - -[[package]] -name = "six" -version = "1.16.0" -description = "Python 2 and 3 compatibility utilities" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" - -[[package]] -name = "snowballstemmer" -version = "2.2.0" -description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "Sphinx" -version = "4.3.2" -description = "Python documentation generator" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -alabaster = ">=0.7,<0.8" -babel = ">=1.3" -colorama = {version = ">=0.3.5", markers = "sys_platform == \"win32\""} -docutils = ">=0.14,<0.18" -imagesize = "*" -Jinja2 = ">=2.3" -packaging = "*" -Pygments = ">=2.0" -requests = ">=2.5.0" -setuptools = "*" -snowballstemmer = ">=1.1" -sphinxcontrib-applehelp = "*" -sphinxcontrib-devhelp = "*" -sphinxcontrib-htmlhelp = ">=2.0.0" -sphinxcontrib-jsmath = "*" -sphinxcontrib-qthelp = "*" -sphinxcontrib-serializinghtml = ">=1.1.5" - -[package.extras] -docs = ["sphinxcontrib-websupport"] -lint = ["docutils-stubs", "flake8 (>=3.5.0)", "isort", "mypy (>=0.920)", "types-pkg-resources", "types-requests", "types-typed-ast"] -test = ["cython", "html5lib", "pytest", "pytest-cov", "typed-ast"] - -[[package]] -name = "sphinx-rtd-theme" -version = "1.0.0" -description = "Read the Docs theme for Sphinx" -category = "dev" -optional = false -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" - -[package.dependencies] -docutils = "<0.18" -sphinx = ">=1.6" - -[package.extras] -dev = ["bump2version", "sphinxcontrib-httpdomain", "transifex-client"] - -[[package]] -name = "sphinxcontrib-applehelp" -version = "1.0.2" -description = "sphinxcontrib-applehelp is a sphinx extension which outputs Apple help books" -category = "dev" -optional = false -python-versions = ">=3.5" - -[package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] -test = ["pytest"] - -[[package]] -name = "sphinxcontrib-devhelp" -version = "1.0.2" -description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." -category = "dev" -optional = false -python-versions = ">=3.5" - -[package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] -test = ["pytest"] - -[[package]] -name = "sphinxcontrib-htmlhelp" -version = "2.0.0" -description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] -test = ["html5lib", "pytest"] - -[[package]] -name = "sphinxcontrib-jsmath" -version = "1.0.1" -description = "A sphinx extension which renders display math in HTML via JavaScript" -category = "dev" -optional = false -python-versions = ">=3.5" - -[package.extras] -test = ["flake8", "mypy", "pytest"] - -[[package]] -name = "sphinxcontrib-qthelp" -version = "1.0.3" -description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." -category = "dev" -optional = false -python-versions = ">=3.5" - -[package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] -test = ["pytest"] - -[[package]] -name = "sphinxcontrib-serializinghtml" -version = "1.1.5" -description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." -category = "dev" -optional = false -python-versions = ">=3.5" - -[package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] -test = ["pytest"] - -[[package]] -name = "toml" -version = "0.10.2" -description = "Python Library for Tom's Obvious, Minimal Language" -category = "dev" -optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" - -[[package]] -name = "tomli" -version = "1.2.3" -description = "A lil' TOML parser" -category = "dev" -optional = false -python-versions = ">=3.6" - -[[package]] -name = "tomli" -version = "2.0.1" -description = "A lil' TOML parser" -category = "dev" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "tox" -version = "3.25.0" -description = "tox is a generic virtualenv management and test command line tool" -category = "dev" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" - -[package.dependencies] -colorama = {version = ">=0.4.1", markers = "platform_system == \"Windows\""} -filelock = ">=3.0.0" -importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} -packaging = ">=14" -pluggy = ">=0.12.0" -py = ">=1.4.17" -six = ">=1.14.0" -toml = ">=0.9.4" -virtualenv = ">=16.0.0,<20.0.0 || >20.0.0,<20.0.1 || >20.0.1,<20.0.2 || >20.0.2,<20.0.3 || >20.0.3,<20.0.4 || >20.0.4,<20.0.5 || >20.0.5,<20.0.6 || >20.0.6,<20.0.7 || >20.0.7" - -[package.extras] -docs = ["pygments-github-lexers (>=0.0.5)", "sphinx (>=2.0.0)", "sphinxcontrib-autoprogram (>=0.1.5)", "towncrier (>=18.5.0)"] -testing = ["flaky (>=3.4.0)", "freezegun (>=0.3.11)", "pathlib2 (>=2.3.3)", "psutil (>=5.6.1)", "pytest (>=4.0.0)", "pytest-cov (>=2.5.1)", "pytest-mock (>=1.10.0)", "pytest-randomly (>=1.0.0)"] - -[[package]] -name = "tox" -version = "3.26.0" -description = "tox is a generic virtualenv management and test command line tool" -category = "dev" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" - -[package.dependencies] -colorama = {version = ">=0.4.1", markers = "platform_system == \"Windows\""} -filelock = ">=3.0.0" -importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} -packaging = ">=14" -pluggy = ">=0.12.0" -py = ">=1.4.17" -six = ">=1.14.0" -tomli = {version = ">=2.0.1", markers = "python_version >= \"3.7\" and python_version < \"3.11\""} -virtualenv = ">=16.0.0,<20.0.0 || >20.0.0,<20.0.1 || >20.0.1,<20.0.2 || >20.0.2,<20.0.3 || >20.0.3,<20.0.4 || >20.0.4,<20.0.5 || >20.0.5,<20.0.6 || >20.0.6,<20.0.7 || >20.0.7" - -[package.extras] -docs = ["pygments-github-lexers (>=0.0.5)", "sphinx (>=2.0.0)", "sphinxcontrib-autoprogram (>=0.1.5)", "towncrier (>=18.5.0)"] -testing = ["flaky (>=3.4.0)", "freezegun (>=0.3.11)", "pathlib2 (>=2.3.3)", "psutil (>=5.6.1)", "pytest (>=4.0.0)", "pytest-cov (>=2.5.1)", "pytest-mock (>=1.10.0)", "pytest-randomly (>=1.0.0)"] - -[[package]] -name = "typed-ast" -version = "1.5.4" -description = "a fork of Python 2 and 3 ast modules with type comment support" -category = "dev" -optional = false -python-versions = ">=3.6" - -[[package]] -name = "typing-extensions" -version = "4.1.1" -description = "Backported and Experimental Type Hints for Python 3.6+" -category = "main" -optional = false -python-versions = ">=3.6" - -[[package]] -name = "typing-extensions" -version = "4.3.0" -description = "Backported and Experimental Type Hints for Python 3.7+" -category = "main" -optional = false -python-versions = ">=3.7" - -[[package]] -name = "urllib3" -version = "1.26.12" -description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4" - -[package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] -socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] - -[[package]] -name = "virtualenv" -version = "20.16.2" -description = "Virtual Python Environment builder" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -distlib = ">=0.3.1,<1" -filelock = ">=3.2,<4" -importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} -importlib-resources = {version = ">=1.0", markers = "python_version < \"3.7\""} -platformdirs = ">=2,<3" - -[package.extras] -docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=21.3)"] -testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "packaging (>=20.0)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)"] - -[[package]] -name = "zipp" -version = "3.6.0" -description = "Backport of pathlib-compatible object wrapper for zip files" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.extras] -docs = ["jaraco.packaging (>=8.2)", "rst.linker (>=1.9)", "sphinx"] -testing = ["func-timeout", "jaraco.itertools", "pytest (>=4.6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy"] - -[metadata] -lock-version = "1.1" -python-versions = "^3.6" -content-hash = "dc1f4a6e11fed7704f09dcd5e3b3af090398383b36458c966a52c840b7fc6c78" - -[metadata.files] -alabaster = [ - {file = "alabaster-0.7.12-py2.py3-none-any.whl", hash = "sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359"}, - {file = "alabaster-0.7.12.tar.gz", hash = "sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02"}, -] -appdirs = [ - {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, - {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, -] -atomicwrites = [ - {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"}, -] -attrs = [ - {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"}, - {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"}, -] -Babel = [ - {file = "Babel-2.10.3-py3-none-any.whl", hash = "sha256:ff56f4892c1c4bf0d814575ea23471c230d544203c7748e8c68f0089478d48eb"}, - {file = "Babel-2.10.3.tar.gz", hash = "sha256:7614553711ee97490f732126dc077f8d0ae084ebc6a96e23db1482afabdb2c51"}, -] -black = [ - {file = "black-20.8b1.tar.gz", hash = "sha256:1c02557aa099101b9d21496f8a914e9ed2222ef70336404eeeac8edba836fbea"}, - {file = "black-22.8.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ce957f1d6b78a8a231b18e0dd2d94a33d2ba738cd88a7fe64f53f659eea49fdd"}, - {file = "black-22.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5107ea36b2b61917956d018bd25129baf9ad1125e39324a9b18248d362156a27"}, - {file = "black-22.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e8166b7bfe5dcb56d325385bd1d1e0f635f24aae14b3ae437102dedc0c186747"}, - {file = "black-22.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd82842bb272297503cbec1a2600b6bfb338dae017186f8f215c8958f8acf869"}, - {file = "black-22.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:d839150f61d09e7217f52917259831fe2b689f5c8e5e32611736351b89bb2a90"}, - {file = "black-22.8.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a05da0430bd5ced89176db098567973be52ce175a55677436a271102d7eaa3fe"}, - {file = "black-22.8.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a098a69a02596e1f2a58a2a1c8d5a05d5a74461af552b371e82f9fa4ada8342"}, - {file = "black-22.8.0-cp36-cp36m-win_amd64.whl", hash = "sha256:5594efbdc35426e35a7defa1ea1a1cb97c7dbd34c0e49af7fb593a36bd45edab"}, - {file = "black-22.8.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a983526af1bea1e4cf6768e649990f28ee4f4137266921c2c3cee8116ae42ec3"}, - {file = "black-22.8.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b2c25f8dea5e8444bdc6788a2f543e1fb01494e144480bc17f806178378005e"}, - {file = "black-22.8.0-cp37-cp37m-win_amd64.whl", hash = "sha256:78dd85caaab7c3153054756b9fe8c611efa63d9e7aecfa33e533060cb14b6d16"}, - {file = "black-22.8.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:cea1b2542d4e2c02c332e83150e41e3ca80dc0fb8de20df3c5e98e242156222c"}, - {file = "black-22.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5b879eb439094751185d1cfdca43023bc6786bd3c60372462b6f051efa6281a5"}, - {file = "black-22.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0a12e4e1353819af41df998b02c6742643cfef58282915f781d0e4dd7a200411"}, - {file = "black-22.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3a73f66b6d5ba7288cd5d6dad9b4c9b43f4e8a4b789a94bf5abfb878c663eb3"}, - {file = "black-22.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:e981e20ec152dfb3e77418fb616077937378b322d7b26aa1ff87717fb18b4875"}, - {file = "black-22.8.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8ce13ffed7e66dda0da3e0b2eb1bdfc83f5812f66e09aca2b0978593ed636b6c"}, - {file = "black-22.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:32a4b17f644fc288c6ee2bafdf5e3b045f4eff84693ac069d87b1a347d861497"}, - {file = "black-22.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0ad827325a3a634bae88ae7747db1a395d5ee02cf05d9aa7a9bd77dfb10e940c"}, - {file = "black-22.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53198e28a1fb865e9fe97f88220da2e44df6da82b18833b588b1883b16bb5d41"}, - {file = "black-22.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:bc4d4123830a2d190e9cc42a2e43570f82ace35c3aeb26a512a2102bce5af7ec"}, - {file = "black-22.8.0-py3-none-any.whl", hash = "sha256:d2c21d439b2baf7aa80d6dd4e3659259be64c6f49dfd0f32091063db0e006db4"}, - {file = "black-22.8.0.tar.gz", hash = "sha256:792f7eb540ba9a17e8656538701d3eb1afcb134e3b45b71f20b25c77a8db7e6e"}, -] -build = [ - {file = "build-0.8.0-py3-none-any.whl", hash = "sha256:19b0ed489f92ace6947698c3ca8436cb0556a66e2aa2d34cd70e2a5d27cd0437"}, - {file = "build-0.8.0.tar.gz", hash = "sha256:887a6d471c901b1a6e6574ebaeeebb45e5269a79d095fe9a8f88d6614ed2e5f0"}, -] -bump2version = [ - {file = "bump2version-1.0.1-py2.py3-none-any.whl", hash = "sha256:37f927ea17cde7ae2d7baf832f8e80ce3777624554a653006c9144f8017fe410"}, - {file = "bump2version-1.0.1.tar.gz", hash = "sha256:762cb2bfad61f4ec8e2bdf452c7c267416f8c70dd9ecb1653fd0bbb01fa936e6"}, -] -certifi = [ - {file = "certifi-2022.9.14-py3-none-any.whl", hash = "sha256:e232343de1ab72c2aa521b625c80f699e356830fd0e2c620b465b304b17b0516"}, - {file = "certifi-2022.9.14.tar.gz", hash = "sha256:36973885b9542e6bd01dea287b2b4b3b21236307c56324fcc3f1160f2d655ed5"}, -] -charset-normalizer = [ - {file = "charset-normalizer-2.0.12.tar.gz", hash = "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597"}, - {file = "charset_normalizer-2.0.12-py3-none-any.whl", hash = "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"}, -] -check-manifest = [ - {file = "check-manifest-0.48.tar.gz", hash = "sha256:3b575f1dade7beb3078ef4bf33a94519834457c7281dbc726b15c5466b55c657"}, - {file = "check_manifest-0.48-py3-none-any.whl", hash = "sha256:b1923685f98c1c2468601a1a7bed655db549a25d43c583caded3860ad8308f8c"}, -] -click = [ - {file = "click-8.0.4-py3-none-any.whl", hash = "sha256:6a7a62563bbfabfda3a38f3023a1db4a35978c0abd76f6c9605ecd6554d6d9b1"}, - {file = "click-8.0.4.tar.gz", hash = "sha256:8458d7b1287c5fb128c90e23381cf99dcde74beaf6c7ff6384ce84d6fe090adb"}, -] -colorama = [ - {file = "colorama-0.4.5-py2.py3-none-any.whl", hash = "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da"}, - {file = "colorama-0.4.5.tar.gz", hash = "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4"}, -] -coverage = [ - {file = "coverage-6.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6dbc1536e105adda7a6312c778f15aaabe583b0e9a0b0a324990334fd458c94b"}, - {file = "coverage-6.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:174cf9b4bef0db2e8244f82059a5a72bd47e1d40e71c68ab055425172b16b7d0"}, - {file = "coverage-6.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:92b8c845527eae547a2a6617d336adc56394050c3ed8a6918683646328fbb6da"}, - {file = "coverage-6.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c7912d1526299cb04c88288e148c6c87c0df600eca76efd99d84396cfe00ef1d"}, - {file = "coverage-6.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d5d2033d5db1d58ae2d62f095e1aefb6988af65b4b12cb8987af409587cc0739"}, - {file = "coverage-6.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:3feac4084291642165c3a0d9eaebedf19ffa505016c4d3db15bfe235718d4971"}, - {file = "coverage-6.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:276651978c94a8c5672ea60a2656e95a3cce2a3f31e9fb2d5ebd4c215d095840"}, - {file = "coverage-6.2-cp310-cp310-win32.whl", hash = "sha256:f506af4f27def639ba45789fa6fde45f9a217da0be05f8910458e4557eed020c"}, - {file = "coverage-6.2-cp310-cp310-win_amd64.whl", hash = "sha256:3f7c17209eef285c86f819ff04a6d4cbee9b33ef05cbcaae4c0b4e8e06b3ec8f"}, - {file = "coverage-6.2-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:13362889b2d46e8d9f97c421539c97c963e34031ab0cb89e8ca83a10cc71ac76"}, - {file = "coverage-6.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:22e60a3ca5acba37d1d4a2ee66e051f5b0e1b9ac950b5b0cf4aa5366eda41d47"}, - {file = "coverage-6.2-cp311-cp311-win_amd64.whl", hash = "sha256:b637c57fdb8be84e91fac60d9325a66a5981f8086c954ea2772efe28425eaf64"}, - {file = "coverage-6.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f467bbb837691ab5a8ca359199d3429a11a01e6dfb3d9dcc676dc035ca93c0a9"}, - {file = "coverage-6.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2641f803ee9f95b1f387f3e8f3bf28d83d9b69a39e9911e5bfee832bea75240d"}, - {file = "coverage-6.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1219d760ccfafc03c0822ae2e06e3b1248a8e6d1a70928966bafc6838d3c9e48"}, - {file = "coverage-6.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9a2b5b52be0a8626fcbffd7e689781bf8c2ac01613e77feda93d96184949a98e"}, - {file = "coverage-6.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:8e2c35a4c1f269704e90888e56f794e2d9c0262fb0c1b1c8c4ee44d9b9e77b5d"}, - {file = "coverage-6.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:5d6b09c972ce9200264c35a1d53d43ca55ef61836d9ec60f0d44273a31aa9f17"}, - {file = "coverage-6.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:e3db840a4dee542e37e09f30859f1612da90e1c5239a6a2498c473183a50e781"}, - {file = "coverage-6.2-cp36-cp36m-win32.whl", hash = "sha256:4e547122ca2d244f7c090fe3f4b5a5861255ff66b7ab6d98f44a0222aaf8671a"}, - {file = "coverage-6.2-cp36-cp36m-win_amd64.whl", hash = "sha256:01774a2c2c729619760320270e42cd9e797427ecfddd32c2a7b639cdc481f3c0"}, - {file = "coverage-6.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fb8b8ee99b3fffe4fd86f4c81b35a6bf7e4462cba019997af2fe679365db0c49"}, - {file = "coverage-6.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:619346d57c7126ae49ac95b11b0dc8e36c1dd49d148477461bb66c8cf13bb521"}, - {file = "coverage-6.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0a7726f74ff63f41e95ed3a89fef002916c828bb5fcae83b505b49d81a066884"}, - {file = "coverage-6.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cfd9386c1d6f13b37e05a91a8583e802f8059bebfccde61a418c5808dea6bbfa"}, - {file = "coverage-6.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:17e6c11038d4ed6e8af1407d9e89a2904d573be29d51515f14262d7f10ef0a64"}, - {file = "coverage-6.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c254b03032d5a06de049ce8bca8338a5185f07fb76600afff3c161e053d88617"}, - {file = "coverage-6.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:dca38a21e4423f3edb821292e97cec7ad38086f84313462098568baedf4331f8"}, - {file = "coverage-6.2-cp37-cp37m-win32.whl", hash = "sha256:600617008aa82032ddeace2535626d1bc212dfff32b43989539deda63b3f36e4"}, - {file = "coverage-6.2-cp37-cp37m-win_amd64.whl", hash = "sha256:bf154ba7ee2fd613eb541c2bc03d3d9ac667080a737449d1a3fb342740eb1a74"}, - {file = "coverage-6.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f9afb5b746781fc2abce26193d1c817b7eb0e11459510fba65d2bd77fe161d9e"}, - {file = "coverage-6.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edcada2e24ed68f019175c2b2af2a8b481d3d084798b8c20d15d34f5c733fa58"}, - {file = "coverage-6.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a9c8c4283e17690ff1a7427123ffb428ad6a52ed720d550e299e8291e33184dc"}, - {file = "coverage-6.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f614fc9956d76d8a88a88bb41ddc12709caa755666f580af3a688899721efecd"}, - {file = "coverage-6.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9365ed5cce5d0cf2c10afc6add145c5037d3148585b8ae0e77cc1efdd6aa2953"}, - {file = "coverage-6.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8bdfe9ff3a4ea37d17f172ac0dff1e1c383aec17a636b9b35906babc9f0f5475"}, - {file = "coverage-6.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:63c424e6f5b4ab1cf1e23a43b12f542b0ec2e54f99ec9f11b75382152981df57"}, - {file = "coverage-6.2-cp38-cp38-win32.whl", hash = "sha256:49dbff64961bc9bdd2289a2bda6a3a5a331964ba5497f694e2cbd540d656dc1c"}, - {file = "coverage-6.2-cp38-cp38-win_amd64.whl", hash = "sha256:9a29311bd6429be317c1f3fe4bc06c4c5ee45e2fa61b2a19d4d1d6111cb94af2"}, - {file = "coverage-6.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:03b20e52b7d31be571c9c06b74746746d4eb82fc260e594dc662ed48145e9efd"}, - {file = "coverage-6.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:215f8afcc02a24c2d9a10d3790b21054b58d71f4b3c6f055d4bb1b15cecce685"}, - {file = "coverage-6.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a4bdeb0a52d1d04123b41d90a4390b096f3ef38eee35e11f0b22c2d031222c6c"}, - {file = "coverage-6.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c332d8f8d448ded473b97fefe4a0983265af21917d8b0cdcb8bb06b2afe632c3"}, - {file = "coverage-6.2-cp39-cp39-win32.whl", hash = "sha256:6e1394d24d5938e561fbeaa0cd3d356207579c28bd1792f25a068743f2d5b282"}, - {file = "coverage-6.2-cp39-cp39-win_amd64.whl", hash = "sha256:86f2e78b1eff847609b1ca8050c9e1fa3bd44ce755b2ec30e70f2d3ba3844644"}, - {file = "coverage-6.2-pp36.pp37.pp38-none-any.whl", hash = "sha256:5829192582c0ec8ca4a2532407bc14c2f338d9878a10442f5d03804a95fac9de"}, - {file = "coverage-6.2.tar.gz", hash = "sha256:e2cad8093172b7d1595b4ad66f24270808658e11acf43a8f95b41276162eb5b8"}, -] -dataclasses = [ - {file = "dataclasses-0.8-py3-none-any.whl", hash = "sha256:0201d89fa866f68c8ebd9d08ee6ff50c0b255f8ec63a71c16fda7af82bb887bf"}, - {file = "dataclasses-0.8.tar.gz", hash = "sha256:8479067f342acf957dc82ec415d355ab5edb7e7646b90dc6e2fd1d96ad084c97"}, -] -distlib = [ - {file = "distlib-0.3.6-py2.py3-none-any.whl", hash = "sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e"}, - {file = "distlib-0.3.6.tar.gz", hash = "sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46"}, -] -docutils = [ - {file = "docutils-0.17.1-py2.py3-none-any.whl", hash = "sha256:cf316c8370a737a022b72b56874f6602acf974a37a9fba42ec2876387549fc61"}, - {file = "docutils-0.17.1.tar.gz", hash = "sha256:686577d2e4c32380bb50cbb22f575ed742d58168cee37e99117a854bcd88f125"}, -] -filelock = [ - {file = "filelock-3.4.1-py3-none-any.whl", hash = "sha256:a4bc51381e01502a30e9f06dd4fa19a1712eab852b6fb0f84fd7cce0793d8ca3"}, - {file = "filelock-3.4.1.tar.gz", hash = "sha256:0f12f552b42b5bf60dba233710bf71337d35494fc8bdd4fd6d9f6d082ad45e06"}, - {file = "filelock-3.8.0-py3-none-any.whl", hash = "sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4"}, - {file = "filelock-3.8.0.tar.gz", hash = "sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc"}, -] -flake8 = [ - {file = "flake8-4.0.1-py2.py3-none-any.whl", hash = "sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d"}, - {file = "flake8-4.0.1.tar.gz", hash = "sha256:806e034dda44114815e23c16ef92f95c91e4c71100ff52813adf7132a6ad870d"}, - {file = "flake8-5.0.4-py2.py3-none-any.whl", hash = "sha256:7a1cf6b73744f5806ab95e526f6f0d8c01c66d7bbe349562d22dfca20610b248"}, - {file = "flake8-5.0.4.tar.gz", hash = "sha256:6fbe320aad8d6b95cec8b8e47bc933004678dc63095be98528b7bdd2a9f510db"}, -] -idna = [ - {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, - {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, -] -imagesize = [ - {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, - {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, -] -importlib-metadata = [ - {file = "importlib_metadata-4.2.0-py3-none-any.whl", hash = "sha256:057e92c15bc8d9e8109738a48db0ccb31b4d9d5cfbee5a8670879a30be66304b"}, - {file = "importlib_metadata-4.2.0.tar.gz", hash = "sha256:b7e52a1f8dec14a75ea73e0891f3060099ca1d8e6a462a4dff11c3e119ea1b31"}, -] -importlib-resources = [ - {file = "importlib_resources-5.4.0-py3-none-any.whl", hash = "sha256:33a95faed5fc19b4bc16b29a6eeae248a3fe69dd55d4d229d2b480e23eeaad45"}, - {file = "importlib_resources-5.4.0.tar.gz", hash = "sha256:d756e2f85dd4de2ba89be0b21dba2a3bbec2e871a42a3a16719258a11f87506b"}, -] -iniconfig = [ - {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, - {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, -] -Jinja2 = [ - {file = "Jinja2-3.0.3-py3-none-any.whl", hash = "sha256:077ce6014f7b40d03b47d1f1ca4b0fc8328a692bd284016f806ed0eaca390ad8"}, - {file = "Jinja2-3.0.3.tar.gz", hash = "sha256:611bb273cd68f3b993fabdc4064fc858c5b47a973cb5aa7999ec1ba405c87cd7"}, -] -MarkupSafe = [ - {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36bc903cbb393720fad60fc28c10de6acf10dc6cc883f3e24ee4012371399a38"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d7d807855b419fc2ed3e631034685db6079889a1f01d5d9dac950f764da3dad"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:add36cb2dbb8b736611303cd3bfcee00afd96471b09cda130da3581cbdc56a6d"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:168cd0a3642de83558a5153c8bd34f175a9a6e7f6dc6384b9655d2697312a646"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4dc8f9fb58f7364b63fd9f85013b780ef83c11857ae79f2feda41e270468dd9b"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:20dca64a3ef2d6e4d5d615a3fd418ad3bde77a47ec8a23d984a12b5b4c74491a"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cdfba22ea2f0029c9261a4bd07e830a8da012291fbe44dc794e488b6c9bb353a"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-win32.whl", hash = "sha256:99df47edb6bda1249d3e80fdabb1dab8c08ef3975f69aed437cb69d0a5de1e28"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:e0f138900af21926a02425cf736db95be9f4af72ba1bb21453432a07f6082134"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf5d821ffabf0ef3533c39c518f3357b171a1651c1ff6827325e4489b0e46c3c"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0d4b31cc67ab36e3392bbf3862cfbadac3db12bdd8b02a2731f509ed5b829724"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:baa1a4e8f868845af802979fcdbf0bb11f94f1cb7ced4c4b8a351bb60d108145"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:deb993cacb280823246a026e3b2d81c493c53de6acfd5e6bfe31ab3402bb37dd"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:63f3268ba69ace99cab4e3e3b5840b03340efed0948ab8f78d2fd87ee5442a4f"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:8d206346619592c6200148b01a2142798c989edcb9c896f9ac9722a99d4e77e6"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:6557b31b5e2c9ddf0de32a691f2312a32f77cd7681d8af66c2692efdbef84c18"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:49e3ceeabbfb9d66c3aef5af3a60cc43b85c33df25ce03d0031a608b0a8b2e3f"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9936f0b261d4df76ad22f8fee3ae83b60d7c3e871292cd42f40b81b70afae85"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2a7d351cbd8cfeb19ca00de495e224dea7e7d919659c2841bbb7f420ad03e2d6"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:60bf42e36abfaf9aff1f50f52644b336d4f0a3fd6d8a60ca0d054ac9f713a864"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d6c7ebd4e944c85e2c3421e612a7057a2f48d478d79e61800d81468a8d842207"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f0567c4dc99f264f49fe27da5f735f414c4e7e7dd850cfd8e69f0862d7c74ea9"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:89c687013cb1cd489a0f0ac24febe8c7a666e6e221b783e53ac50ebf68e45d86"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5bb28c636d87e840583ee3adeb78172efc47c8b26127267f54a9c0ec251d41a9"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fcf051089389abe060c9cd7caa212c707e58153afa2c649f00346ce6d260f1b"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5855f8438a7d1d458206a2466bf82b0f104a3724bf96a1c781ab731e4201731a"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3dd007d54ee88b46be476e293f48c85048603f5f516008bee124ddd891398ed6"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aca6377c0cb8a8253e493c6b451565ac77e98c2951c45f913e0b52facdcff83f"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:04635854b943835a6ea959e948d19dcd311762c5c0c6e1f0e16ee57022669194"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6300b8454aa6930a24b9618fbb54b5a68135092bc666f7b06901f897fa5c2fee"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-win32.whl", hash = "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3c112550557578c26af18a1ccc9e090bfe03832ae994343cfdacd287db6a6ae7"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:53edb4da6925ad13c07b6d26c2a852bd81e364f95301c66e930ab2aef5b5ddd8"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f5653a225f31e113b152e56f154ccbe59eeb1c7487b39b9d9f9cdb58e6c79dc5"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c47adbc92fc1bb2b3274c4b3a43ae0e4573d9fbff4f54cd484555edbf030baf1"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:37205cac2a79194e3750b0af2a5720d95f786a55ce7df90c3af697bfa100eaac"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1f2ade76b9903f39aa442b4aadd2177decb66525062db244b35d71d0ee8599b6"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4296f2b1ce8c86a6aea78613c34bb1a672ea0e3de9c6ba08a960efe0b0a09047"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f02365d4e99430a12647f09b6cc8bab61a6564363f313126f775eb4f6ef798e"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5b6d930f030f8ed98e3e6c98ffa0652bdb82601e7a016ec2ab5d7ff23baa78d1"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-win32.whl", hash = "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8"}, - {file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"}, -] -mccabe = [ - {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, - {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, - {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, - {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, -] -mypy = [ - {file = "mypy-0.971-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f2899a3cbd394da157194f913a931edfd4be5f274a88041c9dc2d9cdcb1c315c"}, - {file = "mypy-0.971-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:98e02d56ebe93981c41211c05adb630d1d26c14195d04d95e49cd97dbc046dc5"}, - {file = "mypy-0.971-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:19830b7dba7d5356d3e26e2427a2ec91c994cd92d983142cbd025ebe81d69cf3"}, - {file = "mypy-0.971-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:02ef476f6dcb86e6f502ae39a16b93285fef97e7f1ff22932b657d1ef1f28655"}, - {file = "mypy-0.971-cp310-cp310-win_amd64.whl", hash = "sha256:25c5750ba5609a0c7550b73a33deb314ecfb559c350bb050b655505e8aed4103"}, - {file = "mypy-0.971-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d3348e7eb2eea2472db611486846742d5d52d1290576de99d59edeb7cd4a42ca"}, - {file = "mypy-0.971-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3fa7a477b9900be9b7dd4bab30a12759e5abe9586574ceb944bc29cddf8f0417"}, - {file = "mypy-0.971-cp36-cp36m-win_amd64.whl", hash = "sha256:2ad53cf9c3adc43cf3bea0a7d01a2f2e86db9fe7596dfecb4496a5dda63cbb09"}, - {file = "mypy-0.971-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:855048b6feb6dfe09d3353466004490b1872887150c5bb5caad7838b57328cc8"}, - {file = "mypy-0.971-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:23488a14a83bca6e54402c2e6435467a4138785df93ec85aeff64c6170077fb0"}, - {file = "mypy-0.971-cp37-cp37m-win_amd64.whl", hash = "sha256:4b21e5b1a70dfb972490035128f305c39bc4bc253f34e96a4adf9127cf943eb2"}, - {file = "mypy-0.971-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:9796a2ba7b4b538649caa5cecd398d873f4022ed2333ffde58eaf604c4d2cb27"}, - {file = "mypy-0.971-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5a361d92635ad4ada1b1b2d3630fc2f53f2127d51cf2def9db83cba32e47c856"}, - {file = "mypy-0.971-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b793b899f7cf563b1e7044a5c97361196b938e92f0a4343a5d27966a53d2ec71"}, - {file = "mypy-0.971-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d1ea5d12c8e2d266b5fb8c7a5d2e9c0219fedfeb493b7ed60cd350322384ac27"}, - {file = "mypy-0.971-cp38-cp38-win_amd64.whl", hash = "sha256:23c7ff43fff4b0df93a186581885c8512bc50fc4d4910e0f838e35d6bb6b5e58"}, - {file = "mypy-0.971-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1f7656b69974a6933e987ee8ffb951d836272d6c0f81d727f1d0e2696074d9e6"}, - {file = "mypy-0.971-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d2022bfadb7a5c2ef410d6a7c9763188afdb7f3533f22a0a32be10d571ee4bbe"}, - {file = "mypy-0.971-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef943c72a786b0f8d90fd76e9b39ce81fb7171172daf84bf43eaf937e9f220a9"}, - {file = "mypy-0.971-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d744f72eb39f69312bc6c2abf8ff6656973120e2eb3f3ec4f758ed47e414a4bf"}, - {file = "mypy-0.971-cp39-cp39-win_amd64.whl", hash = "sha256:77a514ea15d3007d33a9e2157b0ba9c267496acf12a7f2b9b9f8446337aac5b0"}, - {file = "mypy-0.971-py3-none-any.whl", hash = "sha256:0d054ef16b071149917085f51f89555a576e2618d5d9dd70bd6eea6410af3ac9"}, - {file = "mypy-0.971.tar.gz", hash = "sha256:40b0f21484238269ae6a57200c807d80debc6459d444c0489a102d7c6a75fa56"}, -] -mypy-extensions = [ - {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, - {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, -] -packaging = [ - {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, - {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, -] -pathspec = [ - {file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"}, - {file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"}, -] -pep517 = [ - {file = "pep517-0.13.0-py3-none-any.whl", hash = "sha256:4ba4446d80aed5b5eac6509ade100bff3e7943a8489de249654a5ae9b33ee35b"}, - {file = "pep517-0.13.0.tar.gz", hash = "sha256:ae69927c5c172be1add9203726d4b84cf3ebad1edcd5f71fcdc746e66e829f59"}, -] -platformdirs = [ - {file = "platformdirs-2.4.0-py3-none-any.whl", hash = "sha256:8868bbe3c3c80d42f20156f22e7131d2fb321f5bc86a2a345375c6481a67021d"}, - {file = "platformdirs-2.4.0.tar.gz", hash = "sha256:367a5e80b3d04d2428ffa76d33f124cf11e8fff2acdaa9b43d545f5c7d661ef2"}, -] -pluggy = [ - {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, - {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, -] -py = [ - {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, - {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, -] -py-cpuinfo = [ - {file = "py-cpuinfo-8.0.0.tar.gz", hash = "sha256:5f269be0e08e33fd959de96b34cd4aeeeacac014dd8305f70eb28d06de2345c5"}, -] -pycodestyle = [ - {file = "pycodestyle-2.8.0-py2.py3-none-any.whl", hash = "sha256:720f8b39dde8b293825e7ff02c475f3077124006db4f440dcbc9a20b76548a20"}, - {file = "pycodestyle-2.8.0.tar.gz", hash = "sha256:eddd5847ef438ea1c7870ca7eb78a9d47ce0cdb4851a5523949f2601d0cbbe7f"}, - {file = "pycodestyle-2.9.1-py2.py3-none-any.whl", hash = "sha256:d1735fc58b418fd7c5f658d28d943854f8a849b01a5d0a1e6f3f3fdd0166804b"}, - {file = "pycodestyle-2.9.1.tar.gz", hash = "sha256:2c9607871d58c76354b697b42f5d57e1ada7d261c261efac224b664affdc5785"}, -] -pyflakes = [ - {file = "pyflakes-2.4.0-py2.py3-none-any.whl", hash = "sha256:3bb3a3f256f4b7968c9c788781e4ff07dce46bdf12339dcda61053375426ee2e"}, - {file = "pyflakes-2.4.0.tar.gz", hash = "sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c"}, - {file = "pyflakes-2.5.0-py2.py3-none-any.whl", hash = "sha256:4579f67d887f804e67edb544428f264b7b24f435b263c4614f384135cea553d2"}, - {file = "pyflakes-2.5.0.tar.gz", hash = "sha256:491feb020dca48ccc562a8c0cbe8df07ee13078df59813b83959cbdada312ea3"}, -] -Pygments = [ - {file = "Pygments-2.13.0-py3-none-any.whl", hash = "sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42"}, - {file = "Pygments-2.13.0.tar.gz", hash = "sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1"}, -] -pyparsing = [ - {file = "pyparsing-3.0.7-py3-none-any.whl", hash = "sha256:a6c06a88f252e6c322f65faf8f418b16213b51bdfaece0524c1c1bc30c63c484"}, - {file = "pyparsing-3.0.7.tar.gz", hash = "sha256:18ee9022775d270c55187733956460083db60b37d0d0fb357445f3094eed3eea"}, -] -pytest = [ - {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, - {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, -] -pytest-asyncio = [ - {file = "pytest-asyncio-0.16.0.tar.gz", hash = "sha256:7496c5977ce88c34379df64a66459fe395cd05543f0a2f837016e7144391fcfb"}, - {file = "pytest_asyncio-0.16.0-py3-none-any.whl", hash = "sha256:5f2a21273c47b331ae6aa5b36087047b4899e40f03f18397c0e65fa5cca54e9b"}, - {file = "pytest-asyncio-0.19.0.tar.gz", hash = "sha256:ac4ebf3b6207259750bc32f4c1d8fcd7e79739edbc67ad0c58dd150b1d072fed"}, - {file = "pytest_asyncio-0.19.0-py3-none-any.whl", hash = "sha256:7a97e37cfe1ed296e2e84941384bdd37c376453912d397ed39293e0916f521fa"}, -] -pytest-benchmark = [ - {file = "pytest-benchmark-3.4.1.tar.gz", hash = "sha256:40e263f912de5a81d891619032983557d62a3d85843f9a9f30b98baea0cd7b47"}, - {file = "pytest_benchmark-3.4.1-py2.py3-none-any.whl", hash = "sha256:36d2b08c4882f6f997fd3126a3d6dfd70f3249cde178ed8bbc0b73db7c20f809"}, -] -pytest-cov = [ - {file = "pytest-cov-3.0.0.tar.gz", hash = "sha256:e7f0f5b1617d2210a2cabc266dfe2f4c75a8d32fb89eafb7ad9d06f6d076d470"}, - {file = "pytest_cov-3.0.0-py3-none-any.whl", hash = "sha256:578d5d15ac4a25e5f961c938b85a05b09fdaae9deef3bb6de9a6e766622ca7a6"}, -] -pytest-describe = [ - {file = "pytest-describe-2.0.1.tar.gz", hash = "sha256:e5cbaa31169f0060348ad5ca0191027e5f1f41f3f27fdeef208365e09c55eb9a"}, - {file = "pytest_describe-2.0.1-py3-none-any.whl", hash = "sha256:ea347838bdf774b498ee7cb4a0b802a40be89e667a399fb63d860e3223bf4183"}, -] -pytest-timeout = [ - {file = "pytest-timeout-2.1.0.tar.gz", hash = "sha256:c07ca07404c612f8abbe22294b23c368e2e5104b521c1790195561f37e1ac3d9"}, - {file = "pytest_timeout-2.1.0-py3-none-any.whl", hash = "sha256:f6f50101443ce70ad325ceb4473c4255e9d74e3c7cd0ef827309dfa4c0d975c6"}, -] -pytz = [ - {file = "pytz-2022.2.1-py2.py3-none-any.whl", hash = "sha256:220f481bdafa09c3955dfbdddb7b57780e9a94f5127e35456a48589b9e0c0197"}, - {file = "pytz-2022.2.1.tar.gz", hash = "sha256:cea221417204f2d1a2aa03ddae3e867921971d0d76f14d87abb4414415bbdcf5"}, -] -regex = [ - {file = "regex-2022.9.13-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0394265391a86e2bbaa7606e59ac71bd9f1edf8665a59e42771a9c9adbf6fd4f"}, - {file = "regex-2022.9.13-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:86df2049b18745f3cd4b0f4c4ef672bfac4b80ca488e6ecfd2bbfe68d2423a2c"}, - {file = "regex-2022.9.13-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce331b076b2b013e7d7f07157f957974ef0b0881a808e8a4a4b3b5105aee5d04"}, - {file = "regex-2022.9.13-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:360ffbc9357794ae41336b681dff1c0463193199dfb91fcad3ec385ea4972f46"}, - {file = "regex-2022.9.13-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18e503b1e515a10282b3f14f1b3d856194ecece4250e850fad230842ed31227f"}, - {file = "regex-2022.9.13-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6e167d1ccd41d27b7b6655bb7a2dcb1b1eb1e0d2d662043470bd3b4315d8b2b"}, - {file = "regex-2022.9.13-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4146cb7ae6029fc83b5c905ec6d806b7e5568dc14297c423e66b86294bad6c39"}, - {file = "regex-2022.9.13-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a1aec4ae549fd7b3f52ceaf67e133010e2fba1538bf4d5fc5cd162a5e058d5df"}, - {file = "regex-2022.9.13-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:cab548d6d972e1de584161487b2ac1aa82edd8430d1bde69587ba61698ad1cfb"}, - {file = "regex-2022.9.13-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:3d64e1a7e6d98a4cdc8b29cb8d8ed38f73f49e55fbaa737bdb5933db99b9de22"}, - {file = "regex-2022.9.13-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:67a4c625361db04ae40ef7c49d3cbe2c1f5ff10b5a4491327ab20f19f2fb5d40"}, - {file = "regex-2022.9.13-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:5d0dd8b06896423211ce18fba0c75dacc49182a1d6514c004b535be7163dca0f"}, - {file = "regex-2022.9.13-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4318f69b79f9f7d84a7420e97d4bfe872dc767c72f891d4fea5fa721c74685f7"}, - {file = "regex-2022.9.13-cp310-cp310-win32.whl", hash = "sha256:26df88c9636a0c3f3bd9189dd435850a0c49d0b7d6e932500db3f99a6dd604d1"}, - {file = "regex-2022.9.13-cp310-cp310-win_amd64.whl", hash = "sha256:6fe1dd1021e0f8f3f454ce2811f1b0b148f2d25bb38c712fec00316551e93650"}, - {file = "regex-2022.9.13-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:83cc32a1a2fa5bac00f4abc0e6ce142e3c05d3a6d57e23bd0f187c59b4e1e43b"}, - {file = "regex-2022.9.13-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a2effeaf50a6838f3dd4d3c5d265f06eabc748f476e8441892645ae3a697e273"}, - {file = "regex-2022.9.13-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59a786a55d00439d8fae4caaf71581f2aaef7297d04ee60345c3594efef5648a"}, - {file = "regex-2022.9.13-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b7b701dbc124558fd2b1b08005eeca6c9160e209108fbcbd00091fcfac641ac7"}, - {file = "regex-2022.9.13-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dab81cc4d58026861445230cfba27f9825e9223557926e7ec22156a1a140d55c"}, - {file = "regex-2022.9.13-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b0c5cc3d1744a67c3b433dce91e5ef7c527d612354c1f1e8576d9e86bc5c5e2"}, - {file = "regex-2022.9.13-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:518272f25da93e02af4f1e94985f5042cec21557ef3591027d0716f2adda5d0a"}, - {file = "regex-2022.9.13-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8418ee2cb857b83881b8f981e4c636bc50a0587b12d98cb9b947408a3c484fe7"}, - {file = "regex-2022.9.13-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cfa4c956ff0a977c4823cb3b930b0a4e82543b060733628fec7ab3eb9b1abe37"}, - {file = "regex-2022.9.13-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:a1c4d17879dd4c4432c08a1ca1ab379f12ab54af569e945b6fc1c4cf6a74ca45"}, - {file = "regex-2022.9.13-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:77c2879d3ba51e5ca6c2b47f2dcf3d04a976a623a8fc8236010a16c9e0b0a3c7"}, - {file = "regex-2022.9.13-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d2885ec6eea629c648ecc9bde0837ec6b92208b7f36381689937fe5d64a517e8"}, - {file = "regex-2022.9.13-cp311-cp311-win32.whl", hash = "sha256:2dda4b096a6f630d6531728a45bd12c67ec3badf44342046dc77d4897277d4f2"}, - {file = "regex-2022.9.13-cp311-cp311-win_amd64.whl", hash = "sha256:592b9e2e1862168e71d9e612bfdc22c451261967dbd46681f14e76dfba7105fd"}, - {file = "regex-2022.9.13-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:df8fe00b60e4717662c7f80c810ba66dcc77309183c76b7754c0dff6f1d42054"}, - {file = "regex-2022.9.13-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:995e70bb8c91d1b99ed2aaf8ec44863e06ad1dfbb45d7df95f76ef583ec323a9"}, - {file = "regex-2022.9.13-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad75173349ad79f9d21e0d0896b27dcb37bfd233b09047bc0b4d226699cf5c87"}, - {file = "regex-2022.9.13-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7681c49da1a2d4b905b4f53d86c9ba4506e79fba50c4a664d9516056e0f7dfcc"}, - {file = "regex-2022.9.13-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9bc8edc5f8ef0ebb46f3fa0d02bd825bbe9cc63d59e428ffb6981ff9672f6de1"}, - {file = "regex-2022.9.13-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7bee775ff05c9d519195bd9e8aaaccfe3971db60f89f89751ee0f234e8aeac5"}, - {file = "regex-2022.9.13-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1a901ce5cd42658ab8f8eade51b71a6d26ad4b68c7cfc86b87efc577dfa95602"}, - {file = "regex-2022.9.13-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:14a7ab070fa3aec288076eed6ed828587b805ef83d37c9bfccc1a4a7cfbd8111"}, - {file = "regex-2022.9.13-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:d23ac6b4bf9e32fcde5fcdb2e1fd5e7370d6693fcac51ee1d340f0e886f50d1f"}, - {file = "regex-2022.9.13-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:4cdbfa6d2befeaee0c899f19222e9b20fc5abbafe5e9c43a46ef819aeb7b75e5"}, - {file = "regex-2022.9.13-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:ab07934725e6f25c6f87465976cc69aef1141e86987af49d8c839c3ffd367c72"}, - {file = "regex-2022.9.13-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:d2a1371dc73e921f3c2e087c05359050f3525a9a34b476ebc8130e71bec55e97"}, - {file = "regex-2022.9.13-cp36-cp36m-win32.whl", hash = "sha256:fcbd1edff1473d90dc5cf4b52d355cf1f47b74eb7c85ba6e45f45d0116b8edbd"}, - {file = "regex-2022.9.13-cp36-cp36m-win_amd64.whl", hash = "sha256:fe428822b7a8c486bcd90b334e9ab541ce6cc0d6106993d59f201853e5e14121"}, - {file = "regex-2022.9.13-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d7430f041755801b712ec804aaf3b094b9b5facbaa93a6339812a8e00d7bd53a"}, - {file = "regex-2022.9.13-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:079c182f99c89524069b9cd96f5410d6af437e9dca576a7d59599a574972707e"}, - {file = "regex-2022.9.13-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59bac44b5a07b08a261537f652c26993af9b1bbe2a29624473968dd42fc29d56"}, - {file = "regex-2022.9.13-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a59d0377e58d96a6f11636e97992f5b51b7e1e89eb66332d1c01b35adbabfe8a"}, - {file = "regex-2022.9.13-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9d68eb704b24bc4d441b24e4a12653acd07d2c39940548761e0985a08bc1fff"}, - {file = "regex-2022.9.13-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0385d66e73cdd4462f3cc42c76a6576ddcc12472c30e02a2ae82061bff132c32"}, - {file = "regex-2022.9.13-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:db45016364eec9ddbb5af93c8740c5c92eb7f5fc8848d1ae04205a40a1a2efc6"}, - {file = "regex-2022.9.13-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:03ff695518482b946a6d3d4ce9cbbd99a21320e20d94913080aa3841f880abcd"}, - {file = "regex-2022.9.13-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:6b32b45433df1fad7fed738fe15200b6516da888e0bd1fdd6aa5e50cc16b76bc"}, - {file = "regex-2022.9.13-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:003a2e1449d425afc817b5f0b3d4c4aa9072dd5f3dfbf6c7631b8dc7b13233de"}, - {file = "regex-2022.9.13-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:a9eb9558e1d0f78e07082d8a70d5c4d631c8dd75575fae92105df9e19c736730"}, - {file = "regex-2022.9.13-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:f6e0321921d2fdc082ef90c1fd0870f129c2e691bfdc4937dcb5cd308aba95c4"}, - {file = "regex-2022.9.13-cp37-cp37m-win32.whl", hash = "sha256:3f3b4594d564ed0b2f54463a9f328cf6a5b2a32610a90cdff778d6e3e561d08b"}, - {file = "regex-2022.9.13-cp37-cp37m-win_amd64.whl", hash = "sha256:8aba0d01e3dfd335f2cb107079b07fdddb4cd7fb2d8c8a1986f9cb8ce9246c24"}, - {file = "regex-2022.9.13-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:944567bb08f52268d8600ee5bdf1798b2b62ea002cc692a39cec113244cbdd0d"}, - {file = "regex-2022.9.13-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0b664a4d33ffc6be10996606dfc25fd3248c24cc589c0b139feb4c158053565e"}, - {file = "regex-2022.9.13-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f06cc1190f3db3192ab8949e28f2c627e1809487e2cfc435b6524c1ce6a2f391"}, - {file = "regex-2022.9.13-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6c57d50d4d5eb0c862569ca3c840eba2a73412f31d9ecc46ef0d6b2e621a592b"}, - {file = "regex-2022.9.13-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19a4da6f513045f5ba00e491215bd00122e5bd131847586522463e5a6b2bd65f"}, - {file = "regex-2022.9.13-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a926339356fe29595f8e37af71db37cd87ff764e15da8ad5129bbaff35bcc5a6"}, - {file = "regex-2022.9.13-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:091efcfdd4178a7e19a23776dc2b1fafb4f57f4d94daf340f98335817056f874"}, - {file = "regex-2022.9.13-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:880dbeb6bdde7d926b4d8e41410b16ffcd4cb3b4c6d926280fea46e2615c7a01"}, - {file = "regex-2022.9.13-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:73b985c9fc09a7896846e26d7b6f4d1fd5a20437055f4ef985d44729f9f928d0"}, - {file = "regex-2022.9.13-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c0b7cb9598795b01f9a3dd3f770ab540889259def28a3bf9b2fa24d52edecba3"}, - {file = "regex-2022.9.13-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:37e5a26e76c46f54b3baf56a6fdd56df9db89758694516413757b7d127d4c57b"}, - {file = "regex-2022.9.13-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:99945ddb4f379bb9831c05e9f80f02f079ba361a0fb1fba1fc3b267639b6bb2e"}, - {file = "regex-2022.9.13-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dcbcc9e72a791f622a32d17ff5011326a18996647509cac0609a7fc43adc229"}, - {file = "regex-2022.9.13-cp38-cp38-win32.whl", hash = "sha256:d3102ab9bf16bf541ca228012d45d88d2a567c9682a805ae2c145a79d3141fdd"}, - {file = "regex-2022.9.13-cp38-cp38-win_amd64.whl", hash = "sha256:14216ea15efc13f28d0ef1c463d86d93ca7158a79cd4aec0f9273f6d4c6bb047"}, - {file = "regex-2022.9.13-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9a165a05979e212b2c2d56a9f40b69c811c98a788964e669eb322de0a3e420b4"}, - {file = "regex-2022.9.13-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:14c71437ffb89479c89cc7022a5ea2075a842b728f37205e47c824cc17b30a42"}, - {file = "regex-2022.9.13-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee7045623a5ace70f3765e452528b4c1f2ce669ed31959c63f54de64fe2f6ff7"}, - {file = "regex-2022.9.13-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6e521d9db006c5e4a0f8acfef738399f72b704913d4e083516774eb51645ad7c"}, - {file = "regex-2022.9.13-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b86548b8234b2be3985dbc0b385e35f5038f0f3e6251464b827b83ebf4ed90e5"}, - {file = "regex-2022.9.13-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2b39ee3b280e15824298b97cec3f7cbbe6539d8282cc8a6047a455b9a72c598"}, - {file = "regex-2022.9.13-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e6e6e61e9a38b6cc60ca3e19caabc90261f070f23352e66307b3d21a24a34aaf"}, - {file = "regex-2022.9.13-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d837ccf3bd2474feabee96cd71144e991472e400ed26582edc8ca88ce259899c"}, - {file = "regex-2022.9.13-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6adfe300848d61a470ec7547adc97b0ccf86de86a99e6830f1d8c8d19ecaf6b3"}, - {file = "regex-2022.9.13-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d5b003d248e6f292475cd24b04e5f72c48412231961a675edcb653c70730e79e"}, - {file = "regex-2022.9.13-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:d5edd3eb877c9fc2e385173d4a4e1d792bf692d79e25c1ca391802d36ecfaa01"}, - {file = "regex-2022.9.13-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:50e764ffbd08b06aa8c4e86b8b568b6722c75d301b33b259099f237c46b2134e"}, - {file = "regex-2022.9.13-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6d43bd402b27e0e7eae85c612725ba1ce7798f20f6fab4e8bc3de4f263294f03"}, - {file = "regex-2022.9.13-cp39-cp39-win32.whl", hash = "sha256:7fcf7f94ccad19186820ac67e2ec7e09e0ac2dac39689f11cf71eac580503296"}, - {file = "regex-2022.9.13-cp39-cp39-win_amd64.whl", hash = "sha256:322bd5572bed36a5b39952d88e072738926759422498a96df138d93384934ff8"}, - {file = "regex-2022.9.13.tar.gz", hash = "sha256:f07373b6e56a6f3a0df3d75b651a278ca7bd357a796078a26a958ea1ce0588fd"}, -] -requests = [ - {file = "requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"}, - {file = "requests-2.27.1.tar.gz", hash = "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61"}, -] -setuptools = [ - {file = "setuptools-59.6.0-py3-none-any.whl", hash = "sha256:4ce92f1e1f8f01233ee9952c04f6b81d1e02939d6e1b488428154974a4d0783e"}, - {file = "setuptools-59.6.0.tar.gz", hash = "sha256:22c7348c6d2976a52632c67f7ab0cdf40147db7789f9aed18734643fe9cf3373"}, -] -six = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] -snowballstemmer = [ - {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, - {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, -] -Sphinx = [ - {file = "Sphinx-4.3.2-py3-none-any.whl", hash = "sha256:6a11ea5dd0bdb197f9c2abc2e0ce73e01340464feaece525e64036546d24c851"}, - {file = "Sphinx-4.3.2.tar.gz", hash = "sha256:0a8836751a68306b3fe97ecbe44db786f8479c3bf4b80e3a7f5c838657b4698c"}, -] -sphinx-rtd-theme = [ - {file = "sphinx_rtd_theme-1.0.0-py2.py3-none-any.whl", hash = "sha256:4d35a56f4508cfee4c4fb604373ede6feae2a306731d533f409ef5c3496fdbd8"}, - {file = "sphinx_rtd_theme-1.0.0.tar.gz", hash = "sha256:eec6d497e4c2195fa0e8b2016b337532b8a699a68bcb22a512870e16925c6a5c"}, -] -sphinxcontrib-applehelp = [ - {file = "sphinxcontrib-applehelp-1.0.2.tar.gz", hash = "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58"}, - {file = "sphinxcontrib_applehelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a"}, -] -sphinxcontrib-devhelp = [ - {file = "sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"}, - {file = "sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e"}, -] -sphinxcontrib-htmlhelp = [ - {file = "sphinxcontrib-htmlhelp-2.0.0.tar.gz", hash = "sha256:f5f8bb2d0d629f398bf47d0d69c07bc13b65f75a81ad9e2f71a63d4b7a2f6db2"}, - {file = "sphinxcontrib_htmlhelp-2.0.0-py2.py3-none-any.whl", hash = "sha256:d412243dfb797ae3ec2b59eca0e52dac12e75a241bf0e4eb861e450d06c6ed07"}, -] -sphinxcontrib-jsmath = [ - {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, - {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, -] -sphinxcontrib-qthelp = [ - {file = "sphinxcontrib-qthelp-1.0.3.tar.gz", hash = "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72"}, - {file = "sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"}, -] -sphinxcontrib-serializinghtml = [ - {file = "sphinxcontrib-serializinghtml-1.1.5.tar.gz", hash = "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952"}, - {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"}, -] -toml = [ - {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, - {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, -] -tomli = [ - {file = "tomli-1.2.3-py3-none-any.whl", hash = "sha256:e3069e4be3ead9668e21cb9b074cd948f7b3113fd9c8bba083f48247aab8b11c"}, - {file = "tomli-1.2.3.tar.gz", hash = "sha256:05b6166bff487dc068d322585c7ea4ef78deed501cc124060e0f238e89a9231f"}, - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, -] -tox = [ - {file = "tox-3.25.0-py2.py3-none-any.whl", hash = "sha256:0805727eb4d6b049de304977dfc9ce315a1938e6619c3ab9f38682bb04662a5a"}, - {file = "tox-3.25.0.tar.gz", hash = "sha256:37888f3092aa4e9f835fc8cc6dadbaaa0782651c41ef359e3a5743fcb0308160"}, - {file = "tox-3.26.0-py2.py3-none-any.whl", hash = "sha256:bf037662d7c740d15c9924ba23bb3e587df20598697bb985ac2b49bdc2d847f6"}, - {file = "tox-3.26.0.tar.gz", hash = "sha256:44f3c347c68c2c68799d7d44f1808f9d396fc8a1a500cbc624253375c7ae107e"}, -] -typed-ast = [ - {file = "typed_ast-1.5.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:669dd0c4167f6f2cd9f57041e03c3c2ebf9063d0757dc89f79ba1daa2bfca9d4"}, - {file = "typed_ast-1.5.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:211260621ab1cd7324e0798d6be953d00b74e0428382991adfddb352252f1d62"}, - {file = "typed_ast-1.5.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:267e3f78697a6c00c689c03db4876dd1efdfea2f251a5ad6555e82a26847b4ac"}, - {file = "typed_ast-1.5.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c542eeda69212fa10a7ada75e668876fdec5f856cd3d06829e6aa64ad17c8dfe"}, - {file = "typed_ast-1.5.4-cp310-cp310-win_amd64.whl", hash = "sha256:a9916d2bb8865f973824fb47436fa45e1ebf2efd920f2b9f99342cb7fab93f72"}, - {file = "typed_ast-1.5.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:79b1e0869db7c830ba6a981d58711c88b6677506e648496b1f64ac7d15633aec"}, - {file = "typed_ast-1.5.4-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a94d55d142c9265f4ea46fab70977a1944ecae359ae867397757d836ea5a3f47"}, - {file = "typed_ast-1.5.4-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:183afdf0ec5b1b211724dfef3d2cad2d767cbefac291f24d69b00546c1837fb6"}, - {file = "typed_ast-1.5.4-cp36-cp36m-win_amd64.whl", hash = "sha256:639c5f0b21776605dd6c9dbe592d5228f021404dafd377e2b7ac046b0349b1a1"}, - {file = "typed_ast-1.5.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cf4afcfac006ece570e32d6fa90ab74a17245b83dfd6655a6f68568098345ff6"}, - {file = "typed_ast-1.5.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed855bbe3eb3715fca349c80174cfcfd699c2f9de574d40527b8429acae23a66"}, - {file = "typed_ast-1.5.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6778e1b2f81dfc7bc58e4b259363b83d2e509a65198e85d5700dfae4c6c8ff1c"}, - {file = "typed_ast-1.5.4-cp37-cp37m-win_amd64.whl", hash = "sha256:0261195c2062caf107831e92a76764c81227dae162c4f75192c0d489faf751a2"}, - {file = "typed_ast-1.5.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2efae9db7a8c05ad5547d522e7dbe62c83d838d3906a3716d1478b6c1d61388d"}, - {file = "typed_ast-1.5.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7d5d014b7daa8b0bf2eaef684295acae12b036d79f54178b92a2b6a56f92278f"}, - {file = "typed_ast-1.5.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:370788a63915e82fd6f212865a596a0fefcbb7d408bbbb13dea723d971ed8bdc"}, - {file = "typed_ast-1.5.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4e964b4ff86550a7a7d56345c7864b18f403f5bd7380edf44a3c1fb4ee7ac6c6"}, - {file = "typed_ast-1.5.4-cp38-cp38-win_amd64.whl", hash = "sha256:683407d92dc953c8a7347119596f0b0e6c55eb98ebebd9b23437501b28dcbb8e"}, - {file = "typed_ast-1.5.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4879da6c9b73443f97e731b617184a596ac1235fe91f98d279a7af36c796da35"}, - {file = "typed_ast-1.5.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3e123d878ba170397916557d31c8f589951e353cc95fb7f24f6bb69adc1a8a97"}, - {file = "typed_ast-1.5.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebd9d7f80ccf7a82ac5f88c521115cc55d84e35bf8b446fcd7836eb6b98929a3"}, - {file = "typed_ast-1.5.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98f80dee3c03455e92796b58b98ff6ca0b2a6f652120c263efdba4d6c5e58f72"}, - {file = "typed_ast-1.5.4-cp39-cp39-win_amd64.whl", hash = "sha256:0fdbcf2fef0ca421a3f5912555804296f0b0960f0418c440f5d6d3abb549f3e1"}, - {file = "typed_ast-1.5.4.tar.gz", hash = "sha256:39e21ceb7388e4bb37f4c679d72707ed46c2fbf2a5609b8b8ebc4b067d977df2"}, -] -typing-extensions = [ - {file = "typing_extensions-4.1.1-py3-none-any.whl", hash = "sha256:21c85e0fe4b9a155d0799430b0ad741cdce7e359660ccbd8b530613e8df88ce2"}, - {file = "typing_extensions-4.1.1.tar.gz", hash = "sha256:1a9462dcc3347a79b1f1c0271fbe79e844580bb598bafa1ed208b94da3cdcd42"}, - {file = "typing_extensions-4.3.0-py3-none-any.whl", hash = "sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02"}, - {file = "typing_extensions-4.3.0.tar.gz", hash = "sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6"}, -] -urllib3 = [ - {file = "urllib3-1.26.12-py2.py3-none-any.whl", hash = "sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997"}, - {file = "urllib3-1.26.12.tar.gz", hash = "sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e"}, -] -virtualenv = [ - {file = "virtualenv-20.16.2-py2.py3-none-any.whl", hash = "sha256:635b272a8e2f77cb051946f46c60a54ace3cb5e25568228bd6b57fc70eca9ff3"}, - {file = "virtualenv-20.16.2.tar.gz", hash = "sha256:0ef5be6d07181946891f5abc8047fda8bc2f0b4b9bf222c64e6e8963baee76db"}, -] -zipp = [ - {file = "zipp-3.6.0-py3-none-any.whl", hash = "sha256:9fe5ea21568a0a70e50f273397638d39b03353731e6cbbb3fd8502a33fec40bc"}, - {file = "zipp-3.6.0.tar.gz", hash = "sha256:71c644c5369f4a6e07636f0aa966270449561fcea2e3d6747b8d23efaa9d7832"}, -] diff --git a/pyproject.toml b/pyproject.toml index 4d47bb5f..4dbf9c8a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -46,22 +46,35 @@ typing-extensions = [ { version = "^4.3", python = "<3.8" } ] -[tool.poetry.dev-dependencies] +[tool.poetry.group.test] +optional = true + +[tool.poetry.group.test.dependencies] pytest = "^7.1" pytest-asyncio = ">=0.19,<1" pytest-benchmark = "^3.4" pytest-cov = "^3.0" pytest-describe = "^2.0" pytest-timeout = "^2.1" +tox = "^3.26" + +[tool.poetry.group.lint] +optional = true + +[tool.poetry.group.lint.dependencies] black = "22.8.0" flake8 = "^5.0" isort = "^5.10" mypy = "0.971" -sphinx = "^5.1" -sphinx_rtd_theme = ">=1,<2" check-manifest = ">=0.48,<1" bump2version = ">=1.0,<2" -tox = "^3.26" + +[tool.poetry.group.doc] +optional = true + +[tool.poetry.group.doc.dependencies] +sphinx = "^5.1" +sphinx_rtd_theme = ">=1,<2" [tool.black] target-version = ['py37', 'py38', 'py39', 'py310'] @@ -136,5 +149,5 @@ timeout = "100" filterwarnings = "ignore::pytest.PytestConfigWarning" [build-system] -requires = ["poetry_core>=1,<2", "setuptools>=65,<70"] +requires = ["poetry_core>=1.2,<2", "setuptools>=65,<70"] build-backend = "poetry.core.masonry.api" From c97bd6842e9c74c58ff8ece9ad090968568e5e52 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Fri, 23 Sep 2022 17:57:54 +0200 Subject: [PATCH 012/230] Add lockfile back Solve conflict between Flake8 and Sphinx by relaxing the version requirements for Sphinx in pyproject.toml (but not in tox.ini). --- poetry.lock | 1180 ++++++++++++++++++++++++++++++++++++++++++++++++ pyproject.toml | 3 +- 2 files changed, 1182 insertions(+), 1 deletion(-) create mode 100644 poetry.lock diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 00000000..dbdc8ed0 --- /dev/null +++ b/poetry.lock @@ -0,0 +1,1180 @@ +[[package]] +name = "alabaster" +version = "0.7.12" +description = "A configurable sidebar-enabled Sphinx theme" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "attrs" +version = "22.1.0" +description = "Classes Without Boilerplate" +category = "dev" +optional = false +python-versions = ">=3.5" + +[package.extras] +dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"] +docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] +tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"] +tests_no_zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"] + +[[package]] +name = "Babel" +version = "2.10.3" +description = "Internationalization utilities" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +pytz = ">=2015.7" + +[[package]] +name = "black" +version = "22.8.0" +description = "The uncompromising code formatter." +category = "dev" +optional = false +python-versions = ">=3.6.2" + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +pathspec = ">=0.9.0" +platformdirs = ">=2" +tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""} +typed-ast = {version = ">=1.4.2", markers = "python_version < \"3.8\" and implementation_name == \"cpython\""} +typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + +[[package]] +name = "build" +version = "0.8.0" +description = "A simple, correct PEP 517 build frontend" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +colorama = {version = "*", markers = "os_name == \"nt\""} +importlib-metadata = {version = ">=0.22", markers = "python_version < \"3.8\""} +packaging = ">=19.0" +pep517 = ">=0.9.1" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} + +[package.extras] +docs = ["furo (>=2021.08.31)", "sphinx (>=4.0,<5.0)", "sphinx-argparse-cli (>=1.5)", "sphinx-autodoc-typehints (>=1.10)"] +test = ["filelock (>=3)", "pytest (>=6.2.4)", "pytest-cov (>=2.12)", "pytest-mock (>=2)", "pytest-rerunfailures (>=9.1)", "pytest-xdist (>=1.34)", "setuptools (>=42.0.0)", "setuptools (>=56.0.0)", "toml (>=0.10.0)", "wheel (>=0.36.0)"] +typing = ["importlib-metadata (>=4.6.4)", "mypy (==0.950)", "typing-extensions (>=3.7.4.3)"] +virtualenv = ["virtualenv (>=20.0.35)"] + +[[package]] +name = "bump2version" +version = "1.0.1" +description = "Version-bump your software with a single command!" +category = "dev" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "certifi" +version = "2022.9.14" +description = "Python package for providing Mozilla's CA Bundle." +category = "dev" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "charset-normalizer" +version = "2.1.1" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "dev" +optional = false +python-versions = ">=3.6.0" + +[package.extras] +unicode_backport = ["unicodedata2"] + +[[package]] +name = "check-manifest" +version = "0.48" +description = "Check MANIFEST.in in a Python source package for completeness" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +build = ">=0.1" +setuptools = "*" +tomli = "*" + +[package.extras] +test = ["mock (>=3.0.0)", "pytest"] + +[[package]] +name = "click" +version = "8.1.3" +description = "Composable command line interface toolkit" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} +importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} + +[[package]] +name = "colorama" +version = "0.4.5" +description = "Cross-platform colored terminal text." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "coverage" +version = "6.4.4" +description = "Code coverage measurement for Python" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} + +[package.extras] +toml = ["tomli"] + +[[package]] +name = "distlib" +version = "0.3.6" +description = "Distribution utilities" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "docutils" +version = "0.17.1" +description = "Docutils -- Python Documentation Utilities" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "filelock" +version = "3.8.0" +description = "A platform independent file lock." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["furo (>=2022.6.21)", "sphinx (>=5.1.1)", "sphinx-autodoc-typehints (>=1.19.1)"] +testing = ["covdefaults (>=2.2)", "coverage (>=6.4.2)", "pytest (>=7.1.2)", "pytest-cov (>=3)", "pytest-timeout (>=2.1)"] + +[[package]] +name = "flake8" +version = "5.0.4" +description = "the modular source code checker: pep8 pyflakes and co" +category = "dev" +optional = false +python-versions = ">=3.6.1" + +[package.dependencies] +importlib-metadata = {version = ">=1.1.0,<4.3", markers = "python_version < \"3.8\""} +mccabe = ">=0.7.0,<0.8.0" +pycodestyle = ">=2.9.0,<2.10.0" +pyflakes = ">=2.5.0,<2.6.0" + +[[package]] +name = "idna" +version = "3.4" +description = "Internationalized Domain Names in Applications (IDNA)" +category = "dev" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "imagesize" +version = "1.4.1" +description = "Getting image size from png/jpeg/jpeg2000/gif file" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "importlib-metadata" +version = "4.2.0" +description = "Read metadata from Python packages" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} +zipp = ">=0.5" + +[package.extras] +docs = ["jaraco.packaging (>=8.2)", "rst.linker (>=1.9)", "sphinx"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pep517", "pyfakefs", "pytest (>=4.6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy"] + +[[package]] +name = "iniconfig" +version = "1.1.1" +description = "iniconfig: brain-dead simple config-ini parsing" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "isort" +version = "5.10.1" +description = "A Python utility / library to sort Python imports." +category = "dev" +optional = false +python-versions = ">=3.6.1,<4.0" + +[package.extras] +colors = ["colorama (>=0.4.3,<0.5.0)"] +pipfile_deprecated_finder = ["pipreqs", "requirementslib"] +plugins = ["setuptools"] +requirements_deprecated_finder = ["pip-api", "pipreqs"] + +[[package]] +name = "Jinja2" +version = "3.1.2" +description = "A very fast and expressive template engine." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "MarkupSafe" +version = "2.1.1" +description = "Safely add untrusted strings to HTML/XML markup." +category = "dev" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "mccabe" +version = "0.7.0" +description = "McCabe checker, plugin for flake8" +category = "dev" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "mypy" +version = "0.971" +description = "Optional static typing for Python" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +mypy-extensions = ">=0.4.3" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typed-ast = {version = ">=1.4.0,<2", markers = "python_version < \"3.8\""} +typing-extensions = ">=3.10" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +python2 = ["typed-ast (>=1.4.0,<2)"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "0.4.3" +description = "Experimental type system extensions for programs checked with the mypy typechecker." +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "packaging" +version = "21.3" +description = "Core utilities for Python packages" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" + +[[package]] +name = "pathspec" +version = "0.10.1" +description = "Utility library for gitignore style pattern matching of file paths." +category = "dev" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "pep517" +version = "0.13.0" +description = "Wrappers to build Python packages using PEP 517 hooks" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +importlib_metadata = {version = "*", markers = "python_version < \"3.8\""} +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +zipp = {version = "*", markers = "python_version < \"3.8\""} + +[[package]] +name = "platformdirs" +version = "2.5.2" +description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx (>=4)", "sphinx-autodoc-typehints (>=1.12)"] +test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"] + +[[package]] +name = "pluggy" +version = "1.0.0" +description = "plugin and hook calling mechanisms for python" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "py" +version = "1.11.0" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "py-cpuinfo" +version = "8.0.0" +description = "Get CPU info with pure Python 2 & 3" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "pycodestyle" +version = "2.9.1" +description = "Python style guide checker" +category = "dev" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "pyflakes" +version = "2.5.0" +description = "passive checker of Python programs" +category = "dev" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "Pygments" +version = "2.13.0" +description = "Pygments is a syntax highlighting package written in Python." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.extras] +plugins = ["importlib-metadata"] + +[[package]] +name = "pyparsing" +version = "3.0.9" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" +category = "dev" +optional = false +python-versions = ">=3.6.8" + +[package.extras] +diagrams = ["jinja2", "railroad-diagrams"] + +[[package]] +name = "pytest" +version = "7.1.3" +description = "pytest: simple powerful testing with Python" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +attrs = ">=19.2.0" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +py = ">=1.8.2" +tomli = ">=1.0.0" + +[package.extras] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] + +[[package]] +name = "pytest-asyncio" +version = "0.19.0" +description = "Pytest support for asyncio" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +pytest = ">=6.1.0" +typing-extensions = {version = ">=3.7.2", markers = "python_version < \"3.8\""} + +[package.extras] +testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy (>=0.931)", "pytest-trio (>=0.7.0)"] + +[[package]] +name = "pytest-benchmark" +version = "3.4.1" +description = "A ``pytest`` fixture for benchmarking code. It will group the tests into rounds that are calibrated to the chosen timer." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[package.dependencies] +py-cpuinfo = "*" +pytest = ">=3.8" + +[package.extras] +aspect = ["aspectlib"] +elasticsearch = ["elasticsearch"] +histogram = ["pygal", "pygaljs"] + +[[package]] +name = "pytest-cov" +version = "3.0.0" +description = "Pytest plugin for measuring coverage." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +coverage = {version = ">=5.2.1", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] + +[[package]] +name = "pytest-describe" +version = "2.0.1" +description = "Describe-style plugin for pytest" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +pytest = ">=4.0.0" + +[[package]] +name = "pytest-timeout" +version = "2.1.0" +description = "pytest plugin to abort hanging tests" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +pytest = ">=5.0.0" + +[[package]] +name = "pytz" +version = "2022.2.1" +description = "World timezone definitions, modern and historical" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "requests" +version = "2.28.1" +description = "Python HTTP for Humans." +category = "dev" +optional = false +python-versions = ">=3.7, <4" + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<3" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<1.27" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "setuptools" +version = "65.3.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mock", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" + +[[package]] +name = "snowballstemmer" +version = "2.2.0" +description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "Sphinx" +version = "4.3.2" +description = "Python documentation generator" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +alabaster = ">=0.7,<0.8" +babel = ">=1.3" +colorama = {version = ">=0.3.5", markers = "sys_platform == \"win32\""} +docutils = ">=0.14,<0.18" +imagesize = "*" +Jinja2 = ">=2.3" +packaging = "*" +Pygments = ">=2.0" +requests = ">=2.5.0" +setuptools = "*" +snowballstemmer = ">=1.1" +sphinxcontrib-applehelp = "*" +sphinxcontrib-devhelp = "*" +sphinxcontrib-htmlhelp = ">=2.0.0" +sphinxcontrib-jsmath = "*" +sphinxcontrib-qthelp = "*" +sphinxcontrib-serializinghtml = ">=1.1.5" + +[package.extras] +docs = ["sphinxcontrib-websupport"] +lint = ["docutils-stubs", "flake8 (>=3.5.0)", "isort", "mypy (>=0.920)", "types-pkg-resources", "types-requests", "types-typed-ast"] +test = ["cython", "html5lib", "pytest", "pytest-cov", "typed-ast"] + +[[package]] +name = "sphinx-rtd-theme" +version = "1.0.0" +description = "Read the Docs theme for Sphinx" +category = "dev" +optional = false +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" + +[package.dependencies] +docutils = "<0.18" +sphinx = ">=1.6" + +[package.extras] +dev = ["bump2version", "sphinxcontrib-httpdomain", "transifex-client"] + +[[package]] +name = "sphinxcontrib-applehelp" +version = "1.0.2" +description = "sphinxcontrib-applehelp is a sphinx extension which outputs Apple help books" +category = "dev" +optional = false +python-versions = ">=3.5" + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-devhelp" +version = "1.0.2" +description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." +category = "dev" +optional = false +python-versions = ">=3.5" + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-htmlhelp" +version = "2.0.0" +description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["html5lib", "pytest"] + +[[package]] +name = "sphinxcontrib-jsmath" +version = "1.0.1" +description = "A sphinx extension which renders display math in HTML via JavaScript" +category = "dev" +optional = false +python-versions = ">=3.5" + +[package.extras] +test = ["flake8", "mypy", "pytest"] + +[[package]] +name = "sphinxcontrib-qthelp" +version = "1.0.3" +description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." +category = "dev" +optional = false +python-versions = ">=3.5" + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-serializinghtml" +version = "1.1.5" +description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." +category = "dev" +optional = false +python-versions = ">=3.5" + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +category = "dev" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "tox" +version = "3.26.0" +description = "tox is a generic virtualenv management and test command line tool" +category = "dev" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" + +[package.dependencies] +colorama = {version = ">=0.4.1", markers = "platform_system == \"Windows\""} +filelock = ">=3.0.0" +importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} +packaging = ">=14" +pluggy = ">=0.12.0" +py = ">=1.4.17" +six = ">=1.14.0" +tomli = {version = ">=2.0.1", markers = "python_version >= \"3.7\" and python_version < \"3.11\""} +virtualenv = ">=16.0.0,<20.0.0 || >20.0.0,<20.0.1 || >20.0.1,<20.0.2 || >20.0.2,<20.0.3 || >20.0.3,<20.0.4 || >20.0.4,<20.0.5 || >20.0.5,<20.0.6 || >20.0.6,<20.0.7 || >20.0.7" + +[package.extras] +docs = ["pygments-github-lexers (>=0.0.5)", "sphinx (>=2.0.0)", "sphinxcontrib-autoprogram (>=0.1.5)", "towncrier (>=18.5.0)"] +testing = ["flaky (>=3.4.0)", "freezegun (>=0.3.11)", "pathlib2 (>=2.3.3)", "psutil (>=5.6.1)", "pytest (>=4.0.0)", "pytest-cov (>=2.5.1)", "pytest-mock (>=1.10.0)", "pytest-randomly (>=1.0.0)"] + +[[package]] +name = "typed-ast" +version = "1.5.4" +description = "a fork of Python 2 and 3 ast modules with type comment support" +category = "dev" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "typing-extensions" +version = "4.3.0" +description = "Backported and Experimental Type Hints for Python 3.7+" +category = "main" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "urllib3" +version = "1.26.12" +description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4" + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] + +[[package]] +name = "virtualenv" +version = "20.16.2" +description = "Virtual Python Environment builder" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +distlib = ">=0.3.1,<1" +filelock = ">=3.2,<4" +importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} +platformdirs = ">=2,<3" + +[package.extras] +docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=21.3)"] +testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "packaging (>=20.0)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)"] + +[[package]] +name = "zipp" +version = "3.8.1" +description = "Backport of pathlib-compatible object wrapper for zip files" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx"] +testing = ["func-timeout", "jaraco.itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] + +[metadata] +lock-version = "1.1" +python-versions = "^3.7" +content-hash = "1a1371f2665f410245e83874106f75f160133fe6749d70ba6ca179b4837e50ae" + +[metadata.files] +alabaster = [ + {file = "alabaster-0.7.12-py2.py3-none-any.whl", hash = "sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359"}, + {file = "alabaster-0.7.12.tar.gz", hash = "sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02"}, +] +attrs = [ + {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"}, + {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"}, +] +Babel = [ + {file = "Babel-2.10.3-py3-none-any.whl", hash = "sha256:ff56f4892c1c4bf0d814575ea23471c230d544203c7748e8c68f0089478d48eb"}, + {file = "Babel-2.10.3.tar.gz", hash = "sha256:7614553711ee97490f732126dc077f8d0ae084ebc6a96e23db1482afabdb2c51"}, +] +black = [ + {file = "black-22.8.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ce957f1d6b78a8a231b18e0dd2d94a33d2ba738cd88a7fe64f53f659eea49fdd"}, + {file = "black-22.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5107ea36b2b61917956d018bd25129baf9ad1125e39324a9b18248d362156a27"}, + {file = "black-22.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e8166b7bfe5dcb56d325385bd1d1e0f635f24aae14b3ae437102dedc0c186747"}, + {file = "black-22.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd82842bb272297503cbec1a2600b6bfb338dae017186f8f215c8958f8acf869"}, + {file = "black-22.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:d839150f61d09e7217f52917259831fe2b689f5c8e5e32611736351b89bb2a90"}, + {file = "black-22.8.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a05da0430bd5ced89176db098567973be52ce175a55677436a271102d7eaa3fe"}, + {file = "black-22.8.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a098a69a02596e1f2a58a2a1c8d5a05d5a74461af552b371e82f9fa4ada8342"}, + {file = "black-22.8.0-cp36-cp36m-win_amd64.whl", hash = "sha256:5594efbdc35426e35a7defa1ea1a1cb97c7dbd34c0e49af7fb593a36bd45edab"}, + {file = "black-22.8.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a983526af1bea1e4cf6768e649990f28ee4f4137266921c2c3cee8116ae42ec3"}, + {file = "black-22.8.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b2c25f8dea5e8444bdc6788a2f543e1fb01494e144480bc17f806178378005e"}, + {file = "black-22.8.0-cp37-cp37m-win_amd64.whl", hash = "sha256:78dd85caaab7c3153054756b9fe8c611efa63d9e7aecfa33e533060cb14b6d16"}, + {file = "black-22.8.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:cea1b2542d4e2c02c332e83150e41e3ca80dc0fb8de20df3c5e98e242156222c"}, + {file = "black-22.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5b879eb439094751185d1cfdca43023bc6786bd3c60372462b6f051efa6281a5"}, + {file = "black-22.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0a12e4e1353819af41df998b02c6742643cfef58282915f781d0e4dd7a200411"}, + {file = "black-22.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3a73f66b6d5ba7288cd5d6dad9b4c9b43f4e8a4b789a94bf5abfb878c663eb3"}, + {file = "black-22.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:e981e20ec152dfb3e77418fb616077937378b322d7b26aa1ff87717fb18b4875"}, + {file = "black-22.8.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8ce13ffed7e66dda0da3e0b2eb1bdfc83f5812f66e09aca2b0978593ed636b6c"}, + {file = "black-22.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:32a4b17f644fc288c6ee2bafdf5e3b045f4eff84693ac069d87b1a347d861497"}, + {file = "black-22.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0ad827325a3a634bae88ae7747db1a395d5ee02cf05d9aa7a9bd77dfb10e940c"}, + {file = "black-22.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53198e28a1fb865e9fe97f88220da2e44df6da82b18833b588b1883b16bb5d41"}, + {file = "black-22.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:bc4d4123830a2d190e9cc42a2e43570f82ace35c3aeb26a512a2102bce5af7ec"}, + {file = "black-22.8.0-py3-none-any.whl", hash = "sha256:d2c21d439b2baf7aa80d6dd4e3659259be64c6f49dfd0f32091063db0e006db4"}, + {file = "black-22.8.0.tar.gz", hash = "sha256:792f7eb540ba9a17e8656538701d3eb1afcb134e3b45b71f20b25c77a8db7e6e"}, +] +build = [ + {file = "build-0.8.0-py3-none-any.whl", hash = "sha256:19b0ed489f92ace6947698c3ca8436cb0556a66e2aa2d34cd70e2a5d27cd0437"}, + {file = "build-0.8.0.tar.gz", hash = "sha256:887a6d471c901b1a6e6574ebaeeebb45e5269a79d095fe9a8f88d6614ed2e5f0"}, +] +bump2version = [ + {file = "bump2version-1.0.1-py2.py3-none-any.whl", hash = "sha256:37f927ea17cde7ae2d7baf832f8e80ce3777624554a653006c9144f8017fe410"}, + {file = "bump2version-1.0.1.tar.gz", hash = "sha256:762cb2bfad61f4ec8e2bdf452c7c267416f8c70dd9ecb1653fd0bbb01fa936e6"}, +] +certifi = [ + {file = "certifi-2022.9.14-py3-none-any.whl", hash = "sha256:e232343de1ab72c2aa521b625c80f699e356830fd0e2c620b465b304b17b0516"}, + {file = "certifi-2022.9.14.tar.gz", hash = "sha256:36973885b9542e6bd01dea287b2b4b3b21236307c56324fcc3f1160f2d655ed5"}, +] +charset-normalizer = [ + {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"}, + {file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"}, +] +check-manifest = [ + {file = "check-manifest-0.48.tar.gz", hash = "sha256:3b575f1dade7beb3078ef4bf33a94519834457c7281dbc726b15c5466b55c657"}, + {file = "check_manifest-0.48-py3-none-any.whl", hash = "sha256:b1923685f98c1c2468601a1a7bed655db549a25d43c583caded3860ad8308f8c"}, +] +click = [ + {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, + {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, +] +colorama = [ + {file = "colorama-0.4.5-py2.py3-none-any.whl", hash = "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da"}, + {file = "colorama-0.4.5.tar.gz", hash = "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4"}, +] +coverage = [ + {file = "coverage-6.4.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e7b4da9bafad21ea45a714d3ea6f3e1679099e420c8741c74905b92ee9bfa7cc"}, + {file = "coverage-6.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fde17bc42e0716c94bf19d92e4c9f5a00c5feb401f5bc01101fdf2a8b7cacf60"}, + {file = "coverage-6.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdbb0d89923c80dbd435b9cf8bba0ff55585a3cdb28cbec65f376c041472c60d"}, + {file = "coverage-6.4.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:67f9346aeebea54e845d29b487eb38ec95f2ecf3558a3cffb26ee3f0dcc3e760"}, + {file = "coverage-6.4.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42c499c14efd858b98c4e03595bf914089b98400d30789511577aa44607a1b74"}, + {file = "coverage-6.4.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c35cca192ba700979d20ac43024a82b9b32a60da2f983bec6c0f5b84aead635c"}, + {file = "coverage-6.4.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9cc4f107009bca5a81caef2fca843dbec4215c05e917a59dec0c8db5cff1d2aa"}, + {file = "coverage-6.4.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5f444627b3664b80d078c05fe6a850dd711beeb90d26731f11d492dcbadb6973"}, + {file = "coverage-6.4.4-cp310-cp310-win32.whl", hash = "sha256:66e6df3ac4659a435677d8cd40e8eb1ac7219345d27c41145991ee9bf4b806a0"}, + {file = "coverage-6.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:35ef1f8d8a7a275aa7410d2f2c60fa6443f4a64fae9be671ec0696a68525b875"}, + {file = "coverage-6.4.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c1328d0c2f194ffda30a45f11058c02410e679456276bfa0bbe0b0ee87225fac"}, + {file = "coverage-6.4.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:61b993f3998ee384935ee423c3d40894e93277f12482f6e777642a0141f55782"}, + {file = "coverage-6.4.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d5dd4b8e9cd0deb60e6fcc7b0647cbc1da6c33b9e786f9c79721fd303994832f"}, + {file = "coverage-6.4.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7026f5afe0d1a933685d8f2169d7c2d2e624f6255fb584ca99ccca8c0e966fd7"}, + {file = "coverage-6.4.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9c7b9b498eb0c0d48b4c2abc0e10c2d78912203f972e0e63e3c9dc21f15abdaa"}, + {file = "coverage-6.4.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ee2b2fb6eb4ace35805f434e0f6409444e1466a47f620d1d5763a22600f0f892"}, + {file = "coverage-6.4.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ab066f5ab67059d1f1000b5e1aa8bbd75b6ed1fc0014559aea41a9eb66fc2ce0"}, + {file = "coverage-6.4.4-cp311-cp311-win32.whl", hash = "sha256:9d6e1f3185cbfd3d91ac77ea065d85d5215d3dfa45b191d14ddfcd952fa53796"}, + {file = "coverage-6.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:e3d3c4cc38b2882f9a15bafd30aec079582b819bec1b8afdbde8f7797008108a"}, + {file = "coverage-6.4.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a095aa0a996ea08b10580908e88fbaf81ecf798e923bbe64fb98d1807db3d68a"}, + {file = "coverage-6.4.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef6f44409ab02e202b31a05dd6666797f9de2aa2b4b3534e9d450e42dea5e817"}, + {file = "coverage-6.4.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b7101938584d67e6f45f0015b60e24a95bf8dea19836b1709a80342e01b472f"}, + {file = "coverage-6.4.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14a32ec68d721c3d714d9b105c7acf8e0f8a4f4734c811eda75ff3718570b5e3"}, + {file = "coverage-6.4.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6a864733b22d3081749450466ac80698fe39c91cb6849b2ef8752fd7482011f3"}, + {file = "coverage-6.4.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:08002f9251f51afdcc5e3adf5d5d66bb490ae893d9e21359b085f0e03390a820"}, + {file = "coverage-6.4.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a3b2752de32c455f2521a51bd3ffb53c5b3ae92736afde67ce83477f5c1dd928"}, + {file = "coverage-6.4.4-cp37-cp37m-win32.whl", hash = "sha256:f855b39e4f75abd0dfbcf74a82e84ae3fc260d523fcb3532786bcbbcb158322c"}, + {file = "coverage-6.4.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ee6ae6bbcac0786807295e9687169fba80cb0617852b2fa118a99667e8e6815d"}, + {file = "coverage-6.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:564cd0f5b5470094df06fab676c6d77547abfdcb09b6c29c8a97c41ad03b103c"}, + {file = "coverage-6.4.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cbbb0e4cd8ddcd5ef47641cfac97d8473ab6b132dd9a46bacb18872828031685"}, + {file = "coverage-6.4.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6113e4df2fa73b80f77663445be6d567913fb3b82a86ceb64e44ae0e4b695de1"}, + {file = "coverage-6.4.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8d032bfc562a52318ae05047a6eb801ff31ccee172dc0d2504614e911d8fa83e"}, + {file = "coverage-6.4.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e431e305a1f3126477abe9a184624a85308da8edf8486a863601d58419d26ffa"}, + {file = "coverage-6.4.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cf2afe83a53f77aec067033199797832617890e15bed42f4a1a93ea24794ae3e"}, + {file = "coverage-6.4.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:783bc7c4ee524039ca13b6d9b4186a67f8e63d91342c713e88c1865a38d0892a"}, + {file = "coverage-6.4.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ff934ced84054b9018665ca3967fc48e1ac99e811f6cc99ea65978e1d384454b"}, + {file = "coverage-6.4.4-cp38-cp38-win32.whl", hash = "sha256:e1fabd473566fce2cf18ea41171d92814e4ef1495e04471786cbc943b89a3781"}, + {file = "coverage-6.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:4179502f210ebed3ccfe2f78bf8e2d59e50b297b598b100d6c6e3341053066a2"}, + {file = "coverage-6.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:98c0b9e9b572893cdb0a00e66cf961a238f8d870d4e1dc8e679eb8bdc2eb1b86"}, + {file = "coverage-6.4.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fc600f6ec19b273da1d85817eda339fb46ce9eef3e89f220055d8696e0a06908"}, + {file = "coverage-6.4.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7a98d6bf6d4ca5c07a600c7b4e0c5350cd483c85c736c522b786be90ea5bac4f"}, + {file = "coverage-6.4.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01778769097dbd705a24e221f42be885c544bb91251747a8a3efdec6eb4788f2"}, + {file = "coverage-6.4.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfa0b97eb904255e2ab24166071b27408f1f69c8fbda58e9c0972804851e0558"}, + {file = "coverage-6.4.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:fcbe3d9a53e013f8ab88734d7e517eb2cd06b7e689bedf22c0eb68db5e4a0a19"}, + {file = "coverage-6.4.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:15e38d853ee224e92ccc9a851457fb1e1f12d7a5df5ae44544ce7863691c7a0d"}, + {file = "coverage-6.4.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6913dddee2deff8ab2512639c5168c3e80b3ebb0f818fed22048ee46f735351a"}, + {file = "coverage-6.4.4-cp39-cp39-win32.whl", hash = "sha256:354df19fefd03b9a13132fa6643527ef7905712109d9c1c1903f2133d3a4e145"}, + {file = "coverage-6.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:1238b08f3576201ebf41f7c20bf59baa0d05da941b123c6656e42cdb668e9827"}, + {file = "coverage-6.4.4-pp36.pp37.pp38-none-any.whl", hash = "sha256:f67cf9f406cf0d2f08a3515ce2db5b82625a7257f88aad87904674def6ddaec1"}, + {file = "coverage-6.4.4.tar.gz", hash = "sha256:e16c45b726acb780e1e6f88b286d3c10b3914ab03438f32117c4aa52d7f30d58"}, +] +distlib = [ + {file = "distlib-0.3.6-py2.py3-none-any.whl", hash = "sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e"}, + {file = "distlib-0.3.6.tar.gz", hash = "sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46"}, +] +docutils = [ + {file = "docutils-0.17.1-py2.py3-none-any.whl", hash = "sha256:cf316c8370a737a022b72b56874f6602acf974a37a9fba42ec2876387549fc61"}, + {file = "docutils-0.17.1.tar.gz", hash = "sha256:686577d2e4c32380bb50cbb22f575ed742d58168cee37e99117a854bcd88f125"}, +] +filelock = [ + {file = "filelock-3.8.0-py3-none-any.whl", hash = "sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4"}, + {file = "filelock-3.8.0.tar.gz", hash = "sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc"}, +] +flake8 = [ + {file = "flake8-5.0.4-py2.py3-none-any.whl", hash = "sha256:7a1cf6b73744f5806ab95e526f6f0d8c01c66d7bbe349562d22dfca20610b248"}, + {file = "flake8-5.0.4.tar.gz", hash = "sha256:6fbe320aad8d6b95cec8b8e47bc933004678dc63095be98528b7bdd2a9f510db"}, +] +idna = [ + {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, + {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, +] +imagesize = [ + {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, + {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, +] +importlib-metadata = [ + {file = "importlib_metadata-4.2.0-py3-none-any.whl", hash = "sha256:057e92c15bc8d9e8109738a48db0ccb31b4d9d5cfbee5a8670879a30be66304b"}, + {file = "importlib_metadata-4.2.0.tar.gz", hash = "sha256:b7e52a1f8dec14a75ea73e0891f3060099ca1d8e6a462a4dff11c3e119ea1b31"}, +] +iniconfig = [ + {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, + {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, +] +isort = [ + {file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"}, + {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"}, +] +Jinja2 = [ + {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, + {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, +] +MarkupSafe = [ + {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-win32.whl", hash = "sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-win32.whl", hash = "sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-win32.whl", hash = "sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-win32.whl", hash = "sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247"}, + {file = "MarkupSafe-2.1.1.tar.gz", hash = "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b"}, +] +mccabe = [ + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, +] +mypy = [ + {file = "mypy-0.971-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f2899a3cbd394da157194f913a931edfd4be5f274a88041c9dc2d9cdcb1c315c"}, + {file = "mypy-0.971-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:98e02d56ebe93981c41211c05adb630d1d26c14195d04d95e49cd97dbc046dc5"}, + {file = "mypy-0.971-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:19830b7dba7d5356d3e26e2427a2ec91c994cd92d983142cbd025ebe81d69cf3"}, + {file = "mypy-0.971-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:02ef476f6dcb86e6f502ae39a16b93285fef97e7f1ff22932b657d1ef1f28655"}, + {file = "mypy-0.971-cp310-cp310-win_amd64.whl", hash = "sha256:25c5750ba5609a0c7550b73a33deb314ecfb559c350bb050b655505e8aed4103"}, + {file = "mypy-0.971-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d3348e7eb2eea2472db611486846742d5d52d1290576de99d59edeb7cd4a42ca"}, + {file = "mypy-0.971-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3fa7a477b9900be9b7dd4bab30a12759e5abe9586574ceb944bc29cddf8f0417"}, + {file = "mypy-0.971-cp36-cp36m-win_amd64.whl", hash = "sha256:2ad53cf9c3adc43cf3bea0a7d01a2f2e86db9fe7596dfecb4496a5dda63cbb09"}, + {file = "mypy-0.971-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:855048b6feb6dfe09d3353466004490b1872887150c5bb5caad7838b57328cc8"}, + {file = "mypy-0.971-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:23488a14a83bca6e54402c2e6435467a4138785df93ec85aeff64c6170077fb0"}, + {file = "mypy-0.971-cp37-cp37m-win_amd64.whl", hash = "sha256:4b21e5b1a70dfb972490035128f305c39bc4bc253f34e96a4adf9127cf943eb2"}, + {file = "mypy-0.971-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:9796a2ba7b4b538649caa5cecd398d873f4022ed2333ffde58eaf604c4d2cb27"}, + {file = "mypy-0.971-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5a361d92635ad4ada1b1b2d3630fc2f53f2127d51cf2def9db83cba32e47c856"}, + {file = "mypy-0.971-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b793b899f7cf563b1e7044a5c97361196b938e92f0a4343a5d27966a53d2ec71"}, + {file = "mypy-0.971-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d1ea5d12c8e2d266b5fb8c7a5d2e9c0219fedfeb493b7ed60cd350322384ac27"}, + {file = "mypy-0.971-cp38-cp38-win_amd64.whl", hash = "sha256:23c7ff43fff4b0df93a186581885c8512bc50fc4d4910e0f838e35d6bb6b5e58"}, + {file = "mypy-0.971-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1f7656b69974a6933e987ee8ffb951d836272d6c0f81d727f1d0e2696074d9e6"}, + {file = "mypy-0.971-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d2022bfadb7a5c2ef410d6a7c9763188afdb7f3533f22a0a32be10d571ee4bbe"}, + {file = "mypy-0.971-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef943c72a786b0f8d90fd76e9b39ce81fb7171172daf84bf43eaf937e9f220a9"}, + {file = "mypy-0.971-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d744f72eb39f69312bc6c2abf8ff6656973120e2eb3f3ec4f758ed47e414a4bf"}, + {file = "mypy-0.971-cp39-cp39-win_amd64.whl", hash = "sha256:77a514ea15d3007d33a9e2157b0ba9c267496acf12a7f2b9b9f8446337aac5b0"}, + {file = "mypy-0.971-py3-none-any.whl", hash = "sha256:0d054ef16b071149917085f51f89555a576e2618d5d9dd70bd6eea6410af3ac9"}, + {file = "mypy-0.971.tar.gz", hash = "sha256:40b0f21484238269ae6a57200c807d80debc6459d444c0489a102d7c6a75fa56"}, +] +mypy-extensions = [ + {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, + {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, +] +packaging = [ + {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, + {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, +] +pathspec = [ + {file = "pathspec-0.10.1-py3-none-any.whl", hash = "sha256:46846318467efc4556ccfd27816e004270a9eeeeb4d062ce5e6fc7a87c573f93"}, + {file = "pathspec-0.10.1.tar.gz", hash = "sha256:7ace6161b621d31e7902eb6b5ae148d12cfd23f4a249b9ffb6b9fee12084323d"}, +] +pep517 = [ + {file = "pep517-0.13.0-py3-none-any.whl", hash = "sha256:4ba4446d80aed5b5eac6509ade100bff3e7943a8489de249654a5ae9b33ee35b"}, + {file = "pep517-0.13.0.tar.gz", hash = "sha256:ae69927c5c172be1add9203726d4b84cf3ebad1edcd5f71fcdc746e66e829f59"}, +] +platformdirs = [ + {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"}, + {file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"}, +] +pluggy = [ + {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, + {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, +] +py = [ + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, +] +py-cpuinfo = [ + {file = "py-cpuinfo-8.0.0.tar.gz", hash = "sha256:5f269be0e08e33fd959de96b34cd4aeeeacac014dd8305f70eb28d06de2345c5"}, +] +pycodestyle = [ + {file = "pycodestyle-2.9.1-py2.py3-none-any.whl", hash = "sha256:d1735fc58b418fd7c5f658d28d943854f8a849b01a5d0a1e6f3f3fdd0166804b"}, + {file = "pycodestyle-2.9.1.tar.gz", hash = "sha256:2c9607871d58c76354b697b42f5d57e1ada7d261c261efac224b664affdc5785"}, +] +pyflakes = [ + {file = "pyflakes-2.5.0-py2.py3-none-any.whl", hash = "sha256:4579f67d887f804e67edb544428f264b7b24f435b263c4614f384135cea553d2"}, + {file = "pyflakes-2.5.0.tar.gz", hash = "sha256:491feb020dca48ccc562a8c0cbe8df07ee13078df59813b83959cbdada312ea3"}, +] +Pygments = [ + {file = "Pygments-2.13.0-py3-none-any.whl", hash = "sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42"}, + {file = "Pygments-2.13.0.tar.gz", hash = "sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1"}, +] +pyparsing = [ + {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, + {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, +] +pytest = [ + {file = "pytest-7.1.3-py3-none-any.whl", hash = "sha256:1377bda3466d70b55e3f5cecfa55bb7cfcf219c7964629b967c37cf0bda818b7"}, + {file = "pytest-7.1.3.tar.gz", hash = "sha256:4f365fec2dff9c1162f834d9f18af1ba13062db0c708bf7b946f8a5c76180c39"}, +] +pytest-asyncio = [ + {file = "pytest-asyncio-0.19.0.tar.gz", hash = "sha256:ac4ebf3b6207259750bc32f4c1d8fcd7e79739edbc67ad0c58dd150b1d072fed"}, + {file = "pytest_asyncio-0.19.0-py3-none-any.whl", hash = "sha256:7a97e37cfe1ed296e2e84941384bdd37c376453912d397ed39293e0916f521fa"}, +] +pytest-benchmark = [ + {file = "pytest-benchmark-3.4.1.tar.gz", hash = "sha256:40e263f912de5a81d891619032983557d62a3d85843f9a9f30b98baea0cd7b47"}, + {file = "pytest_benchmark-3.4.1-py2.py3-none-any.whl", hash = "sha256:36d2b08c4882f6f997fd3126a3d6dfd70f3249cde178ed8bbc0b73db7c20f809"}, +] +pytest-cov = [ + {file = "pytest-cov-3.0.0.tar.gz", hash = "sha256:e7f0f5b1617d2210a2cabc266dfe2f4c75a8d32fb89eafb7ad9d06f6d076d470"}, + {file = "pytest_cov-3.0.0-py3-none-any.whl", hash = "sha256:578d5d15ac4a25e5f961c938b85a05b09fdaae9deef3bb6de9a6e766622ca7a6"}, +] +pytest-describe = [ + {file = "pytest-describe-2.0.1.tar.gz", hash = "sha256:e5cbaa31169f0060348ad5ca0191027e5f1f41f3f27fdeef208365e09c55eb9a"}, + {file = "pytest_describe-2.0.1-py3-none-any.whl", hash = "sha256:ea347838bdf774b498ee7cb4a0b802a40be89e667a399fb63d860e3223bf4183"}, +] +pytest-timeout = [ + {file = "pytest-timeout-2.1.0.tar.gz", hash = "sha256:c07ca07404c612f8abbe22294b23c368e2e5104b521c1790195561f37e1ac3d9"}, + {file = "pytest_timeout-2.1.0-py3-none-any.whl", hash = "sha256:f6f50101443ce70ad325ceb4473c4255e9d74e3c7cd0ef827309dfa4c0d975c6"}, +] +pytz = [ + {file = "pytz-2022.2.1-py2.py3-none-any.whl", hash = "sha256:220f481bdafa09c3955dfbdddb7b57780e9a94f5127e35456a48589b9e0c0197"}, + {file = "pytz-2022.2.1.tar.gz", hash = "sha256:cea221417204f2d1a2aa03ddae3e867921971d0d76f14d87abb4414415bbdcf5"}, +] +requests = [ + {file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"}, + {file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"}, +] +setuptools = [ + {file = "setuptools-65.3.0-py3-none-any.whl", hash = "sha256:2e24e0bec025f035a2e72cdd1961119f557d78ad331bb00ff82efb2ab8da8e82"}, + {file = "setuptools-65.3.0.tar.gz", hash = "sha256:7732871f4f7fa58fb6bdcaeadb0161b2bd046c85905dbaa066bdcbcc81953b57"}, +] +six = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] +snowballstemmer = [ + {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, + {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, +] +Sphinx = [ + {file = "Sphinx-4.3.2-py3-none-any.whl", hash = "sha256:6a11ea5dd0bdb197f9c2abc2e0ce73e01340464feaece525e64036546d24c851"}, + {file = "Sphinx-4.3.2.tar.gz", hash = "sha256:0a8836751a68306b3fe97ecbe44db786f8479c3bf4b80e3a7f5c838657b4698c"}, +] +sphinx-rtd-theme = [ + {file = "sphinx_rtd_theme-1.0.0-py2.py3-none-any.whl", hash = "sha256:4d35a56f4508cfee4c4fb604373ede6feae2a306731d533f409ef5c3496fdbd8"}, + {file = "sphinx_rtd_theme-1.0.0.tar.gz", hash = "sha256:eec6d497e4c2195fa0e8b2016b337532b8a699a68bcb22a512870e16925c6a5c"}, +] +sphinxcontrib-applehelp = [ + {file = "sphinxcontrib-applehelp-1.0.2.tar.gz", hash = "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58"}, + {file = "sphinxcontrib_applehelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a"}, +] +sphinxcontrib-devhelp = [ + {file = "sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"}, + {file = "sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e"}, +] +sphinxcontrib-htmlhelp = [ + {file = "sphinxcontrib-htmlhelp-2.0.0.tar.gz", hash = "sha256:f5f8bb2d0d629f398bf47d0d69c07bc13b65f75a81ad9e2f71a63d4b7a2f6db2"}, + {file = "sphinxcontrib_htmlhelp-2.0.0-py2.py3-none-any.whl", hash = "sha256:d412243dfb797ae3ec2b59eca0e52dac12e75a241bf0e4eb861e450d06c6ed07"}, +] +sphinxcontrib-jsmath = [ + {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, + {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, +] +sphinxcontrib-qthelp = [ + {file = "sphinxcontrib-qthelp-1.0.3.tar.gz", hash = "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72"}, + {file = "sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"}, +] +sphinxcontrib-serializinghtml = [ + {file = "sphinxcontrib-serializinghtml-1.1.5.tar.gz", hash = "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952"}, + {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"}, +] +tomli = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] +tox = [ + {file = "tox-3.26.0-py2.py3-none-any.whl", hash = "sha256:bf037662d7c740d15c9924ba23bb3e587df20598697bb985ac2b49bdc2d847f6"}, + {file = "tox-3.26.0.tar.gz", hash = "sha256:44f3c347c68c2c68799d7d44f1808f9d396fc8a1a500cbc624253375c7ae107e"}, +] +typed-ast = [ + {file = "typed_ast-1.5.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:669dd0c4167f6f2cd9f57041e03c3c2ebf9063d0757dc89f79ba1daa2bfca9d4"}, + {file = "typed_ast-1.5.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:211260621ab1cd7324e0798d6be953d00b74e0428382991adfddb352252f1d62"}, + {file = "typed_ast-1.5.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:267e3f78697a6c00c689c03db4876dd1efdfea2f251a5ad6555e82a26847b4ac"}, + {file = "typed_ast-1.5.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c542eeda69212fa10a7ada75e668876fdec5f856cd3d06829e6aa64ad17c8dfe"}, + {file = "typed_ast-1.5.4-cp310-cp310-win_amd64.whl", hash = "sha256:a9916d2bb8865f973824fb47436fa45e1ebf2efd920f2b9f99342cb7fab93f72"}, + {file = "typed_ast-1.5.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:79b1e0869db7c830ba6a981d58711c88b6677506e648496b1f64ac7d15633aec"}, + {file = "typed_ast-1.5.4-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a94d55d142c9265f4ea46fab70977a1944ecae359ae867397757d836ea5a3f47"}, + {file = "typed_ast-1.5.4-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:183afdf0ec5b1b211724dfef3d2cad2d767cbefac291f24d69b00546c1837fb6"}, + {file = "typed_ast-1.5.4-cp36-cp36m-win_amd64.whl", hash = "sha256:639c5f0b21776605dd6c9dbe592d5228f021404dafd377e2b7ac046b0349b1a1"}, + {file = "typed_ast-1.5.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cf4afcfac006ece570e32d6fa90ab74a17245b83dfd6655a6f68568098345ff6"}, + {file = "typed_ast-1.5.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed855bbe3eb3715fca349c80174cfcfd699c2f9de574d40527b8429acae23a66"}, + {file = "typed_ast-1.5.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6778e1b2f81dfc7bc58e4b259363b83d2e509a65198e85d5700dfae4c6c8ff1c"}, + {file = "typed_ast-1.5.4-cp37-cp37m-win_amd64.whl", hash = "sha256:0261195c2062caf107831e92a76764c81227dae162c4f75192c0d489faf751a2"}, + {file = "typed_ast-1.5.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2efae9db7a8c05ad5547d522e7dbe62c83d838d3906a3716d1478b6c1d61388d"}, + {file = "typed_ast-1.5.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7d5d014b7daa8b0bf2eaef684295acae12b036d79f54178b92a2b6a56f92278f"}, + {file = "typed_ast-1.5.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:370788a63915e82fd6f212865a596a0fefcbb7d408bbbb13dea723d971ed8bdc"}, + {file = "typed_ast-1.5.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4e964b4ff86550a7a7d56345c7864b18f403f5bd7380edf44a3c1fb4ee7ac6c6"}, + {file = "typed_ast-1.5.4-cp38-cp38-win_amd64.whl", hash = "sha256:683407d92dc953c8a7347119596f0b0e6c55eb98ebebd9b23437501b28dcbb8e"}, + {file = "typed_ast-1.5.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4879da6c9b73443f97e731b617184a596ac1235fe91f98d279a7af36c796da35"}, + {file = "typed_ast-1.5.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3e123d878ba170397916557d31c8f589951e353cc95fb7f24f6bb69adc1a8a97"}, + {file = "typed_ast-1.5.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebd9d7f80ccf7a82ac5f88c521115cc55d84e35bf8b446fcd7836eb6b98929a3"}, + {file = "typed_ast-1.5.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98f80dee3c03455e92796b58b98ff6ca0b2a6f652120c263efdba4d6c5e58f72"}, + {file = "typed_ast-1.5.4-cp39-cp39-win_amd64.whl", hash = "sha256:0fdbcf2fef0ca421a3f5912555804296f0b0960f0418c440f5d6d3abb549f3e1"}, + {file = "typed_ast-1.5.4.tar.gz", hash = "sha256:39e21ceb7388e4bb37f4c679d72707ed46c2fbf2a5609b8b8ebc4b067d977df2"}, +] +typing-extensions = [ + {file = "typing_extensions-4.3.0-py3-none-any.whl", hash = "sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02"}, + {file = "typing_extensions-4.3.0.tar.gz", hash = "sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6"}, +] +urllib3 = [ + {file = "urllib3-1.26.12-py2.py3-none-any.whl", hash = "sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997"}, + {file = "urllib3-1.26.12.tar.gz", hash = "sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e"}, +] +virtualenv = [ + {file = "virtualenv-20.16.2-py2.py3-none-any.whl", hash = "sha256:635b272a8e2f77cb051946f46c60a54ace3cb5e25568228bd6b57fc70eca9ff3"}, + {file = "virtualenv-20.16.2.tar.gz", hash = "sha256:0ef5be6d07181946891f5abc8047fda8bc2f0b4b9bf222c64e6e8963baee76db"}, +] +zipp = [ + {file = "zipp-3.8.1-py3-none-any.whl", hash = "sha256:47c40d7fe183a6f21403a199b3e4192cca5774656965b0a4988ad2f8feb5f009"}, + {file = "zipp-3.8.1.tar.gz", hash = "sha256:05b45f1ee8f807d0cc928485ca40a07cb491cf092ff587c0df9cb1fd154848d2"}, +] diff --git a/pyproject.toml b/pyproject.toml index 4dbf9c8a..622a2c3f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -73,7 +73,8 @@ bump2version = ">=1.0,<2" optional = true [tool.poetry.group.doc.dependencies] -sphinx = "^5.1" +# Sphinx >= 4.4 has conflicting dependencies with Flake8 +sphinx = ">= 4.3" sphinx_rtd_theme = ">=1,<2" [tool.black] From 56e47b87861ae0753595b94f4347acfc5c24941a Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Fri, 23 Sep 2022 18:06:12 +0200 Subject: [PATCH 013/230] Limit Sphinx version --- docs/requirements.txt | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/requirements.txt b/docs/requirements.txt index 0ad38822..2f6c4726 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,2 +1,2 @@ -sphinx>=4.3,<5 +sphinx>=4.3,<6 sphinx_rtd_theme>=1,<2 diff --git a/pyproject.toml b/pyproject.toml index 622a2c3f..78d7e9f2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -74,7 +74,7 @@ optional = true [tool.poetry.group.doc.dependencies] # Sphinx >= 4.4 has conflicting dependencies with Flake8 -sphinx = ">= 4.3" +sphinx = ">= 4.3,<6" sphinx_rtd_theme = ">=1,<2" [tool.black] From 2b12f173de8137f97e0b5668dd4eec5bf19ced8c Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Fri, 23 Sep 2022 19:59:17 +0200 Subject: [PATCH 014/230] Remove support for setuptools, use poetry only (#178) That means we also don't need MANIFEST.in and check-manifest any more. --- .github/workflows/lint.yml | 2 +- .github/workflows/publish.yml | 5 ++- MANIFEST.in | 23 ------------- poetry.lock | 64 +---------------------------------- pyproject.toml | 5 +-- setup.cfg | 5 --- setup.py | 46 ------------------------- tests/benchmarks/__init__.py | 2 +- tox.ini | 14 +++----- 9 files changed, 10 insertions(+), 156 deletions(-) delete mode 100644 MANIFEST.in delete mode 100644 setup.cfg delete mode 100644 setup.py diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 373eb176..2b8acd4d 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -22,4 +22,4 @@ jobs: - name: Run code quality tests with tox run: tox env: - TOXENV: black,flake8,isort,mypy,docs,manifest + TOXENV: black,flake8,isort,mypy,docs diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index fc166745..f3725b66 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -19,9 +19,8 @@ jobs: - name: Build wheel and source tarball run: | - pip install wheel - python setup.py sdist bdist_wheel - + pip install poetry + poetry build - name: Publish a Python distribution to PyPI uses: pypa/gh-action-pypi-publish@release/v1 with: diff --git a/MANIFEST.in b/MANIFEST.in deleted file mode 100644 index 421aa24f..00000000 --- a/MANIFEST.in +++ /dev/null @@ -1,23 +0,0 @@ -include MANIFEST.in - -include CODEOWNERS -include LICENSE -include README.md -include SECURITY.md - -include .bumpversion.cfg -include .editorconfig -include .flake8 -include .readthedocs.yaml - -include tox.ini - -include poetry.lock -include pyproject.toml - -graft src/graphql -graft tests -recursive-include docs *.txt *.rst conf.py Makefile make.bat *.jpg *.png *.gif -prune docs/_build - -global-exclude *.py[co] __pycache__ diff --git a/poetry.lock b/poetry.lock index dbdc8ed0..131f0946 100644 --- a/poetry.lock +++ b/poetry.lock @@ -54,27 +54,6 @@ d = ["aiohttp (>=3.7.4)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] -[[package]] -name = "build" -version = "0.8.0" -description = "A simple, correct PEP 517 build frontend" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -colorama = {version = "*", markers = "os_name == \"nt\""} -importlib-metadata = {version = ">=0.22", markers = "python_version < \"3.8\""} -packaging = ">=19.0" -pep517 = ">=0.9.1" -tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} - -[package.extras] -docs = ["furo (>=2021.08.31)", "sphinx (>=4.0,<5.0)", "sphinx-argparse-cli (>=1.5)", "sphinx-autodoc-typehints (>=1.10)"] -test = ["filelock (>=3)", "pytest (>=6.2.4)", "pytest-cov (>=2.12)", "pytest-mock (>=2)", "pytest-rerunfailures (>=9.1)", "pytest-xdist (>=1.34)", "setuptools (>=42.0.0)", "setuptools (>=56.0.0)", "toml (>=0.10.0)", "wheel (>=0.36.0)"] -typing = ["importlib-metadata (>=4.6.4)", "mypy (==0.950)", "typing-extensions (>=3.7.4.3)"] -virtualenv = ["virtualenv (>=20.0.35)"] - [[package]] name = "bump2version" version = "1.0.1" @@ -102,22 +81,6 @@ python-versions = ">=3.6.0" [package.extras] unicode_backport = ["unicodedata2"] -[[package]] -name = "check-manifest" -version = "0.48" -description = "Check MANIFEST.in in a Python source package for completeness" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -build = ">=0.1" -setuptools = "*" -tomli = "*" - -[package.extras] -test = ["mock (>=3.0.0)", "pytest"] - [[package]] name = "click" version = "8.1.3" @@ -324,19 +287,6 @@ category = "dev" optional = false python-versions = ">=3.7" -[[package]] -name = "pep517" -version = "0.13.0" -description = "Wrappers to build Python packages using PEP 517 hooks" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -importlib_metadata = {version = "*", markers = "python_version < \"3.8\""} -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -zipp = {version = "*", markers = "python_version < \"3.8\""} - [[package]] name = "platformdirs" version = "2.5.2" @@ -774,7 +724,7 @@ testing = ["func-timeout", "jaraco.itertools", "pytest (>=6)", "pytest-black (>= [metadata] lock-version = "1.1" python-versions = "^3.7" -content-hash = "1a1371f2665f410245e83874106f75f160133fe6749d70ba6ca179b4837e50ae" +content-hash = "3f2775172e15ba8491dd4c0eb49800e7f6287e901a4863503317f0e8ec3aef15" [metadata.files] alabaster = [ @@ -814,10 +764,6 @@ black = [ {file = "black-22.8.0-py3-none-any.whl", hash = "sha256:d2c21d439b2baf7aa80d6dd4e3659259be64c6f49dfd0f32091063db0e006db4"}, {file = "black-22.8.0.tar.gz", hash = "sha256:792f7eb540ba9a17e8656538701d3eb1afcb134e3b45b71f20b25c77a8db7e6e"}, ] -build = [ - {file = "build-0.8.0-py3-none-any.whl", hash = "sha256:19b0ed489f92ace6947698c3ca8436cb0556a66e2aa2d34cd70e2a5d27cd0437"}, - {file = "build-0.8.0.tar.gz", hash = "sha256:887a6d471c901b1a6e6574ebaeeebb45e5269a79d095fe9a8f88d6614ed2e5f0"}, -] bump2version = [ {file = "bump2version-1.0.1-py2.py3-none-any.whl", hash = "sha256:37f927ea17cde7ae2d7baf832f8e80ce3777624554a653006c9144f8017fe410"}, {file = "bump2version-1.0.1.tar.gz", hash = "sha256:762cb2bfad61f4ec8e2bdf452c7c267416f8c70dd9ecb1653fd0bbb01fa936e6"}, @@ -830,10 +776,6 @@ charset-normalizer = [ {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"}, {file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"}, ] -check-manifest = [ - {file = "check-manifest-0.48.tar.gz", hash = "sha256:3b575f1dade7beb3078ef4bf33a94519834457c7281dbc726b15c5466b55c657"}, - {file = "check_manifest-0.48-py3-none-any.whl", hash = "sha256:b1923685f98c1c2468601a1a7bed655db549a25d43c583caded3860ad8308f8c"}, -] click = [ {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, @@ -1017,10 +959,6 @@ pathspec = [ {file = "pathspec-0.10.1-py3-none-any.whl", hash = "sha256:46846318467efc4556ccfd27816e004270a9eeeeb4d062ce5e6fc7a87c573f93"}, {file = "pathspec-0.10.1.tar.gz", hash = "sha256:7ace6161b621d31e7902eb6b5ae148d12cfd23f4a249b9ffb6b9fee12084323d"}, ] -pep517 = [ - {file = "pep517-0.13.0-py3-none-any.whl", hash = "sha256:4ba4446d80aed5b5eac6509ade100bff3e7943a8489de249654a5ae9b33ee35b"}, - {file = "pep517-0.13.0.tar.gz", hash = "sha256:ae69927c5c172be1add9203726d4b84cf3ebad1edcd5f71fcdc746e66e829f59"}, -] platformdirs = [ {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"}, {file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"}, diff --git a/pyproject.toml b/pyproject.toml index 78d7e9f2..00dc1a47 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -31,9 +31,7 @@ packages = [ { include = '.readthedocs.yaml', format = "sdist" }, { include = 'poetry.lock', format = "sdist" }, { include = 'tox.ini', format = "sdist" }, - { include = 'setup.cfg', format = "sdist" }, { include = 'CODEOWNERS', format = "sdist" }, - { include = 'MANIFEST.in', format = "sdist" }, { include = 'SECURITY.md', format = "sdist" } ] @@ -66,7 +64,6 @@ black = "22.8.0" flake8 = "^5.0" isort = "^5.10" mypy = "0.971" -check-manifest = ">=0.48,<1" bump2version = ">=1.0,<2" [tool.poetry.group.doc] @@ -150,5 +147,5 @@ timeout = "100" filterwarnings = "ignore::pytest.PytestConfigWarning" [build-system] -requires = ["poetry_core>=1.2,<2", "setuptools>=65,<70"] +requires = ["poetry_core>=1.2,<2"] build-backend = "poetry.core.masonry.api" diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index 7daeda08..00000000 --- a/setup.cfg +++ /dev/null @@ -1,5 +0,0 @@ -[bdist_wheel] -python-tag = py3 - -[aliases] -test = pytest diff --git a/setup.py b/setup.py deleted file mode 100644 index c3743a7a..00000000 --- a/setup.py +++ /dev/null @@ -1,46 +0,0 @@ -from re import search - -from setuptools import find_packages, setup - - -with open("src/graphql/version.py") as version_file: - version = search('version = "(.*)"', version_file.read()).group(1) - -with open("README.md") as readme_file: - readme = readme_file.read() - -setup( - name="graphql-core", - version=version, - description="GraphQL implementation for Python, a port of GraphQL.js," - " the JavaScript reference implementation for GraphQL.", - long_description=readme, - long_description_content_type="text/markdown", - keywords="graphql", - url="https://github.com/graphql-python/graphql-core", - author="Christoph Zwerschke", - author_email="cito@online.de", - license="MIT license", - classifiers=[ - "Development Status :: 5 - Production/Stable", - "Intended Audience :: Developers", - "Topic :: Software Development :: Libraries", - "License :: OSI Approved :: MIT License", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3 :: Only", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - ], - install_requires=[ - "typing-extensions>=4.3,<5; python_version < '3.8'", - ], - python_requires=">=3.7,<4", - packages=find_packages("src"), - package_dir={"": "src"}, - # PEP-561: https://www.python.org/dev/peps/pep-0561/ - package_data={"graphql": ["py.typed"]}, - include_package_data=True, - zip_safe=False, -) diff --git a/tests/benchmarks/__init__.py b/tests/benchmarks/__init__.py index 0f0a83c1..4142e203 100644 --- a/tests/benchmarks/__init__.py +++ b/tests/benchmarks/__init__.py @@ -1,6 +1,6 @@ """Benchmarks for graphql -Benchmarks are disabled (only executed as tests) by default in setup.cfg. +Benchmarks are disabled (only executed as tests) by default in pyproject.toml. You can enable them with --benchmark-enable if your want to execute them. E.g. in order to execute all the benchmarks with tox using Python 3.9:: diff --git a/tox.ini b/tox.ini index 7887ed5c..604713ff 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = py3{7,8,9,10}, black, flake8, isort, mypy, docs, manifest +envlist = py3{7,8,9,10}, black, flake8, isort, mypy, docs isolated_build = true [gh-actions] @@ -13,19 +13,19 @@ python = basepython = python3.9 deps = black==22.8.0 commands = - black src tests setup.py -t py39 --check + black src tests -t py39 --check [testenv:flake8] basepython = python3.9 deps = flake8>=5,<6 commands = - flake8 src tests setup.py + flake8 src tests [testenv:isort] basepython = python3.9 deps = isort>=5.10,<6 commands = - isort src tests setup.py --check-only + isort src tests --check-only [testenv:mypy] basepython = python3.9 @@ -43,12 +43,6 @@ deps = commands = sphinx-build -b html -nEW docs docs/_build/html -[testenv:manifest] -basepython = python3.9 -deps = check-manifest>=0.48,<1 -commands = - check-manifest -v - [testenv] deps = pytest>=7.1,<8 From c991716043d0525ea43a77fb784af480e3a720db Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Fri, 23 Sep 2022 21:02:09 +0200 Subject: [PATCH 015/230] Add flake8-bandit to report common security issues --- .bandit | 4 ++++ .flake8 | 2 ++ pyproject.toml | 22 +++++++++++++--------- tox.ini | 4 +++- 4 files changed, 22 insertions(+), 10 deletions(-) create mode 100644 .bandit diff --git a/.bandit b/.bandit new file mode 100644 index 00000000..0b9b0e68 --- /dev/null +++ b/.bandit @@ -0,0 +1,4 @@ +# unfortunately, flake8-bandit does not support pyproject.toml + +[bandit] +exclude = /tests diff --git a/.flake8 b/.flake8 index ccded588..93056d26 100644 --- a/.flake8 +++ b/.flake8 @@ -1,3 +1,5 @@ +# unfortunately, flake8 does not support pyproject.toml + [flake8] ignore = E203,W503 exclude = .git,.mypy_cache,.pytest_cache,.tox,.venv,__pycache__,build,dist,docs diff --git a/pyproject.toml b/pyproject.toml index 00dc1a47..46490240 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -25,14 +25,14 @@ packages = [ { include = "graphql", from = "src" }, { include = "tests", format = "sdist" }, { include = "docs", format = "sdist" }, - { include = '.bumpversion.cfg', format = "sdist" }, - { include = '.editorconfig', format = "sdist" }, - { include = '.flake8', format = "sdist" }, - { include = '.readthedocs.yaml', format = "sdist" }, - { include = 'poetry.lock', format = "sdist" }, - { include = 'tox.ini', format = "sdist" }, - { include = 'CODEOWNERS', format = "sdist" }, - { include = 'SECURITY.md', format = "sdist" } + { include = ".bumpversion.cfg", format = "sdist" }, + { include = ".editorconfig", format = "sdist" }, + { include = ".flake8", format = "sdist" }, + { include = ".readthedocs.yaml", format = "sdist" }, + { include = "poetry.lock", format = "sdist" }, + { include = "tox.ini", format = "sdist" }, + { include = "CODEOWNERS", format = "sdist" }, + { include = "SECURITY.md", format = "sdist" } ] [tool.poetry.urls] @@ -62,6 +62,7 @@ optional = true [tool.poetry.group.lint.dependencies] black = "22.8.0" flake8 = "^5.0" +flake8-bandit = "^4.1" isort = "^5.10" mypy = "0.971" bump2version = ">=1.0,<2" @@ -74,8 +75,11 @@ optional = true sphinx = ">= 4.3,<6" sphinx_rtd_theme = ">=1,<2" +[tool.bandit] +exclude_dirs = ["tests"] + [tool.black] -target-version = ['py37', 'py38', 'py39', 'py310'] +target-version = ["py37", "py38", "py39", "py310"] [tool.coverage.run] branch = true diff --git a/tox.ini b/tox.ini index 604713ff..31204de7 100644 --- a/tox.ini +++ b/tox.ini @@ -17,7 +17,9 @@ commands = [testenv:flake8] basepython = python3.9 -deps = flake8>=5,<6 +deps = + flake8>=5,<6 + flake8-bandit>=4.1,<6 commands = flake8 src tests From b594f18076369cdf1bdf65f80679a0f1f5f15e45 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Fri, 23 Sep 2022 22:16:11 +0200 Subject: [PATCH 016/230] Add flake8-bugbear to report potential bugs --- .flake8 | 1 + pyproject.toml | 1 + src/graphql/execution/execute.py | 4 ++-- src/graphql/execution/values.py | 8 +++++--- src/graphql/language/source.py | 7 +++++-- src/graphql/language/visitor.py | 2 +- src/graphql/pyutils/cached_property.py | 2 +- src/graphql/pyutils/identity_func.py | 4 +++- tests/language/test_ast.py | 5 +++-- tests/pyutils/test_simple_pub_sub.py | 2 +- tox.ini | 1 + 11 files changed, 24 insertions(+), 13 deletions(-) diff --git a/.flake8 b/.flake8 index 93056d26..ad0b79f2 100644 --- a/.flake8 +++ b/.flake8 @@ -4,3 +4,4 @@ ignore = E203,W503 exclude = .git,.mypy_cache,.pytest_cache,.tox,.venv,__pycache__,build,dist,docs max-line-length = 88 +per-file-ignores = tests/*:B011 diff --git a/pyproject.toml b/pyproject.toml index 46490240..7596cc76 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -63,6 +63,7 @@ optional = true black = "22.8.0" flake8 = "^5.0" flake8-bandit = "^4.1" +flake8-bugbear = "22.9.23" isort = "^5.10" mypy = "0.971" bump2version = ">=1.0,<2" diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index 9a7b3108..97b5927b 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -714,7 +714,7 @@ async def await_completed(item: Any, item_path: Path) -> Any: error = located_error( raw_error, field_nodes, item_path.as_list() ) - self.handle_field_error(error, item_type) + self.handle_field_error(error, item_type) # noqa: B023 return None completed_item = await_completed(item, item_path) @@ -732,7 +732,7 @@ async def await_completed(item: Any, item_path: Path) -> Any: error = located_error( raw_error, field_nodes, item_path.as_list() ) - self.handle_field_error(error, item_type) + self.handle_field_error(error, item_type) # noqa: B023 return None completed_item = await_completed(completed_item, item_path) diff --git a/src/graphql/execution/values.py b/src/graphql/execution/values.py index 625d2028..11dc5638 100644 --- a/src/graphql/execution/values.py +++ b/src/graphql/execution/values.py @@ -125,13 +125,15 @@ def on_input_value_error( path: List[Union[str, int]], invalid_value: Any, error: GraphQLError ) -> None: invalid_str = inspect(invalid_value) - prefix = f"Variable '${var_name}' got invalid value {invalid_str}" + prefix = ( + f"Variable '${var_name}' got invalid value {invalid_str}" # noqa: B023 + ) if path: - prefix += f" at '{var_name}{print_path_list(path)}'" + prefix += f" at '{var_name}{print_path_list(path)}'" # noqa: B023 on_error( GraphQLError( prefix + "; " + error.message, - var_def_node, + var_def_node, # noqa: B023 original_error=error.original_error, ) ) diff --git a/src/graphql/language/source.py b/src/graphql/language/source.py index e4ec0e89..1a71b67d 100644 --- a/src/graphql/language/source.py +++ b/src/graphql/language/source.py @@ -5,6 +5,9 @@ __all__ = ["Source", "is_source"] +DEFAULT_NAME = "GraphQL request" +DEFAULT_SOURCE_LOCATION = SourceLocation(1, 1) + class Source: """A representation of source input to GraphQL.""" @@ -15,8 +18,8 @@ class Source: def __init__( self, body: str, - name: str = "GraphQL request", - location_offset: SourceLocation = SourceLocation(1, 1), + name: str = DEFAULT_NAME, + location_offset: SourceLocation = DEFAULT_SOURCE_LOCATION, ) -> None: """Initialize source input. diff --git a/src/graphql/language/visitor.py b/src/graphql/language/visitor.py index f6fe9eb1..d8d17f16 100644 --- a/src/graphql/language/visitor.py +++ b/src/graphql/language/visitor.py @@ -110,7 +110,7 @@ def leave(self, node, key, parent, path, ancestors): def __init_subclass__(cls) -> None: """Verify that all defined handlers are valid.""" super().__init_subclass__() - for attr, val in cls.__dict__.items(): + for attr in cls.__dict__: if attr.startswith("_"): continue attr_kind = attr.split("_", 1) diff --git a/src/graphql/pyutils/cached_property.py b/src/graphql/pyutils/cached_property.py index 43e34eda..4e34be22 100644 --- a/src/graphql/pyutils/cached_property.py +++ b/src/graphql/pyutils/cached_property.py @@ -22,7 +22,7 @@ class CachedProperty: """ def __init__(self, func: Callable) -> None: - self.__doc__ = getattr(func, "__doc__") + self.__doc__ = func.__doc__ self.func = func def __get__(self, obj: object, cls: type) -> Any: diff --git a/src/graphql/pyutils/identity_func.py b/src/graphql/pyutils/identity_func.py index d8efdf26..7fb6f86e 100644 --- a/src/graphql/pyutils/identity_func.py +++ b/src/graphql/pyutils/identity_func.py @@ -8,7 +8,9 @@ T = TypeVar("T") +DEFAULT_VALUE = cast(Any, Undefined) -def identity_func(x: T = cast(Any, Undefined), *_args: Any) -> T: + +def identity_func(x: T = DEFAULT_VALUE, *_args: Any) -> T: """Return the first received argument.""" return x diff --git a/tests/language/test_ast.py b/tests/language/test_ast.py index d61502b9..46619e40 100644 --- a/tests/language/test_ast.py +++ b/tests/language/test_ast.py @@ -180,18 +180,19 @@ def can_hash(): assert node3 != node assert hash(node3) != hash(node) + # noinspection PyProtectedMember def caches_are_hashed(): node = SampleTestNode(alpha=1) assert not hasattr(node, "_hash") hash1 = hash(node) assert hasattr(node, "_hash") - assert hash1 == getattr(node, "_hash") + assert hash1 == node._hash node.alpha = 2 assert not hasattr(node, "_hash") hash2 = hash(node) assert hash2 != hash1 assert hasattr(node, "_hash") - assert hash2 == getattr(node, "_hash") + assert hash2 == node._hash def can_create_weak_reference(): node = SampleTestNode(alpha=1, beta=2) diff --git a/tests/pyutils/test_simple_pub_sub.py b/tests/pyutils/test_simple_pub_sub.py index 47060d3f..1c6f793b 100644 --- a/tests/pyutils/test_simple_pub_sub.py +++ b/tests/pyutils/test_simple_pub_sub.py @@ -91,6 +91,6 @@ async def iterator_aclose_is_idempotent(): pubsub = SimplePubSub() iterator = pubsub.get_subscriber() assert iterator.listening - for n in range(3): + for _n in range(3): await iterator.aclose() assert not iterator.listening diff --git a/tox.ini b/tox.ini index 31204de7..3a552d0a 100644 --- a/tox.ini +++ b/tox.ini @@ -20,6 +20,7 @@ basepython = python3.9 deps = flake8>=5,<6 flake8-bandit>=4.1,<6 + flake8-bugbear==22.9.23 commands = flake8 src tests From f8b11c2689325dd646daee3330c806674cebe9e9 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Fri, 23 Sep 2022 22:22:52 +0200 Subject: [PATCH 017/230] Remove deprecated FrozenDict and FrozenList classes --- src/graphql/pyutils/__init__.py | 4 - src/graphql/pyutils/frozen_dict.py | 55 -------------- src/graphql/pyutils/frozen_list.py | 73 ------------------- tests/pyutils/test_frozen_dict.py | 97 ------------------------- tests/pyutils/test_frozen_list.py | 113 ----------------------------- tests/pyutils/test_is_iterable.py | 22 +----- 6 files changed, 1 insertion(+), 363 deletions(-) delete mode 100644 src/graphql/pyutils/frozen_dict.py delete mode 100644 src/graphql/pyutils/frozen_list.py delete mode 100644 tests/pyutils/test_frozen_dict.py delete mode 100644 tests/pyutils/test_frozen_list.py diff --git a/src/graphql/pyutils/__init__.py b/src/graphql/pyutils/__init__.py index c156de41..63035a62 100644 --- a/src/graphql/pyutils/__init__.py +++ b/src/graphql/pyutils/__init__.py @@ -26,8 +26,6 @@ from .awaitable_or_value import AwaitableOrValue from .suggestion_list import suggestion_list from .frozen_error import FrozenError -from .frozen_list import FrozenList -from .frozen_dict import FrozenDict from .merge_kwargs import merge_kwargs from .path import Path from .print_path_list import print_path_list @@ -54,8 +52,6 @@ "AwaitableOrValue", "suggestion_list", "FrozenError", - "FrozenList", - "FrozenDict", "Path", "print_path_list", "SimplePubSub", diff --git a/src/graphql/pyutils/frozen_dict.py b/src/graphql/pyutils/frozen_dict.py deleted file mode 100644 index f466b88a..00000000 --- a/src/graphql/pyutils/frozen_dict.py +++ /dev/null @@ -1,55 +0,0 @@ -from __future__ import annotations # Python < 3.10 - -from copy import deepcopy -from typing import Dict, TypeVar - -from .frozen_error import FrozenError - - -__all__ = ["FrozenDict"] - -KT = TypeVar("KT") -VT = TypeVar("VT", covariant=True) - - -class FrozenDict(Dict[KT, VT]): - """Dictionary that can only be read, but not changed. - - .. deprecated:: 3.2 - Use dicts and the Mapping type instead. Will be removed in v3.3. - """ - - def __delitem__(self, key): - raise FrozenError - - def __setitem__(self, key, value): - raise FrozenError - - def __iadd__(self, value): - raise FrozenError - - def __hash__(self): - return hash(tuple(self.items())) - - def __copy__(self) -> FrozenDict: - return FrozenDict(self) - - copy = __copy__ - - def __deepcopy__(self, memo: Dict) -> FrozenDict: - return FrozenDict({k: deepcopy(v, memo) for k, v in self.items()}) - - def clear(self): - raise FrozenError - - def pop(self, key, default=None): - raise FrozenError - - def popitem(self): - raise FrozenError - - def setdefault(self, key, default=None): - raise FrozenError - - def update(self, other=None): - raise FrozenError diff --git a/src/graphql/pyutils/frozen_list.py b/src/graphql/pyutils/frozen_list.py deleted file mode 100644 index 52dd355b..00000000 --- a/src/graphql/pyutils/frozen_list.py +++ /dev/null @@ -1,73 +0,0 @@ -from __future__ import annotations # Python < 3.10 - -from copy import deepcopy -from typing import Dict, List, TypeVar - -from .frozen_error import FrozenError - - -__all__ = ["FrozenList"] - - -T = TypeVar("T", covariant=True) - - -class FrozenList(List[T]): - """List that can only be read, but not changed. - - .. deprecated:: 3.2 - Use tuples or lists and the Collection type instead. Will be removed in v3.3. - """ - - def __delitem__(self, key): - raise FrozenError - - def __setitem__(self, key, value): - raise FrozenError - - def __add__(self, value): - if isinstance(value, tuple): - value = list(value) - return list.__add__(self, value) - - def __iadd__(self, value): - raise FrozenError - - def __mul__(self, value): - return list.__mul__(self, value) - - def __imul__(self, value): - raise FrozenError - - def __hash__(self): - return hash(tuple(self)) - - def __copy__(self) -> FrozenList: - return FrozenList(self) - - def __deepcopy__(self, memo: Dict) -> FrozenList: - return FrozenList(deepcopy(value, memo) for value in self) - - def append(self, x): - raise FrozenError - - def extend(self, iterable): - raise FrozenError - - def insert(self, i, x): - raise FrozenError - - def remove(self, x): - raise FrozenError - - def pop(self, i=None): - raise FrozenError - - def clear(self): - raise FrozenError - - def sort(self, *, key=None, reverse=False): - raise FrozenError - - def reverse(self): - raise FrozenError diff --git a/tests/pyutils/test_frozen_dict.py b/tests/pyutils/test_frozen_dict.py deleted file mode 100644 index 27160980..00000000 --- a/tests/pyutils/test_frozen_dict.py +++ /dev/null @@ -1,97 +0,0 @@ -from copy import copy, deepcopy - -from pytest import raises - -from graphql.pyutils import FrozenDict, FrozenError - - -def describe_frozen_list(): - def can_read(): - fd = FrozenDict({1: 2, 3: 4}) - assert fd == {1: 2, 3: 4} - assert list(i for i in fd) == [1, 3] - assert fd.copy() == fd - assert 3 in fd - assert 2 not in fd - assert fd[1] == 2 - with raises(KeyError): - # noinspection PyStatementEffect - fd[2] - assert len(fd) == 2 - assert fd.get(1) == 2 - assert fd.get(2, 5) == 5 - assert list(fd.items()) == [(1, 2), (3, 4)] - assert list(fd.keys()) == [1, 3] - assert list(fd.values()) == [2, 4] - - def cannot_write(): - fd = FrozenDict({1: 2, 3: 4}) - with raises(FrozenError): - fd[1] = 2 - with raises(FrozenError): - fd[4] = 5 - with raises(FrozenError): - del fd[1] - with raises(FrozenError): - del fd[3] - with raises(FrozenError): - fd.clear() - with raises(FrozenError): - fd.pop(1) - with raises(FrozenError): - fd.pop(4, 5) - with raises(FrozenError): - fd.popitem() - with raises(FrozenError): - fd.setdefault(1, 2) - with raises(FrozenError): - fd.setdefault(4, 5) - with raises(FrozenError): - fd.update({1: 2}) - with raises(FrozenError): - fd.update({4: 5}) - with raises(FrozenError): - fd += {4: 5} - assert fd == {1: 2, 3: 4} - - def can_hash(): - fd1 = FrozenDict({1: 2, 3: 4}) - fd2 = FrozenDict({1: 2, 3: 4}) - assert fd2 == fd1 - assert fd2 is not fd1 - assert hash(fd2) is not hash(fd1) - fd3 = FrozenDict({1: 2, 3: 5}) - assert fd3 != fd1 - assert hash(fd3) != hash(fd1) - - def can_copy(): - fd1 = FrozenDict({1: 2, 3: 4}) - fd2 = fd1.copy() - assert isinstance(fd2, FrozenDict) - assert fd2 == fd1 - assert hash(fd2) == hash(fd1) - assert fd2 is not fd1 - fd3 = copy(fd1) - assert isinstance(fd3, FrozenDict) - assert fd3 == fd1 - assert hash(fd3) == hash(fd1) - assert fd3 is not fd1 - - def can_deep_copy(): - fd11 = FrozenDict({1: 2, 3: 4}) - fd12 = FrozenDict({2: 1, 4: 3}) - fd1 = FrozenDict({1: fd11, 2: fd12}) - assert fd1[1] is fd11 - assert fd1[2] is fd12 - fd2 = deepcopy(fd1) - assert isinstance(fd2, FrozenDict) - assert fd2 == fd1 - assert hash(fd2) == hash(fd1) - fd21 = fd2[1] - fd22 = fd2[2] - assert isinstance(fd21, FrozenDict) - assert isinstance(fd22, FrozenDict) - assert fd21 == fd11 - assert fd21 is not fd11 - assert fd22 == fd12 - assert fd22 is not fd12 diff --git a/tests/pyutils/test_frozen_list.py b/tests/pyutils/test_frozen_list.py deleted file mode 100644 index fea86bff..00000000 --- a/tests/pyutils/test_frozen_list.py +++ /dev/null @@ -1,113 +0,0 @@ -from copy import copy, deepcopy - -from pytest import raises - -from graphql.pyutils import FrozenError, FrozenList - - -def describe_frozen_list(): - def can_read(): - fl = FrozenList([1, 2, 3]) - assert fl == [1, 2, 3] - assert list(i for i in fl) == fl - assert fl.copy() == fl - assert 2 in fl - assert 4 not in fl - assert fl + [4, 5] == [1, 2, 3, 4, 5] - assert [4, 5] + fl == [4, 5, 1, 2, 3] - assert fl * 2 == [1, 2, 3, 1, 2, 3] - assert 2 * fl == [1, 2, 3, 1, 2, 3] - assert fl[1] == 2 - with raises(IndexError): - fl[3] - assert fl[1:4] == [2, 3] - assert fl[::2] == [1, 3] - assert len(fl) == 3 - assert min(fl) == 1 - assert max(fl) == 3 - assert sum(fl) == 6 - assert fl.index(2) == 1 - with raises(ValueError): - fl.index(4) - assert fl.count(2) == 1 - assert fl.count(4) == 0 - assert list(reversed(fl)) == [3, 2, 1] - assert sorted(fl) == [1, 2, 3] - - def cannot_write(): - fl = FrozenList([1, 2, 3]) - with raises(FrozenError): - fl[1] = 4 - with raises(FrozenError): - fl[1:4] = [4] - with raises(FrozenError): - del fl[1] - with raises(FrozenError): - del fl[1:4] - with raises(FrozenError): - fl[1::2] = [4] - with raises(FrozenError): - del fl[::2] - with raises(FrozenError): - fl.append(4) - with raises(FrozenError): - fl.clear() - with raises(FrozenError): - fl.extend([4]) - with raises(FrozenError): - fl += [4] - with raises(FrozenError): - fl *= 2 - with raises(FrozenError): - fl.insert(1, 4) - with raises(FrozenError): - fl.pop() - with raises(FrozenError): - fl.remove(2) - with raises(FrozenError): - fl.sort() - with raises(FrozenError): - fl.reverse() - assert fl == [1, 2, 3] - - def can_add_rol(): - fl1 = FrozenList([1, 2]) - rol2 = FrozenList([3, 4]) - assert fl1 + rol2 == [1, 2, 3, 4] - - def can_add_tuple(): - fl = FrozenList([1, 2]) - assert fl + (3, 4) == [1, 2, 3, 4] - - def can_hash(): - fl1 = FrozenList([1, 2]) - fl2 = FrozenList([1, 2]) - assert fl2 == fl1 - assert fl2 is not fl1 - assert hash(fl2) == hash(fl1) - fl3 = FrozenList([1, 3]) - assert fl3 != fl1 - assert hash(fl3) != hash(fl1) - - def can_copy(): - fl1 = FrozenList([1, 2]) - fl2 = copy(fl1) - assert isinstance(fl2, FrozenList) - assert fl2 == fl1 - assert hash(fl2) == hash(fl1) - assert fl2 is not fl1 - - def can_deep_copy(): - fl11 = FrozenList([1, 2]) - fl12 = FrozenList([2, 1]) - fl1 = FrozenList([fl11, fl12]) - fl2 = deepcopy(fl1) - assert isinstance(fl2, FrozenList) - assert fl2 == fl1 - assert hash(fl2) == hash(fl1) - assert isinstance(fl2[0], FrozenList) - assert isinstance(fl2[1], FrozenList) - assert fl2[0] == fl1[0] - assert fl2[0] is not fl1[0] - assert fl2[1] == fl1[1] - assert fl2[1] is not fl1[1] diff --git a/tests/pyutils/test_is_iterable.py b/tests/pyutils/test_is_iterable.py index 5dbf210a..e40e6961 100644 --- a/tests/pyutils/test_is_iterable.py +++ b/tests/pyutils/test_is_iterable.py @@ -3,7 +3,7 @@ from decimal import Decimal from itertools import count -from graphql.pyutils import FrozenDict, FrozenList, is_collection, is_iterable +from graphql.pyutils import is_collection, is_iterable def describe_is_collection(): @@ -12,11 +12,6 @@ def should_return_true_for_lists(): assert is_collection([0, 1, 2]) is True assert is_collection(["A", "B", "C"]) is True - def should_return_true_for_frozen_lists(): - assert is_collection(FrozenList()) is True - assert is_collection(FrozenList([0, 1, 2])) is True - assert is_collection(FrozenList(["A", "B", "C"])) is True - def should_return_true_for_tuples(): assert is_collection(()) is True assert is_collection((0, 1, 1)) is True @@ -100,11 +95,6 @@ def should_return_false_for_dicts(): assert is_collection({"__iter__": True}) is False assert is_collection({0: "A", 1: "B", 2: "C"}) is False - def should_return_false_for_frozen_dicts(): - assert is_collection(FrozenDict()) is False - assert is_collection(FrozenDict({"__iter__": True})) is False - assert is_collection(FrozenDict({0: "A", 1: "B", 2: "C"})) is False - def should_return_false_for_default_dicts(): assert is_collection(defaultdict(list)) is False @@ -126,11 +116,6 @@ def should_return_true_for_lists(): assert is_iterable([0, 1, 2]) is True assert is_iterable(["A", "B", "C"]) is True - def should_return_true_for_frozen_lists(): - assert is_iterable(FrozenList()) is True - assert is_iterable(FrozenList([0, 1, 2])) is True - assert is_iterable(FrozenList(["A", "B", "C"])) is True - def should_return_true_for_tuples(): assert is_iterable(()) is True assert is_iterable((0, 1, 1)) is True @@ -214,11 +199,6 @@ def should_return_false_for_dicts(): assert is_iterable({"__iter__": True}) is False assert is_iterable({0: "A", 1: "B", 2: "C"}) is False - def should_return_false_for_frozen_dicts(): - assert is_iterable(FrozenDict()) is False - assert is_iterable(FrozenDict({"__iter__": True})) is False - assert is_iterable(FrozenDict({0: "A", 1: "B", 2: "C"})) is False - def should_return_false_for_default_dicts(): assert is_iterable(defaultdict(list)) is False From b52a24015b7409d8416cd0c1071b67b24a6bfbda Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Fri, 23 Sep 2022 23:21:50 +0200 Subject: [PATCH 018/230] print: add spaces inside input object Replicates graphql/graphql-js@5ccf579a3bfd98243700ef541a6939af7a6f545c --- src/graphql/language/printer.py | 2 +- tests/language/test_printer.py | 9 +++++---- tests/language/test_schema_printer.py | 2 +- tests/type/test_custom_scalars.py | 4 ++-- tests/type/test_introspection.py | 4 ++-- tests/type/test_scalars.py | 15 ++++++++------- tests/utilities/test_build_client_schema.py | 2 +- tests/utilities/test_find_breaking_changes.py | 4 ++-- tests/utilities/test_sort_value_node.py | 8 ++++---- 9 files changed, 26 insertions(+), 24 deletions(-) diff --git a/src/graphql/language/printer.py b/src/graphql/language/printer.py index 7382064c..53fd5656 100644 --- a/src/graphql/language/printer.py +++ b/src/graphql/language/printer.py @@ -169,7 +169,7 @@ def leave_list_value(node: PrintedNode, *_args: Any) -> str: @staticmethod def leave_object_value(node: PrintedNode, *_args: Any) -> str: - return f"{{{join(node.fields, ', ')}}}" + return f"{{ {join(node.fields, ', ')} }}" @staticmethod def leave_object_field(node: PrintedNode, *_args: Any) -> str: diff --git a/tests/language/test_printer.py b/tests/language/test_printer.py index 4468478b..5628b7d2 100644 --- a/tests/language/test_printer.py +++ b/tests/language/test_printer.py @@ -60,11 +60,12 @@ def correctly_prints_mutation_operation_with_artifacts(): def prints_query_with_variable_directives(): query_ast_with_variable_directive = parse( - "query ($foo: TestType = {a: 123}" " @testDirective(if: true) @test) { id }" + "query ($foo: TestType = { a: 123 }" + " @testDirective(if: true) @test) { id }" ) assert print_ast(query_ast_with_variable_directive) == dedent( """ - query ($foo: TestType = {a: 123} @testDirective(if: true) @test) { + query ($foo: TestType = { a: 123 } @testDirective(if: true) @test) { id } """ @@ -185,9 +186,9 @@ def prints_kitchen_sink_without_altering_ast(kitchen_sink_query): # noqa: F811 foo( size: $size bar: $b - obj: {key: "value", block: """ + obj: { key: "value", block: """ block string uses \""" - """} + """ } ) } diff --git a/tests/language/test_schema_printer.py b/tests/language/test_schema_printer.py index 6ede54b6..93190216 100644 --- a/tests/language/test_schema_printer.py +++ b/tests/language/test_schema_printer.py @@ -56,7 +56,7 @@ def prints_kitchen_sink_without_altering_ast(kitchen_sink_sdl): # noqa: F811 three(argument: InputType, other: String): Int four(argument: String = "string"): String five(argument: [String] = ["string", "string"]): String - six(argument: InputType = {key: "value"}): Type + six(argument: InputType = { key: "value" }): Type seven(argument: Int = null): Type } diff --git a/tests/type/test_custom_scalars.py b/tests/type/test_custom_scalars.py index bf63c0b5..e2576498 100644 --- a/tests/type/test_custom_scalars.py +++ b/tests/type/test_custom_scalars.py @@ -186,7 +186,7 @@ def parse_literal(): def parse_literal_with_errors(): source = """ query Money($amount: String!, $currency: Float!) { - toEuros(money: {amount: $amount, currency: $currency}) + toEuros(money: { amount: $amount, currency: $currency }) } """ @@ -197,7 +197,7 @@ def parse_literal_with_errors(): [ { "message": "Argument 'money' has invalid value" - " {amount: $amount, currency: $currency}.", + " { amount: $amount, currency: $currency }.", "locations": [(3, 30)], }, ], diff --git a/tests/type/test_introspection.py b/tests/type/test_introspection.py index 20f26c20..09a21c31 100644 --- a/tests/type/test_introspection.py +++ b/tests/type/test_introspection.py @@ -1079,7 +1079,7 @@ def introspects_any_default_value(): """ input InputObjectWithDefaultValues { a: String = "Emoji: \\u{1F600}" - b: Complex = {x: ["abc"], y: 123} + b: Complex = { x: ["abc"], y: 123 } } input Complex { @@ -1109,7 +1109,7 @@ def introspects_any_default_value(): "__type": { "inputFields": [ {"name": "a", "defaultValue": '"Emoji: \U0001f600"'}, - {"name": "b", "defaultValue": '{x: ["abc"], y: 123}'}, + {"name": "b", "defaultValue": '{ x: ["abc"], y: 123 }'}, ] } }, diff --git a/tests/type/test_scalars.py b/tests/type/test_scalars.py index f9d04f7a..c5413803 100644 --- a/tests/type/test_scalars.py +++ b/tests/type/test_scalars.py @@ -93,7 +93,7 @@ def _parse_literal_raises(s: str, message: str): ) _parse_literal_raises("[1]", "Int cannot represent non-integer value: [1]") _parse_literal_raises( - "{value: 1}", "Int cannot represent non-integer value: {value: 1}" + "{value: 1}", "Int cannot represent non-integer value: { value: 1 }" ) _parse_literal_raises( "ENUM_VALUE", "Int cannot represent non-integer value: ENUM_VALUE" @@ -246,7 +246,8 @@ def _parse_literal_raises(s: str, message: str): "[0.1]", "Float cannot represent non numeric value: [0.1]" ) _parse_literal_raises( - "{value: 0.1}", "Float cannot represent non numeric value: {value: 0.1}" + "{value: 0.1}", + "Float cannot represent non numeric value: { value: 0.1 }", ) _parse_literal_raises( "ENUM_VALUE", "Float cannot represent non numeric value: ENUM_VALUE" @@ -357,7 +358,7 @@ def _parse_literal_raises(s: str, message: str): ) _parse_literal_raises( '{value: "foo"}', - 'String cannot represent a non string value: {value: "foo"}', + 'String cannot represent a non string value: { value: "foo" }', ) _parse_literal_raises( "ENUM_VALUE", "String cannot represent a non string value: ENUM_VALUE" @@ -488,11 +489,11 @@ def _parse_literal_raises(s: str, message: str): ) _parse_literal_raises( "{value: false}", - "Boolean cannot represent a non boolean value: {value: false}", + "Boolean cannot represent a non boolean value: { value: false }", ) _parse_literal_raises( "{value: False}", - "Boolean cannot represent a non boolean value: {value: False}", + "Boolean cannot represent a non boolean value: { value: False }", ) _parse_literal_raises( "ENUM_VALUE", "Boolean cannot represent a non boolean value: ENUM_VALUE" @@ -614,9 +615,9 @@ def _parse_literal_raises(s: str, message: str): '["1"]', 'ID cannot represent a non-string and non-integer value: ["1"]' ) _parse_literal_raises( - '{value: "1"}', + '{ value: "1" }', "ID cannot represent a non-string and non-integer value:" - ' {value: "1"}', + ' { value: "1" }', ) _parse_literal_raises( "ENUM_VALUE", diff --git a/tests/utilities/test_build_client_schema.py b/tests/utilities/test_build_client_schema.py index 07123178..83c5935e 100644 --- a/tests/utilities/test_build_client_schema.py +++ b/tests/utilities/test_build_client_schema.py @@ -459,7 +459,7 @@ def builds_a_schema_with_field_arguments_with_default_values(): type Query { defaultInt(intArg: Int = 30): String defaultList(listArg: [Int] = [1, 2, 3]): String - defaultObject(objArg: Geo = {lat: 37.485, lon: -122.148}): String + defaultObject(objArg: Geo = { lat: 37.485, lon: -122.148 }): String defaultNull(intArg: Int = null): String noDefault(intArg: Int): String } diff --git a/tests/utilities/test_find_breaking_changes.py b/tests/utilities/test_find_breaking_changes.py index 72dde601..c9003a6c 100644 --- a/tests/utilities/test_find_breaking_changes.py +++ b/tests/utilities/test_find_breaking_changes.py @@ -986,8 +986,8 @@ def should_detect_if_a_default_value_changed_on_an_argument(): ( DangerousChangeType.ARG_DEFAULT_VALUE_CHANGE, "Type1.field1 arg complexObject has changed defaultValue" - " from {innerInputArray: [{arrayField: [1, 2, 3]}]}" - " to {innerInputArray: [{arrayField: [3, 2, 1]}]}.", + " from { innerInputArray: [{ arrayField: [1, 2, 3] }] }" + " to { innerInputArray: [{ arrayField: [3, 2, 1] }] }.", ), ] diff --git a/tests/utilities/test_sort_value_node.py b/tests/utilities/test_sort_value_node.py index 5d246567..5ec97db1 100644 --- a/tests/utilities/test_sort_value_node.py +++ b/tests/utilities/test_sort_value_node.py @@ -21,13 +21,13 @@ def do_not_change_non_object_values(): ) def sort_input_object_fields(): - _expect_sorted_value("{ b: 2, a: 1 }", "{a: 1, b: 2}") - _expect_sorted_value("{ a: { c: 3, b: 2 } }", "{a: {b: 2, c: 3}}") + _expect_sorted_value("{ b: 2, a: 1 }", "{ a: 1, b: 2 }") + _expect_sorted_value("{ a: { c: 3, b: 2 } }", "{ a: { b: 2, c: 3 } }") _expect_sorted_value( "[{ b: 2, a: 1 }, { d: 4, c: 3}]", - "[{a: 1, b: 2}, {c: 3, d: 4}]", + "[{ a: 1, b: 2 }, { c: 3, d: 4 }]", ) _expect_sorted_value( "{ b: { g: 7, f: 6 }, c: 3 , a: { d: 4, e: 5 } }", - "{a: {d: 4, e: 5}, b: {f: 6, g: 7}, c: 3}", + "{ a: { d: 4, e: 5 }, b: { f: 6, g: 7 }, c: 3 }", ) From 7dcc5127e6b1f0f2d7f05a1eed2e58e8a59655eb Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 24 Sep 2022 14:13:35 +0200 Subject: [PATCH 019/230] validate_schema: unify check of root types Replicates graphql/graphql-js@839f2449ffe6589735df80bb24e30293bf22cb19 --- src/graphql/type/validate.py | 41 +++++++++++++----------------------- 1 file changed, 15 insertions(+), 26 deletions(-) diff --git a/src/graphql/type/validate.py b/src/graphql/type/validate.py index f49a5123..8468b61c 100644 --- a/src/graphql/type/validate.py +++ b/src/graphql/type/validate.py @@ -103,33 +103,22 @@ def report_error( def validate_root_types(self) -> None: schema = self.schema - query_type = schema.query_type - if not query_type: + if not schema.query_type: self.report_error("Query root type must be provided.", schema.ast_node) - elif not is_object_type(query_type): - self.report_error( - f"Query root type must be Object type, it cannot be {query_type}.", - get_operation_type_node(schema, OperationType.QUERY) - or query_type.ast_node, - ) - - mutation_type = schema.mutation_type - if mutation_type and not is_object_type(mutation_type): - self.report_error( - "Mutation root type must be Object type if provided," - f" it cannot be {mutation_type}.", - get_operation_type_node(schema, OperationType.MUTATION) - or mutation_type.ast_node, - ) - - subscription_type = schema.subscription_type - if subscription_type and not is_object_type(subscription_type): - self.report_error( - "Subscription root type must be Object type if provided," - f" it cannot be {subscription_type}.", - get_operation_type_node(schema, OperationType.SUBSCRIPTION) - or subscription_type.ast_node, - ) + for operation_type in OperationType: + root_type = schema.get_root_type(operation_type) + if root_type and not is_object_type(root_type): + operation_type_str = operation_type.value.capitalize() + root_type_str = inspect(root_type) + if_provided_str = ( + "" if operation_type == operation_type.QUERY else " if provided" + ) + self.report_error( + f"{operation_type_str} root type must be Object type" + f"{if_provided_str}, it cannot be {root_type_str}.", + get_operation_type_node(schema, operation_type) + or root_type.ast_node, + ) def validate_directives(self) -> None: directives = self.schema.directives From 2d8699c376690287eed8b285d3b9824b53321604 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 24 Sep 2022 14:18:04 +0200 Subject: [PATCH 020/230] test_validation: Test root type validation with multiple types Replicates graphql/graphql-js@d3d68e82ba29d69f9c07015f7c1a7792f9ca9c4f --- tests/type/test_validation.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/tests/type/test_validation.py b/tests/type/test_validation.py index 20b8f4eb..06834f22 100644 --- a/tests/type/test_validation.py +++ b/tests/type/test_validation.py @@ -364,6 +364,12 @@ def rejects_a_schema_extended_with_invalid_root_types(): input SomeInputObject { test: String } + + scalar SomeScalar + + enum SomeEnum { + ENUM_VALUE + } """ ) schema = extend_schema( @@ -381,7 +387,7 @@ def rejects_a_schema_extended_with_invalid_root_types(): parse( """ extend schema { - mutation: SomeInputObject + mutation: SomeScalar } """ ), @@ -391,7 +397,7 @@ def rejects_a_schema_extended_with_invalid_root_types(): parse( """ extend schema { - subscription: SomeInputObject + subscription: SomeEnum } """ ), @@ -404,12 +410,12 @@ def rejects_a_schema_extended_with_invalid_root_types(): }, { "message": "Mutation root type must be Object type" - " if provided, it cannot be SomeInputObject.", + " if provided, it cannot be SomeScalar.", "locations": [(3, 29)], }, { "message": "Subscription root type must be Object type" - " if provided, it cannot be SomeInputObject.", + " if provided, it cannot be SomeEnum.", "locations": [(3, 33)], }, ] From 35ce9d74b149f8c6ca3ca47febc8153106d0ded3 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 24 Sep 2022 14:59:43 +0200 Subject: [PATCH 021/230] Use defaultdict in collect_fields Note that a defaultdict(list) has the same functionality as the newly introduced AccumulatorMap in GraphQL-js. Replicates graphql/graphql-js@e59ae4c7bd48dbb63f547fae025052e9361b585a --- src/graphql/execution/collect_fields.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/graphql/execution/collect_fields.py b/src/graphql/execution/collect_fields.py index f782162a..40a8d35f 100644 --- a/src/graphql/execution/collect_fields.py +++ b/src/graphql/execution/collect_fields.py @@ -1,3 +1,4 @@ +from collections import defaultdict from typing import Any, Dict, List, Set, Union, cast from ..language import ( @@ -39,7 +40,7 @@ def collect_fields( For internal use only. """ - fields: Dict[str, List[FieldNode]] = {} + fields: Dict[str, List[FieldNode]] = defaultdict(list) collect_fields_impl( schema, fragments, variable_values, runtime_type, selection_set, fields, set() ) @@ -64,7 +65,7 @@ def collect_sub_fields( For internal use only. """ - sub_field_nodes: Dict[str, List[FieldNode]] = {} + sub_field_nodes: Dict[str, List[FieldNode]] = defaultdict(list) visited_fragment_names: Set[str] = set() for node in field_nodes: if node.selection_set: @@ -94,8 +95,7 @@ def collect_fields_impl( if isinstance(selection, FieldNode): if not should_include_node(variable_values, selection): continue - name = get_field_entry_key(selection) - fields.setdefault(name, []).append(selection) + fields[get_field_entry_key(selection)].append(selection) elif isinstance(selection, InlineFragmentNode): if not should_include_node( variable_values, selection From 7c18bed867a25e977546b448aa95e173cd90235e Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 24 Sep 2022 15:07:36 +0200 Subject: [PATCH 022/230] Rename collect_sub_fields to collect_subfields In order to make this correspond better to GraphQL-js where it is collectSubfields, not collectSubFields. --- src/graphql/execution/collect_fields.py | 8 ++++---- src/graphql/execution/execute.py | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/src/graphql/execution/collect_fields.py b/src/graphql/execution/collect_fields.py index 40a8d35f..1b5934ec 100644 --- a/src/graphql/execution/collect_fields.py +++ b/src/graphql/execution/collect_fields.py @@ -20,7 +20,7 @@ from .values import get_directive_values -__all__ = ["collect_fields", "collect_sub_fields"] +__all__ = ["collect_fields", "collect_subfields"] def collect_fields( @@ -47,19 +47,19 @@ def collect_fields( return fields -def collect_sub_fields( +def collect_subfields( schema: GraphQLSchema, fragments: Dict[str, FragmentDefinitionNode], variable_values: Dict[str, Any], return_type: GraphQLObjectType, field_nodes: List[FieldNode], ) -> Dict[str, List[FieldNode]]: - """Collect sub fields. + """Collect subfields. Given a list of field nodes, collects all the subfields of the passed in fields, and returns them at the end. - collect_sub_fields requires the "return type" of an object. For a field that + collect_subfields requires the "return type" of an object. For a field that returns an Interface or Union type, the "return type" will be the actual object type returned by that field. diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index 97b5927b..2c2e0b88 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -55,7 +55,7 @@ is_non_null_type, is_object_type, ) -from .collect_fields import collect_fields, collect_sub_fields +from .collect_fields import collect_fields, collect_subfields from .middleware import MiddlewareManager from .values import get_argument_values, get_variable_values @@ -948,7 +948,7 @@ def collect_subfields( ) sub_field_nodes = cache.get(key) if sub_field_nodes is None: - sub_field_nodes = collect_sub_fields( + sub_field_nodes = collect_subfields( self.schema, self.fragments, self.variable_values, From 1ed451a972875b0137adbd88f5a3794d2042bc6a Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 24 Sep 2022 15:17:19 +0200 Subject: [PATCH 023/230] Remove trailing whitespace in query --- tests/type/test_validation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/type/test_validation.py b/tests/type/test_validation.py index 06834f22..b3d3e254 100644 --- a/tests/type/test_validation.py +++ b/tests/type/test_validation.py @@ -364,7 +364,7 @@ def rejects_a_schema_extended_with_invalid_root_types(): input SomeInputObject { test: String } - + scalar SomeScalar enum SomeEnum { From fe822e3a43ad69c7de4d7cf50504e9140420e221 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 24 Sep 2022 19:08:32 +0200 Subject: [PATCH 024/230] fix(validate): no reusing root types Replicates graphql/graphql-js@c008d0fed2b1dda0ff415e5bb1ad5be11a3ad0fd --- src/graphql/pyutils/__init__.py | 3 ++ src/graphql/pyutils/did_you_mean.py | 23 ++++----- src/graphql/pyutils/format_list.py | 29 +++++++++++ src/graphql/type/validate.py | 39 +++++++++++---- tests/pyutils/test_format_list.py | 39 +++++++++++++++ tests/type/test_validation.py | 74 +++++++++++++++++++++++++++++ 6 files changed, 182 insertions(+), 25 deletions(-) create mode 100644 src/graphql/pyutils/format_list.py create mode 100644 tests/pyutils/test_format_list.py diff --git a/src/graphql/pyutils/__init__.py b/src/graphql/pyutils/__init__.py index 63035a62..fff78de4 100644 --- a/src/graphql/pyutils/__init__.py +++ b/src/graphql/pyutils/__init__.py @@ -17,6 +17,7 @@ unregister_description, ) from .did_you_mean import did_you_mean +from .format_list import or_list, and_list from .group_by import group_by from .identity_func import identity_func from .inspect import inspect @@ -37,6 +38,8 @@ "snake_to_camel", "cached_property", "did_you_mean", + "or_list", + "and_list", "Description", "group_by", "is_description", diff --git a/src/graphql/pyutils/did_you_mean.py b/src/graphql/pyutils/did_you_mean.py index e5821aa1..0078b228 100644 --- a/src/graphql/pyutils/did_you_mean.py +++ b/src/graphql/pyutils/did_you_mean.py @@ -1,5 +1,7 @@ from typing import Optional, Sequence +from .format_list import or_list + __all__ = ["did_you_mean"] @@ -10,20 +12,11 @@ def did_you_mean(suggestions: Sequence[str], sub_message: Optional[str] = None) """Given [ A, B, C ] return ' Did you mean A, B, or C?'""" if not suggestions or not MAX_LENGTH: return "" - parts = [" Did you mean "] + message = " Did you mean " if sub_message: - parts.extend([sub_message, " "]) + message += sub_message + " " suggestions = suggestions[:MAX_LENGTH] - n = len(suggestions) - if n == 1: - parts.append(f"'{suggestions[0]}'?") - elif n == 2: - parts.append(f"'{suggestions[0]}' or '{suggestions[1]}'?") - else: - parts.extend( - [ - ", ".join(f"'{s}'" for s in suggestions[:-1]), - f", or '{suggestions[-1]}'?", - ] - ) - return "".join(parts) + suggestion_list = or_list( + [f"'{suggestion}'" for suggestion in suggestions[:MAX_LENGTH]] + ) + return message + suggestion_list + "?" diff --git a/src/graphql/pyutils/format_list.py b/src/graphql/pyutils/format_list.py new file mode 100644 index 00000000..ddaf642d --- /dev/null +++ b/src/graphql/pyutils/format_list.py @@ -0,0 +1,29 @@ +from typing import Sequence + + +__all__ = ["or_list", "and_list"] + + +def or_list(items: Sequence[str]) -> str: + """Given [ A, B, C ] return 'A, B, or C'.""" + return format_list("or", items) + + +def and_list(items: Sequence[str]) -> str: + """Given [ A, B, C ] return 'A, B, and C'.""" + return format_list("and", items) + + +def format_list(conjunction: str, items: Sequence[str]) -> str: + """Given [ A, B, C ] return 'A, B, (conjunction) C'""" + if not items: + raise ValueError("Missing list items to be formatted.") + + n = len(items) + if n == 1: + return items[0] + if n == 2: + return f"{items[0]} {conjunction} {items[1]}" + + *all_but_last, last_item = items + return f"{', '.join(all_but_last)}, {conjunction} {last_item}" diff --git a/src/graphql/type/validate.py b/src/graphql/type/validate.py index 8468b61c..2cb2d93d 100644 --- a/src/graphql/type/validate.py +++ b/src/graphql/type/validate.py @@ -1,3 +1,4 @@ +from collections import defaultdict from operator import attrgetter, itemgetter from typing import Any, Collection, Dict, List, Optional, Set, Tuple, Union, cast @@ -11,7 +12,7 @@ SchemaDefinitionNode, SchemaExtensionNode, ) -from ..pyutils import inspect +from ..pyutils import and_list, inspect from ..utilities.type_comparators import is_equal_type, is_type_sub_type_of from .definition import ( GraphQLEnumType, @@ -105,19 +106,37 @@ def validate_root_types(self) -> None: schema = self.schema if not schema.query_type: self.report_error("Query root type must be provided.", schema.ast_node) + root_types_map: Dict[GraphQLObjectType, List[OperationType]] = defaultdict(list) + for operation_type in OperationType: root_type = schema.get_root_type(operation_type) - if root_type and not is_object_type(root_type): - operation_type_str = operation_type.value.capitalize() - root_type_str = inspect(root_type) - if_provided_str = ( - "" if operation_type == operation_type.QUERY else " if provided" + if root_type: + if is_object_type(root_type): + root_types_map[root_type].append(operation_type) + else: + operation_type_str = operation_type.value.capitalize() + root_type_str = inspect(root_type) + if_provided_str = ( + "" if operation_type == operation_type.QUERY else " if provided" + ) + self.report_error( + f"{operation_type_str} root type must be Object type" + f"{if_provided_str}, it cannot be {root_type_str}.", + get_operation_type_node(schema, operation_type) + or root_type.ast_node, + ) + for root_type, operation_types in root_types_map.items(): + if len(operation_types) > 1: + operation_list = and_list( + [operation_type.value for operation_type in operation_types] ) self.report_error( - f"{operation_type_str} root type must be Object type" - f"{if_provided_str}, it cannot be {root_type_str}.", - get_operation_type_node(schema, operation_type) - or root_type.ast_node, + "All root types must be different," + f" '{root_type.name}' type is used as {operation_list} root types.", + [ + get_operation_type_node(schema, operation_type) + for operation_type in operation_types + ], ) def validate_directives(self) -> None: diff --git a/tests/pyutils/test_format_list.py b/tests/pyutils/test_format_list.py new file mode 100644 index 00000000..bdc6b62f --- /dev/null +++ b/tests/pyutils/test_format_list.py @@ -0,0 +1,39 @@ +from pytest import raises + +from graphql.pyutils import and_list, or_list + + +def describe_and_list(): + def does_not_accept_an_empty_list(): + with raises(ValueError): + and_list([]) + + def handles_single_item(): + assert and_list(["A"]) == "A" + + def handles_two_items(): + assert and_list(["A", "B"]) == "A and B" + + def handles_three_items(): + assert and_list(["A", "B", "C"]) == "A, B, and C" + + def handles_more_than_five_items(): + assert and_list(["A", "B", "C", "D", "E", "F"]) == "A, B, C, D, E, and F" + + +def describe_or_list(): + def does_not_accept_an_empty_list(): + with raises(ValueError): + or_list([]) + + def handles_single_item(): + assert or_list(["A"]) == "A" + + def handles_two_items(): + assert or_list(["A", "B"]) == "A or B" + + def handles_three_items(): + assert or_list(["A", "B", "C"]) == "A, B, or C" + + def handles_more_than_five_items(): + assert or_list(["A", "B", "C", "D", "E", "F"]) == "A, B, C, D, E, or F" diff --git a/tests/type/test_validation.py b/tests/type/test_validation.py index b3d3e254..82020a43 100644 --- a/tests/type/test_validation.py +++ b/tests/type/test_validation.py @@ -454,6 +454,80 @@ def rejects_a_schema_whose_directives_are_incorrectly_typed(): ] +def describe_type_system_root_types_must_all_be_different_if_provided(): + def accepts_a_schema_with_different_root_types(): + schema = build_schema( + """ + type SomeObject1 { + field: String + } + + type SomeObject2 { + field: String + } + + type SomeObject3 { + field: String + } + + schema { + query: SomeObject1 + mutation: SomeObject2 + subscription: SomeObject3 + } + """ + ) + assert validate_schema(schema) == [] + + def rejects_a_schema_where_the_same_type_is_used_for_multiple_root_types(): + schema = build_schema( + """ + type SomeObject { + field: String + } + + type UniqueObject { + field: String + } + + schema { + query: SomeObject + mutation: UniqueObject + subscription: SomeObject + } + """ + ) + assert validate_schema(schema) == [ + { + "message": "All root types must be different, 'SomeObject' type" + " is used as query and subscription root types.", + "locations": [(11, 22), (13, 29)], + } + ] + + def rejects_a_schema_where_the_same_type_is_used_for_all_root_types(): + schema = build_schema( + """ + type SomeObject { + field: String + } + + schema { + query: SomeObject + mutation: SomeObject + subscription: SomeObject + } + """ + ) + assert validate_schema(schema) == [ + { + "message": "All root types must be different, 'SomeObject' type" + " is used as query, mutation, and subscription root types.", + "locations": [(7, 22), (8, 25), (9, 29)], + } + ] + + def describe_type_system_objects_must_have_fields(): def accepts_an_object_type_with_fields_object(): schema = build_schema( From dcda281433841371a696b27258218b06a7097d9d Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 24 Sep 2022 20:38:10 +0200 Subject: [PATCH 025/230] Delete deprecated graphql/subscription package Replicates graphql/graphql-js@ce09b5b3a092373a985204a8f2c6c84f19c7fbd3 --- src/graphql/subscription/__init__.py | 16 ---------------- 1 file changed, 16 deletions(-) delete mode 100644 src/graphql/subscription/__init__.py diff --git a/src/graphql/subscription/__init__.py b/src/graphql/subscription/__init__.py deleted file mode 100644 index f0c90910..00000000 --- a/src/graphql/subscription/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -"""GraphQL Subscription - -The :mod:`graphql.subscription` package is responsible for subscribing to updates -on specific data. - -.. deprecated:: 3.2 - This package has been deprecated with its exported functions integrated into the - :mod:`graphql.execution` package, to better conform with the terminology of the - GraphQL specification. For backwards compatibility, the :mod:`graphql.subscription` - package currently re-exports the moved functions from the :mod:`graphql.execution` - package. In v3.3, the :mod:`graphql.subscription` package will be dropped entirely. -""" - -from ..execution import subscribe, create_source_event_stream, MapAsyncIterator - -__all__ = ["subscribe", "create_source_event_stream", "MapAsyncIterator"] From 2a45b565df970c21cf9da4e03583f6de0235df3f Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 24 Sep 2022 20:41:57 +0200 Subject: [PATCH 026/230] Remove deprecated get_operation_root_type Replicates graphql/graphql-js@0c315b6c9aca6e1cbeab2ea318cd7e07f29a5828 --- docs/modules/utilities.rst | 4 - src/graphql/__init__.py | 3 - src/graphql/utilities/__init__.py | 4 - .../utilities/get_operation_root_type.py | 47 -------- .../utilities/test_get_operation_root_type.py | 114 ------------------ 5 files changed, 172 deletions(-) delete mode 100644 src/graphql/utilities/get_operation_root_type.py delete mode 100644 tests/utilities/test_get_operation_root_type.py diff --git a/docs/modules/utilities.rst b/docs/modules/utilities.rst index 21571404..160df76b 100644 --- a/docs/modules/utilities.rst +++ b/docs/modules/utilities.rst @@ -18,10 +18,6 @@ Get the target Operation from a Document: .. autofunction:: get_operation_ast -Get the Type for the target Operation AST: - -.. autofunction:: get_operation_root_type - Convert a GraphQLSchema to an IntrospectionQuery: .. autofunction:: introspection_from_schema diff --git a/src/graphql/__init__.py b/src/graphql/__init__.py index f1b21ab3..90dc565c 100644 --- a/src/graphql/__init__.py +++ b/src/graphql/__init__.py @@ -167,8 +167,6 @@ IntrospectionQuery, # Get the target Operation from a Document. get_operation_ast, - # Get the Type for the target Operation AST. - get_operation_root_type, # Convert a GraphQLSchema to an IntrospectionQuery. introspection_from_schema, # Build a GraphQLSchema from an introspection result. @@ -744,7 +742,6 @@ "get_introspection_query", "IntrospectionQuery", "get_operation_ast", - "get_operation_root_type", "introspection_from_schema", "build_client_schema", "build_ast_schema", diff --git a/src/graphql/utilities/__init__.py b/src/graphql/utilities/__init__.py index 1571485b..963830fd 100644 --- a/src/graphql/utilities/__init__.py +++ b/src/graphql/utilities/__init__.py @@ -10,9 +10,6 @@ # Get the target Operation from a Document. from .get_operation_ast import get_operation_ast -# Get the Type for the target Operation AST. -from .get_operation_root_type import get_operation_root_type - # Convert a GraphQLSchema to an IntrospectionQuery. from .introspection_from_schema import introspection_from_schema @@ -106,7 +103,6 @@ "find_dangerous_changes", "get_introspection_query", "get_operation_ast", - "get_operation_root_type", "is_equal_type", "is_type_sub_type_of", "is_valid_name_error", diff --git a/src/graphql/utilities/get_operation_root_type.py b/src/graphql/utilities/get_operation_root_type.py deleted file mode 100644 index 65f76c9c..00000000 --- a/src/graphql/utilities/get_operation_root_type.py +++ /dev/null @@ -1,47 +0,0 @@ -from typing import Union - -from ..error import GraphQLError -from ..language import ( - OperationDefinitionNode, - OperationType, - OperationTypeDefinitionNode, -) -from ..type import GraphQLObjectType, GraphQLSchema - - -__all__ = ["get_operation_root_type"] - - -def get_operation_root_type( - schema: GraphQLSchema, - operation: Union[OperationDefinitionNode, OperationTypeDefinitionNode], -) -> GraphQLObjectType: - """Extract the root type of the operation from the schema. - - .. deprecated:: 3.2 - Please use `GraphQLSchema.getRootType` instead. Will be removed in v3.3. - """ - operation_type = operation.operation - if operation_type == OperationType.QUERY: - query_type = schema.query_type - if not query_type: - raise GraphQLError( - "Schema does not define the required query root type.", operation - ) - return query_type - - if operation_type == OperationType.MUTATION: - mutation_type = schema.mutation_type - if not mutation_type: - raise GraphQLError("Schema is not configured for mutations.", operation) - return mutation_type - - if operation_type == OperationType.SUBSCRIPTION: - subscription_type = schema.subscription_type - if not subscription_type: - raise GraphQLError("Schema is not configured for subscriptions.", operation) - return subscription_type - - raise GraphQLError( - "Can only have query, mutation and subscription operations.", operation - ) diff --git a/tests/utilities/test_get_operation_root_type.py b/tests/utilities/test_get_operation_root_type.py deleted file mode 100644 index 6d6d7f76..00000000 --- a/tests/utilities/test_get_operation_root_type.py +++ /dev/null @@ -1,114 +0,0 @@ -from pytest import raises - -from graphql.error import GraphQLError -from graphql.language import ( - DocumentNode, - OperationDefinitionNode, - OperationTypeDefinitionNode, - SchemaDefinitionNode, - parse, -) -from graphql.type import GraphQLField, GraphQLObjectType, GraphQLSchema, GraphQLString -from graphql.utilities import get_operation_root_type - - -query_type = GraphQLObjectType("FooQuery", {"field": GraphQLField(GraphQLString)}) - -mutation_type = GraphQLObjectType("FooMutation", {"field": GraphQLField(GraphQLString)}) - -subscription_type = GraphQLObjectType( - "FooSubscription", {"field": GraphQLField(GraphQLString)} -) - - -def get_operation_node(doc: DocumentNode) -> OperationDefinitionNode: - operation_node = doc.definitions[0] - assert isinstance(operation_node, OperationDefinitionNode) - return operation_node - - -def describe_deprecated_get_operation_root_type(): - def gets_a_query_type_for_an_unnamed_operation_definition_node(): - test_schema = GraphQLSchema(query_type) - doc = parse("{ field }") - operation_node = get_operation_node(doc) - assert get_operation_root_type(test_schema, operation_node) is query_type - - def gets_a_query_type_for_a_named_operation_definition_node(): - test_schema = GraphQLSchema(query_type) - doc = parse("query Q { field }") - operation_node = get_operation_node(doc) - assert get_operation_root_type(test_schema, operation_node) is query_type - - def gets_a_type_for_operation_definition_nodes(): - test_schema = GraphQLSchema(query_type, mutation_type, subscription_type) - doc = parse( - """ - schema { - query: FooQuery - mutation: FooMutation - subscription: FooSubscription - } - """ - ) - - schema_node = doc.definitions[0] - assert isinstance(schema_node, SchemaDefinitionNode) - query_node, mutation_node, subscription_node = schema_node.operation_types - assert isinstance(query_node, OperationTypeDefinitionNode) - assert get_operation_root_type(test_schema, query_node) is query_type - assert isinstance(mutation_node, OperationTypeDefinitionNode) - assert get_operation_root_type(test_schema, mutation_node) is mutation_type - assert isinstance(subscription_node, OperationTypeDefinitionNode) - assert ( - get_operation_root_type(test_schema, subscription_node) is subscription_type - ) - - def gets_a_mutation_type_for_an_operation_definition_node(): - test_schema = GraphQLSchema(mutation=mutation_type) - doc = parse("mutation { field }") - operation_node = get_operation_node(doc) - assert get_operation_root_type(test_schema, operation_node) is mutation_type - - def gets_a_subscription_type_for_an_operation_definition_node(): - test_schema = GraphQLSchema(subscription=subscription_type) - doc = parse("subscription { field }") - operation_node = get_operation_node(doc) - assert get_operation_root_type(test_schema, operation_node) is subscription_type - - def throws_when_query_type_not_defined_in_schema(): - test_schema = GraphQLSchema() - doc = parse("query { field }") - operation_node = get_operation_node(doc) - with raises(GraphQLError) as exc_info: - get_operation_root_type(test_schema, operation_node) - assert exc_info.value.message == ( - "Schema does not define the required query root type." - ) - - def throws_when_mutation_type_not_defined_in_schema(): - test_schema = GraphQLSchema() - doc = parse("mutation { field }") - operation_node = get_operation_node(doc) - with raises(GraphQLError) as exc_info: - get_operation_root_type(test_schema, operation_node) - assert exc_info.value.message == "Schema is not configured for mutations." - - def throws_when_subscription_type_not_defined_in_schema(): - test_schema = GraphQLSchema() - doc = parse("subscription { field }") - operation_node = get_operation_node(doc) - with raises(GraphQLError) as exc_info: - get_operation_root_type(test_schema, operation_node) - assert exc_info.value.message == "Schema is not configured for subscriptions." - - def throws_when_operation_not_a_valid_operation_kind(): - test_schema = GraphQLSchema() - doc = parse("{ field }") - operation_node = get_operation_node(doc) - operation_node.operation = "non_existent_operation" # type: ignore - with raises(GraphQLError) as exc_info: - get_operation_root_type(test_schema, operation_node) - assert exc_info.value.message == ( - "Can only have query, mutation and subscription operations." - ) From 6495cbfb92105a6262e207c6055a47a7e6751a28 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 24 Sep 2022 20:56:01 +0200 Subject: [PATCH 027/230] Remove deprecated assert_valid_name & is_valid_name_error Replicates graphql/graphql-js@6dda669afb34298bf088ee74c6fe386b689ffc0b --- docs/modules/utilities.rst | 5 --- src/graphql/__init__.py | 6 ---- src/graphql/utilities/__init__.py | 5 --- src/graphql/utilities/assert_valid_name.py | 39 ---------------------- 4 files changed, 55 deletions(-) delete mode 100644 src/graphql/utilities/assert_valid_name.py diff --git a/docs/modules/utilities.rst b/docs/modules/utilities.rst index 160df76b..e79809f4 100644 --- a/docs/modules/utilities.rst +++ b/docs/modules/utilities.rst @@ -94,11 +94,6 @@ Comparators for types: .. autofunction:: is_type_sub_type_of .. autofunction:: do_types_overlap -Assert that a string is a valid GraphQL name: - -.. autofunction:: assert_valid_name -.. autofunction:: is_valid_name_error - Compare two GraphQLSchemas and detect breaking changes: .. autofunction:: find_breaking_changes diff --git a/src/graphql/__init__.py b/src/graphql/__init__.py index 90dc565c..07ff4d96 100644 --- a/src/graphql/__init__.py +++ b/src/graphql/__init__.py @@ -212,10 +212,6 @@ is_equal_type, is_type_sub_type_of, do_types_overlap, - # Assert a string is a valid GraphQL name. - assert_valid_name, - # Determine if a string is a valid GraphQL name. - is_valid_name_error, # Compare two GraphQLSchemas and detect breaking changes. BreakingChange, BreakingChangeType, @@ -764,8 +760,6 @@ "is_equal_type", "is_type_sub_type_of", "do_types_overlap", - "assert_valid_name", - "is_valid_name_error", "find_breaking_changes", "find_dangerous_changes", "BreakingChange", diff --git a/src/graphql/utilities/__init__.py b/src/graphql/utilities/__init__.py index 963830fd..26585595 100644 --- a/src/graphql/utilities/__init__.py +++ b/src/graphql/utilities/__init__.py @@ -68,9 +68,6 @@ # Comparators for types from .type_comparators import is_equal_type, is_type_sub_type_of, do_types_overlap -# Assert that a string is a valid GraphQL name. -from .assert_valid_name import assert_valid_name, is_valid_name_error - # Compare two GraphQLSchemas and detect breaking changes. from .find_breaking_changes import ( BreakingChange, @@ -89,7 +86,6 @@ "IntrospectionQuery", "TypeInfo", "TypeInfoVisitor", - "assert_valid_name", "ast_from_value", "ast_to_dict", "build_ast_schema", @@ -105,7 +101,6 @@ "get_operation_ast", "is_equal_type", "is_type_sub_type_of", - "is_valid_name_error", "introspection_from_schema", "lexicographic_sort_schema", "print_introspection_schema", diff --git a/src/graphql/utilities/assert_valid_name.py b/src/graphql/utilities/assert_valid_name.py deleted file mode 100644 index 4019d73e..00000000 --- a/src/graphql/utilities/assert_valid_name.py +++ /dev/null @@ -1,39 +0,0 @@ -from typing import Optional - -from ..error import GraphQLError -from ..type.assert_name import assert_name - - -__all__ = ["assert_valid_name", "is_valid_name_error"] - - -def assert_valid_name(name: str) -> str: - """Uphold the spec rules about naming. - - .. deprecated:: 3.2 - Please use ``assert_name`` instead. Will be removed in v3.3. - """ - error = is_valid_name_error(name) - if error: - raise error - return name - - -def is_valid_name_error(name: str) -> Optional[GraphQLError]: - """Return an Error if a name is invalid. - - .. deprecated:: 3.2 - Please use ``assert_name`` instead. Will be removed in v3.3. - """ - if not isinstance(name, str): - raise TypeError("Expected name to be a string.") - if name.startswith("__"): - return GraphQLError( - f"Name {name!r} must not begin with '__'," - " which is reserved by GraphQL introspection." - ) - try: - assert_name(name) - except GraphQLError as error: - return error - return None From 2d90cc150725e264f1fb68a1ba25128e2b2aec57 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 24 Sep 2022 21:10:27 +0200 Subject: [PATCH 028/230] Remove deprecated type_info argument of validate() function Replicates graphql/graphql-js@b4ad1282ce38449a953d9375307c9e45f8151a70 --- src/graphql/validation/validate.py | 10 ++----- tests/validation/test_validation.py | 41 +---------------------------- 2 files changed, 3 insertions(+), 48 deletions(-) diff --git a/src/graphql/validation/validate.py b/src/graphql/validation/validate.py index 8f301396..d1b5818f 100644 --- a/src/graphql/validation/validate.py +++ b/src/graphql/validation/validate.py @@ -2,7 +2,7 @@ from ..error import GraphQLError from ..language import DocumentNode, ParallelVisitor, visit -from ..pyutils import inspect, is_collection +from ..pyutils import is_collection from ..type import GraphQLSchema, assert_valid_schema from ..utilities import TypeInfo, TypeInfoVisitor from .rules import ASTValidationRule @@ -22,7 +22,6 @@ def validate( document_ast: DocumentNode, rules: Optional[Collection[Type[ASTValidationRule]]] = None, max_errors: Optional[int] = None, - type_info: Optional[TypeInfo] = None, ) -> List[GraphQLError]: """Implements the "Validation" section of the spec. @@ -39,8 +38,6 @@ def validate( Validate will stop validation after a ``max_errors`` limit has been reached. Attackers can send pathologically invalid queries to induce a DoS attack, so by default ``max_errors`` set to 100 errors. - - Providing a custom TypeInfo instance is deprecated and will be removed in v3.3. """ if not document_ast or not isinstance(document_ast, DocumentNode): raise TypeError("Must provide document.") @@ -50,10 +47,6 @@ def validate( max_errors = 100 elif not isinstance(max_errors, int): raise TypeError("The maximum number of errors must be passed as an int.") - if type_info is None: - type_info = TypeInfo(schema) - elif not isinstance(type_info, TypeInfo): - raise TypeError(f"Not a TypeInfo object: {inspect(type_info)}.") if rules is None: rules = specified_rules elif not is_collection(rules) or not all( @@ -76,6 +69,7 @@ def on_error(error: GraphQLError) -> None: raise ValidationAbortedError errors.append(error) + type_info = TypeInfo(schema) context = ValidationContext(schema, document_ast, type_info, on_error) # This uses a specialized visitor which runs multiple visitors in parallel, diff --git a/tests/validation/test_validation.py b/tests/validation/test_validation.py index 0f7d80e6..5c23ec69 100644 --- a/tests/validation/test_validation.py +++ b/tests/validation/test_validation.py @@ -2,7 +2,7 @@ from graphql.error import GraphQLError from graphql.language import parse -from graphql.utilities import TypeInfo, build_schema +from graphql.utilities import build_schema from graphql.validation import ValidationRule, validate from .harness import test_schema @@ -15,14 +15,6 @@ def rejects_invalid_documents(): assert validate(test_schema, None) # type: ignore assert str(exc_info.value) == "Must provide document." - def rejects_invalid_type_info(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - assert validate( - test_schema, parse("query { name }"), type_info={} # type: ignore - ) - assert str(exc_info.value) == "Not a TypeInfo object: {}." - def rejects_invalid_rules(): with raises(TypeError) as exc_info: # noinspection PyTypeChecker @@ -80,37 +72,6 @@ def detects_unknown_fields(): {"message": "Cannot query field 'unknown' on type 'QueryRoot'."} ] - def deprecated_validates_using_a_custom_type_info(): - # This TypeInfo will never return a valid field. - type_info = TypeInfo(test_schema, None, lambda *args: None) - - doc = parse( - """ - query { - human { - pets { - ... on Cat { - meowsVolume - } - ... on Dog { - barkVolume - } - } - } - } - """ - ) - - errors = validate(test_schema, doc, None, None, type_info) - - assert [error.message for error in errors] == [ - "Cannot query field 'human' on type 'QueryRoot'. Did you mean 'human'?", - "Cannot query field 'meowsVolume' on type 'Cat'." - " Did you mean 'meowsVolume'?", - "Cannot query field 'barkVolume' on type 'Dog'." - " Did you mean 'barkVolume'?", - ] - def validates_using_a_custom_rule(): schema = build_schema( """ From 03a44cba57a1c1b872074957d2289eb4f18de9c7 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 24 Sep 2022 21:24:15 +0200 Subject: [PATCH 029/230] Remove deprecated get_field_def_fn argument of TypeInfo Replicates graphql/graphql-js@75eb3eb57fc3580b95f91f45b07aaa608b92c6a4 --- src/graphql/utilities/type_info.py | 25 ++++++++----------------- 1 file changed, 8 insertions(+), 17 deletions(-) diff --git a/src/graphql/utilities/type_info.py b/src/graphql/utilities/type_info.py index 11bdea58..554bc5f8 100644 --- a/src/graphql/utilities/type_info.py +++ b/src/graphql/utilities/type_info.py @@ -1,6 +1,6 @@ from __future__ import annotations # Python < 3.10 -from typing import Any, Callable, List, Optional, Union, cast +from typing import Any, List, Optional, Union, cast from ..language import ( ArgumentNode, @@ -52,11 +52,6 @@ __all__ = ["TypeInfo", "TypeInfoVisitor"] -GetFieldDefFn = Callable[ - [GraphQLSchema, GraphQLType, FieldNode], Optional[GraphQLField] -] - - class TypeInfo: """Utility class for keeping track of type definitions. @@ -70,14 +65,11 @@ def __init__( self, schema: GraphQLSchema, initial_type: Optional[GraphQLType] = None, - get_field_def_fn: Optional[GetFieldDefFn] = None, ) -> None: """Initialize the TypeInfo for the given GraphQL schema. Initial type may be provided in rare cases to facilitate traversals beginning somewhere other than documents. - - The optional last parameter is deprecated and will be removed in v3.3. """ self._schema = schema self._type_stack: List[Optional[GraphQLOutputType]] = [] @@ -88,7 +80,6 @@ def __init__( self._directive: Optional[GraphQLDirective] = None self._argument: Optional[GraphQLArgument] = None self._enum_value: Optional[GraphQLEnumValue] = None - self._get_field_def: GetFieldDefFn = get_field_def_fn or get_field_def if initial_type: if is_input_type(initial_type): self._input_type_stack.append(cast(GraphQLInputType, initial_type)) @@ -158,7 +149,7 @@ def enter_selection_set(self, node: SelectionSetNode) -> None: def enter_field(self, node: FieldNode) -> None: parent_type = self.get_parent_type() if parent_type: - field_def = self._get_field_def(self._schema, parent_type, node) + field_def = get_field_def(self._schema, parent_type, node) field_type = field_def.type if field_def else None else: field_def = field_type = None @@ -277,7 +268,7 @@ def leave_enum_value(self) -> None: def get_field_def( - schema: GraphQLSchema, parent_type: GraphQLType, field_node: FieldNode + schema: GraphQLSchema, parent_type: GraphQLCompositeType, field_node: FieldNode ) -> Optional[GraphQLField]: """Get field definition. @@ -285,16 +276,16 @@ def get_field_def( :func:`graphql.execution.get_field_def`, in this statically evaluated environment we do not always have an Object type, and need to handle Interface and Union types. """ - name = field_node.name.value - if name == "__schema" and schema.query_type is parent_type: + field_name = field_node.name.value + if field_name == "__schema" and schema.query_type is parent_type: return SchemaMetaFieldDef - if name == "__type" and schema.query_type is parent_type: + if field_name == "__type" and schema.query_type is parent_type: return TypeMetaFieldDef - if name == "__typename" and is_composite_type(parent_type): + if field_name == "__typename" and is_composite_type(parent_type): return TypeNameMetaFieldDef if is_object_type(parent_type) or is_interface_type(parent_type): parent_type = cast(Union[GraphQLObjectType, GraphQLInterfaceType], parent_type) - return parent_type.fields.get(name) + return parent_type.fields.get(field_name) return None From dbe6bb9cbb6d7f26f63c0740af828ef5622c6aac Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 24 Sep 2022 21:41:20 +0200 Subject: [PATCH 030/230] Remove deprecated get_visit_fn method Replicates graphql/graphql-js@f9d79f1f2b3725f42d2724e41a4f5da8200aced5 --- src/graphql/language/visitor.py | 11 ----------- tests/language/test_visitor.py | 6 ------ 2 files changed, 17 deletions(-) diff --git a/src/graphql/language/visitor.py b/src/graphql/language/visitor.py index d8d17f16..e8039933 100644 --- a/src/graphql/language/visitor.py +++ b/src/graphql/language/visitor.py @@ -146,17 +146,6 @@ def get_enter_leave_for_kind(self, kind: str) -> EnterLeaveVisitor: self.enter_leave_map[kind] = enter_leave return enter_leave - def get_visit_fn( - self, kind: str, is_leaving: bool = False - ) -> Optional[Callable[..., Optional[VisitorAction]]]: - """Get the visit function for the given node kind and direction. - - .. deprecated:: 3.2 - Please use ``get_enter_leave_for_kind`` instead. Will be removed in v3.3. - """ - enter_leave = self.get_enter_leave_for_kind(kind) - return enter_leave.leave if is_leaving else enter_leave.enter - class Stack(NamedTuple): """A stack for the visit function.""" diff --git a/tests/language/test_visitor.py b/tests/language/test_visitor.py index 21567a7d..a0acd236 100644 --- a/tests/language/test_visitor.py +++ b/tests/language/test_visitor.py @@ -242,12 +242,6 @@ def leave_document(*args): visitor.leave, ) - # also test deprecated method - assert visitor.get_visit_fn("document") == visitor.enter_document - assert visitor.get_visit_fn("field") == visitor.enter - assert visitor.get_visit_fn("document", True) == visitor.leave_document - assert visitor.get_visit_fn("field", True) == visitor.leave - def validates_path_argument(): ast = parse("{ a }", no_location=True) visited = [] From 505d554d8756a2b19017725eccc6f7a4db1b3fcb Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 24 Sep 2022 21:46:17 +0200 Subject: [PATCH 031/230] Remove deprecated print_rrror/format_error functions Replicates graphql/graphql-js@dce65280ef52e21ce43745dad11b610e525e0c9b --- src/graphql/error/graphql_error.py | 27 --------------------------- tests/error/test_graphql_error.py | 24 ------------------------ 2 files changed, 51 deletions(-) diff --git a/src/graphql/error/graphql_error.py b/src/graphql/error/graphql_error.py index 9f515606..0fa6c170 100644 --- a/src/graphql/error/graphql_error.py +++ b/src/graphql/error/graphql_error.py @@ -236,30 +236,3 @@ def formatted(self) -> GraphQLFormattedError: if self.extensions: formatted["extensions"] = self.extensions return formatted - - -def print_error(error: GraphQLError) -> str: - """Print a GraphQLError to a string. - - Represents useful location information about the error's position in the source. - - .. deprecated:: 3.2 - Please use ``str(error)`` instead. Will be removed in v3.3. - """ - if not isinstance(error, GraphQLError): - raise TypeError("Expected a GraphQLError.") - return str(error) - - -def format_error(error: GraphQLError) -> GraphQLFormattedError: - """Format a GraphQL error. - - Given a GraphQLError, format it according to the rules described by the "Response - Format, Errors" section of the GraphQL Specification. - - .. deprecated:: 3.2 - Please use ``error.formatted`` instead. Will be removed in v3.3. - """ - if not isinstance(error, GraphQLError): - raise TypeError("Expected a GraphQLError.") - return error.formatted diff --git a/tests/error/test_graphql_error.py b/tests/error/test_graphql_error.py index d331bae3..418b4662 100644 --- a/tests/error/test_graphql_error.py +++ b/tests/error/test_graphql_error.py @@ -273,17 +273,6 @@ def hashes_are_unique_per_instance(): def describe_to_string(): - def deprecated_prints_an_error_using_print_error(): - # noinspection PyProtectedMember - from graphql.error.graphql_error import print_error - - error = GraphQLError("Error") - assert print_error(error) == "Error" - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - print_error(Exception) # type: ignore - assert str(exc_info.value) == "Expected a GraphQLError." - def prints_an_error_without_location(): error = GraphQLError("Error without location") assert str(error) == "Error without location" @@ -353,19 +342,6 @@ def prints_an_error_with_nodes_from_different_sources(): def describe_formatted(): - def deprecated_formats_an_error_using_format_error(): - # noinspection PyProtectedMember - from graphql.error.graphql_error import format_error - - error = GraphQLError("Example Error") - assert format_error(error) == { - "message": "Example Error", - } - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - format_error(Exception) # type: ignore - assert str(exc_info.value) == "Expected a GraphQLError." - def formats_graphql_error(): path: List[Union[int, str]] = ["one", 2] extensions = {"ext": None} From fe748bba5f4e3b64c25ff159d367af92b293d2ea Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 24 Sep 2022 22:16:12 +0200 Subject: [PATCH 032/230] Speedup tests by moving some in strip_ignored_chars to fuzzing Replicates graphql/graphql-js@ebb7befce087a87568deb9e52930a746357dd19b --- .../test_strip_ignored_characters.py | 171 +------------- .../test_strip_ignored_characters_fuzz.py | 210 ++++++++++++++++++ 2 files changed, 212 insertions(+), 169 deletions(-) diff --git a/tests/utilities/test_strip_ignored_characters.py b/tests/utilities/test_strip_ignored_characters.py index 21353c44..0c9de42f 100644 --- a/tests/utilities/test_strip_ignored_characters.py +++ b/tests/utilities/test_strip_ignored_characters.py @@ -1,4 +1,3 @@ -from json import dumps from typing import Optional from pytest import raises @@ -11,33 +10,6 @@ from ..utils import dedent -ignored_tokens = [ - # UnicodeBOM - "\uFEFF", # Byte Order Mark (U+FEFF) - # WhiteSpace - "\t", # Horizontal Tab (U+0009) - " ", # Space (U+0020) - # LineTerminator - "\n", # "New Line (U+000A)" - "\r", # "Carriage Return (U+000D)" [ lookahead ! "New Line (U+000A)" ] - "\r\n", # "Carriage Return (U+000D)" "New Line (U+000A)" - # Comment - '# "Comment" string\n', # `#` CommentChar* - # Comma - ",", # , -] - -punctuator_tokens = ["!", "$", "(", ")", "...", ":", "=", "@", "[", "]", "{", "|", "}"] - -non_punctuator_tokens = [ - "name_token", # Name - "1", # IntValue - "3.14", # FloatValue - '"some string value"', # StringValue - '"""block\nstring\nvalue"""', # StringValue(BlockString) -] - - def lex_value(s: str) -> Optional[str]: lexer = Lexer(Source(s)) value = lexer.advance().value @@ -52,24 +24,10 @@ def __init__(self, doc_string: str): def to_equal(self, expected: str): doc_string = self.doc_string stripped = strip_ignored_characters(doc_string) - - assert stripped == expected, dedent( - f""" - Expected strip_ignored_characters({doc_string!r}) - to equal {expected!r} - but got {stripped!r} - """ - ) + assert stripped == expected stripped_twice = strip_ignored_characters(stripped) - - assert stripped == stripped_twice, dedent( - f"""" - Expected strip_ignored_characters({stripped!r})" - to equal {stripped!r} - but got {stripped_twice!r} - """ - ) + assert stripped == stripped_twice def to_stay_the_same(self): self.to_equal(self.doc_string) @@ -139,14 +97,6 @@ def strips_documents_with_only_ignored_characters(): ExpectStripped(",,").to_equal("") ExpectStripped("#comment\n, \n").to_equal("") - for ignored in ignored_tokens: - ExpectStripped(ignored).to_equal("") - - for another_ignored in ignored_tokens: - ExpectStripped(ignored + another_ignored).to_equal("") - - ExpectStripped("".join(ignored_tokens)).to_equal("") - def strips_leading_and_trailing_ignored_tokens(): ExpectStripped("\n1").to_equal("1") ExpectStripped(",1").to_equal("1") @@ -158,18 +108,6 @@ def strips_leading_and_trailing_ignored_tokens(): ExpectStripped("1,,").to_equal("1") ExpectStripped("1#comment\n, \n").to_equal("1") - for token in punctuator_tokens + non_punctuator_tokens: - for ignored in ignored_tokens: - ExpectStripped(ignored + token).to_equal(token) - ExpectStripped(token + ignored).to_equal(token) - - for another_ignored in ignored_tokens: - ExpectStripped(token + ignored + ignored).to_equal(token) - ExpectStripped(ignored + another_ignored + token).to_equal(token) - - ExpectStripped("".join(ignored_tokens) + token).to_equal(token) - ExpectStripped(token + "".join(ignored_tokens)).to_equal(token) - def strips_ignored_tokens_between_punctuator_tokens(): ExpectStripped("[,)").to_equal("[)") ExpectStripped("[\r)").to_equal("[)") @@ -177,20 +115,6 @@ def strips_ignored_tokens_between_punctuator_tokens(): ExpectStripped("[\r,)").to_equal("[)") ExpectStripped("[,\n)").to_equal("[)") - for left in punctuator_tokens: - for right in punctuator_tokens: - for ignored in ignored_tokens: - ExpectStripped(left + ignored + right).to_equal(left + right) - - for another_ignored in ignored_tokens: - ExpectStripped( - left + ignored + another_ignored + right - ).to_equal(left + right) - - ExpectStripped(left + "".join(ignored_tokens) + right).to_equal( - left + right - ) - def strips_ignored_tokens_between_punctuator_and_non_punctuator_tokens(): ExpectStripped("[,1").to_equal("[1") ExpectStripped("[\r1").to_equal("[1") @@ -198,22 +122,6 @@ def strips_ignored_tokens_between_punctuator_and_non_punctuator_tokens(): ExpectStripped("[\r,1").to_equal("[1") ExpectStripped("[,\n1").to_equal("[1") - for non_punctuator in non_punctuator_tokens: - for punctuator in punctuator_tokens: - for ignored in ignored_tokens: - ExpectStripped(punctuator + ignored + non_punctuator).to_equal( - punctuator + non_punctuator - ) - - for another_ignored in ignored_tokens: - ExpectStripped( - punctuator + ignored + another_ignored + non_punctuator - ).to_equal(punctuator + non_punctuator) - - ExpectStripped( - punctuator + "".join(ignored_tokens) + non_punctuator - ).to_equal(punctuator + non_punctuator) - def strips_ignored_tokens_between_non_punctuator_and_punctuator_tokens(): ExpectStripped("1,[").to_equal("1[") ExpectStripped("1\r[").to_equal("1[") @@ -221,46 +129,11 @@ def strips_ignored_tokens_between_non_punctuator_and_punctuator_tokens(): ExpectStripped("1\r,[").to_equal("1[") ExpectStripped("1,\n[").to_equal("1[") - for non_punctuator in non_punctuator_tokens: - for punctuator in punctuator_tokens: - # Special case for that is handled in the below test - if punctuator == "...": - continue - - for ignored in ignored_tokens: - ExpectStripped(non_punctuator + ignored + punctuator).to_equal( - non_punctuator + punctuator - ) - - for another_ignored in ignored_tokens: - ExpectStripped( - non_punctuator + ignored + another_ignored + punctuator - ).to_equal(non_punctuator + punctuator) - - ExpectStripped( - non_punctuator + "".join(ignored_tokens) + punctuator - ).to_equal(non_punctuator + punctuator) - def replace_ignored_tokens_between_non_punctuator_tokens_and_spread_with_space(): ExpectStripped("a ...").to_equal("a ...") ExpectStripped("1 ...").to_equal("1 ...") ExpectStripped("1 ... ...").to_equal("1 ......") - for non_punctuator in non_punctuator_tokens: - for ignored in ignored_tokens: - ExpectStripped(non_punctuator + ignored + "...").to_equal( - non_punctuator + " ..." - ) - - for another_ignored in ignored_tokens: - ExpectStripped( - non_punctuator + ignored + another_ignored + " ..." - ).to_equal(non_punctuator + " ...") - - ExpectStripped(non_punctuator + "".join(ignored_tokens) + "...").to_equal( - non_punctuator + " ..." - ) - def replace_ignored_tokens_between_non_punctuator_tokens_with_space(): ExpectStripped("1 2").to_stay_the_same() ExpectStripped('"" ""').to_stay_the_same() @@ -271,57 +144,17 @@ def replace_ignored_tokens_between_non_punctuator_tokens_with_space(): ExpectStripped("a 1").to_equal("a 1") ExpectStripped("a \t 1").to_equal("a 1") - for left in non_punctuator_tokens: - for right in non_punctuator_tokens: - for ignored in ignored_tokens: - ExpectStripped(left + ignored + right).to_equal(left + " " + right) - - for another_ignored in ignored_tokens: - ExpectStripped( - left + ignored + another_ignored + right - ).to_equal(left + " " + right) - - ExpectStripped(left + "".join(ignored_tokens) + right).to_equal( - left + " " + right - ) - def does_not_strip_ignored_tokens_embedded_in_the_string(): ExpectStripped('" "').to_stay_the_same() ExpectStripped('","').to_stay_the_same() ExpectStripped('",,"').to_stay_the_same() ExpectStripped('",|"').to_stay_the_same() - for ignored in ignored_tokens: - ExpectStripped(dumps(ignored)).to_stay_the_same() - - for another_ignored in ignored_tokens: - ExpectStripped(dumps(ignored + another_ignored)).to_stay_the_same() - - ExpectStripped(dumps("".join(ignored_tokens))).to_stay_the_same() - def does_not_strip_ignored_tokens_embedded_in_the_block_string(): ExpectStripped('""","""').to_stay_the_same() ExpectStripped('""",,"""').to_stay_the_same() ExpectStripped('""",|"""').to_stay_the_same() - ignored_tokens_without_formatting = [ - token - for token in ignored_tokens - if token not in ["\n", "\r", "\r\n", "\t", " "] - ] - - for ignored in ignored_tokens_without_formatting: - ExpectStripped('"""|' + ignored + '|"""').to_stay_the_same() - - for another_ignored in ignored_tokens_without_formatting: - ExpectStripped( - '"""|' + ignored + another_ignored + '|"""' - ).to_stay_the_same() - - ExpectStripped( - '"""|' + "".join(ignored_tokens_without_formatting) + '|"""' - ).to_stay_the_same() - def strips_ignored_characters_inside_block_strings(): # noinspection PyShadowingNames def expect_stripped_string(block_str: str): diff --git a/tests/utilities/test_strip_ignored_characters_fuzz.py b/tests/utilities/test_strip_ignored_characters_fuzz.py index 019ec5fb..7f75b8eb 100644 --- a/tests/utilities/test_strip_ignored_characters_fuzz.py +++ b/tests/utilities/test_strip_ignored_characters_fuzz.py @@ -1,3 +1,4 @@ +from json import dumps from typing import Optional from pytest import mark @@ -9,6 +10,63 @@ from ..utils import dedent, gen_fuzz_strings +ignored_tokens = [ + # UnicodeBOM + "\uFEFF", # Byte Order Mark (U+FEFF) + # WhiteSpace + "\t", # Horizontal Tab (U+0009) + " ", # Space (U+0020) + # LineTerminator + "\n", # "New Line (U+000A)" + "\r", # "Carriage Return (U+000D)" [ lookahead ! "New Line (U+000A)" ] + "\r\n", # "Carriage Return (U+000D)" "New Line (U+000A)" + # Comment + '# "Comment" string\n', # `#` CommentChar* + # Comma + ",", # , +] + +punctuator_tokens = ["!", "$", "(", ")", "...", ":", "=", "@", "[", "]", "{", "|", "}"] + +non_punctuator_tokens = [ + "name_token", # Name + "1", # IntValue + "3.14", # FloatValue + '"some string value"', # StringValue + '"""block\nstring\nvalue"""', # StringValue(BlockString) +] + + +class ExpectStripped: + def __init__(self, doc_string: str): + self.doc_string = doc_string + + def to_equal(self, expected: str): + doc_string = self.doc_string + stripped = strip_ignored_characters(doc_string) + + assert stripped == expected, dedent( + f""" + Expected strip_ignored_characters({doc_string!r}) + to equal {expected!r} + but got {stripped!r} + """ + ) + + stripped_twice = strip_ignored_characters(stripped) + + assert stripped == stripped_twice, dedent( + f"""" + Expected strip_ignored_characters({stripped!r})" + to equal {stripped!r} + but got {stripped_twice!r} + """ + ) + + def to_stay_the_same(self): + self.to_equal(self.doc_string) + + def lex_value(s: str) -> Optional[str]: lexer = Lexer(Source(s)) value = lexer.advance().value @@ -17,6 +75,158 @@ def lex_value(s: str) -> Optional[str]: def describe_strip_ignored_characters(): + @mark.slow + @mark.timeout(10) + def strips_documents_with_random_combination_of_ignored_characters(): + for ignored in ignored_tokens: + ExpectStripped(ignored).to_equal("") + + for another_ignored in ignored_tokens: + ExpectStripped(ignored + another_ignored).to_equal("") + + ExpectStripped("".join(ignored_tokens)).to_equal("") + + @mark.slow + @mark.timeout(10) + def strips_random_leading_and_trailing_ignored_tokens(): + for token in punctuator_tokens + non_punctuator_tokens: + for ignored in ignored_tokens: + ExpectStripped(ignored + token).to_equal(token) + ExpectStripped(token + ignored).to_equal(token) + + for another_ignored in ignored_tokens: + ExpectStripped(token + ignored + ignored).to_equal(token) + ExpectStripped(ignored + another_ignored + token).to_equal(token) + + ExpectStripped("".join(ignored_tokens) + token).to_equal(token) + ExpectStripped(token + "".join(ignored_tokens)).to_equal(token) + + @mark.slow + @mark.timeout(10) + def strips_random_ignored_tokens_between_punctuator_tokens(): + for left in punctuator_tokens: + for right in punctuator_tokens: + for ignored in ignored_tokens: + ExpectStripped(left + ignored + right).to_equal(left + right) + + for another_ignored in ignored_tokens: + ExpectStripped( + left + ignored + another_ignored + right + ).to_equal(left + right) + + ExpectStripped(left + "".join(ignored_tokens) + right).to_equal( + left + right + ) + + @mark.slow + @mark.timeout(10) + def strips_random_ignored_tokens_between_punctuator_and_non_punctuator_tokens(): + for non_punctuator in non_punctuator_tokens: + for punctuator in punctuator_tokens: + for ignored in ignored_tokens: + ExpectStripped(punctuator + ignored + non_punctuator).to_equal( + punctuator + non_punctuator + ) + + for another_ignored in ignored_tokens: + ExpectStripped( + punctuator + ignored + another_ignored + non_punctuator + ).to_equal(punctuator + non_punctuator) + + ExpectStripped( + punctuator + "".join(ignored_tokens) + non_punctuator + ).to_equal(punctuator + non_punctuator) + + @mark.slow + @mark.timeout(10) + def strips_random_ignored_tokens_between_non_punctuator_and_punctuator_tokens(): + for non_punctuator in non_punctuator_tokens: + for punctuator in punctuator_tokens: + # Special case for that is handled in the below test + if punctuator == "...": + continue + + for ignored in ignored_tokens: + ExpectStripped(non_punctuator + ignored + punctuator).to_equal( + non_punctuator + punctuator + ) + + for another_ignored in ignored_tokens: + ExpectStripped( + non_punctuator + ignored + another_ignored + punctuator + ).to_equal(non_punctuator + punctuator) + + ExpectStripped( + non_punctuator + "".join(ignored_tokens) + punctuator + ).to_equal(non_punctuator + punctuator) + + @mark.slow + @mark.timeout(10) + def replace_random_ignored_tokens_between_non_punctuator_and_spread_with_space(): + for non_punctuator in non_punctuator_tokens: + for ignored in ignored_tokens: + ExpectStripped(non_punctuator + ignored + "...").to_equal( + non_punctuator + " ..." + ) + + for another_ignored in ignored_tokens: + ExpectStripped( + non_punctuator + ignored + another_ignored + " ..." + ).to_equal(non_punctuator + " ...") + + ExpectStripped(non_punctuator + "".join(ignored_tokens) + "...").to_equal( + non_punctuator + " ..." + ) + + @mark.slow + @mark.timeout(10) + def replace_random_ignored_tokens_between_non_punctuator_tokens_with_space(): + for left in non_punctuator_tokens: + for right in non_punctuator_tokens: + for ignored in ignored_tokens: + ExpectStripped(left + ignored + right).to_equal(left + " " + right) + + for another_ignored in ignored_tokens: + ExpectStripped( + left + ignored + another_ignored + right + ).to_equal(left + " " + right) + + ExpectStripped(left + "".join(ignored_tokens) + right).to_equal( + left + " " + right + ) + + @mark.slow + @mark.timeout(10) + def does_not_strip_random_ignored_tokens_embedded_in_the_string(): + for ignored in ignored_tokens: + ExpectStripped(dumps(ignored)).to_stay_the_same() + + for another_ignored in ignored_tokens: + ExpectStripped(dumps(ignored + another_ignored)).to_stay_the_same() + + ExpectStripped(dumps("".join(ignored_tokens))).to_stay_the_same() + + @mark.slow + @mark.timeout(10) + def does_not_strip_random_ignored_tokens_embedded_in_the_block_string(): + ignored_tokens_without_formatting = [ + token + for token in ignored_tokens + if token not in ["\n", "\r", "\r\n", "\t", " "] + ] + + for ignored in ignored_tokens_without_formatting: + ExpectStripped('"""|' + ignored + '|"""').to_stay_the_same() + + for another_ignored in ignored_tokens_without_formatting: + ExpectStripped( + '"""|' + ignored + another_ignored + '|"""' + ).to_stay_the_same() + + ExpectStripped( + '"""|' + "".join(ignored_tokens_without_formatting) + '|"""' + ).to_stay_the_same() + @mark.slow @mark.timeout(20) def strips_ignored_characters_inside_random_block_strings(): From aa47a8389d4eb1f4591198adb757e5ac716eaddf Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 24 Sep 2022 22:26:14 +0200 Subject: [PATCH 033/230] Remove unnecessary import --- tests/error/test_graphql_error.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/tests/error/test_graphql_error.py b/tests/error/test_graphql_error.py index 418b4662..58b019c1 100644 --- a/tests/error/test_graphql_error.py +++ b/tests/error/test_graphql_error.py @@ -1,7 +1,5 @@ from typing import List, Union, cast -from pytest import raises - from graphql.error import GraphQLError from graphql.language import ( Node, From 318fc43592d9c154278731d8e3d776c0e90c85eb Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 24 Sep 2022 22:31:44 +0200 Subject: [PATCH 034/230] Revert "Remove deprecated get_field_def_fn argument of TypeInfo" This reverts commit 03a44cba57a1c1b872074957d2289eb4f18de9c7 Replicates graphql/graphql-js@e3ac35c5ee9ce25e237145c663fd1095b2b4a1b5 --- src/graphql/utilities/type_info.py | 25 +++++++++++++++++-------- 1 file changed, 17 insertions(+), 8 deletions(-) diff --git a/src/graphql/utilities/type_info.py b/src/graphql/utilities/type_info.py index 554bc5f8..11bdea58 100644 --- a/src/graphql/utilities/type_info.py +++ b/src/graphql/utilities/type_info.py @@ -1,6 +1,6 @@ from __future__ import annotations # Python < 3.10 -from typing import Any, List, Optional, Union, cast +from typing import Any, Callable, List, Optional, Union, cast from ..language import ( ArgumentNode, @@ -52,6 +52,11 @@ __all__ = ["TypeInfo", "TypeInfoVisitor"] +GetFieldDefFn = Callable[ + [GraphQLSchema, GraphQLType, FieldNode], Optional[GraphQLField] +] + + class TypeInfo: """Utility class for keeping track of type definitions. @@ -65,11 +70,14 @@ def __init__( self, schema: GraphQLSchema, initial_type: Optional[GraphQLType] = None, + get_field_def_fn: Optional[GetFieldDefFn] = None, ) -> None: """Initialize the TypeInfo for the given GraphQL schema. Initial type may be provided in rare cases to facilitate traversals beginning somewhere other than documents. + + The optional last parameter is deprecated and will be removed in v3.3. """ self._schema = schema self._type_stack: List[Optional[GraphQLOutputType]] = [] @@ -80,6 +88,7 @@ def __init__( self._directive: Optional[GraphQLDirective] = None self._argument: Optional[GraphQLArgument] = None self._enum_value: Optional[GraphQLEnumValue] = None + self._get_field_def: GetFieldDefFn = get_field_def_fn or get_field_def if initial_type: if is_input_type(initial_type): self._input_type_stack.append(cast(GraphQLInputType, initial_type)) @@ -149,7 +158,7 @@ def enter_selection_set(self, node: SelectionSetNode) -> None: def enter_field(self, node: FieldNode) -> None: parent_type = self.get_parent_type() if parent_type: - field_def = get_field_def(self._schema, parent_type, node) + field_def = self._get_field_def(self._schema, parent_type, node) field_type = field_def.type if field_def else None else: field_def = field_type = None @@ -268,7 +277,7 @@ def leave_enum_value(self) -> None: def get_field_def( - schema: GraphQLSchema, parent_type: GraphQLCompositeType, field_node: FieldNode + schema: GraphQLSchema, parent_type: GraphQLType, field_node: FieldNode ) -> Optional[GraphQLField]: """Get field definition. @@ -276,16 +285,16 @@ def get_field_def( :func:`graphql.execution.get_field_def`, in this statically evaluated environment we do not always have an Object type, and need to handle Interface and Union types. """ - field_name = field_node.name.value - if field_name == "__schema" and schema.query_type is parent_type: + name = field_node.name.value + if name == "__schema" and schema.query_type is parent_type: return SchemaMetaFieldDef - if field_name == "__type" and schema.query_type is parent_type: + if name == "__type" and schema.query_type is parent_type: return TypeMetaFieldDef - if field_name == "__typename" and is_composite_type(parent_type): + if name == "__typename" and is_composite_type(parent_type): return TypeNameMetaFieldDef if is_object_type(parent_type) or is_interface_type(parent_type): parent_type = cast(Union[GraphQLObjectType, GraphQLInterfaceType], parent_type) - return parent_type.fields.get(field_name) + return parent_type.fields.get(name) return None From 0d67023ccef1ccd1df461e82d451f62df521817f Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 24 Sep 2022 22:33:58 +0200 Subject: [PATCH 035/230] Revert "Remove deprecated type_info argument of validate() function" This reverts commit 2d90cc150725e264f1fb68a1ba25128e2b2aec57. Replicates graphql/graphql-js@75635f03ba950a5f0aa212bad2052297e9a59725 --- src/graphql/validation/validate.py | 10 +++++-- tests/validation/test_validation.py | 41 ++++++++++++++++++++++++++++- 2 files changed, 48 insertions(+), 3 deletions(-) diff --git a/src/graphql/validation/validate.py b/src/graphql/validation/validate.py index d1b5818f..8f301396 100644 --- a/src/graphql/validation/validate.py +++ b/src/graphql/validation/validate.py @@ -2,7 +2,7 @@ from ..error import GraphQLError from ..language import DocumentNode, ParallelVisitor, visit -from ..pyutils import is_collection +from ..pyutils import inspect, is_collection from ..type import GraphQLSchema, assert_valid_schema from ..utilities import TypeInfo, TypeInfoVisitor from .rules import ASTValidationRule @@ -22,6 +22,7 @@ def validate( document_ast: DocumentNode, rules: Optional[Collection[Type[ASTValidationRule]]] = None, max_errors: Optional[int] = None, + type_info: Optional[TypeInfo] = None, ) -> List[GraphQLError]: """Implements the "Validation" section of the spec. @@ -38,6 +39,8 @@ def validate( Validate will stop validation after a ``max_errors`` limit has been reached. Attackers can send pathologically invalid queries to induce a DoS attack, so by default ``max_errors`` set to 100 errors. + + Providing a custom TypeInfo instance is deprecated and will be removed in v3.3. """ if not document_ast or not isinstance(document_ast, DocumentNode): raise TypeError("Must provide document.") @@ -47,6 +50,10 @@ def validate( max_errors = 100 elif not isinstance(max_errors, int): raise TypeError("The maximum number of errors must be passed as an int.") + if type_info is None: + type_info = TypeInfo(schema) + elif not isinstance(type_info, TypeInfo): + raise TypeError(f"Not a TypeInfo object: {inspect(type_info)}.") if rules is None: rules = specified_rules elif not is_collection(rules) or not all( @@ -69,7 +76,6 @@ def on_error(error: GraphQLError) -> None: raise ValidationAbortedError errors.append(error) - type_info = TypeInfo(schema) context = ValidationContext(schema, document_ast, type_info, on_error) # This uses a specialized visitor which runs multiple visitors in parallel, diff --git a/tests/validation/test_validation.py b/tests/validation/test_validation.py index 5c23ec69..0f7d80e6 100644 --- a/tests/validation/test_validation.py +++ b/tests/validation/test_validation.py @@ -2,7 +2,7 @@ from graphql.error import GraphQLError from graphql.language import parse -from graphql.utilities import build_schema +from graphql.utilities import TypeInfo, build_schema from graphql.validation import ValidationRule, validate from .harness import test_schema @@ -15,6 +15,14 @@ def rejects_invalid_documents(): assert validate(test_schema, None) # type: ignore assert str(exc_info.value) == "Must provide document." + def rejects_invalid_type_info(): + with raises(TypeError) as exc_info: + # noinspection PyTypeChecker + assert validate( + test_schema, parse("query { name }"), type_info={} # type: ignore + ) + assert str(exc_info.value) == "Not a TypeInfo object: {}." + def rejects_invalid_rules(): with raises(TypeError) as exc_info: # noinspection PyTypeChecker @@ -72,6 +80,37 @@ def detects_unknown_fields(): {"message": "Cannot query field 'unknown' on type 'QueryRoot'."} ] + def deprecated_validates_using_a_custom_type_info(): + # This TypeInfo will never return a valid field. + type_info = TypeInfo(test_schema, None, lambda *args: None) + + doc = parse( + """ + query { + human { + pets { + ... on Cat { + meowsVolume + } + ... on Dog { + barkVolume + } + } + } + } + """ + ) + + errors = validate(test_schema, doc, None, None, type_info) + + assert [error.message for error in errors] == [ + "Cannot query field 'human' on type 'QueryRoot'. Did you mean 'human'?", + "Cannot query field 'meowsVolume' on type 'Cat'." + " Did you mean 'meowsVolume'?", + "Cannot query field 'barkVolume' on type 'Dog'." + " Did you mean 'barkVolume'?", + ] + def validates_using_a_custom_rule(): schema = build_schema( """ From 5023c1e9dd07087892fd46f1e7649c665a7107ac Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 24 Sep 2022 23:06:03 +0200 Subject: [PATCH 036/230] Alpha release v3.3.0a1, matching GraphQL.js v17.0.0a1 Replicates graphql/graphql-js@d9eda1f75ced5228147b6230d5df4a1812aff1b8 --- .bumpversion.cfg | 2 +- README.md | 7 +++++-- docs/conf.py | 2 +- pyproject.toml | 2 +- src/graphql/version.py | 4 ++-- 5 files changed, 10 insertions(+), 7 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 4bd8fc1a..6fe9800a 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 3.3.0a0 +current_version = 3.3.0a1 commit = False tag = False diff --git a/README.md b/README.md index 471c8303..e171f4a5 100644 --- a/README.md +++ b/README.md @@ -10,12 +10,15 @@ a query language for APIs created by Facebook. ![Lint Status](https://github.com/graphql-python/graphql-core/actions/workflows/lint.yml/badge.svg) [![Code Style](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/ambv/black) -The current version 3.2.3 of GraphQL-core is up-to-date with GraphQL.js version 16.6.0. - An extensive test suite with over 2300 unit tests and 100% coverage comprises a replication of the complete test suite of GraphQL.js, making sure this port is reliable and compatible with GraphQL.js. +The current stable version 3.2.3 of GraphQL-core is up-to-date with GraphQL.js version 16.6.0. + +You can also try out the latest alpha version 3.3.0a1 of GraphQL-core that is up-to-date with GraphQL.js version 17.0.0a1. +Please note that this new minor version of GraphQL-core does not support Python 3.7 anymore. + Note that for various reasons, GraphQL-core does not use SemVer like GraphQL.js. Changes in the major version of GraphQL.js are reflected in the minor version of GraphQL-core instead. This means there can be breaking changes in the API when the minor version changes, and only patch releases are fully backward compatible. Therefore, we recommend something like `=~ 3.2.0` as version specifier when including GraphQL-core as a dependency. diff --git a/docs/conf.py b/docs/conf.py index a9007dfb..b32115c3 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -61,7 +61,7 @@ # The short X.Y version. # version = '3.3' # The full version, including alpha/beta/rc tags. -version = release = '3.3.0a0' +version = release = '3.3.0a1' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/pyproject.toml b/pyproject.toml index 7596cc76..661cbbc7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "graphql-core" -version = "17.0.0a0" +version = "3.3.0a1" description = """ GraphQL-core is a Python port of GraphQL.js,\ the JavaScript reference implementation for GraphQL.""" diff --git a/src/graphql/version.py b/src/graphql/version.py index 88726e13..13de8d48 100644 --- a/src/graphql/version.py +++ b/src/graphql/version.py @@ -7,9 +7,9 @@ __all__ = ["version", "version_info", "version_js", "version_info_js"] -version = "3.3.0a0" +version = "3.3.0a1" -version_js = "17.0.0a0" +version_js = "17.0.0a1" _re_version = re.compile(r"(\d+)\.(\d+)\.(\d+)(\D*)(\d*)") From 67f6888d7c414ba7d5ccd737309e82a4651168f3 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 25 Sep 2022 18:48:05 +0200 Subject: [PATCH 037/230] Update GitHub actions, add PyPy 3.9 to test matrix --- .github/workflows/lint.yml | 4 ++-- .github/workflows/publish.yml | 4 ++-- .github/workflows/test.yml | 6 +++--- src/graphql/pyutils/is_iterable.py | 10 ++++++++-- tests/execution/test_map_async_iterator.py | 7 +++++++ tox.ini | 5 ++++- 6 files changed, 26 insertions(+), 10 deletions(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 2b8acd4d..2185a66b 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -7,10 +7,10 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python 3.9 - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: python-version: 3.9 diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index f3725b66..8f2ac627 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -10,10 +10,10 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python 3.9 - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: python-version: 3.9 diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 02fe50c6..7210c219 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -8,13 +8,13 @@ jobs: strategy: matrix: - python: ['3.7', '3.8', '3.9', '3.10', 'pypy3'] + python: ['3.7', '3.8', '3.9', '3.10', 'pypy3.9'] steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} diff --git a/src/graphql/pyutils/is_iterable.py b/src/graphql/pyutils/is_iterable.py index 30417cb7..7e0191f6 100644 --- a/src/graphql/pyutils/is_iterable.py +++ b/src/graphql/pyutils/is_iterable.py @@ -1,11 +1,17 @@ +from array import array from typing import Any, ByteString, Collection, Iterable, Mapping, Text, ValuesView __all__ = ["is_collection", "is_iterable"] -collection_types: Any = Collection +collection_types: Any = [Collection] if not isinstance({}.values(), Collection): # Python < 3.7.2 - collection_types = (Collection, ValuesView) + collection_types.append(ValuesView) +if not isinstance(array, Collection): # PyPy issue 3820 + collection_types.append(array) +collection_types = ( + collection_types[0] if len(collection_types) == 1 else tuple(collection_types) +) iterable_types: Any = Iterable not_iterable_types: Any = (ByteString, Mapping, Text) diff --git a/tests/execution/test_map_async_iterator.py b/tests/execution/test_map_async_iterator.py index 299d010a..0845b9bc 100644 --- a/tests/execution/test_map_async_iterator.py +++ b/tests/execution/test_map_async_iterator.py @@ -1,3 +1,4 @@ +import platform import sys from asyncio import CancelledError, Event, ensure_future, sleep @@ -6,6 +7,8 @@ from graphql.execution import MapAsyncIterator +is_pypy = platform.python_implementation() == "PyPy" + try: # pragma: no cover anext except NameError: # pragma: no cover (Python < 3.10) @@ -344,6 +347,10 @@ def double(x): with raises(StopAsyncIteration): await anext(doubles) + # no more exceptions should be thrown + if is_pypy: + # need to investigate why this is needed with PyPy + await doubles.aclose() # pragma: no cover await doubles.athrow(RuntimeError("no more ouch")) with raises(StopAsyncIteration): diff --git a/tox.ini b/tox.ini index 3a552d0a..04726b40 100644 --- a/tox.ini +++ b/tox.ini @@ -1,13 +1,16 @@ [tox] -envlist = py3{7,8,9,10}, black, flake8, isort, mypy, docs +envlist = py3{7,8,9,10}, pypy39, black, flake8, isort, mypy, docs isolated_build = true [gh-actions] python = + 3: py39 3.7: py37 3.8: py38 3.9: py39 3.10: py310 + pypy3: pypy39 + pypy3.9: pypy39 [testenv:black] basepython = python3.9 From 4d8f6ce61588f34f6c9be881cca77613c1c1c336 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 25 Sep 2022 19:07:14 +0200 Subject: [PATCH 038/230] Restrict Sphinx version used for testing Workaround for incompatibility between the current sphinx_rtd_theme 1.0.0 and Sphinx 5.2.0.post0 --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 04726b40..ce02be2d 100644 --- a/tox.ini +++ b/tox.ini @@ -44,7 +44,7 @@ commands = [testenv:docs] basepython = python3.9 deps = - sphinx>=5.1,<6 + sphinx>=5.1,<5.2 sphinx_rtd_theme>=1,<2 commands = sphinx-build -b html -nEW docs docs/_build/html From 1d8513ac659d0ca8a13af0a7a6c56663e743b7ce Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 25 Sep 2022 19:16:58 +0200 Subject: [PATCH 039/230] Restrict Sphinx version used for Read the Docs --- docs/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/requirements.txt b/docs/requirements.txt index 2f6c4726..85695dd0 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,2 +1,2 @@ -sphinx>=4.3,<6 +sphinx>=5.1,<5.2 sphinx_rtd_theme>=1,<2 From c4e872d95af09614454fd854bdfabeff6a6569fd Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 25 Sep 2022 23:33:31 +0200 Subject: [PATCH 040/230] Update Sphinx after new version has been released Sphinx 5.2.1 has been released and works with sphinx_rtd_theme again. --- docs/requirements.txt | 2 +- tox.ini | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/requirements.txt b/docs/requirements.txt index 85695dd0..f4f9b8af 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,2 +1,2 @@ -sphinx>=5.1,<5.2 +sphinx>=5.2.1,<6 sphinx_rtd_theme>=1,<2 diff --git a/tox.ini b/tox.ini index ce02be2d..ee335970 100644 --- a/tox.ini +++ b/tox.ini @@ -44,7 +44,7 @@ commands = [testenv:docs] basepython = python3.9 deps = - sphinx>=5.1,<5.2 + sphinx>=5.2.1,<6 sphinx_rtd_theme>=1,<2 commands = sphinx-build -b html -nEW docs docs/_build/html From 8ebf563e096f8dc760207b866d5ae7197dddfdfa Mon Sep 17 00:00:00 2001 From: Kevin Le Date: Thu, 29 Sep 2022 11:27:33 -0700 Subject: [PATCH 041/230] Support custom execution contexts in subscriptions (#181) --- src/graphql/execution/subscribe.py | 11 +++++-- tests/execution/test_subscribe.py | 46 +++++++++++++++++++++++++++++- 2 files changed, 54 insertions(+), 3 deletions(-) diff --git a/src/graphql/execution/subscribe.py b/src/graphql/execution/subscribe.py index b40022fe..1ab38989 100644 --- a/src/graphql/execution/subscribe.py +++ b/src/graphql/execution/subscribe.py @@ -1,5 +1,5 @@ from inspect import isawaitable -from typing import Any, AsyncIterable, AsyncIterator, Dict, Optional, Union +from typing import Any, AsyncIterable, AsyncIterator, Dict, Optional, Type, Union from ..error import GraphQLError, located_error from ..execution.collect_fields import collect_fields @@ -29,6 +29,7 @@ async def subscribe( operation_name: Optional[str] = None, field_resolver: Optional[GraphQLFieldResolver] = None, subscribe_field_resolver: Optional[GraphQLFieldResolver] = None, + execution_context_class: Optional[Type["ExecutionContext"]] = None, ) -> Union[AsyncIterator[ExecutionResult], ExecutionResult]: """Create a GraphQL subscription. @@ -57,6 +58,7 @@ async def subscribe( variable_values, operation_name, subscribe_field_resolver, + execution_context_class, ) if isinstance(result_or_stream, ExecutionResult): return result_or_stream @@ -79,6 +81,7 @@ async def map_source_to_response(payload: Any) -> ExecutionResult: variable_values, operation_name, field_resolver, + execution_context_class=execution_context_class, ) return await result if isawaitable(result) else result @@ -94,6 +97,7 @@ async def create_source_event_stream( variable_values: Optional[Dict[str, Any]] = None, operation_name: Optional[str] = None, subscribe_field_resolver: Optional[GraphQLFieldResolver] = None, + execution_context_class: Optional[Type["ExecutionContext"]] = None, ) -> Union[AsyncIterable[Any], ExecutionResult]: """Create source event stream @@ -122,9 +126,12 @@ async def create_source_event_stream( # mistake which should throw an early error. assert_valid_execution_arguments(schema, document, variable_values) + if not execution_context_class: + execution_context_class = ExecutionContext + # If a valid context cannot be created due to incorrect arguments, # a "Response" with only errors is returned. - context = ExecutionContext.build( + context = execution_context_class.build( schema, document, root_value, diff --git a/tests/execution/test_subscribe.py b/tests/execution/test_subscribe.py index 2f1ae39c..564454bb 100644 --- a/tests/execution/test_subscribe.py +++ b/tests/execution/test_subscribe.py @@ -3,7 +3,12 @@ from pytest import mark, raises -from graphql.execution import MapAsyncIterator, create_source_event_stream, subscribe +from graphql.execution import ( + create_source_event_stream, + subscribe, + MapAsyncIterator, + ExecutionContext, +) from graphql.language import parse from graphql.pyutils import SimplePubSub from graphql.type import ( @@ -892,3 +897,42 @@ def resolve_message(message, _info): assert isinstance(subscription, MapAsyncIterator) assert await anext(subscription) == ({"newMessage": "Hello"}, None) + + @mark.asyncio + async def should_work_with_custom_execution_contexts(): + class CustomExecutionContext(ExecutionContext): + def build_resolve_info(self, *args, **kwargs): + resolve_info = super().build_resolve_info(*args, **kwargs) + resolve_info.context['foo'] = 'bar' + return resolve_info + + async def generate_messages(_obj, info): + yield info.context['foo'] + + def resolve_message(message, _info): + return message + + schema = GraphQLSchema( + query=QueryType, + subscription=GraphQLObjectType( + "Subscription", + { + "newMessage": GraphQLField( + GraphQLString, + resolve=resolve_message, + subscribe=generate_messages, + ) + }, + ), + ) + + document = parse("subscription { newMessage }") + subscription = await subscribe( + schema, + document, + context_value={}, + execution_context_class=CustomExecutionContext + ) + assert isinstance(subscription, MapAsyncIterator) + + assert await anext(subscription) == ({"newMessage": "bar"}, None) From 1990bdcec2d3509dcd52a42c56f3251c48928d98 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Thu, 29 Sep 2022 20:33:07 +0200 Subject: [PATCH 042/230] Fix minor formatting issues --- tests/execution/test_subscribe.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/tests/execution/test_subscribe.py b/tests/execution/test_subscribe.py index 564454bb..c3443f5e 100644 --- a/tests/execution/test_subscribe.py +++ b/tests/execution/test_subscribe.py @@ -4,10 +4,10 @@ from pytest import mark, raises from graphql.execution import ( + ExecutionContext, + MapAsyncIterator, create_source_event_stream, subscribe, - MapAsyncIterator, - ExecutionContext, ) from graphql.language import parse from graphql.pyutils import SimplePubSub @@ -903,11 +903,11 @@ async def should_work_with_custom_execution_contexts(): class CustomExecutionContext(ExecutionContext): def build_resolve_info(self, *args, **kwargs): resolve_info = super().build_resolve_info(*args, **kwargs) - resolve_info.context['foo'] = 'bar' + resolve_info.context["foo"] = "bar" return resolve_info async def generate_messages(_obj, info): - yield info.context['foo'] + yield info.context["foo"] def resolve_message(message, _info): return message @@ -931,7 +931,7 @@ def resolve_message(message, _info): schema, document, context_value={}, - execution_context_class=CustomExecutionContext + execution_context_class=CustomExecutionContext, ) assert isinstance(subscription, MapAsyncIterator) From 1a5aca82eb1d026a8f48dd7c7d8f9c56b8cd651b Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Thu, 29 Sep 2022 21:37:59 +0200 Subject: [PATCH 043/230] Move customization tests for to a separate module --- tests/execution/test_customize.py | 83 ++++++++++++++++++++++++++++++- tests/execution/test_subscribe.py | 75 +--------------------------- 2 files changed, 83 insertions(+), 75 deletions(-) diff --git a/tests/execution/test_customize.py b/tests/execution/test_customize.py index 23a1e9d7..baca20ba 100644 --- a/tests/execution/test_customize.py +++ b/tests/execution/test_customize.py @@ -1,8 +1,19 @@ -from graphql.execution import ExecutionContext, execute +from pytest import mark + +from graphql.execution import ExecutionContext, MapAsyncIterator, execute, subscribe from graphql.language import parse from graphql.type import GraphQLField, GraphQLObjectType, GraphQLSchema, GraphQLString +try: + anext +except NameError: # pragma: no cover (Python < 3.10) + # noinspection PyShadowingBuiltins + async def anext(iterator): + """Return the next item from an async iterator.""" + return await iterator.__anext__() + + def describe_customize_execution(): def uses_a_custom_field_resolver(): query = parse("{ foo }") @@ -39,3 +50,73 @@ def execute_field(self, parent_type, source, field_nodes, path): {"foo": "barbar"}, None, ) + + +def describe_customize_subscription(): + @mark.asyncio + async def uses_a_custom_subscribe_field_resolver(): + schema = GraphQLSchema( + query=GraphQLObjectType("Query", {"foo": GraphQLField(GraphQLString)}), + subscription=GraphQLObjectType( + "Subscription", {"foo": GraphQLField(GraphQLString)} + ), + ) + + class Root: + @staticmethod + async def custom_foo(): + yield {"foo": "FooValue"} + + subscription = await subscribe( + schema, + document=parse("subscription { foo }"), + root_value=Root(), + subscribe_field_resolver=lambda root, _info: root.custom_foo(), + ) + assert isinstance(subscription, MapAsyncIterator) + + assert await anext(subscription) == ( + {"foo": "FooValue"}, + None, + ) + + await subscription.aclose() + + @mark.asyncio + async def uses_a_custom_execution_context_class(): + class TestExecutionContext(ExecutionContext): + def build_resolve_info(self, *args, **kwargs): + resolve_info = super().build_resolve_info(*args, **kwargs) + resolve_info.context["foo"] = "bar" + return resolve_info + + async def generate_foo(_obj, info): + yield info.context["foo"] + + def resolve_foo(message, _info): + return message + + schema = GraphQLSchema( + query=GraphQLObjectType("Query", {"foo": GraphQLField(GraphQLString)}), + subscription=GraphQLObjectType( + "Subscription", + { + "foo": GraphQLField( + GraphQLString, + resolve=resolve_foo, + subscribe=generate_foo, + ) + }, + ), + ) + + document = parse("subscription { foo }") + subscription = await subscribe( + schema, + document, + context_value={}, + execution_context_class=TestExecutionContext, + ) + assert isinstance(subscription, MapAsyncIterator) + + assert await anext(subscription) == ({"foo": "bar"}, None) diff --git a/tests/execution/test_subscribe.py b/tests/execution/test_subscribe.py index c3443f5e..8f3ceb55 100644 --- a/tests/execution/test_subscribe.py +++ b/tests/execution/test_subscribe.py @@ -3,12 +3,7 @@ from pytest import mark, raises -from graphql.execution import ( - ExecutionContext, - MapAsyncIterator, - create_source_event_stream, - subscribe, -) +from graphql.execution import MapAsyncIterator, create_source_event_stream, subscribe from graphql.language import parse from graphql.pyutils import SimplePubSub from graphql.type import ( @@ -214,35 +209,6 @@ async def foo_generator(_obj, _info): await subscription.aclose() - @mark.asyncio - async def uses_a_custom_default_subscribe_field_resolver(): - schema = GraphQLSchema( - query=DummyQueryType, - subscription=GraphQLObjectType( - "Subscription", {"foo": GraphQLField(GraphQLString)} - ), - ) - - class Root: - @staticmethod - async def custom_foo(): - yield {"foo": "FooValue"} - - subscription = await subscribe( - schema, - document=parse("subscription { foo }"), - root_value=Root(), - subscribe_field_resolver=lambda root, _info: root.custom_foo(), - ) - assert isinstance(subscription, MapAsyncIterator) - - assert await anext(subscription) == ( - {"foo": "FooValue"}, - None, - ) - - await subscription.aclose() - @mark.asyncio async def should_only_resolve_the_first_field_of_invalid_multi_field(): did_resolve = {"foo": False, "bar": False} @@ -897,42 +863,3 @@ def resolve_message(message, _info): assert isinstance(subscription, MapAsyncIterator) assert await anext(subscription) == ({"newMessage": "Hello"}, None) - - @mark.asyncio - async def should_work_with_custom_execution_contexts(): - class CustomExecutionContext(ExecutionContext): - def build_resolve_info(self, *args, **kwargs): - resolve_info = super().build_resolve_info(*args, **kwargs) - resolve_info.context["foo"] = "bar" - return resolve_info - - async def generate_messages(_obj, info): - yield info.context["foo"] - - def resolve_message(message, _info): - return message - - schema = GraphQLSchema( - query=QueryType, - subscription=GraphQLObjectType( - "Subscription", - { - "newMessage": GraphQLField( - GraphQLString, - resolve=resolve_message, - subscribe=generate_messages, - ) - }, - ), - ) - - document = parse("subscription { newMessage }") - subscription = await subscribe( - schema, - document, - context_value={}, - execution_context_class=CustomExecutionContext, - ) - assert isinstance(subscription, MapAsyncIterator) - - assert await anext(subscription) == ({"newMessage": "bar"}, None) From b27d6b6e09abcdbd35a5bad5094652dc235320b6 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Thu, 6 Oct 2022 16:49:51 +0200 Subject: [PATCH 044/230] Update dependencies --- poetry.lock | 303 +++++++++++++++++++++++++++++++++++++++++----------- 1 file changed, 241 insertions(+), 62 deletions(-) diff --git a/poetry.lock b/poetry.lock index 131f0946..0e5ab649 100644 --- a/poetry.lock +++ b/poetry.lock @@ -31,6 +31,25 @@ python-versions = ">=3.6" [package.dependencies] pytz = ">=2015.7" +[[package]] +name = "bandit" +version = "1.7.4" +description = "Security oriented static analyser for python code." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +colorama = {version = ">=0.3.9", markers = "platform_system == \"Windows\""} +GitPython = ">=1.0.1" +PyYAML = ">=5.3.1" +stevedore = ">=1.20.0" + +[package.extras] +test = ["beautifulsoup4 (>=4.8.0)", "coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "pylint (==1.9.4)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)", "toml"] +toml = ["toml"] +yaml = ["PyYAML"] + [[package]] name = "black" version = "22.8.0" @@ -64,7 +83,7 @@ python-versions = ">=3.5" [[package]] name = "certifi" -version = "2022.9.14" +version = "2022.9.24" description = "Python package for providing Mozilla's CA Bundle." category = "dev" optional = false @@ -103,7 +122,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "coverage" -version = "6.4.4" +version = "6.5.0" description = "Code coverage measurement for Python" category = "dev" optional = false @@ -157,6 +176,56 @@ mccabe = ">=0.7.0,<0.8.0" pycodestyle = ">=2.9.0,<2.10.0" pyflakes = ">=2.5.0,<2.6.0" +[[package]] +name = "flake8-bandit" +version = "4.1.1" +description = "Automated security testing with bandit and flake8." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +bandit = ">=1.7.3" +flake8 = ">=5.0.0" + +[[package]] +name = "flake8-bugbear" +version = "22.9.23" +description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +attrs = ">=19.2.0" +flake8 = ">=3.0.0" + +[package.extras] +dev = ["coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit"] + +[[package]] +name = "gitdb" +version = "4.0.9" +description = "Git Object Database" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +smmap = ">=3.0.1,<6" + +[[package]] +name = "GitPython" +version = "3.1.27" +description = "GitPython is a python library used to interact with Git repositories" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +gitdb = ">=4.0.1,<5" +typing-extensions = {version = ">=3.7.4.3", markers = "python_version < \"3.8\""} + [[package]] name = "idna" version = "3.4" @@ -287,6 +356,14 @@ category = "dev" optional = false python-versions = ">=3.7" +[[package]] +name = "pbr" +version = "5.10.0" +description = "Python Build Reasonableness" +category = "dev" +optional = false +python-versions = ">=2.6" + [[package]] name = "platformdirs" version = "2.5.2" @@ -460,12 +537,20 @@ pytest = ">=5.0.0" [[package]] name = "pytz" -version = "2022.2.1" +version = "2022.4" description = "World timezone definitions, modern and historical" category = "dev" optional = false python-versions = "*" +[[package]] +name = "PyYAML" +version = "6.0" +description = "YAML parser and emitter for Python" +category = "dev" +optional = false +python-versions = ">=3.6" + [[package]] name = "requests" version = "2.28.1" @@ -486,14 +571,14 @@ use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "setuptools" -version = "65.3.0" +version = "65.4.1" description = "Easily download, build, install, upgrade, and uninstall Python packages" category = "dev" optional = false python-versions = ">=3.7" [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mock", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] @@ -505,6 +590,14 @@ category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +[[package]] +name = "smmap" +version = "5.0.0" +description = "A pure Python implementation of a sliding window memory map manager" +category = "dev" +optional = false +python-versions = ">=3.6" + [[package]] name = "snowballstemmer" version = "2.2.0" @@ -631,6 +724,18 @@ python-versions = ">=3.5" lint = ["docutils-stubs", "flake8", "mypy"] test = ["pytest"] +[[package]] +name = "stevedore" +version = "3.5.0" +description = "Manage dynamic plugins for Python applications" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +importlib-metadata = {version = ">=1.7.0", markers = "python_version < \"3.8\""} +pbr = ">=2.0.0,<2.1.0 || >2.1.0" + [[package]] name = "tomli" version = "2.0.1" @@ -724,7 +829,7 @@ testing = ["func-timeout", "jaraco.itertools", "pytest (>=6)", "pytest-black (>= [metadata] lock-version = "1.1" python-versions = "^3.7" -content-hash = "3f2775172e15ba8491dd4c0eb49800e7f6287e901a4863503317f0e8ec3aef15" +content-hash = "135c93a5703fc0dd6506db01a85ed70f027eb3b4de45ddd7b3fa905d270bd46e" [metadata.files] alabaster = [ @@ -739,6 +844,10 @@ Babel = [ {file = "Babel-2.10.3-py3-none-any.whl", hash = "sha256:ff56f4892c1c4bf0d814575ea23471c230d544203c7748e8c68f0089478d48eb"}, {file = "Babel-2.10.3.tar.gz", hash = "sha256:7614553711ee97490f732126dc077f8d0ae084ebc6a96e23db1482afabdb2c51"}, ] +bandit = [ + {file = "bandit-1.7.4-py3-none-any.whl", hash = "sha256:412d3f259dab4077d0e7f0c11f50f650cc7d10db905d98f6520a95a18049658a"}, + {file = "bandit-1.7.4.tar.gz", hash = "sha256:2d63a8c573417bae338962d4b9b06fbc6080f74ecd955a092849e1e65c717bd2"}, +] black = [ {file = "black-22.8.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ce957f1d6b78a8a231b18e0dd2d94a33d2ba738cd88a7fe64f53f659eea49fdd"}, {file = "black-22.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5107ea36b2b61917956d018bd25129baf9ad1125e39324a9b18248d362156a27"}, @@ -769,8 +878,8 @@ bump2version = [ {file = "bump2version-1.0.1.tar.gz", hash = "sha256:762cb2bfad61f4ec8e2bdf452c7c267416f8c70dd9ecb1653fd0bbb01fa936e6"}, ] certifi = [ - {file = "certifi-2022.9.14-py3-none-any.whl", hash = "sha256:e232343de1ab72c2aa521b625c80f699e356830fd0e2c620b465b304b17b0516"}, - {file = "certifi-2022.9.14.tar.gz", hash = "sha256:36973885b9542e6bd01dea287b2b4b3b21236307c56324fcc3f1160f2d655ed5"}, + {file = "certifi-2022.9.24-py3-none-any.whl", hash = "sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382"}, + {file = "certifi-2022.9.24.tar.gz", hash = "sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14"}, ] charset-normalizer = [ {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"}, @@ -785,56 +894,56 @@ colorama = [ {file = "colorama-0.4.5.tar.gz", hash = "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4"}, ] coverage = [ - {file = "coverage-6.4.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e7b4da9bafad21ea45a714d3ea6f3e1679099e420c8741c74905b92ee9bfa7cc"}, - {file = "coverage-6.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fde17bc42e0716c94bf19d92e4c9f5a00c5feb401f5bc01101fdf2a8b7cacf60"}, - {file = "coverage-6.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdbb0d89923c80dbd435b9cf8bba0ff55585a3cdb28cbec65f376c041472c60d"}, - {file = "coverage-6.4.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:67f9346aeebea54e845d29b487eb38ec95f2ecf3558a3cffb26ee3f0dcc3e760"}, - {file = "coverage-6.4.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42c499c14efd858b98c4e03595bf914089b98400d30789511577aa44607a1b74"}, - {file = "coverage-6.4.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c35cca192ba700979d20ac43024a82b9b32a60da2f983bec6c0f5b84aead635c"}, - {file = "coverage-6.4.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9cc4f107009bca5a81caef2fca843dbec4215c05e917a59dec0c8db5cff1d2aa"}, - {file = "coverage-6.4.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5f444627b3664b80d078c05fe6a850dd711beeb90d26731f11d492dcbadb6973"}, - {file = "coverage-6.4.4-cp310-cp310-win32.whl", hash = "sha256:66e6df3ac4659a435677d8cd40e8eb1ac7219345d27c41145991ee9bf4b806a0"}, - {file = "coverage-6.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:35ef1f8d8a7a275aa7410d2f2c60fa6443f4a64fae9be671ec0696a68525b875"}, - {file = "coverage-6.4.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c1328d0c2f194ffda30a45f11058c02410e679456276bfa0bbe0b0ee87225fac"}, - {file = "coverage-6.4.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:61b993f3998ee384935ee423c3d40894e93277f12482f6e777642a0141f55782"}, - {file = "coverage-6.4.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d5dd4b8e9cd0deb60e6fcc7b0647cbc1da6c33b9e786f9c79721fd303994832f"}, - {file = "coverage-6.4.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7026f5afe0d1a933685d8f2169d7c2d2e624f6255fb584ca99ccca8c0e966fd7"}, - {file = "coverage-6.4.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9c7b9b498eb0c0d48b4c2abc0e10c2d78912203f972e0e63e3c9dc21f15abdaa"}, - {file = "coverage-6.4.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ee2b2fb6eb4ace35805f434e0f6409444e1466a47f620d1d5763a22600f0f892"}, - {file = "coverage-6.4.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ab066f5ab67059d1f1000b5e1aa8bbd75b6ed1fc0014559aea41a9eb66fc2ce0"}, - {file = "coverage-6.4.4-cp311-cp311-win32.whl", hash = "sha256:9d6e1f3185cbfd3d91ac77ea065d85d5215d3dfa45b191d14ddfcd952fa53796"}, - {file = "coverage-6.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:e3d3c4cc38b2882f9a15bafd30aec079582b819bec1b8afdbde8f7797008108a"}, - {file = "coverage-6.4.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a095aa0a996ea08b10580908e88fbaf81ecf798e923bbe64fb98d1807db3d68a"}, - {file = "coverage-6.4.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef6f44409ab02e202b31a05dd6666797f9de2aa2b4b3534e9d450e42dea5e817"}, - {file = "coverage-6.4.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b7101938584d67e6f45f0015b60e24a95bf8dea19836b1709a80342e01b472f"}, - {file = "coverage-6.4.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14a32ec68d721c3d714d9b105c7acf8e0f8a4f4734c811eda75ff3718570b5e3"}, - {file = "coverage-6.4.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6a864733b22d3081749450466ac80698fe39c91cb6849b2ef8752fd7482011f3"}, - {file = "coverage-6.4.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:08002f9251f51afdcc5e3adf5d5d66bb490ae893d9e21359b085f0e03390a820"}, - {file = "coverage-6.4.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a3b2752de32c455f2521a51bd3ffb53c5b3ae92736afde67ce83477f5c1dd928"}, - {file = "coverage-6.4.4-cp37-cp37m-win32.whl", hash = "sha256:f855b39e4f75abd0dfbcf74a82e84ae3fc260d523fcb3532786bcbbcb158322c"}, - {file = "coverage-6.4.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ee6ae6bbcac0786807295e9687169fba80cb0617852b2fa118a99667e8e6815d"}, - {file = "coverage-6.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:564cd0f5b5470094df06fab676c6d77547abfdcb09b6c29c8a97c41ad03b103c"}, - {file = "coverage-6.4.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cbbb0e4cd8ddcd5ef47641cfac97d8473ab6b132dd9a46bacb18872828031685"}, - {file = "coverage-6.4.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6113e4df2fa73b80f77663445be6d567913fb3b82a86ceb64e44ae0e4b695de1"}, - {file = "coverage-6.4.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8d032bfc562a52318ae05047a6eb801ff31ccee172dc0d2504614e911d8fa83e"}, - {file = "coverage-6.4.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e431e305a1f3126477abe9a184624a85308da8edf8486a863601d58419d26ffa"}, - {file = "coverage-6.4.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cf2afe83a53f77aec067033199797832617890e15bed42f4a1a93ea24794ae3e"}, - {file = "coverage-6.4.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:783bc7c4ee524039ca13b6d9b4186a67f8e63d91342c713e88c1865a38d0892a"}, - {file = "coverage-6.4.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ff934ced84054b9018665ca3967fc48e1ac99e811f6cc99ea65978e1d384454b"}, - {file = "coverage-6.4.4-cp38-cp38-win32.whl", hash = "sha256:e1fabd473566fce2cf18ea41171d92814e4ef1495e04471786cbc943b89a3781"}, - {file = "coverage-6.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:4179502f210ebed3ccfe2f78bf8e2d59e50b297b598b100d6c6e3341053066a2"}, - {file = "coverage-6.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:98c0b9e9b572893cdb0a00e66cf961a238f8d870d4e1dc8e679eb8bdc2eb1b86"}, - {file = "coverage-6.4.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fc600f6ec19b273da1d85817eda339fb46ce9eef3e89f220055d8696e0a06908"}, - {file = "coverage-6.4.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7a98d6bf6d4ca5c07a600c7b4e0c5350cd483c85c736c522b786be90ea5bac4f"}, - {file = "coverage-6.4.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01778769097dbd705a24e221f42be885c544bb91251747a8a3efdec6eb4788f2"}, - {file = "coverage-6.4.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfa0b97eb904255e2ab24166071b27408f1f69c8fbda58e9c0972804851e0558"}, - {file = "coverage-6.4.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:fcbe3d9a53e013f8ab88734d7e517eb2cd06b7e689bedf22c0eb68db5e4a0a19"}, - {file = "coverage-6.4.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:15e38d853ee224e92ccc9a851457fb1e1f12d7a5df5ae44544ce7863691c7a0d"}, - {file = "coverage-6.4.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6913dddee2deff8ab2512639c5168c3e80b3ebb0f818fed22048ee46f735351a"}, - {file = "coverage-6.4.4-cp39-cp39-win32.whl", hash = "sha256:354df19fefd03b9a13132fa6643527ef7905712109d9c1c1903f2133d3a4e145"}, - {file = "coverage-6.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:1238b08f3576201ebf41f7c20bf59baa0d05da941b123c6656e42cdb668e9827"}, - {file = "coverage-6.4.4-pp36.pp37.pp38-none-any.whl", hash = "sha256:f67cf9f406cf0d2f08a3515ce2db5b82625a7257f88aad87904674def6ddaec1"}, - {file = "coverage-6.4.4.tar.gz", hash = "sha256:e16c45b726acb780e1e6f88b286d3c10b3914ab03438f32117c4aa52d7f30d58"}, + {file = "coverage-6.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef8674b0ee8cc11e2d574e3e2998aea5df5ab242e012286824ea3c6970580e53"}, + {file = "coverage-6.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:784f53ebc9f3fd0e2a3f6a78b2be1bd1f5575d7863e10c6e12504f240fd06660"}, + {file = "coverage-6.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4a5be1748d538a710f87542f22c2cad22f80545a847ad91ce45e77417293eb4"}, + {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83516205e254a0cb77d2d7bb3632ee019d93d9f4005de31dca0a8c3667d5bc04"}, + {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af4fffaffc4067232253715065e30c5a7ec6faac36f8fc8d6f64263b15f74db0"}, + {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:97117225cdd992a9c2a5515db1f66b59db634f59d0679ca1fa3fe8da32749cae"}, + {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1170fa54185845505fbfa672f1c1ab175446c887cce8212c44149581cf2d466"}, + {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:11b990d520ea75e7ee8dcab5bc908072aaada194a794db9f6d7d5cfd19661e5a"}, + {file = "coverage-6.5.0-cp310-cp310-win32.whl", hash = "sha256:5dbec3b9095749390c09ab7c89d314727f18800060d8d24e87f01fb9cfb40b32"}, + {file = "coverage-6.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:59f53f1dc5b656cafb1badd0feb428c1e7bc19b867479ff72f7a9dd9b479f10e"}, + {file = "coverage-6.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4a5375e28c5191ac38cca59b38edd33ef4cc914732c916f2929029b4bfb50795"}, + {file = "coverage-6.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4ed2820d919351f4167e52425e096af41bfabacb1857186c1ea32ff9983ed75"}, + {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33a7da4376d5977fbf0a8ed91c4dffaaa8dbf0ddbf4c8eea500a2486d8bc4d7b"}, + {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8fb6cf131ac4070c9c5a3e21de0f7dc5a0fbe8bc77c9456ced896c12fcdad91"}, + {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a6b7d95969b8845250586f269e81e5dfdd8ff828ddeb8567a4a2eaa7313460c4"}, + {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1ef221513e6f68b69ee9e159506d583d31aa3567e0ae84eaad9d6ec1107dddaa"}, + {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cca4435eebea7962a52bdb216dec27215d0df64cf27fc1dd538415f5d2b9da6b"}, + {file = "coverage-6.5.0-cp311-cp311-win32.whl", hash = "sha256:98e8a10b7a314f454d9eff4216a9a94d143a7ee65018dd12442e898ee2310578"}, + {file = "coverage-6.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:bc8ef5e043a2af066fa8cbfc6e708d58017024dc4345a1f9757b329a249f041b"}, + {file = "coverage-6.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4433b90fae13f86fafff0b326453dd42fc9a639a0d9e4eec4d366436d1a41b6d"}, + {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4f05d88d9a80ad3cac6244d36dd89a3c00abc16371769f1340101d3cb899fc3"}, + {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:94e2565443291bd778421856bc975d351738963071e9b8839ca1fc08b42d4bef"}, + {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:027018943386e7b942fa832372ebc120155fd970837489896099f5cfa2890f79"}, + {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:255758a1e3b61db372ec2736c8e2a1fdfaf563977eedbdf131de003ca5779b7d"}, + {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:851cf4ff24062c6aec510a454b2584f6e998cada52d4cb58c5e233d07172e50c"}, + {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:12adf310e4aafddc58afdb04d686795f33f4d7a6fa67a7a9d4ce7d6ae24d949f"}, + {file = "coverage-6.5.0-cp37-cp37m-win32.whl", hash = "sha256:b5604380f3415ba69de87a289a2b56687faa4fe04dbee0754bfcae433489316b"}, + {file = "coverage-6.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4a8dbc1f0fbb2ae3de73eb0bdbb914180c7abfbf258e90b311dcd4f585d44bd2"}, + {file = "coverage-6.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d900bb429fdfd7f511f868cedd03a6bbb142f3f9118c09b99ef8dc9bf9643c3c"}, + {file = "coverage-6.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2198ea6fc548de52adc826f62cb18554caedfb1d26548c1b7c88d8f7faa8f6ba"}, + {file = "coverage-6.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c4459b3de97b75e3bd6b7d4b7f0db13f17f504f3d13e2a7c623786289dd670e"}, + {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:20c8ac5386253717e5ccc827caad43ed66fea0efe255727b1053a8154d952398"}, + {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b07130585d54fe8dff3d97b93b0e20290de974dc8177c320aeaf23459219c0b"}, + {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dbdb91cd8c048c2b09eb17713b0c12a54fbd587d79adcebad543bc0cd9a3410b"}, + {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:de3001a203182842a4630e7b8d1a2c7c07ec1b45d3084a83d5d227a3806f530f"}, + {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e07f4a4a9b41583d6eabec04f8b68076ab3cd44c20bd29332c6572dda36f372e"}, + {file = "coverage-6.5.0-cp38-cp38-win32.whl", hash = "sha256:6d4817234349a80dbf03640cec6109cd90cba068330703fa65ddf56b60223a6d"}, + {file = "coverage-6.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:7ccf362abd726b0410bf8911c31fbf97f09f8f1061f8c1cf03dfc4b6372848f6"}, + {file = "coverage-6.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:633713d70ad6bfc49b34ead4060531658dc6dfc9b3eb7d8a716d5873377ab745"}, + {file = "coverage-6.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:95203854f974e07af96358c0b261f1048d8e1083f2de9b1c565e1be4a3a48cfc"}, + {file = "coverage-6.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9023e237f4c02ff739581ef35969c3739445fb059b060ca51771e69101efffe"}, + {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:265de0fa6778d07de30bcf4d9dc471c3dc4314a23a3c6603d356a3c9abc2dfcf"}, + {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f830ed581b45b82451a40faabb89c84e1a998124ee4212d440e9c6cf70083e5"}, + {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7b6be138d61e458e18d8e6ddcddd36dd96215edfe5f1168de0b1b32635839b62"}, + {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:42eafe6778551cf006a7c43153af1211c3aaab658d4d66fa5fcc021613d02518"}, + {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:723e8130d4ecc8f56e9a611e73b31219595baa3bb252d539206f7bbbab6ffc1f"}, + {file = "coverage-6.5.0-cp39-cp39-win32.whl", hash = "sha256:d9ecf0829c6a62b9b573c7bb6d4dcd6ba8b6f80be9ba4fc7ed50bf4ac9aecd72"}, + {file = "coverage-6.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc2af30ed0d5ae0b1abdb4ebdce598eafd5b35397d4d75deb341a614d333d987"}, + {file = "coverage-6.5.0-pp36.pp37.pp38-none-any.whl", hash = "sha256:1431986dac3923c5945271f169f59c45b8802a114c8f548d611f2015133df77a"}, + {file = "coverage-6.5.0.tar.gz", hash = "sha256:f642e90754ee3e06b0e7e51bce3379590e76b7f76b708e1a71ff043f87025c84"}, ] distlib = [ {file = "distlib-0.3.6-py2.py3-none-any.whl", hash = "sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e"}, @@ -852,6 +961,22 @@ flake8 = [ {file = "flake8-5.0.4-py2.py3-none-any.whl", hash = "sha256:7a1cf6b73744f5806ab95e526f6f0d8c01c66d7bbe349562d22dfca20610b248"}, {file = "flake8-5.0.4.tar.gz", hash = "sha256:6fbe320aad8d6b95cec8b8e47bc933004678dc63095be98528b7bdd2a9f510db"}, ] +flake8-bandit = [ + {file = "flake8_bandit-4.1.1-py3-none-any.whl", hash = "sha256:4c8a53eb48f23d4ef1e59293657181a3c989d0077c9952717e98a0eace43e06d"}, + {file = "flake8_bandit-4.1.1.tar.gz", hash = "sha256:068e09287189cbfd7f986e92605adea2067630b75380c6b5733dab7d87f9a84e"}, +] +flake8-bugbear = [ + {file = "flake8-bugbear-22.9.23.tar.gz", hash = "sha256:17b9623325e6e0dcdcc80ed9e4aa811287fcc81d7e03313b8736ea5733759937"}, + {file = "flake8_bugbear-22.9.23-py3-none-any.whl", hash = "sha256:cd2779b2b7ada212d7a322814a1e5651f1868ab0d3f24cc9da66169ab8fda474"}, +] +gitdb = [ + {file = "gitdb-4.0.9-py3-none-any.whl", hash = "sha256:8033ad4e853066ba6ca92050b9df2f89301b8fc8bf7e9324d412a63f8bf1a8fd"}, + {file = "gitdb-4.0.9.tar.gz", hash = "sha256:bac2fd45c0a1c9cf619e63a90d62bdc63892ef92387424b855792a6cabe789aa"}, +] +GitPython = [ + {file = "GitPython-3.1.27-py3-none-any.whl", hash = "sha256:5b68b000463593e05ff2b261acff0ff0972df8ab1b70d3cdbd41b546c8b8fc3d"}, + {file = "GitPython-3.1.27.tar.gz", hash = "sha256:1c885ce809e8ba2d88a29befeb385fcea06338d3640712b59ca623c220bb5704"}, +] idna = [ {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, @@ -959,6 +1084,10 @@ pathspec = [ {file = "pathspec-0.10.1-py3-none-any.whl", hash = "sha256:46846318467efc4556ccfd27816e004270a9eeeeb4d062ce5e6fc7a87c573f93"}, {file = "pathspec-0.10.1.tar.gz", hash = "sha256:7ace6161b621d31e7902eb6b5ae148d12cfd23f4a249b9ffb6b9fee12084323d"}, ] +pbr = [ + {file = "pbr-5.10.0-py2.py3-none-any.whl", hash = "sha256:da3e18aac0a3c003e9eea1a81bd23e5a3a75d745670dcf736317b7d966887fdf"}, + {file = "pbr-5.10.0.tar.gz", hash = "sha256:cfcc4ff8e698256fc17ea3ff796478b050852585aa5bae79ecd05b2ab7b39b9a"}, +] platformdirs = [ {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"}, {file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"}, @@ -1015,21 +1144,67 @@ pytest-timeout = [ {file = "pytest_timeout-2.1.0-py3-none-any.whl", hash = "sha256:f6f50101443ce70ad325ceb4473c4255e9d74e3c7cd0ef827309dfa4c0d975c6"}, ] pytz = [ - {file = "pytz-2022.2.1-py2.py3-none-any.whl", hash = "sha256:220f481bdafa09c3955dfbdddb7b57780e9a94f5127e35456a48589b9e0c0197"}, - {file = "pytz-2022.2.1.tar.gz", hash = "sha256:cea221417204f2d1a2aa03ddae3e867921971d0d76f14d87abb4414415bbdcf5"}, + {file = "pytz-2022.4-py2.py3-none-any.whl", hash = "sha256:2c0784747071402c6e99f0bafdb7da0fa22645f06554c7ae06bf6358897e9c91"}, + {file = "pytz-2022.4.tar.gz", hash = "sha256:48ce799d83b6f8aab2020e369b627446696619e79645419610b9facd909b3174"}, +] +PyYAML = [ + {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, + {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, + {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, + {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, + {file = "PyYAML-6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358"}, + {file = "PyYAML-6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1"}, + {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d"}, + {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f"}, + {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782"}, + {file = "PyYAML-6.0-cp311-cp311-win32.whl", hash = "sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7"}, + {file = "PyYAML-6.0-cp311-cp311-win_amd64.whl", hash = "sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf"}, + {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"}, + {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"}, + {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"}, + {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"}, + {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"}, + {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"}, + {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"}, + {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"}, + {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"}, + {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"}, + {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"}, + {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"}, + {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, + {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, ] requests = [ {file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"}, {file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"}, ] setuptools = [ - {file = "setuptools-65.3.0-py3-none-any.whl", hash = "sha256:2e24e0bec025f035a2e72cdd1961119f557d78ad331bb00ff82efb2ab8da8e82"}, - {file = "setuptools-65.3.0.tar.gz", hash = "sha256:7732871f4f7fa58fb6bdcaeadb0161b2bd046c85905dbaa066bdcbcc81953b57"}, + {file = "setuptools-65.4.1-py3-none-any.whl", hash = "sha256:1b6bdc6161661409c5f21508763dc63ab20a9ac2f8ba20029aaaa7fdb9118012"}, + {file = "setuptools-65.4.1.tar.gz", hash = "sha256:3050e338e5871e70c72983072fe34f6032ae1cdeeeb67338199c2f74e083a80e"}, ] six = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] +smmap = [ + {file = "smmap-5.0.0-py3-none-any.whl", hash = "sha256:2aba19d6a040e78d8b09de5c57e96207b09ed71d8e55ce0959eeee6c8e190d94"}, + {file = "smmap-5.0.0.tar.gz", hash = "sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936"}, +] snowballstemmer = [ {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, @@ -1066,6 +1241,10 @@ sphinxcontrib-serializinghtml = [ {file = "sphinxcontrib-serializinghtml-1.1.5.tar.gz", hash = "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952"}, {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"}, ] +stevedore = [ + {file = "stevedore-3.5.0-py3-none-any.whl", hash = "sha256:a547de73308fd7e90075bb4d301405bebf705292fa90a90fc3bcf9133f58616c"}, + {file = "stevedore-3.5.0.tar.gz", hash = "sha256:f40253887d8712eaa2bb0ea3830374416736dc8ec0e22f5a65092c1174c44335"}, +] tomli = [ {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, From 1ce4222ace77b3eea2ee3f2770a676a05b5ea300 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 8 Oct 2022 13:39:00 +0200 Subject: [PATCH 045/230] Update black --- poetry.lock | 62 ++++++++++++++++++++++++-------------------------- pyproject.toml | 2 +- tox.ini | 2 +- 3 files changed, 32 insertions(+), 34 deletions(-) diff --git a/poetry.lock b/poetry.lock index 0e5ab649..6ecc7fca 100644 --- a/poetry.lock +++ b/poetry.lock @@ -52,11 +52,11 @@ yaml = ["PyYAML"] [[package]] name = "black" -version = "22.8.0" +version = "22.10.0" description = "The uncompromising code formatter." category = "dev" optional = false -python-versions = ">=3.6.2" +python-versions = ">=3.7" [package.dependencies] click = ">=8.0.0" @@ -216,7 +216,7 @@ smmap = ">=3.0.1,<6" [[package]] name = "GitPython" -version = "3.1.27" +version = "3.1.28" description = "GitPython is a python library used to interact with Git repositories" category = "dev" optional = false @@ -777,7 +777,7 @@ python-versions = ">=3.6" [[package]] name = "typing-extensions" -version = "4.3.0" +version = "4.4.0" description = "Backported and Experimental Type Hints for Python 3.7+" category = "main" optional = false @@ -829,7 +829,7 @@ testing = ["func-timeout", "jaraco.itertools", "pytest (>=6)", "pytest-black (>= [metadata] lock-version = "1.1" python-versions = "^3.7" -content-hash = "135c93a5703fc0dd6506db01a85ed70f027eb3b4de45ddd7b3fa905d270bd46e" +content-hash = "062470ef45f310b2e14b6828b7dc6bcbbaf5f3770ef12b1ee1739f624effd738" [metadata.files] alabaster = [ @@ -849,29 +849,27 @@ bandit = [ {file = "bandit-1.7.4.tar.gz", hash = "sha256:2d63a8c573417bae338962d4b9b06fbc6080f74ecd955a092849e1e65c717bd2"}, ] black = [ - {file = "black-22.8.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ce957f1d6b78a8a231b18e0dd2d94a33d2ba738cd88a7fe64f53f659eea49fdd"}, - {file = "black-22.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5107ea36b2b61917956d018bd25129baf9ad1125e39324a9b18248d362156a27"}, - {file = "black-22.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e8166b7bfe5dcb56d325385bd1d1e0f635f24aae14b3ae437102dedc0c186747"}, - {file = "black-22.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd82842bb272297503cbec1a2600b6bfb338dae017186f8f215c8958f8acf869"}, - {file = "black-22.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:d839150f61d09e7217f52917259831fe2b689f5c8e5e32611736351b89bb2a90"}, - {file = "black-22.8.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a05da0430bd5ced89176db098567973be52ce175a55677436a271102d7eaa3fe"}, - {file = "black-22.8.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a098a69a02596e1f2a58a2a1c8d5a05d5a74461af552b371e82f9fa4ada8342"}, - {file = "black-22.8.0-cp36-cp36m-win_amd64.whl", hash = "sha256:5594efbdc35426e35a7defa1ea1a1cb97c7dbd34c0e49af7fb593a36bd45edab"}, - {file = "black-22.8.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a983526af1bea1e4cf6768e649990f28ee4f4137266921c2c3cee8116ae42ec3"}, - {file = "black-22.8.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b2c25f8dea5e8444bdc6788a2f543e1fb01494e144480bc17f806178378005e"}, - {file = "black-22.8.0-cp37-cp37m-win_amd64.whl", hash = "sha256:78dd85caaab7c3153054756b9fe8c611efa63d9e7aecfa33e533060cb14b6d16"}, - {file = "black-22.8.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:cea1b2542d4e2c02c332e83150e41e3ca80dc0fb8de20df3c5e98e242156222c"}, - {file = "black-22.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5b879eb439094751185d1cfdca43023bc6786bd3c60372462b6f051efa6281a5"}, - {file = "black-22.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0a12e4e1353819af41df998b02c6742643cfef58282915f781d0e4dd7a200411"}, - {file = "black-22.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3a73f66b6d5ba7288cd5d6dad9b4c9b43f4e8a4b789a94bf5abfb878c663eb3"}, - {file = "black-22.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:e981e20ec152dfb3e77418fb616077937378b322d7b26aa1ff87717fb18b4875"}, - {file = "black-22.8.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8ce13ffed7e66dda0da3e0b2eb1bdfc83f5812f66e09aca2b0978593ed636b6c"}, - {file = "black-22.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:32a4b17f644fc288c6ee2bafdf5e3b045f4eff84693ac069d87b1a347d861497"}, - {file = "black-22.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0ad827325a3a634bae88ae7747db1a395d5ee02cf05d9aa7a9bd77dfb10e940c"}, - {file = "black-22.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53198e28a1fb865e9fe97f88220da2e44df6da82b18833b588b1883b16bb5d41"}, - {file = "black-22.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:bc4d4123830a2d190e9cc42a2e43570f82ace35c3aeb26a512a2102bce5af7ec"}, - {file = "black-22.8.0-py3-none-any.whl", hash = "sha256:d2c21d439b2baf7aa80d6dd4e3659259be64c6f49dfd0f32091063db0e006db4"}, - {file = "black-22.8.0.tar.gz", hash = "sha256:792f7eb540ba9a17e8656538701d3eb1afcb134e3b45b71f20b25c77a8db7e6e"}, + {file = "black-22.10.0-1fixedarch-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:5cc42ca67989e9c3cf859e84c2bf014f6633db63d1cbdf8fdb666dcd9e77e3fa"}, + {file = "black-22.10.0-1fixedarch-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:5d8f74030e67087b219b032aa33a919fae8806d49c867846bfacde57f43972ef"}, + {file = "black-22.10.0-1fixedarch-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:197df8509263b0b8614e1df1756b1dd41be6738eed2ba9e9769f3880c2b9d7b6"}, + {file = "black-22.10.0-1fixedarch-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:2644b5d63633702bc2c5f3754b1b475378fbbfb481f62319388235d0cd104c2d"}, + {file = "black-22.10.0-1fixedarch-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:e41a86c6c650bcecc6633ee3180d80a025db041a8e2398dcc059b3afa8382cd4"}, + {file = "black-22.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2039230db3c6c639bd84efe3292ec7b06e9214a2992cd9beb293d639c6402edb"}, + {file = "black-22.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14ff67aec0a47c424bc99b71005202045dc09270da44a27848d534600ac64fc7"}, + {file = "black-22.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:819dc789f4498ecc91438a7de64427c73b45035e2e3680c92e18795a839ebb66"}, + {file = "black-22.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5b9b29da4f564ba8787c119f37d174f2b69cdfdf9015b7d8c5c16121ddc054ae"}, + {file = "black-22.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8b49776299fece66bffaafe357d929ca9451450f5466e997a7285ab0fe28e3b"}, + {file = "black-22.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:21199526696b8f09c3997e2b4db8d0b108d801a348414264d2eb8eb2532e540d"}, + {file = "black-22.10.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e464456d24e23d11fced2bc8c47ef66d471f845c7b7a42f3bd77bf3d1789650"}, + {file = "black-22.10.0-cp37-cp37m-win_amd64.whl", hash = "sha256:9311e99228ae10023300ecac05be5a296f60d2fd10fff31cf5c1fa4ca4b1988d"}, + {file = "black-22.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fba8a281e570adafb79f7755ac8721b6cf1bbf691186a287e990c7929c7692ff"}, + {file = "black-22.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:915ace4ff03fdfff953962fa672d44be269deb2eaf88499a0f8805221bc68c87"}, + {file = "black-22.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:444ebfb4e441254e87bad00c661fe32df9969b2bf224373a448d8aca2132b395"}, + {file = "black-22.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:974308c58d057a651d182208a484ce80a26dac0caef2895836a92dd6ebd725e0"}, + {file = "black-22.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72ef3925f30e12a184889aac03d77d031056860ccae8a1e519f6cbb742736383"}, + {file = "black-22.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:432247333090c8c5366e69627ccb363bc58514ae3e63f7fc75c54b1ea80fa7de"}, + {file = "black-22.10.0-py3-none-any.whl", hash = "sha256:c957b2b4ea88587b46cf49d1dc17681c1e672864fd7af32fc1e9664d572b3458"}, + {file = "black-22.10.0.tar.gz", hash = "sha256:f513588da599943e0cde4e32cc9879e825d58720d6557062d1098c5ad80080e1"}, ] bump2version = [ {file = "bump2version-1.0.1-py2.py3-none-any.whl", hash = "sha256:37f927ea17cde7ae2d7baf832f8e80ce3777624554a653006c9144f8017fe410"}, @@ -974,8 +972,8 @@ gitdb = [ {file = "gitdb-4.0.9.tar.gz", hash = "sha256:bac2fd45c0a1c9cf619e63a90d62bdc63892ef92387424b855792a6cabe789aa"}, ] GitPython = [ - {file = "GitPython-3.1.27-py3-none-any.whl", hash = "sha256:5b68b000463593e05ff2b261acff0ff0972df8ab1b70d3cdbd41b546c8b8fc3d"}, - {file = "GitPython-3.1.27.tar.gz", hash = "sha256:1c885ce809e8ba2d88a29befeb385fcea06338d3640712b59ca623c220bb5704"}, + {file = "GitPython-3.1.28-py3-none-any.whl", hash = "sha256:77bfbd299d8709f6af7e0c70840ef26e7aff7cf0c1ed53b42dd7fc3a310fcb02"}, + {file = "GitPython-3.1.28.tar.gz", hash = "sha256:6bd3451b8271132f099ceeaf581392eaf6c274af74bb06144307870479d0697c"}, ] idna = [ {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, @@ -1280,8 +1278,8 @@ typed-ast = [ {file = "typed_ast-1.5.4.tar.gz", hash = "sha256:39e21ceb7388e4bb37f4c679d72707ed46c2fbf2a5609b8b8ebc4b067d977df2"}, ] typing-extensions = [ - {file = "typing_extensions-4.3.0-py3-none-any.whl", hash = "sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02"}, - {file = "typing_extensions-4.3.0.tar.gz", hash = "sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6"}, + {file = "typing_extensions-4.4.0-py3-none-any.whl", hash = "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"}, + {file = "typing_extensions-4.4.0.tar.gz", hash = "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa"}, ] urllib3 = [ {file = "urllib3-1.26.12-py2.py3-none-any.whl", hash = "sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997"}, diff --git a/pyproject.toml b/pyproject.toml index 661cbbc7..489e127b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -60,7 +60,7 @@ tox = "^3.26" optional = true [tool.poetry.group.lint.dependencies] -black = "22.8.0" +black = "22.10.0" flake8 = "^5.0" flake8-bandit = "^4.1" flake8-bugbear = "22.9.23" diff --git a/tox.ini b/tox.ini index ee335970..7f9d8e88 100644 --- a/tox.ini +++ b/tox.ini @@ -14,7 +14,7 @@ python = [testenv:black] basepython = python3.9 -deps = black==22.8.0 +deps = black==22.10.0 commands = black src tests -t py39 --check From a657792c10f59ed94af3039807ef92318b5c23f9 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 8 Oct 2022 16:10:41 +0200 Subject: [PATCH 046/230] Correct a workaround for PyPy --- src/graphql/pyutils/is_iterable.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/graphql/pyutils/is_iterable.py b/src/graphql/pyutils/is_iterable.py index 7e0191f6..05d9de32 100644 --- a/src/graphql/pyutils/is_iterable.py +++ b/src/graphql/pyutils/is_iterable.py @@ -7,7 +7,7 @@ collection_types: Any = [Collection] if not isinstance({}.values(), Collection): # Python < 3.7.2 collection_types.append(ValuesView) -if not isinstance(array, Collection): # PyPy issue 3820 +if not issubclass(array, Collection): # PyPy <= 7.3.9 collection_types.append(array) collection_types = ( collection_types[0] if len(collection_types) == 1 else tuple(collection_types) From dd5a99cdfc7576f48be7342f36c5e23e3daffa64 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 23 Oct 2022 16:14:41 +0200 Subject: [PATCH 047/230] Update dependencies --- poetry.lock | 105 ++++++++++++++++--------------- pyproject.toml | 6 +- src/graphql/execution/execute.py | 8 +-- tox.ini | 6 +- 4 files changed, 63 insertions(+), 62 deletions(-) diff --git a/poetry.lock b/poetry.lock index 6ecc7fca..f152f451 100644 --- a/poetry.lock +++ b/poetry.lock @@ -216,7 +216,7 @@ smmap = ">=3.0.1,<6" [[package]] name = "GitPython" -version = "3.1.28" +version = "3.1.29" description = "GitPython is a python library used to interact with Git repositories" category = "dev" optional = false @@ -312,11 +312,11 @@ python-versions = ">=3.6" [[package]] name = "mypy" -version = "0.971" +version = "0.982" description = "Optional static typing for Python" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] mypy-extensions = ">=0.4.3" @@ -358,7 +358,7 @@ python-versions = ">=3.7" [[package]] name = "pbr" -version = "5.10.0" +version = "5.11.0" description = "Python Build Reasonableness" category = "dev" optional = false @@ -468,7 +468,7 @@ testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2. [[package]] name = "pytest-asyncio" -version = "0.19.0" +version = "0.20.1" description = "Pytest support for asyncio" category = "dev" optional = false @@ -500,7 +500,7 @@ histogram = ["pygal", "pygaljs"] [[package]] name = "pytest-cov" -version = "3.0.0" +version = "4.0.0" description = "Pytest plugin for measuring coverage." category = "dev" optional = false @@ -537,7 +537,7 @@ pytest = ">=5.0.0" [[package]] name = "pytz" -version = "2022.4" +version = "2022.5" description = "World timezone definitions, modern and historical" category = "dev" optional = false @@ -571,7 +571,7 @@ use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "setuptools" -version = "65.4.1" +version = "65.5.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" category = "dev" optional = false @@ -726,7 +726,7 @@ test = ["pytest"] [[package]] name = "stevedore" -version = "3.5.0" +version = "3.5.2" description = "Manage dynamic plugins for Python applications" category = "dev" optional = false @@ -816,20 +816,20 @@ testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", [[package]] name = "zipp" -version = "3.8.1" +version = "3.9.0" description = "Backport of pathlib-compatible object wrapper for zip files" category = "dev" optional = false python-versions = ">=3.7" [package.extras] -docs = ["jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx"] -testing = ["func-timeout", "jaraco.itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"] +testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] [metadata] lock-version = "1.1" python-versions = "^3.7" -content-hash = "062470ef45f310b2e14b6828b7dc6bcbbaf5f3770ef12b1ee1739f624effd738" +content-hash = "4ba4d3123e8803b62ee1d3f5043a3b6008386d0cec452300ef64e117493a60c4" [metadata.files] alabaster = [ @@ -972,8 +972,8 @@ gitdb = [ {file = "gitdb-4.0.9.tar.gz", hash = "sha256:bac2fd45c0a1c9cf619e63a90d62bdc63892ef92387424b855792a6cabe789aa"}, ] GitPython = [ - {file = "GitPython-3.1.28-py3-none-any.whl", hash = "sha256:77bfbd299d8709f6af7e0c70840ef26e7aff7cf0c1ed53b42dd7fc3a310fcb02"}, - {file = "GitPython-3.1.28.tar.gz", hash = "sha256:6bd3451b8271132f099ceeaf581392eaf6c274af74bb06144307870479d0697c"}, + {file = "GitPython-3.1.29-py3-none-any.whl", hash = "sha256:41eea0deec2deea139b459ac03656f0dd28fc4a3387240ec1d3c259a2c47850f"}, + {file = "GitPython-3.1.29.tar.gz", hash = "sha256:cc36bfc4a3f913e66805a28e84703e419d9c264c1077e537b54f0e1af85dbefd"}, ] idna = [ {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, @@ -1046,29 +1046,30 @@ mccabe = [ {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, ] mypy = [ - {file = "mypy-0.971-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f2899a3cbd394da157194f913a931edfd4be5f274a88041c9dc2d9cdcb1c315c"}, - {file = "mypy-0.971-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:98e02d56ebe93981c41211c05adb630d1d26c14195d04d95e49cd97dbc046dc5"}, - {file = "mypy-0.971-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:19830b7dba7d5356d3e26e2427a2ec91c994cd92d983142cbd025ebe81d69cf3"}, - {file = "mypy-0.971-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:02ef476f6dcb86e6f502ae39a16b93285fef97e7f1ff22932b657d1ef1f28655"}, - {file = "mypy-0.971-cp310-cp310-win_amd64.whl", hash = "sha256:25c5750ba5609a0c7550b73a33deb314ecfb559c350bb050b655505e8aed4103"}, - {file = "mypy-0.971-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d3348e7eb2eea2472db611486846742d5d52d1290576de99d59edeb7cd4a42ca"}, - {file = "mypy-0.971-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3fa7a477b9900be9b7dd4bab30a12759e5abe9586574ceb944bc29cddf8f0417"}, - {file = "mypy-0.971-cp36-cp36m-win_amd64.whl", hash = "sha256:2ad53cf9c3adc43cf3bea0a7d01a2f2e86db9fe7596dfecb4496a5dda63cbb09"}, - {file = "mypy-0.971-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:855048b6feb6dfe09d3353466004490b1872887150c5bb5caad7838b57328cc8"}, - {file = "mypy-0.971-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:23488a14a83bca6e54402c2e6435467a4138785df93ec85aeff64c6170077fb0"}, - {file = "mypy-0.971-cp37-cp37m-win_amd64.whl", hash = "sha256:4b21e5b1a70dfb972490035128f305c39bc4bc253f34e96a4adf9127cf943eb2"}, - {file = "mypy-0.971-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:9796a2ba7b4b538649caa5cecd398d873f4022ed2333ffde58eaf604c4d2cb27"}, - {file = "mypy-0.971-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5a361d92635ad4ada1b1b2d3630fc2f53f2127d51cf2def9db83cba32e47c856"}, - {file = "mypy-0.971-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b793b899f7cf563b1e7044a5c97361196b938e92f0a4343a5d27966a53d2ec71"}, - {file = "mypy-0.971-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d1ea5d12c8e2d266b5fb8c7a5d2e9c0219fedfeb493b7ed60cd350322384ac27"}, - {file = "mypy-0.971-cp38-cp38-win_amd64.whl", hash = "sha256:23c7ff43fff4b0df93a186581885c8512bc50fc4d4910e0f838e35d6bb6b5e58"}, - {file = "mypy-0.971-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1f7656b69974a6933e987ee8ffb951d836272d6c0f81d727f1d0e2696074d9e6"}, - {file = "mypy-0.971-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d2022bfadb7a5c2ef410d6a7c9763188afdb7f3533f22a0a32be10d571ee4bbe"}, - {file = "mypy-0.971-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef943c72a786b0f8d90fd76e9b39ce81fb7171172daf84bf43eaf937e9f220a9"}, - {file = "mypy-0.971-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d744f72eb39f69312bc6c2abf8ff6656973120e2eb3f3ec4f758ed47e414a4bf"}, - {file = "mypy-0.971-cp39-cp39-win_amd64.whl", hash = "sha256:77a514ea15d3007d33a9e2157b0ba9c267496acf12a7f2b9b9f8446337aac5b0"}, - {file = "mypy-0.971-py3-none-any.whl", hash = "sha256:0d054ef16b071149917085f51f89555a576e2618d5d9dd70bd6eea6410af3ac9"}, - {file = "mypy-0.971.tar.gz", hash = "sha256:40b0f21484238269ae6a57200c807d80debc6459d444c0489a102d7c6a75fa56"}, + {file = "mypy-0.982-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5085e6f442003fa915aeb0a46d4da58128da69325d8213b4b35cc7054090aed5"}, + {file = "mypy-0.982-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:41fd1cf9bc0e1c19b9af13a6580ccb66c381a5ee2cf63ee5ebab747a4badeba3"}, + {file = "mypy-0.982-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f793e3dd95e166b66d50e7b63e69e58e88643d80a3dcc3bcd81368e0478b089c"}, + {file = "mypy-0.982-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86ebe67adf4d021b28c3f547da6aa2cce660b57f0432617af2cca932d4d378a6"}, + {file = "mypy-0.982-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:175f292f649a3af7082fe36620369ffc4661a71005aa9f8297ea473df5772046"}, + {file = "mypy-0.982-cp310-cp310-win_amd64.whl", hash = "sha256:8ee8c2472e96beb1045e9081de8e92f295b89ac10c4109afdf3a23ad6e644f3e"}, + {file = "mypy-0.982-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:58f27ebafe726a8e5ccb58d896451dd9a662a511a3188ff6a8a6a919142ecc20"}, + {file = "mypy-0.982-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6af646bd46f10d53834a8e8983e130e47d8ab2d4b7a97363e35b24e1d588947"}, + {file = "mypy-0.982-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e7aeaa763c7ab86d5b66ff27f68493d672e44c8099af636d433a7f3fa5596d40"}, + {file = "mypy-0.982-cp37-cp37m-win_amd64.whl", hash = "sha256:724d36be56444f569c20a629d1d4ee0cb0ad666078d59bb84f8f887952511ca1"}, + {file = "mypy-0.982-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:14d53cdd4cf93765aa747a7399f0961a365bcddf7855d9cef6306fa41de01c24"}, + {file = "mypy-0.982-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:26ae64555d480ad4b32a267d10cab7aec92ff44de35a7cd95b2b7cb8e64ebe3e"}, + {file = "mypy-0.982-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6389af3e204975d6658de4fb8ac16f58c14e1bacc6142fee86d1b5b26aa52bda"}, + {file = "mypy-0.982-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b35ce03a289480d6544aac85fa3674f493f323d80ea7226410ed065cd46f206"}, + {file = "mypy-0.982-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c6e564f035d25c99fd2b863e13049744d96bd1947e3d3d2f16f5828864506763"}, + {file = "mypy-0.982-cp38-cp38-win_amd64.whl", hash = "sha256:cebca7fd333f90b61b3ef7f217ff75ce2e287482206ef4a8b18f32b49927b1a2"}, + {file = "mypy-0.982-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a705a93670c8b74769496280d2fe6cd59961506c64f329bb179970ff1d24f9f8"}, + {file = "mypy-0.982-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:75838c649290d83a2b83a88288c1eb60fe7a05b36d46cbea9d22efc790002146"}, + {file = "mypy-0.982-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:91781eff1f3f2607519c8b0e8518aad8498af1419e8442d5d0afb108059881fc"}, + {file = "mypy-0.982-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaa97b9ddd1dd9901a22a879491dbb951b5dec75c3b90032e2baa7336777363b"}, + {file = "mypy-0.982-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a692a8e7d07abe5f4b2dd32d731812a0175626a90a223d4b58f10f458747dd8a"}, + {file = "mypy-0.982-cp39-cp39-win_amd64.whl", hash = "sha256:eb7a068e503be3543c4bd329c994103874fa543c1727ba5288393c21d912d795"}, + {file = "mypy-0.982-py3-none-any.whl", hash = "sha256:1021c241e8b6e1ca5a47e4d52601274ac078a89845cfde66c6d5f769819ffa1d"}, + {file = "mypy-0.982.tar.gz", hash = "sha256:85f7a343542dc8b1ed0a888cdd34dca56462654ef23aa673907305b260b3d746"}, ] mypy-extensions = [ {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, @@ -1083,8 +1084,8 @@ pathspec = [ {file = "pathspec-0.10.1.tar.gz", hash = "sha256:7ace6161b621d31e7902eb6b5ae148d12cfd23f4a249b9ffb6b9fee12084323d"}, ] pbr = [ - {file = "pbr-5.10.0-py2.py3-none-any.whl", hash = "sha256:da3e18aac0a3c003e9eea1a81bd23e5a3a75d745670dcf736317b7d966887fdf"}, - {file = "pbr-5.10.0.tar.gz", hash = "sha256:cfcc4ff8e698256fc17ea3ff796478b050852585aa5bae79ecd05b2ab7b39b9a"}, + {file = "pbr-5.11.0-py2.py3-none-any.whl", hash = "sha256:db2317ff07c84c4c63648c9064a79fe9d9f5c7ce85a9099d4b6258b3db83225a"}, + {file = "pbr-5.11.0.tar.gz", hash = "sha256:b97bc6695b2aff02144133c2e7399d5885223d42b7912ffaec2ca3898e673bfe"}, ] platformdirs = [ {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"}, @@ -1122,16 +1123,16 @@ pytest = [ {file = "pytest-7.1.3.tar.gz", hash = "sha256:4f365fec2dff9c1162f834d9f18af1ba13062db0c708bf7b946f8a5c76180c39"}, ] pytest-asyncio = [ - {file = "pytest-asyncio-0.19.0.tar.gz", hash = "sha256:ac4ebf3b6207259750bc32f4c1d8fcd7e79739edbc67ad0c58dd150b1d072fed"}, - {file = "pytest_asyncio-0.19.0-py3-none-any.whl", hash = "sha256:7a97e37cfe1ed296e2e84941384bdd37c376453912d397ed39293e0916f521fa"}, + {file = "pytest-asyncio-0.20.1.tar.gz", hash = "sha256:626699de2a747611f3eeb64168b3575f70439b06c3d0206e6ceaeeb956e65519"}, + {file = "pytest_asyncio-0.20.1-py3-none-any.whl", hash = "sha256:2c85a835df33fda40fe3973b451e0c194ca11bc2c007eabff90bb3d156fc172b"}, ] pytest-benchmark = [ {file = "pytest-benchmark-3.4.1.tar.gz", hash = "sha256:40e263f912de5a81d891619032983557d62a3d85843f9a9f30b98baea0cd7b47"}, {file = "pytest_benchmark-3.4.1-py2.py3-none-any.whl", hash = "sha256:36d2b08c4882f6f997fd3126a3d6dfd70f3249cde178ed8bbc0b73db7c20f809"}, ] pytest-cov = [ - {file = "pytest-cov-3.0.0.tar.gz", hash = "sha256:e7f0f5b1617d2210a2cabc266dfe2f4c75a8d32fb89eafb7ad9d06f6d076d470"}, - {file = "pytest_cov-3.0.0-py3-none-any.whl", hash = "sha256:578d5d15ac4a25e5f961c938b85a05b09fdaae9deef3bb6de9a6e766622ca7a6"}, + {file = "pytest-cov-4.0.0.tar.gz", hash = "sha256:996b79efde6433cdbd0088872dbc5fb3ed7fe1578b68cdbba634f14bb8dd0470"}, + {file = "pytest_cov-4.0.0-py3-none-any.whl", hash = "sha256:2feb1b751d66a8bd934e5edfa2e961d11309dc37b73b0eabe73b5945fee20f6b"}, ] pytest-describe = [ {file = "pytest-describe-2.0.1.tar.gz", hash = "sha256:e5cbaa31169f0060348ad5ca0191027e5f1f41f3f27fdeef208365e09c55eb9a"}, @@ -1142,8 +1143,8 @@ pytest-timeout = [ {file = "pytest_timeout-2.1.0-py3-none-any.whl", hash = "sha256:f6f50101443ce70ad325ceb4473c4255e9d74e3c7cd0ef827309dfa4c0d975c6"}, ] pytz = [ - {file = "pytz-2022.4-py2.py3-none-any.whl", hash = "sha256:2c0784747071402c6e99f0bafdb7da0fa22645f06554c7ae06bf6358897e9c91"}, - {file = "pytz-2022.4.tar.gz", hash = "sha256:48ce799d83b6f8aab2020e369b627446696619e79645419610b9facd909b3174"}, + {file = "pytz-2022.5-py2.py3-none-any.whl", hash = "sha256:335ab46900b1465e714b4fda4963d87363264eb662aab5e65da039c25f1f5b22"}, + {file = "pytz-2022.5.tar.gz", hash = "sha256:c4d88f472f54d615e9cd582a5004d1e5f624854a6a27a6211591c251f22a6914"}, ] PyYAML = [ {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, @@ -1192,8 +1193,8 @@ requests = [ {file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"}, ] setuptools = [ - {file = "setuptools-65.4.1-py3-none-any.whl", hash = "sha256:1b6bdc6161661409c5f21508763dc63ab20a9ac2f8ba20029aaaa7fdb9118012"}, - {file = "setuptools-65.4.1.tar.gz", hash = "sha256:3050e338e5871e70c72983072fe34f6032ae1cdeeeb67338199c2f74e083a80e"}, + {file = "setuptools-65.5.0-py3-none-any.whl", hash = "sha256:f62ea9da9ed6289bfe868cd6845968a2c854d1427f8548d52cae02a42b4f0356"}, + {file = "setuptools-65.5.0.tar.gz", hash = "sha256:512e5536220e38146176efb833d4a62aa726b7bbff82cfbc8ba9eaa3996e0b17"}, ] six = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, @@ -1240,8 +1241,8 @@ sphinxcontrib-serializinghtml = [ {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"}, ] stevedore = [ - {file = "stevedore-3.5.0-py3-none-any.whl", hash = "sha256:a547de73308fd7e90075bb4d301405bebf705292fa90a90fc3bcf9133f58616c"}, - {file = "stevedore-3.5.0.tar.gz", hash = "sha256:f40253887d8712eaa2bb0ea3830374416736dc8ec0e22f5a65092c1174c44335"}, + {file = "stevedore-3.5.2-py3-none-any.whl", hash = "sha256:fa2630e3d0ad3e22d4914aff2501445815b9a4467a6edc49387c667a38faf5bf"}, + {file = "stevedore-3.5.2.tar.gz", hash = "sha256:cf99f41fc0d5a4f185ca4d3d42b03be9011b0a1ec1a4ea1a282be1b4b306dcc2"}, ] tomli = [ {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, @@ -1290,6 +1291,6 @@ virtualenv = [ {file = "virtualenv-20.16.2.tar.gz", hash = "sha256:0ef5be6d07181946891f5abc8047fda8bc2f0b4b9bf222c64e6e8963baee76db"}, ] zipp = [ - {file = "zipp-3.8.1-py3-none-any.whl", hash = "sha256:47c40d7fe183a6f21403a199b3e4192cca5774656965b0a4988ad2f8feb5f009"}, - {file = "zipp-3.8.1.tar.gz", hash = "sha256:05b45f1ee8f807d0cc928485ca40a07cb491cf092ff587c0df9cb1fd154848d2"}, + {file = "zipp-3.9.0-py3-none-any.whl", hash = "sha256:972cfa31bc2fedd3fa838a51e9bc7e64b7fb725a8c00e7431554311f180e9980"}, + {file = "zipp-3.9.0.tar.gz", hash = "sha256:3a7af91c3db40ec72dd9d154ae18e008c69efe8ca88dde4f9a731bb82fe2f9eb"}, ] diff --git a/pyproject.toml b/pyproject.toml index 489e127b..890aaff1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -49,9 +49,9 @@ optional = true [tool.poetry.group.test.dependencies] pytest = "^7.1" -pytest-asyncio = ">=0.19,<1" +pytest-asyncio = ">=0.20,<1" pytest-benchmark = "^3.4" -pytest-cov = "^3.0" +pytest-cov = "^4.0" pytest-describe = "^2.0" pytest-timeout = "^2.1" tox = "^3.26" @@ -65,7 +65,7 @@ flake8 = "^5.0" flake8-bandit = "^4.1" flake8-bugbear = "22.9.23" isort = "^5.10" -mypy = "0.971" +mypy = "0.982" bump2version = ">=1.0,<2" [tool.poetry.group.doc] diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index 2c2e0b88..49651c1b 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -210,9 +210,9 @@ def __init__( self.context_value = context_value self.operation = operation self.variable_values = variable_values - self.field_resolver = field_resolver # type: ignore - self.type_resolver = type_resolver # type: ignore - self.subscribe_field_resolver = subscribe_field_resolver # type: ignore + self.field_resolver = field_resolver + self.type_resolver = type_resolver + self.subscribe_field_resolver = subscribe_field_resolver self.errors = errors self.middleware_manager = middleware_manager if is_awaitable: @@ -790,7 +790,7 @@ def complete_abstract_value( that value, then complete the value for that type. """ resolve_type_fn = return_type.resolve_type or self.type_resolver - runtime_type = resolve_type_fn(result, info, return_type) # type: ignore + runtime_type = resolve_type_fn(result, info, return_type) if self.is_awaitable(runtime_type): runtime_type = cast(Awaitable, runtime_type) diff --git a/tox.ini b/tox.ini index 7f9d8e88..3029f3b4 100644 --- a/tox.ini +++ b/tox.ini @@ -36,7 +36,7 @@ commands = [testenv:mypy] basepython = python3.9 deps = - mypy==0.971 + mypy==0.982 pytest>=7.1,<8 commands = mypy src tests @@ -52,9 +52,9 @@ commands = [testenv] deps = pytest>=7.1,<8 - pytest-asyncio>=0.19,<1 + pytest-asyncio>=0.20,<1 pytest-benchmark>=3.4,<4 - pytest-cov>=3,<4 + pytest-cov>=4,<5 pytest-describe>=2,<3 pytest-timeout>=2,<3 py37: typing-extensions>=4.3,<5 From 0115ed40f418052e09deac899f0ba3b021f6eef4 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 30 Oct 2022 21:06:39 +0100 Subject: [PATCH 048/230] Update pytest --- poetry.lock | 62 +++++++++++++++++++++++++++++++------------------- pyproject.toml | 4 ++-- tox.ini | 4 ++-- 3 files changed, 43 insertions(+), 27 deletions(-) diff --git a/poetry.lock b/poetry.lock index f152f451..959c78fb 100644 --- a/poetry.lock +++ b/poetry.lock @@ -114,11 +114,11 @@ importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} [[package]] name = "colorama" -version = "0.4.5" +version = "0.4.6" description = "Cross-platform colored terminal text." category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" [[package]] name = "coverage" @@ -150,6 +150,17 @@ category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +[[package]] +name = "exceptiongroup" +version = "1.0.0" +description = "Backport of PEP 654 (exception groups)" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +test = ["pytest (>=6)"] + [[package]] name = "filelock" version = "3.8.0" @@ -401,8 +412,8 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "py-cpuinfo" -version = "8.0.0" -description = "Get CPU info with pure Python 2 & 3" +version = "9.0.0" +description = "Get CPU info with pure Python" category = "dev" optional = false python-versions = "*" @@ -447,7 +458,7 @@ diagrams = ["jinja2", "railroad-diagrams"] [[package]] name = "pytest" -version = "7.1.3" +version = "7.2.0" description = "pytest: simple powerful testing with Python" category = "dev" optional = false @@ -456,12 +467,12 @@ python-versions = ">=3.7" [package.dependencies] attrs = ">=19.2.0" colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} iniconfig = "*" packaging = "*" pluggy = ">=0.12,<2.0" -py = ">=1.8.2" -tomli = ">=1.0.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} [package.extras] testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] @@ -483,11 +494,11 @@ testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy [[package]] name = "pytest-benchmark" -version = "3.4.1" +version = "4.0.0" description = "A ``pytest`` fixture for benchmarking code. It will group the tests into rounds that are calibrated to the chosen timer." category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.7" [package.dependencies] py-cpuinfo = "*" @@ -746,7 +757,7 @@ python-versions = ">=3.7" [[package]] name = "tox" -version = "3.26.0" +version = "3.27.0" description = "tox is a generic virtualenv management and test command line tool" category = "dev" optional = false @@ -816,7 +827,7 @@ testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", [[package]] name = "zipp" -version = "3.9.0" +version = "3.10.0" description = "Backport of pathlib-compatible object wrapper for zip files" category = "dev" optional = false @@ -829,7 +840,7 @@ testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools" [metadata] lock-version = "1.1" python-versions = "^3.7" -content-hash = "4ba4d3123e8803b62ee1d3f5043a3b6008386d0cec452300ef64e117493a60c4" +content-hash = "9e24c4a6981f196d340d868d48fe7285655f3161fa0eaa14aa6689e3a48ece0c" [metadata.files] alabaster = [ @@ -888,8 +899,8 @@ click = [ {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, ] colorama = [ - {file = "colorama-0.4.5-py2.py3-none-any.whl", hash = "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da"}, - {file = "colorama-0.4.5.tar.gz", hash = "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4"}, + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] coverage = [ {file = "coverage-6.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef8674b0ee8cc11e2d574e3e2998aea5df5ab242e012286824ea3c6970580e53"}, @@ -951,6 +962,10 @@ docutils = [ {file = "docutils-0.17.1-py2.py3-none-any.whl", hash = "sha256:cf316c8370a737a022b72b56874f6602acf974a37a9fba42ec2876387549fc61"}, {file = "docutils-0.17.1.tar.gz", hash = "sha256:686577d2e4c32380bb50cbb22f575ed742d58168cee37e99117a854bcd88f125"}, ] +exceptiongroup = [ + {file = "exceptiongroup-1.0.0-py3-none-any.whl", hash = "sha256:2ac84b496be68464a2da60da518af3785fff8b7ec0d090a581604bc870bdee41"}, + {file = "exceptiongroup-1.0.0.tar.gz", hash = "sha256:affbabf13fb6e98988c38d9c5650e701569fe3c1de3233cfb61c5f33774690ad"}, +] filelock = [ {file = "filelock-3.8.0-py3-none-any.whl", hash = "sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4"}, {file = "filelock-3.8.0.tar.gz", hash = "sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc"}, @@ -1100,7 +1115,8 @@ py = [ {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, ] py-cpuinfo = [ - {file = "py-cpuinfo-8.0.0.tar.gz", hash = "sha256:5f269be0e08e33fd959de96b34cd4aeeeacac014dd8305f70eb28d06de2345c5"}, + {file = "py-cpuinfo-9.0.0.tar.gz", hash = "sha256:3cdbbf3fac90dc6f118bfd64384f309edeadd902d7c8fb17f02ffa1fc3f49690"}, + {file = "py_cpuinfo-9.0.0-py3-none-any.whl", hash = "sha256:859625bc251f64e21f077d099d4162689c762b5d6a4c3c97553d56241c9674d5"}, ] pycodestyle = [ {file = "pycodestyle-2.9.1-py2.py3-none-any.whl", hash = "sha256:d1735fc58b418fd7c5f658d28d943854f8a849b01a5d0a1e6f3f3fdd0166804b"}, @@ -1119,16 +1135,16 @@ pyparsing = [ {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, ] pytest = [ - {file = "pytest-7.1.3-py3-none-any.whl", hash = "sha256:1377bda3466d70b55e3f5cecfa55bb7cfcf219c7964629b967c37cf0bda818b7"}, - {file = "pytest-7.1.3.tar.gz", hash = "sha256:4f365fec2dff9c1162f834d9f18af1ba13062db0c708bf7b946f8a5c76180c39"}, + {file = "pytest-7.2.0-py3-none-any.whl", hash = "sha256:892f933d339f068883b6fd5a459f03d85bfcb355e4981e146d2c7616c21fef71"}, + {file = "pytest-7.2.0.tar.gz", hash = "sha256:c4014eb40e10f11f355ad4e3c2fb2c6c6d1919c73f3b5a433de4708202cade59"}, ] pytest-asyncio = [ {file = "pytest-asyncio-0.20.1.tar.gz", hash = "sha256:626699de2a747611f3eeb64168b3575f70439b06c3d0206e6ceaeeb956e65519"}, {file = "pytest_asyncio-0.20.1-py3-none-any.whl", hash = "sha256:2c85a835df33fda40fe3973b451e0c194ca11bc2c007eabff90bb3d156fc172b"}, ] pytest-benchmark = [ - {file = "pytest-benchmark-3.4.1.tar.gz", hash = "sha256:40e263f912de5a81d891619032983557d62a3d85843f9a9f30b98baea0cd7b47"}, - {file = "pytest_benchmark-3.4.1-py2.py3-none-any.whl", hash = "sha256:36d2b08c4882f6f997fd3126a3d6dfd70f3249cde178ed8bbc0b73db7c20f809"}, + {file = "pytest-benchmark-4.0.0.tar.gz", hash = "sha256:fb0785b83efe599a6a956361c0691ae1dbb5318018561af10f3e915caa0048d1"}, + {file = "pytest_benchmark-4.0.0-py3-none-any.whl", hash = "sha256:fdb7db64e31c8b277dff9850d2a2556d8b60bcb0ea6524e36e28ffd7c87f71d6"}, ] pytest-cov = [ {file = "pytest-cov-4.0.0.tar.gz", hash = "sha256:996b79efde6433cdbd0088872dbc5fb3ed7fe1578b68cdbba634f14bb8dd0470"}, @@ -1249,8 +1265,8 @@ tomli = [ {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] tox = [ - {file = "tox-3.26.0-py2.py3-none-any.whl", hash = "sha256:bf037662d7c740d15c9924ba23bb3e587df20598697bb985ac2b49bdc2d847f6"}, - {file = "tox-3.26.0.tar.gz", hash = "sha256:44f3c347c68c2c68799d7d44f1808f9d396fc8a1a500cbc624253375c7ae107e"}, + {file = "tox-3.27.0-py2.py3-none-any.whl", hash = "sha256:89e4bc6df3854e9fc5582462e328dd3660d7d865ba625ae5881bbc63836a6324"}, + {file = "tox-3.27.0.tar.gz", hash = "sha256:d2c945f02a03d4501374a3d5430877380deb69b218b1df9b7f1d2f2a10befaf9"}, ] typed-ast = [ {file = "typed_ast-1.5.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:669dd0c4167f6f2cd9f57041e03c3c2ebf9063d0757dc89f79ba1daa2bfca9d4"}, @@ -1291,6 +1307,6 @@ virtualenv = [ {file = "virtualenv-20.16.2.tar.gz", hash = "sha256:0ef5be6d07181946891f5abc8047fda8bc2f0b4b9bf222c64e6e8963baee76db"}, ] zipp = [ - {file = "zipp-3.9.0-py3-none-any.whl", hash = "sha256:972cfa31bc2fedd3fa838a51e9bc7e64b7fb725a8c00e7431554311f180e9980"}, - {file = "zipp-3.9.0.tar.gz", hash = "sha256:3a7af91c3db40ec72dd9d154ae18e008c69efe8ca88dde4f9a731bb82fe2f9eb"}, + {file = "zipp-3.10.0-py3-none-any.whl", hash = "sha256:4fcb6f278987a6605757302a6e40e896257570d11c51628968ccb2a47e80c6c1"}, + {file = "zipp-3.10.0.tar.gz", hash = "sha256:7a7262fd930bd3e36c50b9a64897aec3fafff3dfdeec9623ae22b40e93f99bb8"}, ] diff --git a/pyproject.toml b/pyproject.toml index 890aaff1..2b11664b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -48,9 +48,9 @@ typing-extensions = [ optional = true [tool.poetry.group.test.dependencies] -pytest = "^7.1" +pytest = "^7.2" pytest-asyncio = ">=0.20,<1" -pytest-benchmark = "^3.4" +pytest-benchmark = "^4.0" pytest-cov = "^4.0" pytest-describe = "^2.0" pytest-timeout = "^2.1" diff --git a/tox.ini b/tox.ini index 3029f3b4..17bb0ae0 100644 --- a/tox.ini +++ b/tox.ini @@ -51,9 +51,9 @@ commands = [testenv] deps = - pytest>=7.1,<8 + pytest>=7.2,<8 pytest-asyncio>=0.20,<1 - pytest-benchmark>=3.4,<4 + pytest-benchmark>=4,<5 pytest-cov>=4,<5 pytest-describe>=2,<3 pytest-timeout>=2,<3 From 4c8ecd484f19efa5b2e52838e226fd4358efd782 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Wed, 2 Nov 2022 16:19:03 +0100 Subject: [PATCH 049/230] Solve issues with pickled schemas (#173) --- src/graphql/pyutils/undefined.py | 15 +- src/graphql/type/definition.py | 17 + src/graphql/type/introspection.py | 513 +++++++++-------- src/graphql/type/scalars.py | 4 + src/graphql/type/schema.py | 5 +- src/graphql/utilities/build_ast_schema.py | 6 +- src/graphql/utilities/build_client_schema.py | 58 +- src/graphql/utilities/extend_schema.py | 517 ++++++++++-------- tests/language/test_block_string_fuzz.py | 4 +- tests/language/test_schema_parser.py | 51 +- tests/pyutils/test_undefined.py | 18 +- tests/type/test_definition.py | 21 + tests/type/test_scalars.py | 37 ++ tests/type/test_schema.py | 11 +- tests/utilities/test_build_ast_schema.py | 138 ++++- .../test_introspection_from_schema.py | 116 +++- tests/utilities/test_print_schema.py | 2 +- .../test_strip_ignored_characters_fuzz.py | 4 +- tests/utils/__init__.py | 8 +- tox.ini | 2 + 20 files changed, 1018 insertions(+), 529 deletions(-) diff --git a/src/graphql/pyutils/undefined.py b/src/graphql/pyutils/undefined.py index 8a078eba..a5ab96ec 100644 --- a/src/graphql/pyutils/undefined.py +++ b/src/graphql/pyutils/undefined.py @@ -1,4 +1,5 @@ -from typing import Any +import warnings +from typing import Any, Optional __all__ = ["Undefined", "UndefinedType"] @@ -7,6 +8,18 @@ class UndefinedType(ValueError): """Auxiliary class for creating the Undefined singleton.""" + _instance: Optional["UndefinedType"] = None + + def __new__(cls) -> "UndefinedType": + if cls._instance is None: + cls._instance = super().__new__(cls) + else: + warnings.warn("Redefinition of 'Undefined'", RuntimeWarning, stacklevel=2) + return cls._instance + + def __reduce__(self) -> str: + return "Undefined" + def __repr__(self) -> str: return "Undefined" diff --git a/src/graphql/type/definition.py b/src/graphql/type/definition.py index 0f5895ac..d9fe289d 100644 --- a/src/graphql/type/definition.py +++ b/src/graphql/type/definition.py @@ -231,6 +231,23 @@ class GraphQLNamedType(GraphQLType): ast_node: Optional[TypeDefinitionNode] extension_ast_nodes: Tuple[TypeExtensionNode, ...] + reserved_types: Dict[str, "GraphQLNamedType"] = {} + + def __new__(cls, name: str, *_args: Any, **_kwargs: Any) -> "GraphQLNamedType": + if name in cls.reserved_types: + raise TypeError(f"Redefinition of reserved type {name!r}") + return super().__new__(cls) + + def __reduce__(self) -> Tuple[Callable, Tuple]: + return self._get_instance, (self.name, tuple(self.to_kwargs().items())) + + @classmethod + def _get_instance(cls, name: str, args: Tuple) -> "GraphQLNamedType": + try: + return cls.reserved_types[name] + except KeyError: + return cls(**dict(args)) + def __init__( self, name: str, diff --git a/src/graphql/type/introspection.py b/src/graphql/type/introspection.py index 160c582c..17922d21 100644 --- a/src/graphql/type/introspection.py +++ b/src/graphql/type/introspection.py @@ -8,6 +8,7 @@ GraphQLEnumType, GraphQLEnumValue, GraphQLField, + GraphQLFieldMap, GraphQLList, GraphQLNamedType, GraphQLNonNull, @@ -35,88 +36,105 @@ ] -__Schema: GraphQLObjectType = GraphQLObjectType( +class SchemaFields(GraphQLFieldMap): + def __new__(cls): + return { + "description": GraphQLField(GraphQLString, resolve=cls.description), + "types": GraphQLField( + GraphQLNonNull(GraphQLList(GraphQLNonNull(_Type))), + resolve=cls.types, + description="A list of all types supported by this server.", + ), + "queryType": GraphQLField( + GraphQLNonNull(_Type), + resolve=cls.query_type, + description="The type that query operations will be rooted at.", + ), + "mutationType": GraphQLField( + _Type, + resolve=cls.mutation_type, + description="If this server supports mutation, the type that" + " mutation operations will be rooted at.", + ), + "subscriptionType": GraphQLField( + _Type, + resolve=cls.subscription_type, + description="If this server supports subscription, the type that" + " subscription operations will be rooted at.", + ), + "directives": GraphQLField( + GraphQLNonNull(GraphQLList(GraphQLNonNull(_Directive))), + resolve=cls.directives, + description="A list of all directives supported by this server.", + ), + } + + @staticmethod + def description(schema, _info): + return schema.description + + @staticmethod + def types(schema, _info): + return schema.type_map.values() + + @staticmethod + def query_type(schema, _info): + return schema.query_type + + @staticmethod + def mutation_type(schema, _info): + return schema.mutation_type + + @staticmethod + def subscription_type(schema, _info): + return schema.subscription_type + + @staticmethod + def directives(schema, _info): + return schema.directives + + +_Schema: GraphQLObjectType = GraphQLObjectType( name="__Schema", description="A GraphQL Schema defines the capabilities of a GraphQL" " server. It exposes all available types and directives" " on the server, as well as the entry points for query," " mutation, and subscription operations.", - fields=lambda: { - "description": GraphQLField( - GraphQLString, resolve=lambda schema, _info: schema.description - ), - "types": GraphQLField( - GraphQLNonNull(GraphQLList(GraphQLNonNull(__Type))), - resolve=lambda schema, _info: schema.type_map.values(), - description="A list of all types supported by this server.", - ), - "queryType": GraphQLField( - GraphQLNonNull(__Type), - resolve=lambda schema, _info: schema.query_type, - description="The type that query operations will be rooted at.", - ), - "mutationType": GraphQLField( - __Type, - resolve=lambda schema, _info: schema.mutation_type, - description="If this server supports mutation, the type that" - " mutation operations will be rooted at.", - ), - "subscriptionType": GraphQLField( - __Type, - resolve=lambda schema, _info: schema.subscription_type, - description="If this server support subscription, the type that" - " subscription operations will be rooted at.", - ), - "directives": GraphQLField( - GraphQLNonNull(GraphQLList(GraphQLNonNull(__Directive))), - resolve=lambda schema, _info: schema.directives, - description="A list of all directives supported by this server.", - ), - }, + fields=SchemaFields, ) -__Directive: GraphQLObjectType = GraphQLObjectType( - name="__Directive", - description="A Directive provides a way to describe alternate runtime" - " execution and type validation behavior in a GraphQL" - " document.\n\nIn some cases, you need to provide options" - " to alter GraphQL's execution behavior in ways field" - " arguments will not suffice, such as conditionally including" - " or skipping a field. Directives provide this by describing" - " additional information to the executor.", - fields=lambda: { - # Note: The fields onOperation, onFragment and onField are deprecated - "name": GraphQLField( - GraphQLNonNull(GraphQLString), - resolve=DirectiveResolvers.name, - ), - "description": GraphQLField( - GraphQLString, - resolve=DirectiveResolvers.description, - ), - "isRepeatable": GraphQLField( - GraphQLNonNull(GraphQLBoolean), - resolve=DirectiveResolvers.is_repeatable, - ), - "locations": GraphQLField( - GraphQLNonNull(GraphQLList(GraphQLNonNull(__DirectiveLocation))), - resolve=DirectiveResolvers.locations, - ), - "args": GraphQLField( - GraphQLNonNull(GraphQLList(GraphQLNonNull(__InputValue))), - args={ - "includeDeprecated": GraphQLArgument( - GraphQLBoolean, default_value=False - ) - }, - resolve=DirectiveResolvers.args, - ), - }, -) - +class DirectiveFields(GraphQLFieldMap): + def __new__(cls): + return { + # Note: The fields onOperation, onFragment and onField are deprecated + "name": GraphQLField( + GraphQLNonNull(GraphQLString), + resolve=cls.name, + ), + "description": GraphQLField( + GraphQLString, + resolve=cls.description, + ), + "isRepeatable": GraphQLField( + GraphQLNonNull(GraphQLBoolean), + resolve=cls.is_repeatable, + ), + "locations": GraphQLField( + GraphQLNonNull(GraphQLList(GraphQLNonNull(_DirectiveLocation))), + resolve=cls.locations, + ), + "args": GraphQLField( + GraphQLNonNull(GraphQLList(GraphQLNonNull(_InputValue))), + args={ + "includeDeprecated": GraphQLArgument( + GraphQLBoolean, default_value=False + ) + }, + resolve=cls.args, + ), + } -class DirectiveResolvers: @staticmethod def name(directive, _info): return directive.name @@ -144,7 +162,20 @@ def args(directive, _info, includeDeprecated=False): ) -__DirectiveLocation: GraphQLEnumType = GraphQLEnumType( +_Directive: GraphQLObjectType = GraphQLObjectType( + name="__Directive", + description="A Directive provides a way to describe alternate runtime" + " execution and type validation behavior in a GraphQL" + " document.\n\nIn some cases, you need to provide options" + " to alter GraphQL's execution behavior in ways field" + " arguments will not suffice, such as conditionally including" + " or skipping a field. Directives provide this by describing" + " additional information to the executor.", + fields=DirectiveFields, +) + + +_DirectiveLocation: GraphQLEnumType = GraphQLEnumType( name="__DirectiveLocation", description="A Directive can be adjacent to many parts of the GraphQL" " language, a __DirectiveLocation describes one such possible" @@ -229,65 +260,50 @@ def args(directive, _info, includeDeprecated=False): ) -__Type: GraphQLObjectType = GraphQLObjectType( - name="__Type", - description="The fundamental unit of any GraphQL Schema is the type." - " There are many kinds of types in GraphQL as represented" - " by the `__TypeKind` enum.\n\nDepending on the kind of a" - " type, certain fields describe information about that type." - " Scalar types provide no information beyond a name, description" - " and optional `specifiedByURL`, while Enum types provide their values." - " Object and Interface types provide the fields they describe." - " Abstract types, Union and Interface, provide the Object" - " types possible at runtime. List and NonNull types compose" - " other types.", - fields=lambda: { - "kind": GraphQLField(GraphQLNonNull(__TypeKind), resolve=TypeResolvers.kind), - "name": GraphQLField(GraphQLString, resolve=TypeResolvers.name), - "description": GraphQLField(GraphQLString, resolve=TypeResolvers.description), - "specifiedByURL": GraphQLField( - GraphQLString, resolve=TypeResolvers.specified_by_url - ), - "fields": GraphQLField( - GraphQLList(GraphQLNonNull(__Field)), - args={ - "includeDeprecated": GraphQLArgument( - GraphQLBoolean, default_value=False - ) - }, - resolve=TypeResolvers.fields, - ), - "interfaces": GraphQLField( - GraphQLList(GraphQLNonNull(__Type)), resolve=TypeResolvers.interfaces - ), - "possibleTypes": GraphQLField( - GraphQLList(GraphQLNonNull(__Type)), - resolve=TypeResolvers.possible_types, - ), - "enumValues": GraphQLField( - GraphQLList(GraphQLNonNull(__EnumValue)), - args={ - "includeDeprecated": GraphQLArgument( - GraphQLBoolean, default_value=False - ) - }, - resolve=TypeResolvers.enum_values, - ), - "inputFields": GraphQLField( - GraphQLList(GraphQLNonNull(__InputValue)), - args={ - "includeDeprecated": GraphQLArgument( - GraphQLBoolean, default_value=False - ) - }, - resolve=TypeResolvers.input_fields, - ), - "ofType": GraphQLField(__Type, resolve=TypeResolvers.of_type), - }, -) - +class TypeFields(GraphQLFieldMap): + def __new__(cls): + return { + "kind": GraphQLField(GraphQLNonNull(_TypeKind), resolve=cls.kind), + "name": GraphQLField(GraphQLString, resolve=cls.name), + "description": GraphQLField(GraphQLString, resolve=cls.description), + "specifiedByURL": GraphQLField(GraphQLString, resolve=cls.specified_by_url), + "fields": GraphQLField( + GraphQLList(GraphQLNonNull(_Field)), + args={ + "includeDeprecated": GraphQLArgument( + GraphQLBoolean, default_value=False + ) + }, + resolve=cls.fields, + ), + "interfaces": GraphQLField( + GraphQLList(GraphQLNonNull(_Type)), resolve=cls.interfaces + ), + "possibleTypes": GraphQLField( + GraphQLList(GraphQLNonNull(_Type)), + resolve=cls.possible_types, + ), + "enumValues": GraphQLField( + GraphQLList(GraphQLNonNull(_EnumValue)), + args={ + "includeDeprecated": GraphQLArgument( + GraphQLBoolean, default_value=False + ) + }, + resolve=cls.enum_values, + ), + "inputFields": GraphQLField( + GraphQLList(GraphQLNonNull(_InputValue)), + args={ + "includeDeprecated": GraphQLArgument( + GraphQLBoolean, default_value=False + ) + }, + resolve=cls.input_fields, + ), + "ofType": GraphQLField(_Type, resolve=cls.of_type), + } -class TypeResolvers: @staticmethod def kind(type_, _info): if is_scalar_type(type_): @@ -370,38 +386,46 @@ def of_type(type_, _info): return getattr(type_, "of_type", None) -__Field: GraphQLObjectType = GraphQLObjectType( - name="__Field", - description="Object and Interface types are described by a list of Fields," - " each of which has a name, potentially a list of arguments," - " and a return type.", - fields=lambda: { - "name": GraphQLField( - GraphQLNonNull(GraphQLString), resolve=FieldResolvers.name - ), - "description": GraphQLField(GraphQLString, resolve=FieldResolvers.description), - "args": GraphQLField( - GraphQLNonNull(GraphQLList(GraphQLNonNull(__InputValue))), - args={ - "includeDeprecated": GraphQLArgument( - GraphQLBoolean, default_value=False - ) - }, - resolve=FieldResolvers.args, - ), - "type": GraphQLField(GraphQLNonNull(__Type), resolve=FieldResolvers.type), - "isDeprecated": GraphQLField( - GraphQLNonNull(GraphQLBoolean), - resolve=FieldResolvers.is_deprecated, - ), - "deprecationReason": GraphQLField( - GraphQLString, resolve=FieldResolvers.deprecation_reason - ), - }, +_Type: GraphQLObjectType = GraphQLObjectType( + name="__Type", + description="The fundamental unit of any GraphQL Schema is the type." + " There are many kinds of types in GraphQL as represented" + " by the `__TypeKind` enum.\n\nDepending on the kind of a" + " type, certain fields describe information about that type." + " Scalar types provide no information beyond a name, description" + " and optional `specifiedByURL`, while Enum types provide their values." + " Object and Interface types provide the fields they describe." + " Abstract types, Union and Interface, provide the Object" + " types possible at runtime. List and NonNull types compose" + " other types.", + fields=TypeFields, ) -class FieldResolvers: +class FieldFields(GraphQLFieldMap): + def __new__(cls): + return { + "name": GraphQLField(GraphQLNonNull(GraphQLString), resolve=cls.name), + "description": GraphQLField(GraphQLString, resolve=cls.description), + "args": GraphQLField( + GraphQLNonNull(GraphQLList(GraphQLNonNull(_InputValue))), + args={ + "includeDeprecated": GraphQLArgument( + GraphQLBoolean, default_value=False + ) + }, + resolve=cls.args, + ), + "type": GraphQLField(GraphQLNonNull(_Type), resolve=cls.type), + "isDeprecated": GraphQLField( + GraphQLNonNull(GraphQLBoolean), + resolve=cls.is_deprecated, + ), + "deprecationReason": GraphQLField( + GraphQLString, resolve=cls.deprecation_reason + ), + } + @staticmethod def name(item, _info): return item[0] @@ -433,39 +457,38 @@ def deprecation_reason(item, _info): return item[1].deprecation_reason -__InputValue: GraphQLObjectType = GraphQLObjectType( - name="__InputValue", - description="Arguments provided to Fields or Directives and the input" - " fields of an InputObject are represented as Input Values" - " which describe their type and optionally a default value.", - fields=lambda: { - "name": GraphQLField( - GraphQLNonNull(GraphQLString), resolve=InputValueFieldResolvers.name - ), - "description": GraphQLField( - GraphQLString, resolve=InputValueFieldResolvers.description - ), - "type": GraphQLField( - GraphQLNonNull(__Type), resolve=InputValueFieldResolvers.type - ), - "defaultValue": GraphQLField( - GraphQLString, - description="A GraphQL-formatted string representing" - " the default value for this input value.", - resolve=InputValueFieldResolvers.default_value, - ), - "isDeprecated": GraphQLField( - GraphQLNonNull(GraphQLBoolean), - resolve=InputValueFieldResolvers.is_deprecated, - ), - "deprecationReason": GraphQLField( - GraphQLString, resolve=InputValueFieldResolvers.deprecation_reason - ), - }, +_Field: GraphQLObjectType = GraphQLObjectType( + name="__Field", + description="Object and Interface types are described by a list of Fields," + " each of which has a name, potentially a list of arguments," + " and a return type.", + fields=FieldFields, ) -class InputValueFieldResolvers: +class InputValueFields(GraphQLFieldMap): + def __new__(cls): + return { + "name": GraphQLField(GraphQLNonNull(GraphQLString), resolve=cls.name), + "description": GraphQLField( + GraphQLString, resolve=InputValueFields.description + ), + "type": GraphQLField(GraphQLNonNull(_Type), resolve=cls.type), + "defaultValue": GraphQLField( + GraphQLString, + description="A GraphQL-formatted string representing" + " the default value for this input value.", + resolve=cls.default_value, + ), + "isDeprecated": GraphQLField( + GraphQLNonNull(GraphQLBoolean), + resolve=cls.is_deprecated, + ), + "deprecationReason": GraphQLField( + GraphQLString, resolve=cls.deprecation_reason + ), + } + @staticmethod def name(item, _info): return item[0] @@ -495,27 +518,57 @@ def deprecation_reason(item, _info): return item[1].deprecation_reason -__EnumValue: GraphQLObjectType = GraphQLObjectType( +_InputValue: GraphQLObjectType = GraphQLObjectType( + name="__InputValue", + description="Arguments provided to Fields or Directives and the input" + " fields of an InputObject are represented as Input Values" + " which describe their type and optionally a default value.", + fields=InputValueFields, +) + + +class EnumValueFields(GraphQLFieldMap): + def __new__(cls): + return { + "name": GraphQLField( + GraphQLNonNull(GraphQLString), resolve=EnumValueFields.name + ), + "description": GraphQLField( + GraphQLString, resolve=EnumValueFields.description + ), + "isDeprecated": GraphQLField( + GraphQLNonNull(GraphQLBoolean), + resolve=EnumValueFields.is_deprecated, + ), + "deprecationReason": GraphQLField( + GraphQLString, resolve=EnumValueFields.deprecation_reason + ), + } + + @staticmethod + def name(item, _info): + return item[0] + + @staticmethod + def description(item, _info): + return item[1].description + + @staticmethod + def is_deprecated(item, _info): + return item[1].deprecation_reason is not None + + @staticmethod + def deprecation_reason(item, _info): + return item[1].deprecation_reason + + +_EnumValue: GraphQLObjectType = GraphQLObjectType( name="__EnumValue", description="One possible value for a given Enum. Enum values are unique" " values, not a placeholder for a string or numeric value." " However an Enum value is returned in a JSON response as a" " string.", - fields=lambda: { - "name": GraphQLField( - GraphQLNonNull(GraphQLString), resolve=lambda item, _info: item[0] - ), - "description": GraphQLField( - GraphQLString, resolve=lambda item, _info: item[1].description - ), - "isDeprecated": GraphQLField( - GraphQLNonNull(GraphQLBoolean), - resolve=lambda item, _info: item[1].deprecation_reason is not None, - ), - "deprecationReason": GraphQLField( - GraphQLString, resolve=lambda item, _info: item[1].deprecation_reason - ), - }, + fields=EnumValueFields, ) @@ -530,7 +583,7 @@ class TypeKind(Enum): NON_NULL = "non-null" -__TypeKind: GraphQLEnumType = GraphQLEnumType( +_TypeKind: GraphQLEnumType = GraphQLEnumType( name="__TypeKind", description="An enum describing what kind of type a given `__Type` is.", values={ @@ -575,19 +628,33 @@ class TypeKind(Enum): ) +class MetaFields: + @staticmethod + def schema(_source, info): + return info.schema + + @staticmethod + def type(_source, info, **args): + return info.schema.get_type(args["name"]) + + @staticmethod + def type_name(_source, info, **_args): + return info.parent_type.name + + SchemaMetaFieldDef = GraphQLField( - GraphQLNonNull(__Schema), # name = '__schema' + GraphQLNonNull(_Schema), # name = '__schema' description="Access the current type schema of this server.", args={}, - resolve=lambda _source, info: info.schema, + resolve=MetaFields.schema, ) TypeMetaFieldDef = GraphQLField( - __Type, # name = '__type' + _Type, # name = '__type' description="Request the type information of a single type.", args={"name": GraphQLArgument(GraphQLNonNull(GraphQLString))}, - resolve=lambda _source, info, **args: info.schema.get_type(args["name"]), + resolve=MetaFields.type, ) @@ -595,21 +662,21 @@ class TypeKind(Enum): GraphQLNonNull(GraphQLString), # name='__typename' description="The name of the current Object type at runtime.", args={}, - resolve=lambda _source, info, **_args: info.parent_type.name, + resolve=MetaFields.type_name, ) # Since double underscore names are subject to name mangling in Python, # the introspection classes are best imported via this dictionary: introspection_types: Mapping[str, GraphQLNamedType] = { # treat as read-only - "__Schema": __Schema, - "__Directive": __Directive, - "__DirectiveLocation": __DirectiveLocation, - "__Type": __Type, - "__Field": __Field, - "__InputValue": __InputValue, - "__EnumValue": __EnumValue, - "__TypeKind": __TypeKind, + "__Schema": _Schema, + "__Directive": _Directive, + "__DirectiveLocation": _DirectiveLocation, + "__Type": _Type, + "__Field": _Field, + "__InputValue": _InputValue, + "__EnumValue": _EnumValue, + "__TypeKind": _TypeKind, } """A mapping containing all introspection types with their names as keys""" @@ -617,3 +684,7 @@ class TypeKind(Enum): def is_introspection_type(type_: GraphQLNamedType) -> bool: """Check whether the given named GraphQL type is an introspection type.""" return type_.name in introspection_types + + +# register the introspection types to avoid redefinition +GraphQLNamedType.reserved_types.update(introspection_types) diff --git a/src/graphql/type/scalars.py b/src/graphql/type/scalars.py index 1271e27b..3f7263c1 100644 --- a/src/graphql/type/scalars.py +++ b/src/graphql/type/scalars.py @@ -320,3 +320,7 @@ def parse_id_literal(value_node: ValueNode, _variables: Any = None) -> str: def is_specified_scalar_type(type_: GraphQLNamedType) -> bool: """Check whether the given named GraphQL type is a specified scalar type.""" return type_.name in specified_scalar_types + + +# register the scalar types to avoid redefinition +GraphQLNamedType.reserved_types.update(specified_scalar_types) diff --git a/src/graphql/type/schema.py b/src/graphql/type/schema.py index 25e18d8b..c910fdc6 100644 --- a/src/graphql/type/schema.py +++ b/src/graphql/type/schema.py @@ -208,8 +208,8 @@ def __init__( # Provide specified directives (e.g. @include and @skip) by default self.directives = specified_directives if directives is None else directives - # To preserve order of user-provided types, we add first to add them to - # the set of "collected" types, so `collect_referenced_types` ignore them. + # To preserve order of user-provided types, we first add them to the set + # of "collected" types, so `collect_referenced_types` ignores them. if types: all_referenced_types = TypeSet.with_initial_types(types) collect_referenced_types = all_referenced_types.collect_referenced_types @@ -262,6 +262,7 @@ def __init__( "Schema must contain uniquely named types" f" but contains multiple types named '{type_name}'." ) + type_map[type_name] = named_type if is_interface_type(named_type): diff --git a/src/graphql/utilities/build_ast_schema.py b/src/graphql/utilities/build_ast_schema.py index d2597e0e..3f4fb804 100644 --- a/src/graphql/utilities/build_ast_schema.py +++ b/src/graphql/utilities/build_ast_schema.py @@ -7,7 +7,7 @@ GraphQLSchemaKwargs, specified_directives, ) -from .extend_schema import extend_schema_impl +from .extend_schema import ExtendSchemaImpl __all__ = [ @@ -57,7 +57,9 @@ def build_ast_schema( extension_ast_nodes=(), assume_valid=False, ) - schema_kwargs = extend_schema_impl(empty_schema_kwargs, document_ast, assume_valid) + schema_kwargs = ExtendSchemaImpl.extend_schema_args( + empty_schema_kwargs, document_ast, assume_valid + ) if not schema_kwargs["ast_node"]: for type_ in schema_kwargs["types"] or (): diff --git a/src/graphql/utilities/build_client_schema.py b/src/graphql/utilities/build_client_schema.py index e1c128fb..1f6694b1 100644 --- a/src/graphql/utilities/build_client_schema.py +++ b/src/graphql/utilities/build_client_schema.py @@ -134,11 +134,15 @@ def build_type(type_: IntrospectionType) -> GraphQLNamedType: def build_scalar_def( scalar_introspection: IntrospectionScalarType, ) -> GraphQLScalarType: - return GraphQLScalarType( - name=scalar_introspection["name"], - description=scalar_introspection.get("description"), - specified_by_url=scalar_introspection.get("specifiedByURL"), - ) + name = scalar_introspection["name"] + try: + return cast(GraphQLScalarType, GraphQLScalarType.reserved_types[name]) + except KeyError: + return GraphQLScalarType( + name=name, + description=scalar_introspection.get("description"), + specified_by_url=scalar_introspection.get("specifiedByURL"), + ) def build_implementations_list( implementing_introspection: Union[ @@ -161,12 +165,16 @@ def build_implementations_list( def build_object_def( object_introspection: IntrospectionObjectType, ) -> GraphQLObjectType: - return GraphQLObjectType( - name=object_introspection["name"], - description=object_introspection.get("description"), - interfaces=lambda: build_implementations_list(object_introspection), - fields=lambda: build_field_def_map(object_introspection), - ) + name = object_introspection["name"] + try: + return cast(GraphQLObjectType, GraphQLObjectType.reserved_types[name]) + except KeyError: + return GraphQLObjectType( + name=name, + description=object_introspection.get("description"), + interfaces=lambda: build_implementations_list(object_introspection), + fields=lambda: build_field_def_map(object_introspection), + ) def build_interface_def( interface_introspection: IntrospectionInterfaceType, @@ -200,18 +208,22 @@ def build_enum_def(enum_introspection: IntrospectionEnumType) -> GraphQLEnumType "Introspection result missing enumValues:" f" {inspect(enum_introspection)}." ) - return GraphQLEnumType( - name=enum_introspection["name"], - description=enum_introspection.get("description"), - values={ - value_introspect["name"]: GraphQLEnumValue( - value=value_introspect["name"], - description=value_introspect.get("description"), - deprecation_reason=value_introspect.get("deprecationReason"), - ) - for value_introspect in enum_introspection["enumValues"] - }, - ) + name = enum_introspection["name"] + try: + return cast(GraphQLEnumType, GraphQLEnumType.reserved_types[name]) + except KeyError: + return GraphQLEnumType( + name=name, + description=enum_introspection.get("description"), + values={ + value_introspect["name"]: GraphQLEnumValue( + value=value_introspect["name"], + description=value_introspect.get("description"), + deprecation_reason=value_introspect.get("deprecationReason"), + ) + for value_introspect in enum_introspection["enumValues"] + }, + ) def build_input_object_def( input_object_introspection: IntrospectionInputObjectType, diff --git a/src/graphql/utilities/extend_schema.py b/src/graphql/utilities/extend_schema.py index 37b9bd98..950b8740 100644 --- a/src/graphql/utilities/extend_schema.py +++ b/src/graphql/utilities/extend_schema.py @@ -1,13 +1,14 @@ from collections import defaultdict +from functools import partial from typing import ( Any, - Callable, Collection, DefaultDict, Dict, List, Mapping, Optional, + Tuple, Union, cast, ) @@ -88,7 +89,7 @@ __all__ = [ "extend_schema", - "extend_schema_impl", + "ExtendSchemaImpl", ] @@ -126,153 +127,221 @@ def extend_schema( assert_valid_sdl_extension(document_ast, schema) schema_kwargs = schema.to_kwargs() - extended_kwargs = extend_schema_impl(schema_kwargs, document_ast, assume_valid) + extended_kwargs = ExtendSchemaImpl.extend_schema_args( + schema_kwargs, document_ast, assume_valid + ) return ( schema if schema_kwargs is extended_kwargs else GraphQLSchema(**extended_kwargs) ) -def extend_schema_impl( - schema_kwargs: GraphQLSchemaKwargs, - document_ast: DocumentNode, - assume_valid: bool = False, -) -> GraphQLSchemaKwargs: - """Extend the given schema arguments with extensions from a given document. +class ExtendSchemaImpl: + """Helper class implementing the methods to extend a schema. + + Note: We use a class instead of an implementation with local functions + and lambda functions so that the extended schema can be pickled. For internal use only. """ - # Note: schema_kwargs should become a TypedDict once we require Python 3.8 - - # Collect the type definitions and extensions found in the document. - type_defs: List[TypeDefinitionNode] = [] - type_extensions_map: DefaultDict[str, Any] = defaultdict(list) - - # New directives and types are separate because a directives and types can have the - # same name. For example, a type named "skip". - directive_defs: List[DirectiveDefinitionNode] = [] - - schema_def: Optional[SchemaDefinitionNode] = None - # Schema extensions are collected which may add additional operation types. - schema_extensions: List[SchemaExtensionNode] = [] - - for def_ in document_ast.definitions: - if isinstance(def_, SchemaDefinitionNode): - schema_def = def_ - elif isinstance(def_, SchemaExtensionNode): - schema_extensions.append(def_) - elif isinstance(def_, TypeDefinitionNode): - type_defs.append(def_) - elif isinstance(def_, TypeExtensionNode): - extended_type_name = def_.name.value - type_extensions_map[extended_type_name].append(def_) - elif isinstance(def_, DirectiveDefinitionNode): - directive_defs.append(def_) - - # If this document contains no new types, extensions, or directives then return the - # same unmodified GraphQLSchema instance. - if ( - not type_extensions_map - and not type_defs - and not directive_defs - and not schema_extensions - and not schema_def - ): - return schema_kwargs - - # Below are functions used for producing this schema that have closed over this - # scope and have access to the schema, cache, and newly defined types. + + type_map: Dict[str, GraphQLNamedType] + type_extensions_map: Dict[str, Any] + + def __init__(self, type_extensions_map: Dict[str, Any]): + self.type_map = {} + self.type_extensions_map = type_extensions_map + + @classmethod + def extend_schema_args( + cls, + schema_kwargs: GraphQLSchemaKwargs, + document_ast: DocumentNode, + assume_valid: bool = False, + ) -> GraphQLSchemaKwargs: + """Extend the given schema arguments with extensions from a given document. + + For internal use only. + """ + # Note: schema_kwargs should become a TypedDict once we require Python 3.8 + + # Collect the type definitions and extensions found in the document. + type_defs: List[TypeDefinitionNode] = [] + type_extensions_map: DefaultDict[str, Any] = defaultdict(list) + + # New directives and types are separate because a directives and types can have + # the same name. For example, a type named "skip". + directive_defs: List[DirectiveDefinitionNode] = [] + + schema_def: Optional[SchemaDefinitionNode] = None + # Schema extensions are collected which may add additional operation types. + schema_extensions: List[SchemaExtensionNode] = [] + + for def_ in document_ast.definitions: + if isinstance(def_, SchemaDefinitionNode): + schema_def = def_ + elif isinstance(def_, SchemaExtensionNode): + schema_extensions.append(def_) + elif isinstance(def_, TypeDefinitionNode): + type_defs.append(def_) + elif isinstance(def_, TypeExtensionNode): + extended_type_name = def_.name.value + type_extensions_map[extended_type_name].append(def_) + elif isinstance(def_, DirectiveDefinitionNode): + directive_defs.append(def_) + + # If this document contains no new types, extensions, or directives then return + # the same unmodified GraphQLSchema instance. + if ( + not type_extensions_map + and not type_defs + and not directive_defs + and not schema_extensions + and not schema_def + ): + return schema_kwargs + + self = cls(type_extensions_map) + for existing_type in schema_kwargs["types"] or (): + self.type_map[existing_type.name] = self.extend_named_type(existing_type) + for type_node in type_defs: + name = type_node.name.value + self.type_map[name] = std_type_map.get(name) or self.build_type(type_node) + + # Get the extended root operation types. + operation_types: Dict[OperationType, GraphQLNamedType] = {} + for operation_type in OperationType: + original_type = schema_kwargs[operation_type.value] + if original_type: + operation_types[operation_type] = self.replace_named_type(original_type) + # Then, incorporate schema definition and all schema extensions. + if schema_def: + operation_types.update(self.get_operation_types([schema_def])) + if schema_extensions: + operation_types.update(self.get_operation_types(schema_extensions)) + + # Then produce and return the kwargs for a Schema with these types. + get_operation = operation_types.get + return GraphQLSchemaKwargs( + query=get_operation(OperationType.QUERY), # type: ignore + mutation=get_operation(OperationType.MUTATION), # type: ignore + subscription=get_operation(OperationType.SUBSCRIPTION), # type: ignore + types=tuple(self.type_map.values()), + directives=tuple( + self.replace_directive(directive) + for directive in schema_kwargs["directives"] + ) + + tuple(self.build_directive(directive) for directive in directive_defs), + description=schema_def.description.value + if schema_def and schema_def.description + else None, + extensions={}, + ast_node=schema_def or schema_kwargs["ast_node"], + extension_ast_nodes=schema_kwargs["extension_ast_nodes"] + + tuple(schema_extensions), + assume_valid=assume_valid, + ) # noinspection PyTypeChecker,PyUnresolvedReferences - def replace_type(type_: GraphQLType) -> GraphQLType: + def replace_type(self, type_: GraphQLType) -> GraphQLType: if is_list_type(type_): - return GraphQLList(replace_type(type_.of_type)) # type: ignore + return GraphQLList(self.replace_type(type_.of_type)) # type: ignore if is_non_null_type(type_): - return GraphQLNonNull(replace_type(type_.of_type)) # type: ignore - return replace_named_type(type_) # type: ignore + return GraphQLNonNull(self.replace_type(type_.of_type)) # type: ignore + return self.replace_named_type(type_) # type: ignore - def replace_named_type(type_: GraphQLNamedType) -> GraphQLNamedType: + def replace_named_type(self, type_: GraphQLNamedType) -> GraphQLNamedType: # Note: While this could make early assertions to get the correctly # typed values below, that would throw immediately while type system # validation with validate_schema() will produce more actionable results. - return type_map[type_.name] + return self.type_map[type_.name] # noinspection PyShadowingNames - def replace_directive(directive: GraphQLDirective) -> GraphQLDirective: + def replace_directive(self, directive: GraphQLDirective) -> GraphQLDirective: kwargs = directive.to_kwargs() return GraphQLDirective( **merge_kwargs( kwargs, - args={name: extend_arg(arg) for name, arg in kwargs["args"].items()}, + args={ + name: self.extend_arg(arg) for name, arg in kwargs["args"].items() + }, ) ) - def extend_named_type(type_: GraphQLNamedType) -> GraphQLNamedType: + def extend_named_type(self, type_: GraphQLNamedType) -> GraphQLNamedType: if is_introspection_type(type_) or is_specified_scalar_type(type_): # Builtin types are not extended. return type_ if is_scalar_type(type_): type_ = cast(GraphQLScalarType, type_) - return extend_scalar_type(type_) + return self.extend_scalar_type(type_) if is_object_type(type_): type_ = cast(GraphQLObjectType, type_) - return extend_object_type(type_) + return self.extend_object_type(type_) if is_interface_type(type_): type_ = cast(GraphQLInterfaceType, type_) - return extend_interface_type(type_) + return self.extend_interface_type(type_) if is_union_type(type_): type_ = cast(GraphQLUnionType, type_) - return extend_union_type(type_) + return self.extend_union_type(type_) if is_enum_type(type_): type_ = cast(GraphQLEnumType, type_) - return extend_enum_type(type_) + return self.extend_enum_type(type_) if is_input_object_type(type_): type_ = cast(GraphQLInputObjectType, type_) - return extend_input_object_type(type_) + return self.extend_input_object_type(type_) # Not reachable. All possible types have been considered. raise TypeError(f"Unexpected type: {inspect(type_)}.") # pragma: no cover + def extend_input_object_type_fields( + self, kwargs: Dict[str, Any], extensions: Tuple[Any, ...] + ) -> GraphQLInputFieldMap: + return { + **{ + name: GraphQLInputField( + **merge_kwargs( + field.to_kwargs(), + type_=self.replace_type(field.type), + ) + ) + for name, field in kwargs["fields"].items() + }, + **self.build_input_field_map(extensions), + } + # noinspection PyShadowingNames def extend_input_object_type( + self, type_: GraphQLInputObjectType, ) -> GraphQLInputObjectType: kwargs = type_.to_kwargs() - extensions = tuple(type_extensions_map[kwargs["name"]]) + extensions = tuple(self.type_extensions_map[kwargs["name"]]) return GraphQLInputObjectType( **merge_kwargs( kwargs, - fields=lambda: { - **{ - name: GraphQLInputField( - **merge_kwargs( - field.to_kwargs(), - type_=replace_type(field.type), - ) - ) - for name, field in kwargs["fields"].items() - }, - **build_input_field_map(extensions), - }, + fields=partial( + self.extend_input_object_type_fields, kwargs, extensions + ), extension_ast_nodes=kwargs["extension_ast_nodes"] + extensions, ) ) - def extend_enum_type(type_: GraphQLEnumType) -> GraphQLEnumType: + def extend_enum_type(self, type_: GraphQLEnumType) -> GraphQLEnumType: kwargs = type_.to_kwargs() - extensions = tuple(type_extensions_map[kwargs["name"]]) + extensions = tuple(self.type_extensions_map[kwargs["name"]]) return GraphQLEnumType( **merge_kwargs( kwargs, - values={**kwargs["values"], **build_enum_value_map(extensions)}, + values={**kwargs["values"], **self.build_enum_value_map(extensions)}, extension_ast_nodes=kwargs["extension_ast_nodes"] + extensions, ) ) - def extend_scalar_type(type_: GraphQLScalarType) -> GraphQLScalarType: + def extend_scalar_type(self, type_: GraphQLScalarType) -> GraphQLScalarType: kwargs = type_.to_kwargs() - extensions = tuple(type_extensions_map[kwargs["name"]]) + extensions = tuple(self.type_extensions_map[kwargs["name"]]) specified_by_url = kwargs["specified_by_url"] for extension_node in extensions: @@ -286,120 +355,148 @@ def extend_scalar_type(type_: GraphQLScalarType) -> GraphQLScalarType: ) ) + def extend_object_type_interfaces( + self, kwargs: Dict[str, Any], extensions: Tuple[Any, ...] + ) -> List[GraphQLInterfaceType]: + return [ + cast(GraphQLInterfaceType, self.replace_named_type(interface)) + for interface in kwargs["interfaces"] + ] + self.build_interfaces(extensions) + + def extend_object_type_fields( + self, kwargs: Dict[str, Any], extensions: Tuple[Any, ...] + ) -> GraphQLFieldMap: + return { + **{ + name: self.extend_field(field) + for name, field in kwargs["fields"].items() + }, + **self.build_field_map(extensions), + } + # noinspection PyShadowingNames - def extend_object_type(type_: GraphQLObjectType) -> GraphQLObjectType: + def extend_object_type(self, type_: GraphQLObjectType) -> GraphQLObjectType: kwargs = type_.to_kwargs() - extensions = tuple(type_extensions_map[kwargs["name"]]) + extensions = tuple(self.type_extensions_map[kwargs["name"]]) return GraphQLObjectType( **merge_kwargs( kwargs, - interfaces=lambda: [ - cast(GraphQLInterfaceType, replace_named_type(interface)) - for interface in kwargs["interfaces"] - ] - + build_interfaces(extensions), - fields=lambda: { - **{ - name: extend_field(field) - for name, field in kwargs["fields"].items() - }, - **build_field_map(extensions), - }, + interfaces=partial( + self.extend_object_type_interfaces, kwargs, extensions + ), + fields=partial(self.extend_object_type_fields, kwargs, extensions), extension_ast_nodes=kwargs["extension_ast_nodes"] + extensions, ) ) + def extend_interface_type_interfaces( + self, kwargs: Dict[str, Any], extensions: Tuple[Any, ...] + ) -> List[GraphQLInterfaceType]: + return [ + cast(GraphQLInterfaceType, self.replace_named_type(interface)) + for interface in kwargs["interfaces"] + ] + self.build_interfaces(extensions) + + def extend_interface_type_fields( + self, kwargs: Dict[str, Any], extensions: Tuple[Any, ...] + ) -> GraphQLFieldMap: + return { + **{ + name: self.extend_field(field) + for name, field in kwargs["fields"].items() + }, + **self.build_field_map(extensions), + } + # noinspection PyShadowingNames - def extend_interface_type(type_: GraphQLInterfaceType) -> GraphQLInterfaceType: + def extend_interface_type( + self, type_: GraphQLInterfaceType + ) -> GraphQLInterfaceType: kwargs = type_.to_kwargs() - extensions = tuple(type_extensions_map[kwargs["name"]]) + extensions = tuple(self.type_extensions_map[kwargs["name"]]) return GraphQLInterfaceType( **merge_kwargs( kwargs, - interfaces=lambda: [ - cast(GraphQLInterfaceType, replace_named_type(interface)) - for interface in kwargs["interfaces"] - ] - + build_interfaces(extensions), - fields=lambda: { - **{ - name: extend_field(field) - for name, field in kwargs["fields"].items() - }, - **build_field_map(extensions), - }, + interfaces=partial( + self.extend_interface_type_interfaces, kwargs, extensions + ), + fields=partial(self.extend_interface_type_fields, kwargs, extensions), extension_ast_nodes=kwargs["extension_ast_nodes"] + extensions, ) ) - def extend_union_type(type_: GraphQLUnionType) -> GraphQLUnionType: + def extend_union_type_types( + self, kwargs: Dict[str, Any], extensions: Tuple[Any, ...] + ) -> List[GraphQLObjectType]: + return [ + cast(GraphQLObjectType, self.replace_named_type(member_type)) + for member_type in kwargs["types"] + ] + self.build_union_types(extensions) + + def extend_union_type(self, type_: GraphQLUnionType) -> GraphQLUnionType: kwargs = type_.to_kwargs() - extensions = tuple(type_extensions_map[kwargs["name"]]) + extensions = tuple(self.type_extensions_map[kwargs["name"]]) return GraphQLUnionType( **merge_kwargs( kwargs, - types=lambda: [ - cast(GraphQLObjectType, replace_named_type(member_type)) - for member_type in kwargs["types"] - ] - + build_union_types(extensions), + types=partial(self.extend_union_type_types, kwargs, extensions), extension_ast_nodes=kwargs["extension_ast_nodes"] + extensions, - ) + ), ) # noinspection PyShadowingNames - def extend_field(field: GraphQLField) -> GraphQLField: + def extend_field(self, field: GraphQLField) -> GraphQLField: return GraphQLField( **merge_kwargs( field.to_kwargs(), - type_=replace_type(field.type), - args={name: extend_arg(arg) for name, arg in field.args.items()}, + type_=self.replace_type(field.type), + args={name: self.extend_arg(arg) for name, arg in field.args.items()}, ) ) - def extend_arg(arg: GraphQLArgument) -> GraphQLArgument: + def extend_arg(self, arg: GraphQLArgument) -> GraphQLArgument: return GraphQLArgument( **merge_kwargs( arg.to_kwargs(), - type_=replace_type(arg.type), + type_=self.replace_type(arg.type), ) ) # noinspection PyShadowingNames def get_operation_types( - nodes: Collection[Union[SchemaDefinitionNode, SchemaExtensionNode]] + self, nodes: Collection[Union[SchemaDefinitionNode, SchemaExtensionNode]] ) -> Dict[OperationType, GraphQLNamedType]: # Note: While this could make early assertions to get the correctly # typed values below, that would throw immediately while type system # validation with validate_schema() will produce more actionable results. return { - operation_type.operation: get_named_type(operation_type.type) + operation_type.operation: self.get_named_type(operation_type.type) for node in nodes for operation_type in node.operation_types or [] } # noinspection PyShadowingNames - def get_named_type(node: NamedTypeNode) -> GraphQLNamedType: + def get_named_type(self, node: NamedTypeNode) -> GraphQLNamedType: name = node.name.value - type_ = std_type_map.get(name) or type_map.get(name) + type_ = std_type_map.get(name) or self.type_map.get(name) if not type_: raise TypeError(f"Unknown type: '{name}'.") return type_ - def get_wrapped_type(node: TypeNode) -> GraphQLType: + def get_wrapped_type(self, node: TypeNode) -> GraphQLType: if isinstance(node, ListTypeNode): - return GraphQLList(get_wrapped_type(node.type)) + return GraphQLList(self.get_wrapped_type(node.type)) if isinstance(node, NonNullTypeNode): return GraphQLNonNull( - cast(GraphQLNullableType, get_wrapped_type(node.type)) + cast(GraphQLNullableType, self.get_wrapped_type(node.type)) ) - return get_named_type(cast(NamedTypeNode, node)) + return self.get_named_type(cast(NamedTypeNode, node)) - def build_directive(node: DirectiveDefinitionNode) -> GraphQLDirective: + def build_directive(self, node: DirectiveDefinitionNode) -> GraphQLDirective: locations = [DirectiveLocation[node.value] for node in node.locations] return GraphQLDirective( @@ -407,11 +504,12 @@ def build_directive(node: DirectiveDefinitionNode) -> GraphQLDirective: description=node.description.value if node.description else None, locations=locations, is_repeatable=node.repeatable, - args=build_argument_map(node.arguments), + args=self.build_argument_map(node.arguments), ast_node=node, ) def build_field_map( + self, nodes: Collection[ Union[ InterfaceTypeDefinitionNode, @@ -428,15 +526,16 @@ def build_field_map( # value, that would throw immediately while type system validation # with validate_schema() will produce more actionable results. field_map[field.name.value] = GraphQLField( - type_=cast(GraphQLOutputType, get_wrapped_type(field.type)), + type_=cast(GraphQLOutputType, self.get_wrapped_type(field.type)), description=field.description.value if field.description else None, - args=build_argument_map(field.arguments), + args=self.build_argument_map(field.arguments), deprecation_reason=get_deprecation_reason(field), ast_node=field, ) return field_map def build_argument_map( + self, args: Optional[Collection[InputValueDefinitionNode]], ) -> GraphQLArgumentMap: arg_map: GraphQLArgumentMap = {} @@ -444,7 +543,7 @@ def build_argument_map( # Note: While this could make assertions to get the correctly typed # value, that would throw immediately while type system validation # with validate_schema() will produce more actionable results. - type_ = cast(GraphQLInputType, get_wrapped_type(arg.type)) + type_ = cast(GraphQLInputType, self.get_wrapped_type(arg.type)) arg_map[arg.name.value] = GraphQLArgument( type_=type_, description=arg.description.value if arg.description else None, @@ -455,6 +554,7 @@ def build_argument_map( return arg_map def build_input_field_map( + self, nodes: Collection[ Union[InputObjectTypeDefinitionNode, InputObjectTypeExtensionNode] ], @@ -465,7 +565,7 @@ def build_input_field_map( # Note: While this could make assertions to get the correctly typed # value, that would throw immediately while type system validation # with validate_schema() will produce more actionable results. - type_ = cast(GraphQLInputType, get_wrapped_type(field.type)) + type_ = cast(GraphQLInputType, self.get_wrapped_type(field.type)) input_field_map[field.name.value] = GraphQLInputField( type_=type_, description=field.description.value if field.description else None, @@ -475,6 +575,7 @@ def build_input_field_map( ) return input_field_map + @staticmethod def build_enum_value_map( nodes: Collection[Union[EnumTypeDefinitionNode, EnumTypeExtensionNode]] ) -> GraphQLEnumValueMap: @@ -494,6 +595,7 @@ def build_enum_value_map( return enum_value_map def build_interfaces( + self, nodes: Collection[ Union[ InterfaceTypeDefinitionNode, @@ -503,29 +605,32 @@ def build_interfaces( ] ], ) -> List[GraphQLInterfaceType]: - interfaces: List[GraphQLInterfaceType] = [] - for node in nodes: - for type_ in node.interfaces or []: - # Note: While this could make assertions to get the correctly typed - # value, that would throw immediately while type system validation - # with validate_schema() will produce more actionable results. - interfaces.append(cast(GraphQLInterfaceType, get_named_type(type_))) - return interfaces + # Note: While this could make assertions to get the correctly typed + # value, that would throw immediately while type system validation + # with validate_schema() will produce more actionable results. + return [ + cast(GraphQLInterfaceType, self.get_named_type(type_)) + for node in nodes + for type_ in node.interfaces or [] + ] def build_union_types( + self, nodes: Collection[Union[UnionTypeDefinitionNode, UnionTypeExtensionNode]], ) -> List[GraphQLObjectType]: - types: List[GraphQLObjectType] = [] - for node in nodes: - for type_ in node.types or []: - # Note: While this could make assertions to get the correctly typed - # value, that would throw immediately while type system validation - # with validate_schema() will produce more actionable results. - types.append(cast(GraphQLObjectType, get_named_type(type_))) - return types + # Note: While this could make assertions to get the correctly typed + # value, that would throw immediately while type system validation + # with validate_schema() will produce more actionable results. + return [ + cast(GraphQLObjectType, self.get_named_type(type_)) + for node in nodes + for type_ in node.types or [] + ] - def build_object_type(ast_node: ObjectTypeDefinitionNode) -> GraphQLObjectType: - extension_nodes = type_extensions_map[ast_node.name.value] + def build_object_type( + self, ast_node: ObjectTypeDefinitionNode + ) -> GraphQLObjectType: + extension_nodes = self.type_extensions_map[ast_node.name.value] all_nodes: List[Union[ObjectTypeDefinitionNode, ObjectTypeExtensionNode]] = [ ast_node, *extension_nodes, @@ -533,30 +638,31 @@ def build_object_type(ast_node: ObjectTypeDefinitionNode) -> GraphQLObjectType: return GraphQLObjectType( name=ast_node.name.value, description=ast_node.description.value if ast_node.description else None, - interfaces=lambda: build_interfaces(all_nodes), - fields=lambda: build_field_map(all_nodes), + interfaces=partial(self.build_interfaces, all_nodes), + fields=partial(self.build_field_map, all_nodes), ast_node=ast_node, extension_ast_nodes=extension_nodes, ) def build_interface_type( + self, ast_node: InterfaceTypeDefinitionNode, ) -> GraphQLInterfaceType: - extension_nodes = type_extensions_map[ast_node.name.value] + extension_nodes = self.type_extensions_map[ast_node.name.value] all_nodes: List[ Union[InterfaceTypeDefinitionNode, InterfaceTypeExtensionNode] ] = [ast_node, *extension_nodes] return GraphQLInterfaceType( name=ast_node.name.value, description=ast_node.description.value if ast_node.description else None, - interfaces=lambda: build_interfaces(all_nodes), - fields=lambda: build_field_map(all_nodes), + interfaces=partial(self.build_interfaces, all_nodes), + fields=partial(self.build_field_map, all_nodes), ast_node=ast_node, extension_ast_nodes=extension_nodes, ) - def build_enum_type(ast_node: EnumTypeDefinitionNode) -> GraphQLEnumType: - extension_nodes = type_extensions_map[ast_node.name.value] + def build_enum_type(self, ast_node: EnumTypeDefinitionNode) -> GraphQLEnumType: + extension_nodes = self.type_extensions_map[ast_node.name.value] all_nodes: List[Union[EnumTypeDefinitionNode, EnumTypeExtensionNode]] = [ ast_node, *extension_nodes, @@ -564,13 +670,13 @@ def build_enum_type(ast_node: EnumTypeDefinitionNode) -> GraphQLEnumType: return GraphQLEnumType( name=ast_node.name.value, description=ast_node.description.value if ast_node.description else None, - values=build_enum_value_map(all_nodes), + values=self.build_enum_value_map(all_nodes), ast_node=ast_node, extension_ast_nodes=extension_nodes, ) - def build_union_type(ast_node: UnionTypeDefinitionNode) -> GraphQLUnionType: - extension_nodes = type_extensions_map[ast_node.name.value] + def build_union_type(self, ast_node: UnionTypeDefinitionNode) -> GraphQLUnionType: + extension_nodes = self.type_extensions_map[ast_node.name.value] all_nodes: List[Union[UnionTypeDefinitionNode, UnionTypeExtensionNode]] = [ ast_node, *extension_nodes, @@ -578,13 +684,15 @@ def build_union_type(ast_node: UnionTypeDefinitionNode) -> GraphQLUnionType: return GraphQLUnionType( name=ast_node.name.value, description=ast_node.description.value if ast_node.description else None, - types=lambda: build_union_types(all_nodes), + types=partial(self.build_union_types, all_nodes), ast_node=ast_node, extension_ast_nodes=extension_nodes, ) - def build_scalar_type(ast_node: ScalarTypeDefinitionNode) -> GraphQLScalarType: - extension_nodes = type_extensions_map[ast_node.name.value] + def build_scalar_type( + self, ast_node: ScalarTypeDefinitionNode + ) -> GraphQLScalarType: + extension_nodes = self.type_extensions_map[ast_node.name.value] return GraphQLScalarType( name=ast_node.name.value, description=ast_node.description.value if ast_node.description else None, @@ -594,83 +702,36 @@ def build_scalar_type(ast_node: ScalarTypeDefinitionNode) -> GraphQLScalarType: ) def build_input_object_type( + self, ast_node: InputObjectTypeDefinitionNode, ) -> GraphQLInputObjectType: - extension_nodes = type_extensions_map[ast_node.name.value] + extension_nodes = self.type_extensions_map[ast_node.name.value] all_nodes: List[ Union[InputObjectTypeDefinitionNode, InputObjectTypeExtensionNode] ] = [ast_node, *extension_nodes] return GraphQLInputObjectType( name=ast_node.name.value, description=ast_node.description.value if ast_node.description else None, - fields=lambda: build_input_field_map(all_nodes), + fields=partial(self.build_input_field_map, all_nodes), ast_node=ast_node, extension_ast_nodes=extension_nodes, ) - build_type_for_kind = cast( - Dict[str, Callable[[TypeDefinitionNode], GraphQLNamedType]], - { - "object_type_definition": build_object_type, - "interface_type_definition": build_interface_type, - "enum_type_definition": build_enum_type, - "union_type_definition": build_union_type, - "scalar_type_definition": build_scalar_type, - "input_object_type_definition": build_input_object_type, - }, - ) - - def build_type(ast_node: TypeDefinitionNode) -> GraphQLNamedType: + def build_type(self, ast_node: TypeDefinitionNode) -> GraphQLNamedType: + kind = ast_node.kind try: - # object_type_definition_node is built with _build_object_type etc. - build_function = build_type_for_kind[ast_node.kind] - except KeyError: # pragma: no cover + kind = kind.removesuffix("_definition") + except AttributeError: # pragma: no cover (Python < 3.9) + if kind.endswith("_definition"): + kind = kind[:-11] + try: + build = getattr(self, f"build_{kind}") + except AttributeError: # pragma: no cover # Not reachable. All possible type definition nodes have been considered. raise TypeError( # pragma: no cover f"Unexpected type definition node: {inspect(ast_node)}." ) - else: - return build_function(ast_node) - - type_map: Dict[str, GraphQLNamedType] = {} - for existing_type in schema_kwargs["types"] or (): - type_map[existing_type.name] = extend_named_type(existing_type) - for type_node in type_defs: - name = type_node.name.value - type_map[name] = std_type_map.get(name) or build_type(type_node) - - # Get the extended root operation types. - operation_types: Dict[OperationType, GraphQLNamedType] = {} - for operation_type in OperationType: - original_type = schema_kwargs[operation_type.value] - if original_type: - operation_types[operation_type] = replace_named_type(original_type) - # Then, incorporate schema definition and all schema extensions. - if schema_def: - operation_types.update(get_operation_types([schema_def])) - if schema_extensions: - operation_types.update(get_operation_types(schema_extensions)) - - # Then produce and return the kwargs for a Schema with these types. - get_operation = operation_types.get - return GraphQLSchemaKwargs( - query=get_operation(OperationType.QUERY), # type: ignore - mutation=get_operation(OperationType.MUTATION), # type: ignore - subscription=get_operation(OperationType.SUBSCRIPTION), # type: ignore - types=tuple(type_map.values()), - directives=tuple( - replace_directive(directive) for directive in schema_kwargs["directives"] - ) - + tuple(build_directive(directive) for directive in directive_defs), - description=schema_def.description.value - if schema_def and schema_def.description - else None, - extensions={}, - ast_node=schema_def or schema_kwargs["ast_node"], - extension_ast_nodes=schema_kwargs["extension_ast_nodes"] - + tuple(schema_extensions), - assume_valid=assume_valid, - ) + return build(ast_node) std_type_map: Mapping[str, Union[GraphQLNamedType, GraphQLObjectType]] = { diff --git a/tests/language/test_block_string_fuzz.py b/tests/language/test_block_string_fuzz.py index 8de96b22..e3a38f38 100644 --- a/tests/language/test_block_string_fuzz.py +++ b/tests/language/test_block_string_fuzz.py @@ -6,7 +6,7 @@ print_block_string, ) -from ..utils import dedent, gen_fuzz_strings +from ..utils import dedent, gen_fuzz_strings, timeout_factor def lex_value(s: str) -> str: @@ -42,7 +42,7 @@ def assert_non_printable_block_string(test_value: str) -> None: def describe_print_block_string(): @mark.slow - @mark.timeout(20) + @mark.timeout(80 * timeout_factor) def correctly_print_random_strings(): # Testing with length >7 is taking exponentially more time. However, it is # highly recommended testing with increased limit if you make any change. diff --git a/tests/language/test_schema_parser.py b/tests/language/test_schema_parser.py index 673f1554..feab6543 100644 --- a/tests/language/test_schema_parser.py +++ b/tests/language/test_schema_parser.py @@ -1,3 +1,5 @@ +import pickle +from copy import deepcopy from textwrap import dedent from typing import List, Optional, Tuple @@ -797,19 +799,36 @@ def directive_with_incorrect_locations(): def parses_kitchen_sink_schema(kitchen_sink_sdl): # noqa: F811 assert parse(kitchen_sink_sdl) - def can_pickle_and_unpickle_kitchen_sink_schema_ast(kitchen_sink_sdl): # noqa: F811 - import pickle - - # create a schema AST from the kitchen sink SDL - doc = parse(kitchen_sink_sdl) - # check that the schema AST can be pickled - # (particularly, there should be no recursion error) - dumped = pickle.dumps(doc) - # check that the pickle size is reasonable - assert len(dumped) < 50 * len(kitchen_sink_sdl) - loaded = pickle.loads(dumped) - # check that the un-pickled schema AST is still the same - assert loaded == doc - # check that pickling again creates the same result - dumped_again = pickle.dumps(doc) - assert dumped_again == dumped + def describe_deepcopy_and_pickle(): + def can_deep_copy_ast(kitchen_sink_sdl): # noqa: F811 + # create a schema AST from the kitchen sink SDL + doc = parse(kitchen_sink_sdl) + # make a deepcopy of the schema AST + copied_doc = deepcopy(doc) + # check that the copied AST is equal to the original one + assert copied_doc == doc + + def can_pickle_and_unpickle_ast(kitchen_sink_sdl): # noqa: F811 + # create a schema AST from the kitchen sink SDL + doc = parse(kitchen_sink_sdl) + # check that the schema AST can be pickled + # (particularly, there should be no recursion error) + dumped = pickle.dumps(doc) + # check that the pickle size is reasonable + assert len(dumped) < 50 * len(kitchen_sink_sdl) + loaded = pickle.loads(dumped) + # check that the un-pickled schema AST is still the same + assert loaded == doc + # check that pickling again creates the same result + dumped_again = pickle.dumps(doc) + assert dumped_again == dumped + + def can_deep_copy_pickled_ast(kitchen_sink_sdl): # noqa: F811 + # create a schema AST from the kitchen sink SDL + doc = parse(kitchen_sink_sdl) + # pickle and unpickle the schema AST + loaded_doc = pickle.loads(pickle.dumps(doc)) + # make a deepcopy of this + copied_doc = deepcopy(loaded_doc) + # check that the result is still equal to the original schema AST + assert copied_doc == doc diff --git a/tests/pyutils/test_undefined.py b/tests/pyutils/test_undefined.py index b7ad8cf6..9cd5303f 100644 --- a/tests/pyutils/test_undefined.py +++ b/tests/pyutils/test_undefined.py @@ -1,7 +1,11 @@ -from graphql.pyutils import Undefined +import pickle +from pytest import warns -def describe_invalid(): +from graphql.pyutils import Undefined, UndefinedType + + +def describe_Undefined(): def has_repr(): assert repr(Undefined) == "Undefined" @@ -26,3 +30,13 @@ def only_equal_to_itself(): false_object = False assert Undefined != false_object assert not Undefined == false_object + + def cannot_be_redefined(): + with warns(RuntimeWarning, match="Redefinition of 'Undefined'"): + redefined_undefined = UndefinedType() + assert redefined_undefined is Undefined + + def can_be_pickled(): + pickled_undefined = pickle.dumps(Undefined) + unpickled_undefined = pickle.loads(pickled_undefined) + assert unpickled_undefined is Undefined diff --git a/tests/type/test_definition.py b/tests/type/test_definition.py index 8515de89..24973086 100644 --- a/tests/type/test_definition.py +++ b/tests/type/test_definition.py @@ -1,3 +1,4 @@ +import pickle from enum import Enum from math import isnan, nan from typing import Dict, cast @@ -43,6 +44,7 @@ GraphQLScalarType, GraphQLString, GraphQLUnionType, + introspection_types, ) @@ -261,6 +263,17 @@ def rejects_a_scalar_type_with_incorrect_extension_ast_nodes(): " as a collection of ScalarTypeExtensionNode instances." ) + def pickles_a_custom_scalar_type(): + foo_type = GraphQLScalarType("Foo") + cycled_foo_type = pickle.loads(pickle.dumps(foo_type)) + assert cycled_foo_type.name == foo_type.name + assert cycled_foo_type is not foo_type + + def pickles_a_specified_scalar_type(): + cycled_int_type = pickle.loads(pickle.dumps(GraphQLInt)) + assert cycled_int_type.name == "Int" + assert cycled_int_type is GraphQLInt + def describe_type_system_fields(): def defines_a_field(): @@ -1903,3 +1916,11 @@ def fields_have_repr(): repr(GraphQLField(GraphQLList(GraphQLInt))) == ">>" ) + + +def describe_type_system_introspection_types(): + def cannot_redefine_introspection_types(): + for name, introspection_type in introspection_types.items(): + assert introspection_type.name == name + with raises(TypeError, match=f"Redefinition of reserved type '{name}'"): + introspection_type.__class__(**introspection_type.to_kwargs()) diff --git a/tests/type/test_scalars.py b/tests/type/test_scalars.py index c5413803..f2a45a67 100644 --- a/tests/type/test_scalars.py +++ b/tests/type/test_scalars.py @@ -1,3 +1,4 @@ +import pickle from math import inf, nan, pi from typing import Any @@ -11,6 +12,7 @@ GraphQLFloat, GraphQLID, GraphQLInt, + GraphQLScalarType, GraphQLString, ) @@ -172,6 +174,13 @@ def serializes(): serialize([5]) assert str(exc_info.value) == "Int cannot represent non-integer value: [5]" + def cannot_be_redefined(): + with raises(TypeError, match="Redefinition of reserved type 'Int'"): + GraphQLScalarType(name="Int") + + def pickles(): + assert pickle.loads(pickle.dumps(GraphQLInt)) is GraphQLInt + def describe_graphql_float(): def parse_value(): _parse_value = GraphQLFloat.parse_value @@ -295,6 +304,13 @@ def serializes(): str(exc_info.value) == "Float cannot represent non numeric value: [5]" ) + def cannot_be_redefined(): + with raises(TypeError, match="Redefinition of reserved type 'Float'"): + GraphQLScalarType(name="Float") + + def pickles(): + assert pickle.loads(pickle.dumps(GraphQLFloat)) is GraphQLFloat + def describe_graphql_string(): def parse_value(): _parse_value = GraphQLString.parse_value @@ -401,6 +417,13 @@ def __str__(self): " {'value_of': 'value_of string'}" ) + def cannot_be_redefined(): + with raises(TypeError, match="Redefinition of reserved type 'String'"): + GraphQLScalarType(name="String") + + def pickles(): + assert pickle.loads(pickle.dumps(GraphQLString)) is GraphQLString + def describe_graphql_boolean(): def parse_value(): _parse_value = GraphQLBoolean.parse_value @@ -543,6 +566,13 @@ def serializes(): "Boolean cannot represent a non boolean value: {}" ) + def cannot_be_redefined(): + with raises(TypeError, match="Redefinition of reserved type 'Boolean'"): + GraphQLScalarType(name="Boolean") + + def pickles(): + assert pickle.loads(pickle.dumps(GraphQLBoolean)) is GraphQLBoolean + def describe_graphql_id(): def parse_value(): _parse_value = GraphQLID.parse_value @@ -663,3 +693,10 @@ def __str__(self): with raises(GraphQLError) as exc_info: serialize(["abc"]) assert str(exc_info.value) == "ID cannot represent value: ['abc']" + + def cannot_be_redefined(): + with raises(TypeError, match="Redefinition of reserved type 'ID'"): + GraphQLScalarType(name="ID") + + def pickles(): + assert pickle.loads(pickle.dumps(GraphQLID)) is GraphQLID diff --git a/tests/type/test_schema.py b/tests/type/test_schema.py index 8dfc2c48..efd44f86 100644 --- a/tests/type/test_schema.py +++ b/tests/type/test_schema.py @@ -20,6 +20,7 @@ GraphQLInt, GraphQLInterfaceType, GraphQLList, + GraphQLNamedType, GraphQLObjectType, GraphQLScalarType, GraphQLSchema, @@ -331,7 +332,15 @@ def check_that_query_mutation_and_subscription_are_graphql_types(): def describe_a_schema_must_contain_uniquely_named_types(): def rejects_a_schema_which_redefines_a_built_in_type(): - FakeString = GraphQLScalarType("String") + # temporarily allow redefinition of the String scalar type + reserved_types = GraphQLNamedType.reserved_types + GraphQLScalarType.reserved_types = {} + try: + # create a redefined String scalar type + FakeString = GraphQLScalarType("String") + finally: + # protect from redefinition again + GraphQLScalarType.reserved_types = reserved_types QueryType = GraphQLObjectType( "Query", diff --git a/tests/utilities/test_build_ast_schema.py b/tests/utilities/test_build_ast_schema.py index 1ae7ffb5..bb0dc561 100644 --- a/tests/utilities/test_build_ast_schema.py +++ b/tests/utilities/test_build_ast_schema.py @@ -1,4 +1,7 @@ +import pickle +import sys from collections import namedtuple +from copy import deepcopy from typing import Union from pytest import mark, raises @@ -35,7 +38,8 @@ from graphql.utilities import build_ast_schema, build_schema, print_schema, print_type from ..fixtures import big_schema_sdl # noqa: F401 -from ..utils import dedent +from ..star_wars_schema import star_wars_schema +from ..utils import dedent, timeout_factor def cycle_sdl(sdl: str) -> str: @@ -1186,28 +1190,110 @@ def rejects_invalid_ast(): build_ast_schema({}) # type: ignore assert str(exc_info.value) == "Must provide valid Document AST." - # This currently does not work because of how extend_schema is implemented - @mark.skip(reason="pickling of schemas is not yet supported") - def can_pickle_and_unpickle_big_schema( - big_schema_sdl, # noqa: F811 - ): # pragma: no cover - import pickle - - # create a schema from the kitchen sink SDL - schema = build_schema(big_schema_sdl, assume_valid_sdl=True) - # check that the schema can be pickled - # (particularly, there should be no recursion error, - # or errors because of trying to pickle lambdas or local functions) - dumped = pickle.dumps(schema) - # check that the pickle size is reasonable - assert len(dumped) < 50 * len(big_schema_sdl) - loaded = pickle.loads(dumped) - - # check that the un-pickled schema is still the same - assert loaded == schema - # check that pickling again creates the same result - dumped_again = pickle.dumps(schema) - assert dumped_again == dumped - - # check that printing the unpickled schema gives the same SDL - assert cycle_sdl(print_schema(schema)) == cycle_sdl(big_schema_sdl) + def describe_deepcopy_and_pickle(): # pragma: no cover + sdl = print_schema(star_wars_schema) + + def can_deep_copy_schema(): + schema = build_schema(sdl, assume_valid_sdl=True) + # create a deepcopy of the schema + copied = deepcopy(schema) + # check that printing the copied schema gives the same SDL + assert print_schema(copied) == sdl + + def can_pickle_and_unpickle_star_wars_schema(): + # create a schema from the star wars SDL + schema = build_schema(sdl, assume_valid_sdl=True) + # check that the schema can be pickled + # (particularly, there should be no recursion error, + # or errors because of trying to pickle lambdas or local functions) + dumped = pickle.dumps(schema) + + # check that the pickle size is reasonable + assert len(dumped) < 25 * len(sdl) + loaded = pickle.loads(dumped) + + # check that printing the unpickled schema gives the same SDL + assert print_schema(loaded) == sdl + + # check that pickling again creates the same result + dumped = pickle.dumps(schema) + assert len(dumped) < 25 * len(sdl) + loaded = pickle.loads(dumped) + assert print_schema(loaded) == sdl + + def can_deep_copy_pickled_schema(): + # create a schema from the star wars SDL + schema = build_schema(sdl, assume_valid_sdl=True) + # pickle and unpickle the schema + loaded = pickle.loads(pickle.dumps(schema)) + # create a deepcopy of the unpickled schema + copied = deepcopy(loaded) + # check that printing the copied schema gives the same SDL + assert print_schema(copied) == sdl + + @mark.slow + def describe_deepcopy_and_pickle_big(): # pragma: no cover + @mark.timeout(20 * timeout_factor) + def can_deep_copy_big_schema(big_schema_sdl): # noqa: F811 + # use our printing conventions + big_schema_sdl = cycle_sdl(big_schema_sdl) + + # create a schema from the big SDL + schema = build_schema(big_schema_sdl, assume_valid_sdl=True) + # create a deepcopy of the schema + copied = deepcopy(schema) + # check that printing the copied schema gives the same SDL + assert print_schema(copied) == big_schema_sdl + + @mark.timeout(60 * timeout_factor) + def can_pickle_and_unpickle_big_schema(big_schema_sdl): # noqa: F811 + # use our printing conventions + big_schema_sdl = cycle_sdl(big_schema_sdl) + + limit = sys.getrecursionlimit() + sys.setrecursionlimit(max(limit, 4000)) # needed for pickle + + try: + # create a schema from the big SDL + schema = build_schema(big_schema_sdl, assume_valid_sdl=True) + # check that the schema can be pickled + # (particularly, there should be no recursion error, + # or errors because of trying to pickle lambdas or local functions) + dumped = pickle.dumps(schema) + + # check that the pickle size is reasonable + assert len(dumped) < 25 * len(big_schema_sdl) + loaded = pickle.loads(dumped) + + # check that printing the unpickled schema gives the same SDL + assert print_schema(loaded) == big_schema_sdl + + # check that pickling again creates the same result + dumped = pickle.dumps(schema) + assert len(dumped) < 25 * len(big_schema_sdl) + loaded = pickle.loads(dumped) + assert print_schema(loaded) == big_schema_sdl + + finally: + sys.setrecursionlimit(limit) + + @mark.timeout(60 * timeout_factor) + def can_deep_copy_pickled_big_schema(big_schema_sdl): # noqa: F811 + # use our printing conventions + big_schema_sdl = cycle_sdl(big_schema_sdl) + + limit = sys.getrecursionlimit() + sys.setrecursionlimit(max(limit, 4000)) # needed for pickle + + try: + # create a schema from the big SDL + schema = build_schema(big_schema_sdl, assume_valid_sdl=True) + # pickle and unpickle the schema + loaded = pickle.loads(pickle.dumps(schema)) + # create a deepcopy of the unpickled schema + copied = deepcopy(loaded) + # check that printing the copied schema gives the same SDL + assert print_schema(copied) == big_schema_sdl + + finally: + sys.setrecursionlimit(limit) diff --git a/tests/utilities/test_introspection_from_schema.py b/tests/utilities/test_introspection_from_schema.py index 96ec968f..878ac0fb 100644 --- a/tests/utilities/test_introspection_from_schema.py +++ b/tests/utilities/test_introspection_from_schema.py @@ -1,12 +1,20 @@ +import pickle +import sys +from copy import deepcopy + +from pytest import mark + from graphql.type import GraphQLField, GraphQLObjectType, GraphQLSchema, GraphQLString from graphql.utilities import ( IntrospectionQuery, build_client_schema, + build_schema, introspection_from_schema, print_schema, ) -from ..utils import dedent +from ..fixtures import big_schema_introspection_result, big_schema_sdl # noqa: F401 +from ..utils import dedent, timeout_factor def introspection_to_sdl(introspection: IntrospectionQuery) -> str: @@ -60,3 +68,109 @@ def converts_a_simple_schema_without_description(): } """ ) + + def describe_deepcopy_and_pickle(): # pragma: no cover + # introspect the schema + introspected_schema = introspection_from_schema(schema) + introspection_size = len(str(introspected_schema)) + + def can_deep_copy_schema(): + # create a deepcopy of the schema + copied = deepcopy(schema) + # check that introspecting the copied schema gives the same result + assert introspection_from_schema(copied) == introspected_schema + + def can_pickle_and_unpickle_schema(): + # check that the schema can be pickled + # (particularly, there should be no recursion error, + # or errors because of trying to pickle lambdas or local functions) + dumped = pickle.dumps(schema) + + # check that the pickle size is reasonable + assert len(dumped) < 5 * introspection_size + loaded = pickle.loads(dumped) + + # check that introspecting the unpickled schema gives the same result + assert introspection_from_schema(loaded) == introspected_schema + + # check that pickling again creates the same result + dumped = pickle.dumps(schema) + assert len(dumped) < 5 * introspection_size + loaded = pickle.loads(dumped) + assert introspection_from_schema(loaded) == introspected_schema + + def can_deep_copy_pickled_schema(): + # pickle and unpickle the schema + loaded = pickle.loads(pickle.dumps(schema)) + # create a deepcopy of the unpickled schema + copied = deepcopy(loaded) + # check that introspecting the copied schema gives the same result + assert introspection_from_schema(copied) == introspected_schema + + @mark.slow + def describe_deepcopy_and_pickle_big(): # pragma: no cover + @mark.timeout(20 * timeout_factor) + def can_deep_copy_big_schema(big_schema_sdl): # noqa: F811 + # introspect the original big schema + big_schema = build_schema(big_schema_sdl) + expected_introspection = introspection_from_schema(big_schema) + + # create a deepcopy of the schema + copied = deepcopy(big_schema) + # check that introspecting the copied schema gives the same result + assert introspection_from_schema(copied) == expected_introspection + + @mark.timeout(60 * timeout_factor) + def can_pickle_and_unpickle_big_schema(big_schema_sdl): # noqa: F811 + # introspect the original big schema + big_schema = build_schema(big_schema_sdl) + expected_introspection = introspection_from_schema(big_schema) + size_introspection = len(str(expected_introspection)) + + limit = sys.getrecursionlimit() + sys.setrecursionlimit(max(limit, 4000)) # needed for pickle + + try: + # check that the schema can be pickled + # (particularly, there should be no recursion error, + # or errors because of trying to pickle lambdas or local functions) + dumped = pickle.dumps(big_schema) + + # check that the pickle size is reasonable + assert len(dumped) < 5 * size_introspection + loaded = pickle.loads(dumped) + + # check that introspecting the pickled schema gives the same result + assert introspection_from_schema(loaded) == expected_introspection + + # check that pickling again creates the same result + dumped = pickle.dumps(loaded) + assert len(dumped) < 5 * size_introspection + loaded = pickle.loads(dumped) + + # check that introspecting the re-pickled schema gives the same result + assert introspection_from_schema(loaded) == expected_introspection + + finally: + sys.setrecursionlimit(limit) + + @mark.timeout(60 * timeout_factor) + def can_deep_copy_pickled_big_schema(big_schema_sdl): # noqa: F811 + # introspect the original big schema + big_schema = build_schema(big_schema_sdl) + expected_introspection = introspection_from_schema(big_schema) + + limit = sys.getrecursionlimit() + sys.setrecursionlimit(max(limit, 4000)) # needed for pickle + + try: + # pickle and unpickle the schema + loaded = pickle.loads(pickle.dumps(big_schema)) + # create a deepcopy of the unpickled schema + copied = deepcopy(loaded) + + # check that introspecting the copied schema gives the same result + assert introspection_from_schema(copied) == expected_introspection + + finally: + sys.setrecursionlimit(limit) diff --git a/tests/utilities/test_print_schema.py b/tests/utilities/test_print_schema.py index 4bc5a266..1d60aa41 100644 --- a/tests/utilities/test_print_schema.py +++ b/tests/utilities/test_print_schema.py @@ -691,7 +691,7 @@ def prints_introspection_schema(): mutationType: __Type """ - If this server support subscription, the type that subscription operations will be rooted at. + If this server supports subscription, the type that subscription operations will be rooted at. """ subscriptionType: __Type diff --git a/tests/utilities/test_strip_ignored_characters_fuzz.py b/tests/utilities/test_strip_ignored_characters_fuzz.py index 7f75b8eb..5b038ca2 100644 --- a/tests/utilities/test_strip_ignored_characters_fuzz.py +++ b/tests/utilities/test_strip_ignored_characters_fuzz.py @@ -7,7 +7,7 @@ from graphql.language import Lexer, Source, TokenKind from graphql.utilities import strip_ignored_characters -from ..utils import dedent, gen_fuzz_strings +from ..utils import dedent, gen_fuzz_strings, timeout_factor ignored_tokens = [ @@ -228,7 +228,7 @@ def does_not_strip_random_ignored_tokens_embedded_in_the_block_string(): ).to_stay_the_same() @mark.slow - @mark.timeout(20) + @mark.timeout(80 * timeout_factor) def strips_ignored_characters_inside_random_block_strings(): # Testing with length >7 is taking exponentially more time. However it is # highly recommended to test with increased limit if you make any change. diff --git a/tests/utils/__init__.py b/tests/utils/__init__.py index d6392286..7657950a 100644 --- a/tests/utils/__init__.py +++ b/tests/utils/__init__.py @@ -1,7 +1,13 @@ """Test utilities""" +from platform import python_implementation + from .dedent import dedent from .gen_fuzz_strings import gen_fuzz_strings -__all__ = ["dedent", "gen_fuzz_strings"] +# some tests can take much longer on PyPy +timeout_factor = 4 if python_implementation() == "PyPy" else 1 + + +__all__ = ["dedent", "gen_fuzz_strings", "timeout_factor"] diff --git a/tox.ini b/tox.ini index 17bb0ae0..29f7de91 100644 --- a/tox.ini +++ b/tox.ini @@ -59,4 +59,6 @@ deps = pytest-timeout>=2,<3 py37: typing-extensions>=4.3,<5 commands = + # to also run the time-consuming tests: tox -e py310 -- --run-slow + # to run the benchmarks: tox -e py310 -- -k benchmarks --benchmark-enable pytest tests {posargs: --cov-report=term-missing --cov=graphql --cov=tests --cov-fail-under=100} From ef0ac89a76114d015eb6b2491a89a626f17773fb Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Wed, 2 Nov 2022 17:56:38 +0100 Subject: [PATCH 050/230] Support Python 3.11 --- .github/workflows/lint.yml | 4 ++-- .github/workflows/publish.yml | 4 ++-- .github/workflows/test.yml | 2 +- .readthedocs.yaml | 4 ++-- docs/conf.py | 1 + pyproject.toml | 7 ++++--- tests/execution/test_nonnull.py | 3 +++ tests/pyutils/test_is_awaitable.py | 9 +++++++-- tox.ini | 15 ++++++++------- 9 files changed, 30 insertions(+), 19 deletions(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 2185a66b..fb2255e7 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -9,10 +9,10 @@ jobs: steps: - uses: actions/checkout@v3 - - name: Set up Python 3.9 + - name: Set up Python 3.10 uses: actions/setup-python@v4 with: - python-version: 3.9 + python-version: 3.10 - name: Install dependencies run: | diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 8f2ac627..4772236c 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -12,10 +12,10 @@ jobs: steps: - uses: actions/checkout@v3 - - name: Set up Python 3.9 + - name: Set up Python 3.10 uses: actions/setup-python@v4 with: - python-version: 3.9 + python-version: 3.10 - name: Build wheel and source tarball run: | diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 7210c219..8686eb1d 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -8,7 +8,7 @@ jobs: strategy: matrix: - python: ['3.7', '3.8', '3.9', '3.10', 'pypy3.9'] + python: ['3.7', '3.8', '3.9', '3.10', '3.11', 'pypy3.9'] steps: - uses: actions/checkout@v3 diff --git a/.readthedocs.yaml b/.readthedocs.yaml index bb8e1846..69c62c18 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -5,9 +5,9 @@ version: 2 build: - os: ubuntu-20.04 + os: ubuntu-22.04 tools: - python: "3.9" + python: "3.10" sphinx: configuration: docs/conf.py diff --git a/docs/conf.py b/docs/conf.py index b32115c3..f9a807c9 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -138,6 +138,7 @@ EnterLeaveVisitor FormattedSourceLocation GraphQLAbstractType +GraphQLErrorExtensions GraphQLOutputType asyncio.events.AbstractEventLoop graphql.execution.map_async_iterator.MapAsyncIterator diff --git a/pyproject.toml b/pyproject.toml index 2b11664b..4466070a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -19,7 +19,8 @@ classifiers = [ "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10" + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11" ] packages = [ { include = "graphql", from = "src" }, @@ -80,7 +81,7 @@ sphinx_rtd_theme = ">=1,<2" exclude_dirs = ["tests"] [tool.black] -target-version = ["py37", "py38", "py39", "py310"] +target-version = ["py37", "py38", "py39", "py310", "py311"] [tool.coverage.run] branch = true @@ -119,7 +120,7 @@ force_single_line = false lines_after_imports = 2 [tool.mypy] -python_version = 3.9 +python_version = "3.10" check_untyped_defs = true no_implicit_optional = true strict_optional = true diff --git a/tests/execution/test_nonnull.py b/tests/execution/test_nonnull.py index cee46c0c..6d0f2993 100644 --- a/tests/execution/test_nonnull.py +++ b/tests/execution/test_nonnull.py @@ -1,3 +1,4 @@ +import asyncio import re from typing import Any, Awaitable, cast @@ -482,6 +483,7 @@ def describe_nulls_the_top_level_if_non_nullable_field(): @mark.asyncio async def returns_null(): result = await execute_sync_and_async(query, NullingData()) + await asyncio.sleep(0) # strangely needed to get coverage on Python 3.11 assert result == ( None, [ @@ -497,6 +499,7 @@ async def returns_null(): @mark.asyncio async def throws(): result = await execute_sync_and_async(query, ThrowingData()) + await asyncio.sleep(0) # strangely needed to get coverage on Python 3.11 assert result == ( None, [ diff --git a/tests/pyutils/test_is_awaitable.py b/tests/pyutils/test_is_awaitable.py index 896697d5..dd82a3dc 100644 --- a/tests/pyutils/test_is_awaitable.py +++ b/tests/pyutils/test_is_awaitable.py @@ -1,5 +1,6 @@ import asyncio from inspect import isawaitable +from sys import version_info as python_version from pytest import mark @@ -75,8 +76,12 @@ async def some_coroutine(): assert isawaitable(some_coroutine()) assert is_awaitable(some_coroutine()) - @mark.filterwarnings("ignore::Warning") # Deprecation and Runtime - def recognizes_an_old_style_coroutine(): + @mark.filterwarnings("ignore::Warning") # Deprecation and Runtime warnings + @mark.skipif( + python_version >= (3, 11), + reason="Generator-based coroutines not supported any more since Python 3.11", + ) + def recognizes_an_old_style_coroutine(): # pragma: no cover @asyncio.coroutine def some_old_style_coroutine(): yield False # pragma: no cover diff --git a/tox.ini b/tox.ini index 29f7de91..b2723428 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = py3{7,8,9,10}, pypy39, black, flake8, isort, mypy, docs +envlist = py3{7,8,9,10,11}, pypy39, black, flake8, isort, mypy, docs isolated_build = true [gh-actions] @@ -8,18 +8,19 @@ python = 3.7: py37 3.8: py38 3.9: py39 - 3.10: py310 + 3.10: py310=5,<6 flake8-bandit>=4.1,<6 @@ -28,13 +29,13 @@ commands = flake8 src tests [testenv:isort] -basepython = python3.9 +basepython = python3.10 deps = isort>=5.10,<6 commands = isort src tests --check-only [testenv:mypy] -basepython = python3.9 +basepython = python3.10 deps = mypy==0.982 pytest>=7.1,<8 @@ -42,7 +43,7 @@ commands = mypy src tests [testenv:docs] -basepython = python3.9 +basepython = python3.10 deps = sphinx>=5.2.1,<6 sphinx_rtd_theme>=1,<2 From 12e29fe1b4da2410b2b4a75c61f66d823d52d25f Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Wed, 2 Nov 2022 19:58:25 +0100 Subject: [PATCH 051/230] Fix GitHub actions --- .github/workflows/lint.yml | 2 +- .github/workflows/publish.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index fb2255e7..106d22bd 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -12,7 +12,7 @@ jobs: - name: Set up Python 3.10 uses: actions/setup-python@v4 with: - python-version: 3.10 + python-version: '3.10' - name: Install dependencies run: | diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 4772236c..f1f39421 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -15,7 +15,7 @@ jobs: - name: Set up Python 3.10 uses: actions/setup-python@v4 with: - python-version: 3.10 + python-version: '3.10' - name: Build wheel and source tarball run: | From e4c26df19eb5d03b9ddbb95220fb6e7a0413a084 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Wed, 2 Nov 2022 21:53:25 +0100 Subject: [PATCH 052/230] Use type guards (#183) --- pyproject.toml | 4 +- src/graphql/execution/collect_fields.py | 5 +- src/graphql/execution/execute.py | 22 +++--- src/graphql/execution/values.py | 4 +- src/graphql/language/parser.py | 5 +- src/graphql/language/predicates.py | 30 +++++--- src/graphql/language/source.py | 8 +- src/graphql/pyutils/is_awaitable.py | 12 ++- src/graphql/pyutils/is_iterable.py | 10 ++- src/graphql/type/definition.py | 77 ++++++++++--------- src/graphql/type/directives.py | 8 +- src/graphql/type/scalars.py | 8 +- src/graphql/type/schema.py | 42 +++------- src/graphql/type/validate.py | 7 +- src/graphql/utilities/ast_from_value.py | 10 +-- src/graphql/utilities/build_client_schema.py | 5 -- src/graphql/utilities/coerce_input_value.py | 6 -- src/graphql/utilities/extend_schema.py | 8 +- .../utilities/find_breaking_changes.py | 65 ++++++++-------- .../utilities/lexicographic_sort_schema.py | 9 +-- src/graphql/utilities/print_schema.py | 11 +-- .../utilities/strip_ignored_characters.py | 3 +- src/graphql/utilities/type_comparators.py | 30 ++------ src/graphql/utilities/type_info.py | 40 +++------- src/graphql/utilities/value_from_ast.py | 6 -- .../validation/rules/custom/no_deprecated.py | 10 +-- .../rules/fields_on_correct_type.py | 14 +--- .../validation/rules/known_type_names.py | 14 +++- .../rules/overlapping_fields_can_be_merged.py | 12 +-- .../rules/possible_fragment_spreads.py | 10 +-- .../rules/unique_directives_per_location.py | 3 - .../rules/unique_enum_value_names.py | 9 +-- .../rules/values_of_correct_type.py | 3 - .../rules/variables_in_allowed_position.py | 5 +- tests/execution/test_lists.py | 3 +- .../test_strip_ignored_characters.py | 15 ++++ tox.ini | 6 +- 37 files changed, 233 insertions(+), 306 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 4466070a..d7727970 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -42,7 +42,7 @@ Changelog = "https://github.com/graphql-python/graphql-core/releases" [tool.poetry.dependencies] python = "^3.7" typing-extensions = [ - { version = "^4.3", python = "<3.8" } + { version = "^4.4", python = "<3.10" } ] [tool.poetry.group.test] @@ -138,7 +138,7 @@ module = [ disallow_untyped_defs = false [tool.pytest.ini_options] -minversion = "7.1" +minversion = "7.2" # Only run benchmarks as tests. # To actually run the benchmarks, use --benchmark-enable on the command line. # To run the slow tests (fuzzing), add --run-slow on the command line. diff --git a/src/graphql/execution/collect_fields.py b/src/graphql/execution/collect_fields.py index 1b5934ec..04eefe21 100644 --- a/src/graphql/execution/collect_fields.py +++ b/src/graphql/execution/collect_fields.py @@ -1,5 +1,5 @@ from collections import defaultdict -from typing import Any, Dict, List, Set, Union, cast +from typing import Any, Dict, List, Set, Union from ..language import ( FieldNode, @@ -9,7 +9,6 @@ SelectionSetNode, ) from ..type import ( - GraphQLAbstractType, GraphQLIncludeDirective, GraphQLObjectType, GraphQLSchema, @@ -166,7 +165,7 @@ def does_fragment_condition_match( if conditional_type is type_: return True if is_abstract_type(conditional_type): - return schema.is_sub_type(cast(GraphQLAbstractType, conditional_type), type_) + return schema.is_sub_type(conditional_type, type_) return False diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index 49651c1b..08d7df05 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -21,6 +21,10 @@ from typing import TypedDict except ImportError: # Python < 3.8 from typing_extensions import TypedDict +try: + from typing import TypeGuard +except ImportError: # Python < 3.10 + from typing_extensions import TypeGuard from ..error import GraphQLError, GraphQLFormattedError, located_error from ..language import ( @@ -39,7 +43,6 @@ GraphQLFieldResolver, GraphQLLeafType, GraphQLList, - GraphQLNonNull, GraphQLObjectType, GraphQLOutputType, GraphQLResolveInfo, @@ -187,7 +190,9 @@ class ExecutionContext: errors: List[GraphQLError] middleware_manager: Optional[MiddlewareManager] - is_awaitable = staticmethod(default_is_awaitable) + is_awaitable: Callable[[Any], TypeGuard[Awaitable]] = staticmethod( + default_is_awaitable # type: ignore + ) def __init__( self, @@ -607,7 +612,7 @@ def complete_value( # result is null. if is_non_null_type(return_type): completed = self.complete_value( - cast(GraphQLNonNull, return_type).of_type, + return_type.of_type, field_nodes, info, path, @@ -627,25 +632,25 @@ def complete_value( # If field type is List, complete each item in the list with inner type if is_list_type(return_type): return self.complete_list_value( - cast(GraphQLList, return_type), field_nodes, info, path, result + return_type, field_nodes, info, path, result ) # If field type is a leaf type, Scalar or Enum, serialize to a valid value, # returning null if serialization is not possible. if is_leaf_type(return_type): - return self.complete_leaf_value(cast(GraphQLLeafType, return_type), result) + return self.complete_leaf_value(return_type, result) # If field type is an abstract type, Interface or Union, determine the runtime # Object type and complete for that type. if is_abstract_type(return_type): return self.complete_abstract_value( - cast(GraphQLAbstractType, return_type), field_nodes, info, path, result + return_type, field_nodes, info, path, result ) # If field type is Object, execute and complete all sub-selections. if is_object_type(return_type): return self.complete_object_value( - cast(GraphQLObjectType, return_type), field_nodes, info, path, result + return_type, field_nodes, info, path, result ) # Not reachable. All possible output types have been considered. @@ -684,7 +689,6 @@ async def async_iterable_to_list( "Expected Iterable, but did not find one for field" f" '{info.parent_type.name}.{info.field_name}'." ) - result = cast(Iterable[Any], result) # This is specified as a simple map, however we're optimizing the path where # the list contains no coroutine objects by avoiding creating another coroutine @@ -876,8 +880,6 @@ def ensure_valid_runtime_type( field_nodes, ) - runtime_type = cast(GraphQLObjectType, runtime_type) - if not self.schema.is_sub_type(return_type, runtime_type): raise GraphQLError( f"Runtime Object type '{runtime_type.name}' is not a possible" diff --git a/src/graphql/execution/values.py b/src/graphql/execution/values.py index 11dc5638..2053d61f 100644 --- a/src/graphql/execution/values.py +++ b/src/graphql/execution/values.py @@ -1,4 +1,4 @@ -from typing import Any, Callable, Collection, Dict, List, Optional, Union, cast +from typing import Any, Callable, Collection, Dict, List, Optional, Union from ..error import GraphQLError from ..language import ( @@ -21,7 +21,6 @@ from ..type import ( GraphQLDirective, GraphQLField, - GraphQLInputType, GraphQLSchema, is_input_type, is_non_null_type, @@ -92,7 +91,6 @@ def coerce_variable_values( ) continue - var_type = cast(GraphQLInputType, var_type) if var_name not in inputs: if var_def_node.default_value: coerced_values[var_name] = value_from_ast( diff --git a/src/graphql/language/parser.py b/src/graphql/language/parser.py index 7cd8e8c2..72c1be7c 100644 --- a/src/graphql/language/parser.py +++ b/src/graphql/language/parser.py @@ -201,9 +201,8 @@ def __init__( no_location: bool = False, allow_legacy_fragment_variables: bool = False, ): - source = ( - cast(Source, source) if is_source(source) else Source(cast(str, source)) - ) + if not is_source(source): + source = Source(cast(str, source)) self._lexer = Lexer(source) self._no_location = no_location diff --git a/src/graphql/language/predicates.py b/src/graphql/language/predicates.py index ebd9e5ea..be365003 100644 --- a/src/graphql/language/predicates.py +++ b/src/graphql/language/predicates.py @@ -1,3 +1,5 @@ +from typing import Union + from .ast import ( DefinitionNode, ExecutableDefinitionNode, @@ -15,6 +17,12 @@ ) +try: + from typing import TypeGuard +except ImportError: # Python < 3.10 + from typing_extensions import TypeGuard + + __all__ = [ "is_definition_node", "is_executable_definition_node", @@ -29,27 +37,27 @@ ] -def is_definition_node(node: Node) -> bool: +def is_definition_node(node: Node) -> TypeGuard[DefinitionNode]: """Check whether the given node represents a definition.""" return isinstance(node, DefinitionNode) -def is_executable_definition_node(node: Node) -> bool: +def is_executable_definition_node(node: Node) -> TypeGuard[ExecutableDefinitionNode]: """Check whether the given node represents an executable definition.""" return isinstance(node, ExecutableDefinitionNode) -def is_selection_node(node: Node) -> bool: +def is_selection_node(node: Node) -> TypeGuard[SelectionNode]: """Check whether the given node represents a selection.""" return isinstance(node, SelectionNode) -def is_value_node(node: Node) -> bool: +def is_value_node(node: Node) -> TypeGuard[ValueNode]: """Check whether the given node represents a value.""" return isinstance(node, ValueNode) -def is_const_value_node(node: Node) -> bool: +def is_const_value_node(node: Node) -> TypeGuard[ValueNode]: """Check whether the given node represents a constant value.""" return is_value_node(node) and ( any(is_const_value_node(value) for value in node.values) @@ -60,26 +68,28 @@ def is_const_value_node(node: Node) -> bool: ) -def is_type_node(node: Node) -> bool: +def is_type_node(node: Node) -> TypeGuard[TypeNode]: """Check whether the given node represents a type.""" return isinstance(node, TypeNode) -def is_type_system_definition_node(node: Node) -> bool: +def is_type_system_definition_node(node: Node) -> TypeGuard[TypeSystemDefinitionNode]: """Check whether the given node represents a type system definition.""" return isinstance(node, TypeSystemDefinitionNode) -def is_type_definition_node(node: Node) -> bool: +def is_type_definition_node(node: Node) -> TypeGuard[TypeDefinitionNode]: """Check whether the given node represents a type definition.""" return isinstance(node, TypeDefinitionNode) -def is_type_system_extension_node(node: Node) -> bool: +def is_type_system_extension_node( + node: Node, +) -> TypeGuard[Union[SchemaExtensionNode, TypeExtensionNode]]: """Check whether the given node represents a type system extension.""" return isinstance(node, (SchemaExtensionNode, TypeExtensionNode)) -def is_type_extension_node(node: Node) -> bool: +def is_type_extension_node(node: Node) -> TypeGuard[TypeExtensionNode]: """Check whether the given node represents a type extension.""" return isinstance(node, TypeExtensionNode) diff --git a/src/graphql/language/source.py b/src/graphql/language/source.py index 1a71b67d..10f0d05d 100644 --- a/src/graphql/language/source.py +++ b/src/graphql/language/source.py @@ -3,6 +3,12 @@ from .location import SourceLocation +try: + from typing import TypeGuard +except ImportError: # Python < 3.10 + from typing_extensions import TypeGuard + + __all__ = ["Source", "is_source"] DEFAULT_NAME = "GraphQL request" @@ -66,7 +72,7 @@ def __ne__(self, other: Any) -> bool: return not self == other -def is_source(source: Any) -> bool: +def is_source(source: Any) -> TypeGuard[Source]: """Test if the given value is a Source object. For internal use only. diff --git a/src/graphql/pyutils/is_awaitable.py b/src/graphql/pyutils/is_awaitable.py index 5427ab65..cc927f7f 100644 --- a/src/graphql/pyutils/is_awaitable.py +++ b/src/graphql/pyutils/is_awaitable.py @@ -1,6 +1,12 @@ import inspect from types import CoroutineType, GeneratorType -from typing import Any +from typing import Any, Awaitable + + +try: + from typing import TypeGuard +except ImportError: # Python < 3.10 + from typing_extensions import TypeGuard __all__ = ["is_awaitable"] @@ -8,7 +14,7 @@ CO_ITERABLE_COROUTINE = inspect.CO_ITERABLE_COROUTINE -def is_awaitable(value: Any) -> bool: +def is_awaitable(value: Any) -> TypeGuard[Awaitable]: """Return true if object can be passed to an ``await`` expression. Instead of testing if the object is an instance of abc.Awaitable, it checks @@ -18,7 +24,7 @@ def is_awaitable(value: Any) -> bool: # check for coroutine objects isinstance(value, CoroutineType) # check for old-style generator based coroutine objects - or isinstance(value, GeneratorType) + or isinstance(value, GeneratorType) # for Python < 3.11 and bool(value.gi_code.co_flags & CO_ITERABLE_COROUTINE) # check for other awaitables (e.g. futures) or hasattr(value, "__await__") diff --git a/src/graphql/pyutils/is_iterable.py b/src/graphql/pyutils/is_iterable.py index 05d9de32..f2f04bc9 100644 --- a/src/graphql/pyutils/is_iterable.py +++ b/src/graphql/pyutils/is_iterable.py @@ -2,6 +2,12 @@ from typing import Any, ByteString, Collection, Iterable, Mapping, Text, ValuesView +try: + from typing import TypeGuard +except ImportError: # Python < 3.10 + from typing_extensions import TypeGuard + + __all__ = ["is_collection", "is_iterable"] collection_types: Any = [Collection] @@ -16,14 +22,14 @@ not_iterable_types: Any = (ByteString, Mapping, Text) -def is_collection(value: Any) -> bool: +def is_collection(value: Any) -> TypeGuard[Collection]: """Check if value is a collection, but not a string or a mapping.""" return isinstance(value, collection_types) and not isinstance( value, not_iterable_types ) -def is_iterable(value: Any) -> bool: +def is_iterable(value: Any) -> TypeGuard[Iterable]: """Check if value is an iterable, but not a string or a mapping.""" return isinstance(value, iterable_types) and not isinstance( value, not_iterable_types diff --git a/src/graphql/type/definition.py b/src/graphql/type/definition.py index d9fe289d..464aada7 100644 --- a/src/graphql/type/definition.py +++ b/src/graphql/type/definition.py @@ -65,10 +65,15 @@ from typing import TypedDict except ImportError: # Python < 3.8 from typing_extensions import TypedDict +try: + from typing import TypeGuard +except ImportError: # Python < 3.10 + from typing_extensions import TypeGuard if TYPE_CHECKING: from .schema import GraphQLSchema # noqa: F401 + __all__ = [ "is_type", "is_scalar_type", @@ -171,14 +176,14 @@ class GraphQLType: # There are predicates for each kind of GraphQL type. -def is_type(type_: Any) -> bool: +def is_type(type_: Any) -> TypeGuard[GraphQLType]: return isinstance(type_, GraphQLType) def assert_type(type_: Any) -> GraphQLType: if not is_type(type_): raise TypeError(f"Expected {type_} to be a GraphQL type.") - return cast(GraphQLType, type_) + return type_ # These types wrap and modify other types @@ -196,20 +201,20 @@ def __init__(self, type_: GT) -> None: raise TypeError( f"Can only create a wrapper for a GraphQLType, but got: {type_}." ) - self.of_type = type_ + self.of_type = cast(GT, type_) def __repr__(self) -> str: return f"<{self.__class__.__name__} {self.of_type!r}>" -def is_wrapping_type(type_: Any) -> bool: +def is_wrapping_type(type_: Any) -> TypeGuard[GraphQLWrappingType]: return isinstance(type_, GraphQLWrappingType) def assert_wrapping_type(type_: Any) -> GraphQLWrappingType: if not is_wrapping_type(type_): raise TypeError(f"Expected {type_} to be a GraphQL wrapping type.") - return cast(GraphQLWrappingType, type_) + return type_ class GraphQLNamedTypeKwargs(TypedDict, total=False): @@ -475,14 +480,14 @@ def __copy__(self) -> GraphQLScalarType: # pragma: no cover return self.__class__(**self.to_kwargs()) -def is_scalar_type(type_: Any) -> bool: +def is_scalar_type(type_: Any) -> TypeGuard[GraphQLScalarType]: return isinstance(type_, GraphQLScalarType) def assert_scalar_type(type_: Any) -> GraphQLScalarType: if not is_scalar_type(type_): raise TypeError(f"Expected {type_} to be a GraphQL Scalar type.") - return cast(GraphQLScalarType, type_) + return type_ GraphQLArgumentMap = Dict[str, "GraphQLArgument"] @@ -872,14 +877,14 @@ def interfaces(self) -> Tuple["GraphQLInterfaceType", ...]: return tuple(interfaces) -def is_object_type(type_: Any) -> bool: +def is_object_type(type_: Any) -> TypeGuard[GraphQLObjectType]: return isinstance(type_, GraphQLObjectType) def assert_object_type(type_: Any) -> GraphQLObjectType: if not is_object_type(type_): raise TypeError(f"Expected {type_} to be a GraphQL Object type.") - return cast(GraphQLObjectType, type_) + return type_ class GraphQLInterfaceTypeKwargs(GraphQLNamedTypeKwargs, total=False): @@ -1006,14 +1011,14 @@ def interfaces(self) -> Tuple["GraphQLInterfaceType", ...]: return tuple(interfaces) -def is_interface_type(type_: Any) -> bool: +def is_interface_type(type_: Any) -> TypeGuard[GraphQLInterfaceType]: return isinstance(type_, GraphQLInterfaceType) def assert_interface_type(type_: Any) -> GraphQLInterfaceType: if not is_interface_type(type_): raise TypeError(f"Expected {type_} to be a GraphQL Interface type.") - return cast(GraphQLInterfaceType, type_) + return type_ class GraphQLUnionTypeKwargs(GraphQLNamedTypeKwargs, total=False): @@ -1106,14 +1111,14 @@ def types(self) -> Tuple[GraphQLObjectType, ...]: return tuple(types) -def is_union_type(type_: Any) -> bool: +def is_union_type(type_: Any) -> TypeGuard[GraphQLUnionType]: return isinstance(type_, GraphQLUnionType) def assert_union_type(type_: Any) -> GraphQLUnionType: if not is_union_type(type_): raise TypeError(f"Expected {type_} to be a GraphQL Union type.") - return cast(GraphQLUnionType, type_) + return type_ GraphQLEnumValueMap = Dict[str, "GraphQLEnumValue"] @@ -1293,14 +1298,14 @@ def parse_literal( ) -def is_enum_type(type_: Any) -> bool: +def is_enum_type(type_: Any) -> TypeGuard[GraphQLEnumType]: return isinstance(type_, GraphQLEnumType) def assert_enum_type(type_: Any) -> GraphQLEnumType: if not is_enum_type(type_): raise TypeError(f"Expected {type_} to be a GraphQL Enum type.") - return cast(GraphQLEnumType, type_) + return type_ def did_you_mean_enum_value(enum_type: GraphQLEnumType, unknown_value_str: str) -> str: @@ -1500,14 +1505,14 @@ def fields(self) -> GraphQLInputFieldMap: } -def is_input_object_type(type_: Any) -> bool: +def is_input_object_type(type_: Any) -> TypeGuard[GraphQLInputObjectType]: return isinstance(type_, GraphQLInputObjectType) def assert_input_object_type(type_: Any) -> GraphQLInputObjectType: if not is_input_object_type(type_): raise TypeError(f"Expected {type_} to be a GraphQL Input Object type.") - return cast(GraphQLInputObjectType, type_) + return type_ class GraphQLInputFieldKwargs(TypedDict, total=False): @@ -1626,14 +1631,14 @@ def __str__(self) -> str: return f"[{self.of_type}]" -def is_list_type(type_: Any) -> bool: +def is_list_type(type_: Any) -> TypeGuard[GraphQLList]: return isinstance(type_, GraphQLList) def assert_list_type(type_: Any) -> GraphQLList: if not is_list_type(type_): raise TypeError(f"Expected {type_} to be a GraphQL List type.") - return cast(GraphQLList, type_) + return type_ GNT = TypeVar("GNT", bound="GraphQLNullableType") @@ -1671,14 +1676,14 @@ def __str__(self) -> str: return f"{self.of_type}!" -def is_non_null_type(type_: Any) -> bool: +def is_non_null_type(type_: Any) -> TypeGuard[GraphQLNonNull]: return isinstance(type_, GraphQLNonNull) def assert_non_null_type(type_: Any) -> GraphQLNonNull: if not is_non_null_type(type_): raise TypeError(f"Expected {type_} to be a GraphQL Non-Null type.") - return cast(GraphQLNonNull, type_) + return type_ # These types can all accept null as a value. @@ -1704,14 +1709,14 @@ def assert_non_null_type(type_: Any) -> GraphQLNonNull: ] -def is_nullable_type(type_: Any) -> bool: +def is_nullable_type(type_: Any) -> TypeGuard[GraphQLNullableType]: return isinstance(type_, graphql_nullable_types) def assert_nullable_type(type_: Any) -> GraphQLNullableType: if not is_nullable_type(type_): raise TypeError(f"Expected {type_} to be a GraphQL nullable type.") - return cast(GraphQLNullableType, type_) + return type_ @overload @@ -1734,7 +1739,6 @@ def get_nullable_type( ) -> Optional[GraphQLNullableType]: """Unwrap possible non-null type""" if is_non_null_type(type_): - type_ = cast(GraphQLNonNull, type_) type_ = type_.of_type return cast(Optional[GraphQLNullableType], type_) @@ -1748,7 +1752,7 @@ def get_nullable_type( ] -def is_input_type(type_: Any) -> bool: +def is_input_type(type_: Any) -> TypeGuard[GraphQLInputType]: return isinstance(type_, graphql_input_types) or ( isinstance(type_, GraphQLWrappingType) and is_input_type(type_.of_type) ) @@ -1757,7 +1761,7 @@ def is_input_type(type_: Any) -> bool: def assert_input_type(type_: Any) -> GraphQLInputType: if not is_input_type(type_): raise TypeError(f"Expected {type_} to be a GraphQL input type.") - return cast(GraphQLInputType, type_) + return type_ # These types may be used as output types as the result of fields. @@ -1780,7 +1784,7 @@ def assert_input_type(type_: Any) -> GraphQLInputType: ] -def is_output_type(type_: Any) -> bool: +def is_output_type(type_: Any) -> TypeGuard[GraphQLOutputType]: return isinstance(type_, graphql_output_types) or ( isinstance(type_, GraphQLWrappingType) and is_output_type(type_.of_type) ) @@ -1789,7 +1793,7 @@ def is_output_type(type_: Any) -> bool: def assert_output_type(type_: Any) -> GraphQLOutputType: if not is_output_type(type_): raise TypeError(f"Expected {type_} to be a GraphQL output type.") - return cast(GraphQLOutputType, type_) + return type_ # These named types do not include modifiers like List or NonNull. @@ -1807,14 +1811,14 @@ def assert_output_type(type_: Any) -> GraphQLOutputType: ] -def is_named_type(type_: Any) -> bool: +def is_named_type(type_: Any) -> TypeGuard[GraphQLNamedType]: return isinstance(type_, GraphQLNamedType) def assert_named_type(type_: Any) -> GraphQLNamedType: if not is_named_type(type_): raise TypeError(f"Expected {type_} to be a GraphQL named type.") - return cast(GraphQLNamedType, type_) + return type_ @overload @@ -1832,7 +1836,6 @@ def get_named_type(type_: Optional[GraphQLType]) -> Optional[GraphQLNamedType]: if type_: unwrapped_type = type_ while is_wrapping_type(unwrapped_type): - unwrapped_type = cast(GraphQLWrappingType, unwrapped_type) unwrapped_type = unwrapped_type.of_type return cast(GraphQLNamedType, unwrapped_type) return None @@ -1845,14 +1848,14 @@ def get_named_type(type_: Optional[GraphQLType]) -> Optional[GraphQLNamedType]: GraphQLLeafType = Union[GraphQLScalarType, GraphQLEnumType] -def is_leaf_type(type_: Any) -> bool: +def is_leaf_type(type_: Any) -> TypeGuard[GraphQLLeafType]: return isinstance(type_, graphql_leaf_types) def assert_leaf_type(type_: Any) -> GraphQLLeafType: if not is_leaf_type(type_): raise TypeError(f"Expected {type_} to be a GraphQL leaf type.") - return cast(GraphQLLeafType, type_) + return type_ # These types may describe the parent context of a selection set. @@ -1862,14 +1865,14 @@ def assert_leaf_type(type_: Any) -> GraphQLLeafType: GraphQLCompositeType = Union[GraphQLObjectType, GraphQLInterfaceType, GraphQLUnionType] -def is_composite_type(type_: Any) -> bool: +def is_composite_type(type_: Any) -> TypeGuard[GraphQLCompositeType]: return isinstance(type_, graphql_composite_types) def assert_composite_type(type_: Any) -> GraphQLType: if not is_composite_type(type_): raise TypeError(f"Expected {type_} to be a GraphQL composite type.") - return cast(GraphQLType, type_) + return type_ # These types may describe abstract types. @@ -1879,11 +1882,11 @@ def assert_composite_type(type_: Any) -> GraphQLType: GraphQLAbstractType = Union[GraphQLInterfaceType, GraphQLUnionType] -def is_abstract_type(type_: Any) -> bool: +def is_abstract_type(type_: Any) -> TypeGuard[GraphQLAbstractType]: return isinstance(type_, graphql_abstract_types) def assert_abstract_type(type_: Any) -> GraphQLAbstractType: if not is_abstract_type(type_): raise TypeError(f"Expected {type_} to be a GraphQL composite type.") - return cast(GraphQLAbstractType, type_) + return type_ diff --git a/src/graphql/type/directives.py b/src/graphql/type/directives.py index 714b5b67..a460bb16 100644 --- a/src/graphql/type/directives.py +++ b/src/graphql/type/directives.py @@ -13,6 +13,10 @@ from typing import TypedDict except ImportError: # Python < 3.8 from typing_extensions import TypedDict +try: + from typing import TypeGuard +except ImportError: # Python < 3.10 + from typing_extensions import TypeGuard __all__ = [ "is_directive", @@ -150,7 +154,7 @@ def __copy__(self) -> GraphQLDirective: # pragma: no cover return self.__class__(**self.to_kwargs()) -def is_directive(directive: Any) -> bool: +def is_directive(directive: Any) -> TypeGuard[GraphQLDirective]: """Test if the given value is a GraphQL directive.""" return isinstance(directive, GraphQLDirective) @@ -158,7 +162,7 @@ def is_directive(directive: Any) -> bool: def assert_directive(directive: Any) -> GraphQLDirective: if not is_directive(directive): raise TypeError(f"Expected {inspect(directive)} to be a GraphQL directive.") - return cast(GraphQLDirective, directive) + return directive # Used to conditionally include fields or fragments. diff --git a/src/graphql/type/scalars.py b/src/graphql/type/scalars.py index 3f7263c1..67f0b6b7 100644 --- a/src/graphql/type/scalars.py +++ b/src/graphql/type/scalars.py @@ -14,6 +14,12 @@ from .definition import GraphQLNamedType, GraphQLScalarType +try: + from typing import TypeGuard +except ImportError: # Python < 3.10 + from typing_extensions import TypeGuard + + __all__ = [ "is_specified_scalar_type", "specified_scalar_types", @@ -317,7 +323,7 @@ def parse_id_literal(value_node: ValueNode, _variables: Any = None) -> str: } -def is_specified_scalar_type(type_: GraphQLNamedType) -> bool: +def is_specified_scalar_type(type_: GraphQLNamedType) -> TypeGuard[GraphQLScalarType]: """Check whether the given named GraphQL type is a specified scalar type.""" return type_.name in specified_scalar_types diff --git a/src/graphql/type/schema.py b/src/graphql/type/schema.py index c910fdc6..fe857a37 100644 --- a/src/graphql/type/schema.py +++ b/src/graphql/type/schema.py @@ -1,31 +1,17 @@ from __future__ import annotations # Python < 3.10 from copy import copy, deepcopy -from typing import ( - Any, - Collection, - Dict, - List, - NamedTuple, - Optional, - Set, - Tuple, - Union, - cast, -) +from typing import Any, Collection, Dict, List, NamedTuple, Optional, Set, Tuple, cast from ..error import GraphQLError from ..language import OperationType, ast from ..pyutils import inspect, is_collection, is_description from .definition import ( GraphQLAbstractType, - GraphQLInputObjectType, GraphQLInterfaceType, GraphQLNamedType, GraphQLObjectType, GraphQLType, - GraphQLUnionType, - GraphQLWrappingType, get_named_type, is_input_object_type, is_interface_type, @@ -41,6 +27,11 @@ from typing import TypedDict except ImportError: # Python < 3.8 from typing_extensions import TypedDict +try: + from typing import TypeGuard +except ImportError: # Python < 3.10 + from typing_extensions import TypeGuard + __all__ = ["GraphQLSchema", "GraphQLSchemaKwargs", "is_schema", "assert_schema"] @@ -266,11 +257,9 @@ def __init__( type_map[type_name] = named_type if is_interface_type(named_type): - named_type = cast(GraphQLInterfaceType, named_type) # Store implementations by interface. for iface in named_type.interfaces: if is_interface_type(iface): - iface = cast(GraphQLInterfaceType, iface) if iface.name in implementations_map: implementations = implementations_map[iface.name] else: @@ -280,11 +269,9 @@ def __init__( implementations.interfaces.append(named_type) elif is_object_type(named_type): - named_type = cast(GraphQLObjectType, named_type) # Store implementations by objects. for iface in named_type.interfaces: if is_interface_type(iface): - iface = cast(GraphQLInterfaceType, iface) if iface.name in implementations_map: implementations = implementations_map[iface.name] else: @@ -356,7 +343,7 @@ def get_possible_types( ) -> List[GraphQLObjectType]: """Get list of all possible concrete types for given abstract type.""" return ( - cast(GraphQLUnionType, abstract_type).types + abstract_type.types if is_union_type(abstract_type) else self.get_implementations( cast(GraphQLInterfaceType, abstract_type) @@ -381,7 +368,7 @@ def is_sub_type( types = set() add = types.add if is_union_type(abstract_type): - for type_ in cast(GraphQLUnionType, abstract_type).types: + for type_ in abstract_type.types: add(type_.name) else: implementations = self.get_implementations( @@ -423,13 +410,9 @@ def collect_referenced_types(self, type_: GraphQLType) -> None: collect_referenced_types = self.collect_referenced_types if is_union_type(named_type): - named_type = cast(GraphQLUnionType, named_type) for member_type in named_type.types: collect_referenced_types(member_type) elif is_object_type(named_type) or is_interface_type(named_type): - named_type = cast( - Union[GraphQLObjectType, GraphQLInterfaceType], named_type - ) for interface_type in named_type.interfaces: collect_referenced_types(interface_type) @@ -438,12 +421,11 @@ def collect_referenced_types(self, type_: GraphQLType) -> None: for arg in field.args.values(): collect_referenced_types(arg.type) elif is_input_object_type(named_type): - named_type = cast(GraphQLInputObjectType, named_type) for field in named_type.fields.values(): collect_referenced_types(field.type) -def is_schema(schema: Any) -> bool: +def is_schema(schema: Any) -> TypeGuard[GraphQLSchema]: """Test if the given value is a GraphQL schema.""" return isinstance(schema, GraphQLSchema) @@ -451,13 +433,12 @@ def is_schema(schema: Any) -> bool: def assert_schema(schema: Any) -> GraphQLSchema: if not is_schema(schema): raise TypeError(f"Expected {inspect(schema)} to be a GraphQL schema.") - return cast(GraphQLSchema, schema) + return schema def remapped_type(type_: GraphQLType, type_map: TypeMap) -> GraphQLType: """Get a copy of the given type that uses this type map.""" if is_wrapping_type(type_): - type_ = cast(GraphQLWrappingType, type_) return type_.__class__(remapped_type(type_.of_type, type_map)) type_ = cast(GraphQLNamedType, type_) return type_map.get(type_.name, type_) @@ -466,12 +447,10 @@ def remapped_type(type_: GraphQLType, type_map: TypeMap) -> GraphQLType: def remap_named_type(type_: GraphQLNamedType, type_map: TypeMap) -> None: """Change all references in the given named type to use this type map.""" if is_union_type(type_): - type_ = cast(GraphQLUnionType, type_) type_.types = [ type_map.get(member_type.name, member_type) for member_type in type_.types ] elif is_object_type(type_) or is_interface_type(type_): - type_ = cast(Union[GraphQLObjectType, GraphQLInterfaceType], type_) type_.interfaces = [ type_map.get(interface_type.name, interface_type) for interface_type in type_.interfaces @@ -487,7 +466,6 @@ def remap_named_type(type_: GraphQLNamedType, type_map: TypeMap) -> None: args[arg_name] = arg fields[field_name] = field elif is_input_object_type(type_): - type_ = cast(GraphQLInputObjectType, type_) fields = type_.fields for field_name, field in fields.items(): field = copy(field) diff --git a/src/graphql/type/validate.py b/src/graphql/type/validate.py index 2cb2d93d..1f6603d8 100644 --- a/src/graphql/type/validate.py +++ b/src/graphql/type/validate.py @@ -210,29 +210,24 @@ def validate_types(self) -> None: self.validate_name(type_) if is_object_type(type_): - type_ = cast(GraphQLObjectType, type_) # Ensure fields are valid self.validate_fields(type_) # Ensure objects implement the interfaces they claim to. self.validate_interfaces(type_) elif is_interface_type(type_): - type_ = cast(GraphQLInterfaceType, type_) # Ensure fields are valid. self.validate_fields(type_) # Ensure interfaces implement the interfaces they claim to. self.validate_interfaces(type_) elif is_union_type(type_): - type_ = cast(GraphQLUnionType, type_) # Ensure Unions include valid member types. self.validate_union_members(type_) elif is_enum_type(type_): - type_ = cast(GraphQLEnumType, type_) # Ensure Enums have valid values. self.validate_enum_values(type_) elif is_input_object_type(type_): - type_ = cast(GraphQLInputObjectType, type_) # Ensure Input Object fields are valid. self.validate_input_fields(type_) @@ -534,7 +529,7 @@ def __call__(self, input_obj: GraphQLInputObjectType) -> None: if is_non_null_type(field.type) and is_input_object_type( field.type.of_type ): - field_type = cast(GraphQLInputObjectType, field.type.of_type) + field_type = field.type.of_type cycle_index = self.field_path_index_by_type_name.get(field_type.name) self.field_path.append((field_name, field)) diff --git a/src/graphql/utilities/ast_from_value.py b/src/graphql/utilities/ast_from_value.py index f604c1c5..c8e75996 100644 --- a/src/graphql/utilities/ast_from_value.py +++ b/src/graphql/utilities/ast_from_value.py @@ -1,6 +1,6 @@ import re from math import isfinite -from typing import Any, Mapping, Optional, cast +from typing import Any, Mapping, Optional from ..language import ( BooleanValueNode, @@ -18,10 +18,7 @@ from ..pyutils import Undefined, inspect, is_iterable from ..type import ( GraphQLID, - GraphQLInputObjectType, GraphQLInputType, - GraphQLList, - GraphQLNonNull, is_enum_type, is_input_object_type, is_leaf_type, @@ -60,7 +57,6 @@ def ast_from_value(value: Any, type_: GraphQLInputType) -> Optional[ValueNode]: """ if is_non_null_type(type_): - type_ = cast(GraphQLNonNull, type_) ast_value = ast_from_value(value, type_.of_type) if isinstance(ast_value, NullValueNode): return None @@ -77,7 +73,6 @@ def ast_from_value(value: Any, type_: GraphQLInputType) -> Optional[ValueNode]: # Convert Python list to GraphQL list. If the GraphQLType is a list, but the value # is not a list, convert the value using the list's item type. if is_list_type(type_): - type_ = cast(GraphQLList, type_) item_type = type_.of_type if is_iterable(value): maybe_value_nodes = (ast_from_value(item, item_type) for item in value) @@ -90,7 +85,6 @@ def ast_from_value(value: Any, type_: GraphQLInputType) -> Optional[ValueNode]: if is_input_object_type(type_): if value is None or not isinstance(value, Mapping): return None - type_ = cast(GraphQLInputObjectType, type_) field_items = ( (field_name, ast_from_value(value[field_name], field.type)) for field_name, field in type_.fields.items() @@ -106,7 +100,7 @@ def ast_from_value(value: Any, type_: GraphQLInputType) -> Optional[ValueNode]: if is_leaf_type(type_): # Since value is an internally represented value, it must be serialized to an # externally represented value before converting into an AST. - serialized = type_.serialize(value) # type: ignore + serialized = type_.serialize(value) if serialized is None or serialized is Undefined: return None diff --git a/src/graphql/utilities/build_client_schema.py b/src/graphql/utilities/build_client_schema.py index 1f6694b1..75efdb78 100644 --- a/src/graphql/utilities/build_client_schema.py +++ b/src/graphql/utilities/build_client_schema.py @@ -11,13 +11,11 @@ GraphQLField, GraphQLInputField, GraphQLInputObjectType, - GraphQLInputType, GraphQLInterfaceType, GraphQLList, GraphQLNamedType, GraphQLNonNull, GraphQLObjectType, - GraphQLOutputType, GraphQLScalarType, GraphQLSchema, GraphQLType, @@ -270,7 +268,6 @@ def build_field(field_introspection: IntrospectionField) -> GraphQLField: "Introspection must provide output type for fields," f" but received: {inspect(type_)}." ) - type_ = cast(GraphQLOutputType, type_) args_introspection = field_introspection.get("args") if args_introspection is None: @@ -304,7 +301,6 @@ def build_argument( "Introspection must provide input type for arguments," f" but received: {inspect(type_)}." ) - type_ = cast(GraphQLInputType, type_) default_value_introspection = argument_introspection.get("defaultValue") default_value = ( @@ -339,7 +335,6 @@ def build_input_value( "Introspection must provide input type for input fields," f" but received: {inspect(type_)}." ) - type_ = cast(GraphQLInputType, type_) default_value_introspection = input_value_introspection.get("defaultValue") default_value = ( diff --git a/src/graphql/utilities/coerce_input_value.py b/src/graphql/utilities/coerce_input_value.py index 8b4cbbd8..42a31120 100644 --- a/src/graphql/utilities/coerce_input_value.py +++ b/src/graphql/utilities/coerce_input_value.py @@ -11,10 +11,7 @@ suggestion_list, ) from ..type import ( - GraphQLInputObjectType, GraphQLInputType, - GraphQLList, - GraphQLNonNull, GraphQLScalarType, is_input_object_type, is_leaf_type, @@ -48,7 +45,6 @@ def coerce_input_value( """Coerce a Python value given a GraphQL Input Type.""" if is_non_null_type(type_): if input_value is not None and input_value is not Undefined: - type_ = cast(GraphQLNonNull, type_) return coerce_input_value(input_value, type_.of_type, on_error, path) on_error( path.as_list() if path else [], @@ -64,7 +60,6 @@ def coerce_input_value( return None if is_list_type(type_): - type_ = cast(GraphQLList, type_) item_type = type_.of_type if is_iterable(input_value): coerced_list: List[Any] = [] @@ -80,7 +75,6 @@ def coerce_input_value( return [coerce_input_value(input_value, item_type, on_error, path)] if is_input_object_type(type_): - type_ = cast(GraphQLInputObjectType, type_) if not isinstance(input_value, dict): on_error( path.as_list() if path else [], diff --git a/src/graphql/utilities/extend_schema.py b/src/graphql/utilities/extend_schema.py index 950b8740..8cf9b614 100644 --- a/src/graphql/utilities/extend_schema.py +++ b/src/graphql/utilities/extend_schema.py @@ -244,7 +244,7 @@ def extend_schema_args( # noinspection PyTypeChecker,PyUnresolvedReferences def replace_type(self, type_: GraphQLType) -> GraphQLType: if is_list_type(type_): - return GraphQLList(self.replace_type(type_.of_type)) # type: ignore + return GraphQLList(self.replace_type(type_.of_type)) if is_non_null_type(type_): return GraphQLNonNull(self.replace_type(type_.of_type)) # type: ignore return self.replace_named_type(type_) # type: ignore @@ -272,22 +272,16 @@ def extend_named_type(self, type_: GraphQLNamedType) -> GraphQLNamedType: # Builtin types are not extended. return type_ if is_scalar_type(type_): - type_ = cast(GraphQLScalarType, type_) return self.extend_scalar_type(type_) if is_object_type(type_): - type_ = cast(GraphQLObjectType, type_) return self.extend_object_type(type_) if is_interface_type(type_): - type_ = cast(GraphQLInterfaceType, type_) return self.extend_interface_type(type_) if is_union_type(type_): - type_ = cast(GraphQLUnionType, type_) return self.extend_union_type(type_) if is_enum_type(type_): - type_ = cast(GraphQLEnumType, type_) return self.extend_enum_type(type_) if is_input_object_type(type_): - type_ = cast(GraphQLInputObjectType, type_) return self.extend_input_object_type(type_) # Not reachable. All possible types have been considered. diff --git a/src/graphql/utilities/find_breaking_changes.py b/src/graphql/utilities/find_breaking_changes.py index bb89de3e..8c515291 100644 --- a/src/graphql/utilities/find_breaking_changes.py +++ b/src/graphql/utilities/find_breaking_changes.py @@ -1,16 +1,15 @@ from enum import Enum -from typing import Any, Collection, Dict, List, NamedTuple, Union, cast +from typing import Any, Collection, Dict, List, NamedTuple, Union from ..language import print_ast from ..pyutils import Undefined, inspect from ..type import ( GraphQLEnumType, GraphQLField, + GraphQLInputObjectType, GraphQLInputType, GraphQLInterfaceType, - GraphQLList, GraphQLNamedType, - GraphQLNonNull, GraphQLObjectType, GraphQLSchema, GraphQLType, @@ -223,8 +222,8 @@ def find_type_changes( def find_input_object_type_changes( - old_type: Union[GraphQLObjectType, GraphQLInterfaceType], - new_type: Union[GraphQLObjectType, GraphQLInterfaceType], + old_type: GraphQLInputObjectType, + new_type: GraphQLInputObjectType, ) -> List[Change]: schema_changes: List[Change] = [] fields_diff = dict_diff(old_type.fields, new_type.fields) @@ -466,14 +465,12 @@ def is_change_safe_for_object_or_interface_field( # if they're both lists, make sure underlying types are compatible is_list_type(new_type) and is_change_safe_for_object_or_interface_field( - cast(GraphQLList, old_type).of_type, cast(GraphQLList, new_type).of_type + old_type.of_type, new_type.of_type ) ) or ( # moving from nullable to non-null of same underlying type is safe is_non_null_type(new_type) - and is_change_safe_for_object_or_interface_field( - old_type, cast(GraphQLNonNull, new_type).of_type - ) + and is_change_safe_for_object_or_interface_field(old_type, new_type.of_type) ) if is_non_null_type(old_type): @@ -481,22 +478,22 @@ def is_change_safe_for_object_or_interface_field( return is_non_null_type( new_type ) and is_change_safe_for_object_or_interface_field( - cast(GraphQLNonNull, old_type).of_type, - cast(GraphQLNonNull, new_type).of_type, + old_type.of_type, new_type.of_type ) - return ( - # if they're both named types, see if their names are equivalent - is_named_type(new_type) - and cast(GraphQLNamedType, old_type).name - == cast(GraphQLNamedType, new_type).name - ) or ( - # moving from nullable to non-null of same underlying type is safe - is_non_null_type(new_type) - and is_change_safe_for_object_or_interface_field( - old_type, cast(GraphQLNonNull, new_type).of_type + if is_named_type(old_type): + return ( + # if they're both named types, see if their names are equivalent + is_named_type(new_type) + and old_type.name == new_type.name + ) or ( + # moving from nullable to non-null of same underlying type is safe + is_non_null_type(new_type) + and is_change_safe_for_object_or_interface_field(old_type, new_type.of_type) ) - ) + + # Not reachable. All possible output types have been considered. + raise TypeError(f"Unexpected type {inspect(old_type)}") def is_change_safe_for_input_object_field_or_field_arg( @@ -508,7 +505,7 @@ def is_change_safe_for_input_object_field_or_field_arg( # if they're both lists, make sure underlying types are compatible new_type ) and is_change_safe_for_input_object_field_or_field_arg( - cast(GraphQLList, old_type).of_type, cast(GraphQLList, new_type).of_type + old_type.of_type, new_type.of_type ) if is_non_null_type(old_type): @@ -516,23 +513,25 @@ def is_change_safe_for_input_object_field_or_field_arg( # if they're both non-null, make sure the underlying types are compatible is_non_null_type(new_type) and is_change_safe_for_input_object_field_or_field_arg( - cast(GraphQLNonNull, old_type).of_type, - cast(GraphQLNonNull, new_type).of_type, + old_type.of_type, new_type.of_type ) ) or ( # moving from non-null to nullable of same underlying type is safe not is_non_null_type(new_type) and is_change_safe_for_input_object_field_or_field_arg( - cast(GraphQLNonNull, old_type).of_type, new_type + old_type.of_type, new_type ) ) - return ( - # if they're both named types, see if their names are equivalent - is_named_type(new_type) - and cast(GraphQLNamedType, old_type).name - == cast(GraphQLNamedType, new_type).name - ) + if is_named_type(old_type): + return ( + # if they're both named types, see if their names are equivalent + is_named_type(new_type) + and old_type.name == new_type.name + ) + + # Not reachable. All possible output types have been considered. + raise TypeError(f"Unexpected type {inspect(old_type)}") def type_kind_name(type_: GraphQLNamedType) -> str: @@ -550,7 +549,7 @@ def type_kind_name(type_: GraphQLNamedType) -> str: return "an Input type" # Not reachable. All possible output types have been considered. - raise TypeError(f"Unexpected type {inspect(type)}") + raise TypeError(f"Unexpected type {inspect(type_)}") def stringify_value(value: Any, type_: GraphQLInputType) -> str: diff --git a/src/graphql/utilities/lexicographic_sort_schema.py b/src/graphql/utilities/lexicographic_sort_schema.py index f8362489..b6f6cc8f 100644 --- a/src/graphql/utilities/lexicographic_sort_schema.py +++ b/src/graphql/utilities/lexicographic_sort_schema.py @@ -43,9 +43,9 @@ def replace_type( type_: Union[GraphQLList, GraphQLNonNull, GraphQLNamedType] ) -> Union[GraphQLList, GraphQLNonNull, GraphQLNamedType]: if is_list_type(type_): - return GraphQLList(replace_type(cast(GraphQLList, type_).of_type)) + return GraphQLList(replace_type(type_.of_type)) if is_non_null_type(type_): - return GraphQLNonNull(replace_type(cast(GraphQLNonNull, type_).of_type)) + return GraphQLNonNull(replace_type(type_.of_type)) return replace_named_type(cast(GraphQLNamedType, type_)) def replace_named_type(type_: GraphQLNamedType) -> GraphQLNamedType: @@ -112,7 +112,6 @@ def sort_named_type(type_: GraphQLNamedType) -> GraphQLNamedType: if is_scalar_type(type_) or is_introspection_type(type_): return type_ if is_object_type(type_): - type_ = cast(GraphQLObjectType, type_) return GraphQLObjectType( **merge_kwargs( type_.to_kwargs(), @@ -121,7 +120,6 @@ def sort_named_type(type_: GraphQLNamedType) -> GraphQLNamedType: ) ) if is_interface_type(type_): - type_ = cast(GraphQLInterfaceType, type_) return GraphQLInterfaceType( **merge_kwargs( type_.to_kwargs(), @@ -130,12 +128,10 @@ def sort_named_type(type_: GraphQLNamedType) -> GraphQLNamedType: ) ) if is_union_type(type_): - type_ = cast(GraphQLUnionType, type_) return GraphQLUnionType( **merge_kwargs(type_.to_kwargs(), types=lambda: sort_types(type_.types)) ) if is_enum_type(type_): - type_ = cast(GraphQLEnumType, type_) return GraphQLEnumType( **merge_kwargs( type_.to_kwargs(), @@ -151,7 +147,6 @@ def sort_named_type(type_: GraphQLNamedType) -> GraphQLNamedType: ) ) if is_input_object_type(type_): - type_ = cast(GraphQLInputObjectType, type_) return GraphQLInputObjectType( **merge_kwargs( type_.to_kwargs(), diff --git a/src/graphql/utilities/print_schema.py b/src/graphql/utilities/print_schema.py index 218f3548..00faad69 100644 --- a/src/graphql/utilities/print_schema.py +++ b/src/graphql/utilities/print_schema.py @@ -1,4 +1,4 @@ -from typing import Any, Callable, Dict, List, Optional, Union, cast +from typing import Any, Callable, Dict, List, Optional, Union from ..language import StringValueNode, print_ast from ..language.block_string import is_printable_as_block_string @@ -24,7 +24,6 @@ is_object_type, is_scalar_type, is_specified_directive, - is_specified_scalar_type, is_union_type, ) from .ast_from_value import ast_from_value @@ -44,7 +43,7 @@ def print_introspection_schema(schema: GraphQLSchema) -> str: def is_defined_type(type_: GraphQLNamedType) -> bool: - return not is_specified_scalar_type(type_) and not is_introspection_type(type_) + return type_.name not in GraphQLNamedType.reserved_types def print_filtered_schema( @@ -114,22 +113,16 @@ def is_schema_of_common_names(schema: GraphQLSchema) -> bool: def print_type(type_: GraphQLNamedType) -> str: if is_scalar_type(type_): - type_ = cast(GraphQLScalarType, type_) return print_scalar(type_) if is_object_type(type_): - type_ = cast(GraphQLObjectType, type_) return print_object(type_) if is_interface_type(type_): - type_ = cast(GraphQLInterfaceType, type_) return print_interface(type_) if is_union_type(type_): - type_ = cast(GraphQLUnionType, type_) return print_union(type_) if is_enum_type(type_): - type_ = cast(GraphQLEnumType, type_) return print_enum(type_) if is_input_object_type(type_): - type_ = cast(GraphQLInputObjectType, type_) return print_input_object(type_) # Not reachable. All possible types have been considered. diff --git a/src/graphql/utilities/strip_ignored_characters.py b/src/graphql/utilities/strip_ignored_characters.py index 7c212733..ce21678d 100644 --- a/src/graphql/utilities/strip_ignored_characters.py +++ b/src/graphql/utilities/strip_ignored_characters.py @@ -65,7 +65,8 @@ def strip_ignored_characters(source: Union[str, Source]) -> str: """Type description""" type Foo{"""Field description""" bar:String} ''' - source = cast(Source, source) if is_source(source) else Source(cast(str, source)) + if not is_source(source): + source = Source(cast(str, source)) body = source.body lexer = Lexer(source) diff --git a/src/graphql/utilities/type_comparators.py b/src/graphql/utilities/type_comparators.py index f0360d91..c40a7e70 100644 --- a/src/graphql/utilities/type_comparators.py +++ b/src/graphql/utilities/type_comparators.py @@ -1,11 +1,5 @@ -from typing import cast - from ..type import ( - GraphQLAbstractType, GraphQLCompositeType, - GraphQLList, - GraphQLNonNull, - GraphQLObjectType, GraphQLSchema, GraphQLType, is_abstract_type, @@ -30,12 +24,12 @@ def is_equal_type(type_a: GraphQLType, type_b: GraphQLType) -> bool: # If either type is non-null, the other must also be non-null. if is_non_null_type(type_a) and is_non_null_type(type_b): # noinspection PyUnresolvedReferences - return is_equal_type(type_a.of_type, type_b.of_type) # type:ignore + return is_equal_type(type_a.of_type, type_b.of_type) # If either type is a list, the other must also be a list. if is_list_type(type_a) and is_list_type(type_b): # noinspection PyUnresolvedReferences - return is_equal_type(type_a.of_type, type_b.of_type) # type:ignore + return is_equal_type(type_a.of_type, type_b.of_type) # Otherwise the types are not equal. return False @@ -57,24 +51,18 @@ def is_type_sub_type_of( if is_non_null_type(super_type): if is_non_null_type(maybe_subtype): return is_type_sub_type_of( - schema, - cast(GraphQLNonNull, maybe_subtype).of_type, - cast(GraphQLNonNull, super_type).of_type, + schema, maybe_subtype.of_type, super_type.of_type ) return False elif is_non_null_type(maybe_subtype): # If super_type is nullable, maybe_subtype may be non-null or nullable. - return is_type_sub_type_of( - schema, cast(GraphQLNonNull, maybe_subtype).of_type, super_type - ) + return is_type_sub_type_of(schema, maybe_subtype.of_type, super_type) # If super_type type is a list, maybeSubType type must also be a list. if is_list_type(super_type): if is_list_type(maybe_subtype): return is_type_sub_type_of( - schema, - cast(GraphQLList, maybe_subtype).of_type, - cast(GraphQLList, super_type).of_type, + schema, maybe_subtype.of_type, super_type.of_type ) return False elif is_list_type(maybe_subtype): @@ -86,10 +74,7 @@ def is_type_sub_type_of( return ( is_abstract_type(super_type) and (is_interface_type(maybe_subtype) or is_object_type(maybe_subtype)) - and schema.is_sub_type( - cast(GraphQLAbstractType, super_type), - cast(GraphQLObjectType, maybe_subtype), - ) + and schema.is_sub_type(super_type, maybe_subtype) ) @@ -111,11 +96,9 @@ def do_types_overlap( return True if is_abstract_type(type_a): - type_a = cast(GraphQLAbstractType, type_a) if is_abstract_type(type_b): # If both types are abstract, then determine if there is any intersection # between possible concrete types of each. - type_b = cast(GraphQLAbstractType, type_b) return any( schema.is_sub_type(type_b, type_) for type_ in schema.get_possible_types(type_a) @@ -125,7 +108,6 @@ def do_types_overlap( if is_abstract_type(type_b): # Determine if former type is a possible concrete type of the latter. - type_b = cast(GraphQLAbstractType, type_b) return schema.is_sub_type(type_b, type_a) # Otherwise the types do not overlap. diff --git a/src/graphql/utilities/type_info.py b/src/graphql/utilities/type_info.py index 11bdea58..6aa5e086 100644 --- a/src/graphql/utilities/type_info.py +++ b/src/graphql/utilities/type_info.py @@ -1,6 +1,6 @@ from __future__ import annotations # Python < 3.10 -from typing import Any, Callable, List, Optional, Union, cast +from typing import Any, Callable, List, Optional from ..language import ( ArgumentNode, @@ -21,14 +21,9 @@ GraphQLArgument, GraphQLCompositeType, GraphQLDirective, - GraphQLEnumType, GraphQLEnumValue, GraphQLField, - GraphQLInputObjectType, GraphQLInputType, - GraphQLInterfaceType, - GraphQLList, - GraphQLObjectType, GraphQLOutputType, GraphQLSchema, GraphQLType, @@ -91,11 +86,11 @@ def __init__( self._get_field_def: GetFieldDefFn = get_field_def_fn or get_field_def if initial_type: if is_input_type(initial_type): - self._input_type_stack.append(cast(GraphQLInputType, initial_type)) + self._input_type_stack.append(initial_type) if is_composite_type(initial_type): - self._parent_type_stack.append(cast(GraphQLCompositeType, initial_type)) + self._parent_type_stack.append(initial_type) if is_output_type(initial_type): - self._type_stack.append(cast(GraphQLOutputType, initial_type)) + self._type_stack.append(initial_type) def get_type(self) -> Optional[GraphQLOutputType]: if self._type_stack: @@ -150,9 +145,7 @@ def leave(self, node: Node) -> None: def enter_selection_set(self, node: SelectionSetNode) -> None: named_type = get_named_type(self.get_type()) self._parent_type_stack.append( - cast(GraphQLCompositeType, named_type) - if is_composite_type(named_type) - else None + named_type if is_composite_type(named_type) else None ) def enter_field(self, node: FieldNode) -> None: @@ -179,19 +172,13 @@ def enter_inline_fragment(self, node: InlineFragmentNode) -> None: if type_condition_ast else get_named_type(self.get_type()) ) - self._type_stack.append( - cast(GraphQLOutputType, output_type) - if is_output_type(output_type) - else None - ) + self._type_stack.append(output_type if is_output_type(output_type) else None) enter_fragment_definition = enter_inline_fragment def enter_variable_definition(self, node: VariableDefinitionNode) -> None: input_type = type_from_ast(self._schema, node.type) - self._input_type_stack.append( - cast(GraphQLInputType, input_type) if is_input_type(input_type) else None - ) + self._input_type_stack.append(input_type if is_input_type(input_type) else None) def enter_argument(self, node: ArgumentNode) -> None: field_or_directive = self.get_directive() or self.get_field_def() @@ -209,11 +196,7 @@ def enter_argument(self, node: ArgumentNode) -> None: # noinspection PyUnusedLocal def enter_list_value(self, node: ListValueNode) -> None: list_type = get_nullable_type(self.get_input_type()) # type: ignore - item_type = ( - cast(GraphQLList, list_type).of_type - if is_list_type(list_type) - else list_type - ) + item_type = list_type.of_type if is_list_type(list_type) else list_type # List positions never have a default value. self._default_value_stack.append(Undefined) self._input_type_stack.append(item_type if is_input_type(item_type) else None) @@ -221,9 +204,7 @@ def enter_list_value(self, node: ListValueNode) -> None: def enter_object_field(self, node: ObjectFieldNode) -> None: object_type = get_named_type(self.get_input_type()) if is_input_object_type(object_type): - input_field = cast(GraphQLInputObjectType, object_type).fields.get( - node.name.value - ) + input_field = object_type.fields.get(node.name.value) input_field_type = input_field.type if input_field else None else: input_field = input_field_type = None @@ -237,7 +218,7 @@ def enter_object_field(self, node: ObjectFieldNode) -> None: def enter_enum_value(self, node: EnumValueNode) -> None: enum_type = get_named_type(self.get_input_type()) if is_enum_type(enum_type): - enum_value = cast(GraphQLEnumType, enum_type).values.get(node.value) + enum_value = enum_type.values.get(node.value) else: enum_value = None self._enum_value = enum_value @@ -293,7 +274,6 @@ def get_field_def( if name == "__typename" and is_composite_type(parent_type): return TypeNameMetaFieldDef if is_object_type(parent_type) or is_interface_type(parent_type): - parent_type = cast(Union[GraphQLObjectType, GraphQLInterfaceType], parent_type) return parent_type.fields.get(name) return None diff --git a/src/graphql/utilities/value_from_ast.py b/src/graphql/utilities/value_from_ast.py index 6fac2a8e..c5c3224a 100644 --- a/src/graphql/utilities/value_from_ast.py +++ b/src/graphql/utilities/value_from_ast.py @@ -9,10 +9,7 @@ ) from ..pyutils import Undefined, inspect from ..type import ( - GraphQLInputObjectType, GraphQLInputType, - GraphQLList, - GraphQLNonNull, GraphQLScalarType, is_input_object_type, is_leaf_type, @@ -70,14 +67,12 @@ def value_from_ast( if is_non_null_type(type_): if isinstance(value_node, NullValueNode): return Undefined - type_ = cast(GraphQLNonNull, type_) return value_from_ast(value_node, type_.of_type, variables) if isinstance(value_node, NullValueNode): return None # This is explicitly returning the value None. if is_list_type(type_): - type_ = cast(GraphQLList, type_) item_type = type_.of_type if isinstance(value_node, ListValueNode): coerced_values: List[Any] = [] @@ -103,7 +98,6 @@ def value_from_ast( if is_input_object_type(type_): if not isinstance(value_node, ObjectValueNode): return Undefined - type_ = cast(GraphQLInputObjectType, type_) coerced_obj: Dict[str, Any] = {} fields = type_.fields field_nodes = {field.name.value: field for field in value_node.fields} diff --git a/src/graphql/validation/rules/custom/no_deprecated.py b/src/graphql/validation/rules/custom/no_deprecated.py index 14e9d0e0..94d76221 100644 --- a/src/graphql/validation/rules/custom/no_deprecated.py +++ b/src/graphql/validation/rules/custom/no_deprecated.py @@ -1,8 +1,8 @@ -from typing import Any, cast +from typing import Any from ....error import GraphQLError from ....language import ArgumentNode, EnumValueNode, FieldNode, ObjectFieldNode -from ....type import GraphQLInputObjectType, get_named_type, is_input_object_type +from ....type import get_named_type, is_input_object_type from .. import ValidationRule @@ -69,14 +69,12 @@ def enter_object_field(self, node: ObjectFieldNode, *_args: Any) -> None: context = self.context input_object_def = get_named_type(context.get_parent_input_type()) if is_input_object_type(input_object_def): - input_field_def = cast(GraphQLInputObjectType, input_object_def).fields.get( - node.name.value - ) + input_field_def = input_object_def.fields.get(node.name.value) if input_field_def: deprecation_reason = input_field_def.deprecation_reason if deprecation_reason is not None: field_name = node.name.value - input_object_name = input_object_def.name # type: ignore + input_object_name = input_object_def.name self.report_error( GraphQLError( f"The input field {input_object_name}.{field_name}" diff --git a/src/graphql/validation/rules/fields_on_correct_type.py b/src/graphql/validation/rules/fields_on_correct_type.py index a333a901..3af8b4ca 100644 --- a/src/graphql/validation/rules/fields_on_correct_type.py +++ b/src/graphql/validation/rules/fields_on_correct_type.py @@ -1,12 +1,11 @@ from collections import defaultdict from functools import cmp_to_key -from typing import Any, Dict, List, Union, cast +from typing import Any, Dict, List, Union from ...error import GraphQLError from ...language import FieldNode from ...pyutils import did_you_mean, natural_comparison_key, suggestion_list from ...type import ( - GraphQLAbstractType, GraphQLInterfaceType, GraphQLObjectType, GraphQLOutputType, @@ -74,7 +73,6 @@ def get_suggested_type_names( # Must be an Object type, which does not have possible fields. return [] - type_ = cast(GraphQLAbstractType, type_) # Use a dict instead of a set for stable sorting when usage counts are the same suggested_types: Dict[Union[GraphQLObjectType, GraphQLInterfaceType], None] = {} usage_count: Dict[str, int] = defaultdict(int) @@ -104,13 +102,9 @@ def cmp( return usage_count_diff # Suggest super types first followed by subtypes - if is_interface_type(type_a) and schema.is_sub_type( - cast(GraphQLInterfaceType, type_a), type_b - ): + if is_interface_type(type_a) and schema.is_sub_type(type_a, type_b): return -1 - if is_interface_type(type_b) and schema.is_sub_type( - cast(GraphQLInterfaceType, type_b), type_a - ): + if is_interface_type(type_b) and schema.is_sub_type(type_b, type_a): return 1 name_a = natural_comparison_key(type_a.name) @@ -131,7 +125,7 @@ def get_suggested_field_names(type_: GraphQLOutputType, field_name: str) -> List be the result of a typo. """ if is_object_type(type_) or is_interface_type(type_): - possible_field_names = list(type_.fields) # type: ignore + possible_field_names = list(type_.fields) return suggestion_list(field_name, possible_field_names) # Otherwise, must be a Union type, which does not define fields. return [] diff --git a/src/graphql/validation/rules/known_type_names.py b/src/graphql/validation/rules/known_type_names.py index eb601203..ead8161a 100644 --- a/src/graphql/validation/rules/known_type_names.py +++ b/src/graphql/validation/rules/known_type_names.py @@ -4,7 +4,8 @@ from ...language import ( NamedTypeNode, Node, - TypeDefinitionNode, + TypeSystemDefinitionNode, + TypeSystemExtensionNode, is_type_definition_node, is_type_system_definition_node, is_type_system_extension_node, @@ -14,6 +15,12 @@ from . import ASTValidationRule, SDLValidationContext, ValidationContext +try: + from typing import TypeGuard +except ImportError: # Python < 3.10 + from typing_extensions import TypeGuard + + __all__ = ["KnownTypeNamesRule"] @@ -34,7 +41,6 @@ def __init__(self, context: Union[ValidationContext, SDLValidationContext]): defined_types = [] for def_ in context.document.definitions: if is_type_definition_node(def_): - def_ = cast(TypeDefinitionNode, def_) defined_types.append(def_.name.value) self.defined_types = set(defined_types) @@ -78,7 +84,9 @@ def enter_named_type( standard_type_names = set(specified_scalar_types).union(introspection_types) -def is_sdl_node(value: Union[Node, Collection[Node], None]) -> bool: +def is_sdl_node( + value: Union[Node, Collection[Node], None] +) -> TypeGuard[Union[TypeSystemDefinitionNode, TypeSystemExtensionNode]]: return ( value is not None and not isinstance(value, list) diff --git a/src/graphql/validation/rules/overlapping_fields_can_be_merged.py b/src/graphql/validation/rules/overlapping_fields_can_be_merged.py index 8eda47a2..4e1acf5e 100644 --- a/src/graphql/validation/rules/overlapping_fields_can_be_merged.py +++ b/src/graphql/validation/rules/overlapping_fields_can_be_merged.py @@ -15,9 +15,7 @@ from ...type import ( GraphQLCompositeType, GraphQLField, - GraphQLList, GraphQLNamedType, - GraphQLNonNull, GraphQLOutputType, get_named_type, is_interface_type, @@ -602,9 +600,7 @@ def do_types_conflict(type1: GraphQLOutputType, type2: GraphQLOutputType) -> boo """ if is_list_type(type1): return ( - do_types_conflict( - cast(GraphQLList, type1).of_type, cast(GraphQLList, type2).of_type - ) + do_types_conflict(type1.of_type, type2.of_type) if is_list_type(type2) else True ) @@ -612,9 +608,7 @@ def do_types_conflict(type1: GraphQLOutputType, type2: GraphQLOutputType) -> boo return True if is_non_null_type(type1): return ( - do_types_conflict( - cast(GraphQLNonNull, type1).of_type, cast(GraphQLNonNull, type2).of_type - ) + do_types_conflict(type1.of_type, type2.of_type) if is_non_null_type(type2) else True ) @@ -681,7 +675,7 @@ def collect_fields_and_fragment_names( if isinstance(selection, FieldNode): field_name = selection.name.value field_def = ( - parent_type.fields.get(field_name) # type: ignore + parent_type.fields.get(field_name) if is_object_type(parent_type) or is_interface_type(parent_type) else None ) diff --git a/src/graphql/validation/rules/possible_fragment_spreads.py b/src/graphql/validation/rules/possible_fragment_spreads.py index 944c1d31..d59f0716 100644 --- a/src/graphql/validation/rules/possible_fragment_spreads.py +++ b/src/graphql/validation/rules/possible_fragment_spreads.py @@ -1,4 +1,4 @@ -from typing import Any, Optional, cast +from typing import Any, Optional from ...error import GraphQLError from ...language import FragmentSpreadNode, InlineFragmentNode @@ -25,11 +25,7 @@ def enter_inline_fragment(self, node: InlineFragmentNode, *_args: Any) -> None: if ( is_composite_type(frag_type) and is_composite_type(parent_type) - and not do_types_overlap( - context.schema, - cast(GraphQLCompositeType, frag_type), - cast(GraphQLCompositeType, parent_type), - ) + and not do_types_overlap(context.schema, frag_type, parent_type) ): context.report_error( GraphQLError( @@ -63,5 +59,5 @@ def get_fragment_type(self, name: str) -> Optional[GraphQLCompositeType]: if frag: type_ = type_from_ast(context.schema, frag.type_condition) if is_composite_type(type_): - return cast(GraphQLCompositeType, type_) + return type_ return None diff --git a/src/graphql/validation/rules/unique_directives_per_location.py b/src/graphql/validation/rules/unique_directives_per_location.py index 0baeac4b..2f7ba6ec 100644 --- a/src/graphql/validation/rules/unique_directives_per_location.py +++ b/src/graphql/validation/rules/unique_directives_per_location.py @@ -8,8 +8,6 @@ Node, SchemaDefinitionNode, SchemaExtensionNode, - TypeDefinitionNode, - TypeExtensionNode, is_type_definition_node, is_type_extension_node, ) @@ -64,7 +62,6 @@ def enter(self, node: Node, *_args: Any) -> None: if isinstance(node, (SchemaDefinitionNode, SchemaExtensionNode)): seen_directives = self.schema_directives elif is_type_definition_node(node) or is_type_extension_node(node): - node = cast(Union[TypeDefinitionNode, TypeExtensionNode], node) type_name = node.name.value seen_directives = self.type_directives_map[type_name] else: diff --git a/src/graphql/validation/rules/unique_enum_value_names.py b/src/graphql/validation/rules/unique_enum_value_names.py index cf2e42bb..e680ce2c 100644 --- a/src/graphql/validation/rules/unique_enum_value_names.py +++ b/src/graphql/validation/rules/unique_enum_value_names.py @@ -1,9 +1,9 @@ from collections import defaultdict -from typing import Any, Dict, cast +from typing import Any, Dict from ...error import GraphQLError from ...language import SKIP, EnumTypeDefinitionNode, NameNode, VisitorAction -from ...type import GraphQLEnumType, is_enum_type +from ...type import is_enum_type from . import SDLValidationContext, SDLValidationRule @@ -33,10 +33,7 @@ def check_value_uniqueness( value_name = value_def.name.value existing_type = existing_type_map.get(type_name) - if ( - is_enum_type(existing_type) - and value_name in cast(GraphQLEnumType, existing_type).values - ): + if is_enum_type(existing_type) and value_name in existing_type.values: self.report_error( GraphQLError( f"Enum value '{type_name}.{value_name}'" diff --git a/src/graphql/validation/rules/values_of_correct_type.py b/src/graphql/validation/rules/values_of_correct_type.py index 982b87ed..29a081e3 100644 --- a/src/graphql/validation/rules/values_of_correct_type.py +++ b/src/graphql/validation/rules/values_of_correct_type.py @@ -18,7 +18,6 @@ ) from ...pyutils import Undefined, did_you_mean, suggestion_list from ...type import ( - GraphQLInputObjectType, GraphQLScalarType, get_named_type, get_nullable_type, @@ -57,7 +56,6 @@ def enter_object_value(self, node: ObjectValueNode, *_args: Any) -> VisitorActio if not is_input_object_type(type_): self.is_valid_value_node(node) return SKIP # Don't traverse further. - type_ = cast(GraphQLInputObjectType, type_) # Ensure every required field exists. field_node_map = {field.name.value: field for field in node.fields} for field_name, field_def in type_.fields.items(): @@ -77,7 +75,6 @@ def enter_object_field(self, node: ObjectFieldNode, *_args: Any) -> None: parent_type = get_named_type(self.context.get_parent_input_type()) field_type = self.context.get_input_type() if not field_type and is_input_object_type(parent_type): - parent_type = cast(GraphQLInputObjectType, parent_type) suggestions = suggestion_list(node.name.value, list(parent_type.fields)) self.report_error( GraphQLError( diff --git a/src/graphql/validation/rules/variables_in_allowed_position.py b/src/graphql/validation/rules/variables_in_allowed_position.py index c2d488b6..312a11bc 100644 --- a/src/graphql/validation/rules/variables_in_allowed_position.py +++ b/src/graphql/validation/rules/variables_in_allowed_position.py @@ -1,4 +1,4 @@ -from typing import Any, Dict, Optional, cast +from typing import Any, Dict, Optional from ...error import GraphQLError from ...language import ( @@ -8,7 +8,7 @@ VariableDefinitionNode, ) from ...pyutils import Undefined -from ...type import GraphQLNonNull, GraphQLSchema, GraphQLType, is_non_null_type +from ...type import GraphQLSchema, GraphQLType, is_non_null_type from ...utilities import is_type_sub_type_of, type_from_ast from . import ValidationContext, ValidationRule @@ -88,7 +88,6 @@ def allowed_variable_usage( has_location_default_value = location_default_value is not Undefined if not has_non_null_variable_default_value and not has_location_default_value: return False - location_type = cast(GraphQLNonNull, location_type) nullable_location_type = location_type.of_type return is_type_sub_type_of(schema, var_type, nullable_location_type) return is_type_sub_type_of(schema, var_type, location_type) diff --git a/tests/execution/test_lists.py b/tests/execution/test_lists.py index 5a3b5ad6..98d57c12 100644 --- a/tests/execution/test_lists.py +++ b/tests/execution/test_lists.py @@ -1,4 +1,4 @@ -from typing import Any, Awaitable, cast +from typing import Any from pytest import mark @@ -224,7 +224,6 @@ async def _complete(list_field): Data(list_field), ) assert is_awaitable(result) - result = cast(Awaitable, result) return await result @mark.asyncio diff --git a/tests/utilities/test_strip_ignored_characters.py b/tests/utilities/test_strip_ignored_characters.py index 0c9de42f..2e026af8 100644 --- a/tests/utilities/test_strip_ignored_characters.py +++ b/tests/utilities/test_strip_ignored_characters.py @@ -73,6 +73,21 @@ def strips_ignored_characters_from_graphql_sdl_document(): '"""Type description""" type Foo{"""Field description""" bar:String}' ) + def strips_ignored_characters_from_source(): + source = Source( + dedent( + """ + { + foo { + bar + } + } + """ + ) + ) + + assert strip_ignored_characters(source) == "{foo{bar}}" + def report_document_with_invalid_token(): with raises(GraphQLSyntaxError) as exc_info: strip_ignored_characters('{ foo(arg: "\n"') diff --git a/tox.ini b/tox.ini index b2723428..0002f4be 100644 --- a/tox.ini +++ b/tox.ini @@ -17,7 +17,7 @@ python = basepython = python3.10 deps = black==22.10.0 commands = - black src tests -t py39 --check + black src tests -t py310 --check [testenv:flake8] basepython = python3.10 @@ -38,7 +38,7 @@ commands = basepython = python3.10 deps = mypy==0.982 - pytest>=7.1,<8 + pytest>=7.2,<8 commands = mypy src tests @@ -58,7 +58,7 @@ deps = pytest-cov>=4,<5 pytest-describe>=2,<3 pytest-timeout>=2,<3 - py37: typing-extensions>=4.3,<5 + py37,py38,py39,pypy39: typing-extensions>=4.4,<5 commands = # to also run the time-consuming tests: tox -e py310 -- --run-slow # to run the benchmarks: tox -e py310 -- -k benchmarks --benchmark-enable From 2c215fcde0aafbcec2b5ae8247982951885ceb39 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Wed, 2 Nov 2022 23:03:35 +0100 Subject: [PATCH 053/230] Use some more postponed evaluation of annotations --- docs/conf.py | 8 ++++ src/graphql/error/graphql_error.py | 2 +- src/graphql/error/located_error.py | 2 +- src/graphql/error/syntax_error.py | 2 +- src/graphql/execution/execute.py | 8 ++-- src/graphql/execution/subscribe.py | 4 +- src/graphql/language/ast.py | 52 ++++++++++++------------- src/graphql/language/location.py | 2 +- src/graphql/pyutils/undefined.py | 6 ++- src/graphql/type/definition.py | 38 +++++++++--------- tests/execution/test_union_interface.py | 18 +++++---- 11 files changed, 78 insertions(+), 64 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index f9a807c9..96ba12f4 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -120,9 +120,12 @@ # We need to give autodoc a little help in this cases, too: graphql_classes = { 'GraphQLAbstractType': 'type', + 'GraphQLFieldResolver': 'type', 'GraphQLObjectType': 'type', 'GraphQLOutputType': 'type', 'GraphQLTypeResolver': 'type', + 'AwaitableOrValue': 'execution', + 'Middleware': 'execution', 'Node': 'language', 'Source': 'language', 'SourceLocation': 'language' @@ -135,13 +138,18 @@ traceback types.TracebackType TypeMap +AwaitableOrValue EnterLeaveVisitor FormattedSourceLocation GraphQLAbstractType GraphQLErrorExtensions +GraphQLFieldResolver +GraphQLTypeResolver GraphQLOutputType +Middleware asyncio.events.AbstractEventLoop graphql.execution.map_async_iterator.MapAsyncIterator +graphql.execution.Middleware graphql.language.lexer.EscapeSequence graphql.language.visitor.EnterLeaveVisitor graphql.type.schema.InterfaceImplementations diff --git a/src/graphql/error/graphql_error.py b/src/graphql/error/graphql_error.py index 0fa6c170..a9a85bc4 100644 --- a/src/graphql/error/graphql_error.py +++ b/src/graphql/error/graphql_error.py @@ -149,7 +149,7 @@ def __init__( positions = [loc.start for loc in node_locations] self.positions = positions or None if positions and source: - locations: Optional[List["SourceLocation"]] = [ + locations: Optional[List[SourceLocation]] = [ source.get_location(pos) for pos in positions ] else: diff --git a/src/graphql/error/located_error.py b/src/graphql/error/located_error.py index cabd737c..8f08dcf9 100644 --- a/src/graphql/error/located_error.py +++ b/src/graphql/error/located_error.py @@ -12,7 +12,7 @@ def located_error( original_error: Exception, - nodes: Optional[Union["None", Collection["Node"]]] = None, + nodes: Optional[Union[None, Collection["Node"]]] = None, path: Optional[Collection[Union[str, int]]] = None, ) -> GraphQLError: """Located GraphQL Error diff --git a/src/graphql/error/syntax_error.py b/src/graphql/error/syntax_error.py index 2b24879d..c3d95020 100644 --- a/src/graphql/error/syntax_error.py +++ b/src/graphql/error/syntax_error.py @@ -14,7 +14,7 @@ class GraphQLSyntaxError(GraphQLError): """A GraphQLError representing a syntax error.""" - def __init__(self, source: Source, position: int, description: str) -> None: + def __init__(self, source: "Source", position: int, description: str) -> None: super().__init__( f"Syntax Error: {description}", source=source, positions=[position] ) diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index 08d7df05..f4fedecf 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -1,3 +1,5 @@ +from __future__ import annotations # Python < 3.10 + from asyncio import ensure_future, gather from collections.abc import Mapping from inspect import isawaitable @@ -238,7 +240,7 @@ def build( subscribe_field_resolver: Optional[GraphQLFieldResolver] = None, middleware: Optional[Middleware] = None, is_awaitable: Optional[Callable[[Any], bool]] = None, - ) -> Union[List[GraphQLError], "ExecutionContext"]: + ) -> Union[List[GraphQLError], ExecutionContext]: """Build an execution context Constructs a ExecutionContext object from the arguments passed to execute, which @@ -972,7 +974,7 @@ def execute( type_resolver: Optional[GraphQLTypeResolver] = None, subscribe_field_resolver: Optional[GraphQLFieldResolver] = None, middleware: Optional[Middleware] = None, - execution_context_class: Optional[Type["ExecutionContext"]] = None, + execution_context_class: Optional[Type[ExecutionContext]] = None, is_awaitable: Optional[Callable[[Any], bool]] = None, ) -> AwaitableOrValue[ExecutionResult]: """Execute a GraphQL operation. @@ -1060,7 +1062,7 @@ def execute_sync( field_resolver: Optional[GraphQLFieldResolver] = None, type_resolver: Optional[GraphQLTypeResolver] = None, middleware: Optional[Middleware] = None, - execution_context_class: Optional[Type["ExecutionContext"]] = None, + execution_context_class: Optional[Type[ExecutionContext]] = None, check_sync: bool = False, ) -> ExecutionResult: """Execute a GraphQL operation synchronously. diff --git a/src/graphql/execution/subscribe.py b/src/graphql/execution/subscribe.py index 1ab38989..e803ba03 100644 --- a/src/graphql/execution/subscribe.py +++ b/src/graphql/execution/subscribe.py @@ -29,7 +29,7 @@ async def subscribe( operation_name: Optional[str] = None, field_resolver: Optional[GraphQLFieldResolver] = None, subscribe_field_resolver: Optional[GraphQLFieldResolver] = None, - execution_context_class: Optional[Type["ExecutionContext"]] = None, + execution_context_class: Optional[Type[ExecutionContext]] = None, ) -> Union[AsyncIterator[ExecutionResult], ExecutionResult]: """Create a GraphQL subscription. @@ -97,7 +97,7 @@ async def create_source_event_stream( variable_values: Optional[Dict[str, Any]] = None, operation_name: Optional[str] = None, subscribe_field_resolver: Optional[GraphQLFieldResolver] = None, - execution_context_class: Optional[Type["ExecutionContext"]] = None, + execution_context_class: Optional[Type[ExecutionContext]] = None, ) -> Union[AsyncIterable[Any], ExecutionResult]: """Create source event stream diff --git a/src/graphql/language/ast.py b/src/graphql/language/ast.py index aa4fe9e7..95d75d3d 100644 --- a/src/graphql/language/ast.py +++ b/src/graphql/language/ast.py @@ -93,8 +93,8 @@ class Token: value: Optional[str] # Tokens exist as nodes in a double-linked-list amongst all tokens including # ignored tokens. is always the first node and the last. - prev: Optional["Token"] - next: Optional["Token"] + prev: Optional[Token] + next: Optional[Token] def __init__( self, @@ -413,7 +413,7 @@ class NameNode(Node): class DocumentNode(Node): __slots__ = ("definitions",) - definitions: Tuple["DefinitionNode", ...] + definitions: Tuple[DefinitionNode, ...] class DefinitionNode(Node): @@ -424,9 +424,9 @@ class ExecutableDefinitionNode(DefinitionNode): __slots__ = "name", "directives", "variable_definitions", "selection_set" name: Optional[NameNode] - directives: Tuple["DirectiveNode", ...] - variable_definitions: Tuple["VariableDefinitionNode", ...] - selection_set: "SelectionSetNode" + directives: Tuple[DirectiveNode, ...] + variable_definitions: Tuple[VariableDefinitionNode, ...] + selection_set: SelectionSetNode class OperationDefinitionNode(ExecutableDefinitionNode): @@ -438,22 +438,22 @@ class OperationDefinitionNode(ExecutableDefinitionNode): class VariableDefinitionNode(Node): __slots__ = "variable", "type", "default_value", "directives" - variable: "VariableNode" - type: "TypeNode" - default_value: Optional["ConstValueNode"] - directives: Tuple["ConstDirectiveNode", ...] + variable: VariableNode + type: TypeNode + default_value: Optional[ConstValueNode] + directives: Tuple[ConstDirectiveNode, ...] class SelectionSetNode(Node): __slots__ = ("selections",) - selections: Tuple["SelectionNode", ...] + selections: Tuple[SelectionNode, ...] class SelectionNode(Node): __slots__ = ("directives",) - directives: Tuple["DirectiveNode", ...] + directives: Tuple[DirectiveNode, ...] class FieldNode(SelectionNode): @@ -461,7 +461,7 @@ class FieldNode(SelectionNode): alias: Optional[NameNode] name: NameNode - arguments: Tuple["ArgumentNode", ...] + arguments: Tuple[ArgumentNode, ...] selection_set: Optional[SelectionSetNode] @@ -469,12 +469,12 @@ class ArgumentNode(Node): __slots__ = "name", "value" name: NameNode - value: "ValueNode" + value: ValueNode class ConstArgumentNode(ArgumentNode): - value: "ConstValueNode" + value: ConstValueNode # Fragments @@ -489,7 +489,7 @@ class FragmentSpreadNode(SelectionNode): class InlineFragmentNode(SelectionNode): __slots__ = "type_condition", "selection_set" - type_condition: "NamedTypeNode" + type_condition: NamedTypeNode selection_set: SelectionSetNode @@ -497,7 +497,7 @@ class FragmentDefinitionNode(ExecutableDefinitionNode): __slots__ = ("type_condition",) name: NameNode - type_condition: "NamedTypeNode" + type_condition: NamedTypeNode # Values @@ -556,18 +556,18 @@ class ListValueNode(ValueNode): class ConstListValueNode(ListValueNode): - values: Tuple["ConstValueNode", ...] + values: Tuple[ConstValueNode, ...] class ObjectValueNode(ValueNode): __slots__ = ("fields",) - fields: Tuple["ObjectFieldNode", ...] + fields: Tuple[ObjectFieldNode, ...] class ConstObjectValueNode(ObjectValueNode): - fields: Tuple["ConstObjectFieldNode", ...] + fields: Tuple[ConstObjectFieldNode, ...] class ObjectFieldNode(Node): @@ -579,7 +579,7 @@ class ObjectFieldNode(Node): class ConstObjectFieldNode(ObjectFieldNode): - value: "ConstValueNode" + value: ConstValueNode ConstValueNode = Union[ @@ -646,7 +646,7 @@ class SchemaDefinitionNode(TypeSystemDefinitionNode): description: Optional[StringValueNode] directives: Tuple[ConstDirectiveNode, ...] - operation_types: Tuple["OperationTypeDefinitionNode", ...] + operation_types: Tuple[OperationTypeDefinitionNode, ...] class OperationTypeDefinitionNode(Node): @@ -678,7 +678,7 @@ class ObjectTypeDefinitionNode(TypeDefinitionNode): interfaces: Tuple[NamedTypeNode, ...] directives: Tuple[ConstDirectiveNode, ...] - fields: Tuple["FieldDefinitionNode", ...] + fields: Tuple[FieldDefinitionNode, ...] class FieldDefinitionNode(DefinitionNode): @@ -687,7 +687,7 @@ class FieldDefinitionNode(DefinitionNode): description: Optional[StringValueNode] name: NameNode directives: Tuple[ConstDirectiveNode, ...] - arguments: Tuple["InputValueDefinitionNode", ...] + arguments: Tuple[InputValueDefinitionNode, ...] type: TypeNode @@ -704,7 +704,7 @@ class InputValueDefinitionNode(DefinitionNode): class InterfaceTypeDefinitionNode(TypeDefinitionNode): __slots__ = "fields", "interfaces" - fields: Tuple["FieldDefinitionNode", ...] + fields: Tuple[FieldDefinitionNode, ...] directives: Tuple[ConstDirectiveNode, ...] interfaces: Tuple[NamedTypeNode, ...] @@ -720,7 +720,7 @@ class EnumTypeDefinitionNode(TypeDefinitionNode): __slots__ = ("values",) directives: Tuple[ConstDirectiveNode, ...] - values: Tuple["EnumValueDefinitionNode", ...] + values: Tuple[EnumValueDefinitionNode, ...] class EnumValueDefinitionNode(DefinitionNode): diff --git a/src/graphql/language/location.py b/src/graphql/language/location.py index 897a8595..529f2caf 100644 --- a/src/graphql/language/location.py +++ b/src/graphql/language/location.py @@ -40,7 +40,7 @@ def __ne__(self, other: Any) -> bool: return not self == other -def get_location(source: Source, position: int) -> SourceLocation: +def get_location(source: "Source", position: int) -> SourceLocation: """Get the line and column for a character position in the source. Takes a Source and a UTF-8 character offset, and returns the corresponding line and diff --git a/src/graphql/pyutils/undefined.py b/src/graphql/pyutils/undefined.py index a5ab96ec..e573227e 100644 --- a/src/graphql/pyutils/undefined.py +++ b/src/graphql/pyutils/undefined.py @@ -1,3 +1,5 @@ +from __future__ import annotations # Python < 3.10 + import warnings from typing import Any, Optional @@ -8,9 +10,9 @@ class UndefinedType(ValueError): """Auxiliary class for creating the Undefined singleton.""" - _instance: Optional["UndefinedType"] = None + _instance: Optional[UndefinedType] = None - def __new__(cls) -> "UndefinedType": + def __new__(cls) -> UndefinedType: if cls._instance is None: cls._instance = super().__new__(cls) else: diff --git a/src/graphql/type/definition.py b/src/graphql/type/definition.py index 464aada7..c3157b69 100644 --- a/src/graphql/type/definition.py +++ b/src/graphql/type/definition.py @@ -236,9 +236,9 @@ class GraphQLNamedType(GraphQLType): ast_node: Optional[TypeDefinitionNode] extension_ast_nodes: Tuple[TypeExtensionNode, ...] - reserved_types: Dict[str, "GraphQLNamedType"] = {} + reserved_types: Dict[str, GraphQLNamedType] = {} - def __new__(cls, name: str, *_args: Any, **_kwargs: Any) -> "GraphQLNamedType": + def __new__(cls, name: str, *_args: Any, **_kwargs: Any) -> GraphQLNamedType: if name in cls.reserved_types: raise TypeError(f"Redefinition of reserved type {name!r}") return super().__new__(cls) @@ -247,7 +247,7 @@ def __reduce__(self) -> Tuple[Callable, Tuple]: return self._get_instance, (self.name, tuple(self.to_kwargs().items())) @classmethod - def _get_instance(cls, name: str, args: Tuple) -> "GraphQLNamedType": + def _get_instance(cls, name: str, args: Tuple) -> GraphQLNamedType: try: return cls.reserved_types[name] except KeyError: @@ -496,8 +496,8 @@ def assert_scalar_type(type_: Any) -> GraphQLScalarType: class GraphQLFieldKwargs(TypedDict, total=False): type_: GraphQLOutputType args: Optional[GraphQLArgumentMap] - resolve: Optional["GraphQLFieldResolver"] - subscribe: Optional["GraphQLFieldResolver"] + resolve: Optional[GraphQLFieldResolver] + subscribe: Optional[GraphQLFieldResolver] description: Optional[str] deprecation_reason: Optional[str] extensions: Dict[str, Any] @@ -509,8 +509,8 @@ class GraphQLField: type: GraphQLOutputType args: GraphQLArgumentMap - resolve: Optional["GraphQLFieldResolver"] - subscribe: Optional["GraphQLFieldResolver"] + resolve: Optional[GraphQLFieldResolver] + subscribe: Optional[GraphQLFieldResolver] description: Optional[str] deprecation_reason: Optional[str] extensions: Dict[str, Any] @@ -520,8 +520,8 @@ def __init__( self, type_: GraphQLOutputType, args: Optional[GraphQLArgumentMap] = None, - resolve: Optional["GraphQLFieldResolver"] = None, - subscribe: Optional["GraphQLFieldResolver"] = None, + resolve: Optional[GraphQLFieldResolver] = None, + subscribe: Optional[GraphQLFieldResolver] = None, description: Optional[str] = None, deprecation_reason: Optional[str] = None, extensions: Optional[Dict[str, Any]] = None, @@ -621,7 +621,7 @@ class GraphQLResolveInfo(NamedTuple): return_type: GraphQLOutputType parent_type: GraphQLObjectType path: Path - schema: GraphQLSchema + schema: "GraphQLSchema" fragments: Dict[str, FragmentDefinitionNode] root_value: Any operation: OperationDefinitionNode @@ -742,7 +742,7 @@ def is_required_argument(arg: GraphQLArgument) -> bool: class GraphQLObjectTypeKwargs(GraphQLNamedTypeKwargs, total=False): fields: GraphQLFieldMap - interfaces: Tuple["GraphQLInterfaceType", ...] + interfaces: Tuple[GraphQLInterfaceType, ...] is_type_of: Optional[GraphQLIsTypeOfFn] @@ -782,7 +782,7 @@ def __init__( self, name: str, fields: ThunkMapping[GraphQLField], - interfaces: Optional[ThunkCollection["GraphQLInterfaceType"]] = None, + interfaces: Optional[ThunkCollection[GraphQLInterfaceType]] = None, is_type_of: Optional[GraphQLIsTypeOfFn] = None, extensions: Optional[Dict[str, Any]] = None, description: Optional[str] = None, @@ -856,10 +856,10 @@ def fields(self) -> GraphQLFieldMap: } @cached_property - def interfaces(self) -> Tuple["GraphQLInterfaceType", ...]: + def interfaces(self) -> Tuple[GraphQLInterfaceType, ...]: """Get provided interfaces.""" try: - interfaces: Collection["GraphQLInterfaceType"] = resolve_thunk( + interfaces: Collection[GraphQLInterfaceType] = resolve_thunk( self._interfaces # type: ignore ) except Exception as error: @@ -889,7 +889,7 @@ def assert_object_type(type_: Any) -> GraphQLObjectType: class GraphQLInterfaceTypeKwargs(GraphQLNamedTypeKwargs, total=False): fields: GraphQLFieldMap - interfaces: Tuple["GraphQLInterfaceType", ...] + interfaces: Tuple[GraphQLInterfaceType, ...] resolve_type: Optional[GraphQLTypeResolver] @@ -916,7 +916,7 @@ def __init__( self, name: str, fields: ThunkMapping[GraphQLField], - interfaces: Optional[ThunkCollection["GraphQLInterfaceType"]] = None, + interfaces: Optional[ThunkCollection[GraphQLInterfaceType]] = None, resolve_type: Optional[GraphQLTypeResolver] = None, description: Optional[str] = None, extensions: Optional[Dict[str, Any]] = None, @@ -990,10 +990,10 @@ def fields(self) -> GraphQLFieldMap: } @cached_property - def interfaces(self) -> Tuple["GraphQLInterfaceType", ...]: + def interfaces(self) -> Tuple[GraphQLInterfaceType, ...]: """Get provided interfaces.""" try: - interfaces: Collection["GraphQLInterfaceType"] = resolve_thunk( + interfaces: Collection[GraphQLInterfaceType] = resolve_thunk( self._interfaces # type: ignore ) except Exception as error: @@ -1421,7 +1421,7 @@ class GeoPoint(GraphQLInputObjectType): def __init__( self, name: str, - fields: ThunkMapping["GraphQLInputField"], + fields: ThunkMapping[GraphQLInputField], description: Optional[str] = None, out_type: Optional[GraphQLInputFieldOutType] = None, extensions: Optional[Dict[str, Any]] = None, diff --git a/tests/execution/test_union_interface.py b/tests/execution/test_union_interface.py index 0a1cb299..280199e4 100644 --- a/tests/execution/test_union_interface.py +++ b/tests/execution/test_union_interface.py @@ -1,3 +1,5 @@ +from __future__ import annotations # Python < 3.10 + from typing import List, Optional, Union from graphql.execution import execute_sync @@ -18,9 +20,9 @@ class Dog: name: str barks: bool - mother: Optional["Dog"] - father: Optional["Dog"] - progeny: List["Dog"] + mother: Optional[Dog] + father: Optional[Dog] + progeny: List[Dog] def __init__(self, name: str, barks: bool): self.name = name @@ -34,9 +36,9 @@ class Cat: name: str meows: bool - mother: Optional["Cat"] - father: Optional["Cat"] - progeny: List["Cat"] + mother: Optional[Cat] + father: Optional[Cat] + progeny: List[Cat] def __init__(self, name: str, meows: bool): self.name = name @@ -50,13 +52,13 @@ class Person: name: str pets: Optional[List[Union[Dog, Cat]]] - friends: Optional[List[Union[Dog, Cat, "Person"]]] + friends: Optional[List[Union[Dog, Cat, Person]]] def __init__( self, name: str, pets: Optional[List[Union[Dog, Cat]]] = None, - friends: Optional[List[Union[Dog, Cat, "Person"]]] = None, + friends: Optional[List[Union[Dog, Cat, Person]]] = None, ): self.name = name self.pets = pets From 55547d8cb4e4abba9e45a8f8e603f9303e351aa3 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Wed, 2 Nov 2022 23:51:53 +0100 Subject: [PATCH 054/230] Use type aliases for more clarity --- src/graphql/error/graphql_error.py | 6 ++- src/graphql/execution/execute.py | 6 +-- src/graphql/execution/middleware.py | 8 ++- src/graphql/execution/values.py | 10 +++- src/graphql/language/ast.py | 10 +++- src/graphql/language/parser.py | 8 ++- src/graphql/language/printer.py | 8 ++- src/graphql/language/visitor.py | 10 +++- src/graphql/pyutils/awaitable_or_value.py | 8 ++- src/graphql/type/definition.py | 54 ++++++++++--------- src/graphql/type/schema.py | 6 +-- src/graphql/utilities/coerce_input_value.py | 8 ++- .../utilities/find_breaking_changes.py | 8 ++- .../utilities/get_introspection_query.py | 15 ++++-- src/graphql/utilities/separate_operations.py | 8 ++- src/graphql/utilities/type_info.py | 8 ++- .../rules/overlapping_fields_can_be_merged.py | 18 ++++--- src/graphql/validation/validation_context.py | 8 ++- tests/execution/test_middleware.py | 6 +-- tests/execution/test_subscribe.py | 15 +++++- tests/language/test_lexer.py | 8 ++- tests/language/test_parser.py | 8 ++- tests/language/test_schema_parser.py | 8 ++- tests/test_docs.py | 8 ++- tests/utilities/test_build_ast_schema.py | 10 +++- tests/utilities/test_extend_schema.py | 10 +++- 26 files changed, 211 insertions(+), 69 deletions(-) diff --git a/src/graphql/error/graphql_error.py b/src/graphql/error/graphql_error.py index a9a85bc4..6896f4f7 100644 --- a/src/graphql/error/graphql_error.py +++ b/src/graphql/error/graphql_error.py @@ -6,6 +6,10 @@ from typing import TypedDict except ImportError: # Python < 3.8 from typing_extensions import TypedDict +try: + from typing import TypeAlias +except ImportError: # Python < 3.10 + from typing_extensions import TypeAlias if TYPE_CHECKING: from ..language.ast import Node # noqa: F401 @@ -19,7 +23,7 @@ # Custom extensions -GraphQLErrorExtensions = Dict[str, Any] +GraphQLErrorExtensions: TypeAlias = Dict[str, Any] # Use a unique identifier name for your extension, for example the name of # your library or project. Do not use a shortened identifier as this increases # the risk of conflicts. We recommend you add at most one extension key, diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index f4fedecf..5509f1e2 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -24,9 +24,9 @@ except ImportError: # Python < 3.8 from typing_extensions import TypedDict try: - from typing import TypeGuard + from typing import TypeAlias, TypeGuard except ImportError: # Python < 3.10 - from typing_extensions import TypeGuard + from typing_extensions import TypeAlias, TypeGuard from ..error import GraphQLError, GraphQLFormattedError, located_error from ..language import ( @@ -170,7 +170,7 @@ def __ne__(self, other: Any) -> bool: return not self == other -Middleware = Optional[Union[Tuple, List, MiddlewareManager]] +Middleware: TypeAlias = Optional[Union[Tuple, List, MiddlewareManager]] class ExecutionContext: diff --git a/src/graphql/execution/middleware.py b/src/graphql/execution/middleware.py index 1db54f09..cb455faf 100644 --- a/src/graphql/execution/middleware.py +++ b/src/graphql/execution/middleware.py @@ -3,9 +3,15 @@ from typing import Any, Callable, Dict, Iterator, List, Optional, Tuple +try: + from typing import TypeAlias +except ImportError: # Python < 3.10 + from typing_extensions import TypeAlias + + __all__ = ["MiddlewareManager"] -GraphQLFieldResolver = Callable[..., Any] +GraphQLFieldResolver: TypeAlias = Callable[..., Any] class MiddlewareManager: diff --git a/src/graphql/execution/values.py b/src/graphql/execution/values.py index 2053d61f..9ae9c453 100644 --- a/src/graphql/execution/values.py +++ b/src/graphql/execution/values.py @@ -30,10 +30,16 @@ from ..utilities.value_from_ast import value_from_ast +try: + from typing import TypeAlias +except ImportError: # Python < 3.10 + from typing_extensions import TypeAlias + + __all__ = ["get_argument_values", "get_directive_values", "get_variable_values"] -CoercedVariableValues = Union[List[GraphQLError], Dict[str, Any]] +CoercedVariableValues: TypeAlias = Union[List[GraphQLError], Dict[str, Any]] def get_variable_values( @@ -209,7 +215,7 @@ def get_argument_values( return coerced_values -NodeWithDirective = Union[ +NodeWithDirective: TypeAlias = Union[ EnumValueDefinitionNode, ExecutableDefinitionNode, FieldDefinitionNode, diff --git a/src/graphql/language/ast.py b/src/graphql/language/ast.py index 95d75d3d..cfbfe62a 100644 --- a/src/graphql/language/ast.py +++ b/src/graphql/language/ast.py @@ -9,6 +9,12 @@ from .token_kind import TokenKind +try: + from typing import TypeAlias +except ImportError: # Python < 3.10 + from typing_extensions import TypeAlias + + __all__ = [ "Location", "Token", @@ -582,7 +588,7 @@ class ConstObjectFieldNode(ObjectFieldNode): value: ConstValueNode -ConstValueNode = Union[ +ConstValueNode: TypeAlias = Union[ IntValueNode, FloatValueNode, StringValueNode, @@ -771,7 +777,7 @@ class TypeExtensionNode(TypeSystemDefinitionNode): directives: Tuple[ConstDirectiveNode, ...] -TypeSystemExtensionNode = Union[SchemaExtensionNode, TypeExtensionNode] +TypeSystemExtensionNode: TypeAlias = Union[SchemaExtensionNode, TypeExtensionNode] class ScalarTypeExtensionNode(TypeExtensionNode): diff --git a/src/graphql/language/parser.py b/src/graphql/language/parser.py index 72c1be7c..d0b009ed 100644 --- a/src/graphql/language/parser.py +++ b/src/graphql/language/parser.py @@ -64,11 +64,17 @@ from .token_kind import TokenKind +try: + from typing import TypeAlias +except ImportError: # Python < 3.10 + from typing_extensions import TypeAlias + + __all__ = ["parse", "parse_type", "parse_value", "parse_const_value"] T = TypeVar("T") -SourceType = Union[Source, str] +SourceType: TypeAlias = Union[Source, str] def parse( diff --git a/src/graphql/language/printer.py b/src/graphql/language/printer.py index 53fd5656..72cb4c4e 100644 --- a/src/graphql/language/printer.py +++ b/src/graphql/language/printer.py @@ -6,12 +6,18 @@ from .visitor import Visitor, visit +try: + from typing import TypeAlias +except ImportError: # Python < 3.10 + from typing_extensions import TypeAlias + + __all__ = ["print_ast"] MAX_LINE_LENGTH = 80 -Strings = Collection[str] +Strings: TypeAlias = Collection[str] class PrintedNode: diff --git a/src/graphql/language/visitor.py b/src/graphql/language/visitor.py index e8039933..6ad71da5 100644 --- a/src/graphql/language/visitor.py +++ b/src/graphql/language/visitor.py @@ -17,6 +17,12 @@ from .ast import QUERY_DOCUMENT_KEYS, Node +try: + from typing import TypeAlias +except ImportError: # Python < 3.10 + from typing_extensions import TypeAlias + + __all__ = [ "Visitor", "ParallelVisitor", @@ -41,7 +47,7 @@ class VisitorActionEnum(Enum): REMOVE = Ellipsis -VisitorAction = Optional[VisitorActionEnum] +VisitorAction: TypeAlias = Optional[VisitorActionEnum] # Note that in GraphQL.js these are defined differently: # BREAK = {}, SKIP = false, REMOVE = null, IDLE = undefined @@ -51,7 +57,7 @@ class VisitorActionEnum(Enum): REMOVE = VisitorActionEnum.REMOVE IDLE = None -VisitorKeyMap = Dict[str, Tuple[str, ...]] +VisitorKeyMap: TypeAlias = Dict[str, Tuple[str, ...]] class EnterLeaveVisitor(NamedTuple): diff --git a/src/graphql/pyutils/awaitable_or_value.py b/src/graphql/pyutils/awaitable_or_value.py index 071b1fe2..dcd34ffc 100644 --- a/src/graphql/pyutils/awaitable_or_value.py +++ b/src/graphql/pyutils/awaitable_or_value.py @@ -1,9 +1,15 @@ from typing import Awaitable, TypeVar, Union +try: + from typing import TypeAlias +except ImportError: # Python < 3.10 + from typing_extensions import TypeAlias + + __all__ = ["AwaitableOrValue"] T = TypeVar("T") -AwaitableOrValue = Union[Awaitable[T], T] +AwaitableOrValue: TypeAlias = Union[Awaitable[T], T] diff --git a/src/graphql/type/definition.py b/src/graphql/type/definition.py index c3157b69..09345f67 100644 --- a/src/graphql/type/definition.py +++ b/src/graphql/type/definition.py @@ -66,9 +66,9 @@ except ImportError: # Python < 3.8 from typing_extensions import TypedDict try: - from typing import TypeGuard + from typing import TypeAlias, TypeGuard except ImportError: # Python < 3.10 - from typing_extensions import TypeGuard + from typing_extensions import TypeAlias, TypeGuard if TYPE_CHECKING: from .schema import GraphQLSchema # noqa: F401 @@ -311,9 +311,9 @@ def __copy__(self) -> GraphQLNamedType: # pragma: no cover T = TypeVar("T") -ThunkCollection = Union[Callable[[], Collection[T]], Collection[T]] -ThunkMapping = Union[Callable[[], Mapping[str, T]], Mapping[str, T]] -Thunk = Union[Callable[[], T], T] +ThunkCollection: TypeAlias = Union[Callable[[], Collection[T]], Collection[T]] +ThunkMapping: TypeAlias = Union[Callable[[], Mapping[str, T]], Mapping[str, T]] +Thunk: TypeAlias = Union[Callable[[], T], T] def resolve_thunk(thunk: Thunk[T]) -> T: @@ -325,9 +325,11 @@ def resolve_thunk(thunk: Thunk[T]) -> T: return thunk() if callable(thunk) else thunk -GraphQLScalarSerializer = Callable[[Any], Any] -GraphQLScalarValueParser = Callable[[Any], Any] -GraphQLScalarLiteralParser = Callable[[ValueNode, Optional[Dict[str, Any]]], Any] +GraphQLScalarSerializer: TypeAlias = Callable[[Any], Any] +GraphQLScalarValueParser: TypeAlias = Callable[[Any], Any] +GraphQLScalarLiteralParser: TypeAlias = Callable[ + [ValueNode, Optional[Dict[str, Any]]], Any +] class GraphQLScalarTypeKwargs(GraphQLNamedTypeKwargs, total=False): @@ -490,7 +492,7 @@ def assert_scalar_type(type_: Any) -> GraphQLScalarType: return type_ -GraphQLArgumentMap = Dict[str, "GraphQLArgument"] +GraphQLArgumentMap: TypeAlias = Dict[str, "GraphQLArgument"] class GraphQLFieldKwargs(TypedDict, total=False): @@ -633,23 +635,25 @@ class GraphQLResolveInfo(NamedTuple): # Note: Contrary to the Javascript implementation of GraphQLFieldResolver, # the context is passed as part of the GraphQLResolveInfo and any arguments # are passed individually as keyword arguments. -GraphQLFieldResolverWithoutArgs = Callable[[Any, GraphQLResolveInfo], Any] +GraphQLFieldResolverWithoutArgs: TypeAlias = Callable[[Any, GraphQLResolveInfo], Any] # Unfortunately there is currently no syntax to indicate optional or keyword # arguments in Python, so we also allow any other Callable as a workaround: -GraphQLFieldResolver = Callable[..., Any] +GraphQLFieldResolver: TypeAlias = Callable[..., Any] # Note: Contrary to the Javascript implementation of GraphQLTypeResolver, # the context is passed as part of the GraphQLResolveInfo: -GraphQLTypeResolver = Callable[ +GraphQLTypeResolver: TypeAlias = Callable[ [Any, GraphQLResolveInfo, "GraphQLAbstractType"], AwaitableOrValue[Optional[str]], ] # Note: Contrary to the Javascript implementation of GraphQLIsTypeOfFn, # the context is passed as part of the GraphQLResolveInfo: -GraphQLIsTypeOfFn = Callable[[Any, GraphQLResolveInfo], AwaitableOrValue[bool]] +GraphQLIsTypeOfFn: TypeAlias = Callable[ + [Any, GraphQLResolveInfo], AwaitableOrValue[bool] +] -GraphQLFieldMap = Dict[str, GraphQLField] +GraphQLFieldMap: TypeAlias = Dict[str, GraphQLField] class GraphQLArgumentKwargs(TypedDict, total=False): @@ -1121,7 +1125,7 @@ def assert_union_type(type_: Any) -> GraphQLUnionType: return type_ -GraphQLEnumValueMap = Dict[str, "GraphQLEnumValue"] +GraphQLEnumValueMap: TypeAlias = Dict[str, "GraphQLEnumValue"] class GraphQLEnumTypeKwargs(GraphQLNamedTypeKwargs, total=False): @@ -1381,7 +1385,7 @@ def __copy__(self) -> GraphQLEnumValue: # pragma: no cover return self.__class__(**self.to_kwargs()) -GraphQLInputFieldMap = Dict[str, "GraphQLInputField"] +GraphQLInputFieldMap: TypeAlias = Dict[str, "GraphQLInputField"] GraphQLInputFieldOutType = Callable[[Dict[str, Any]], Any] @@ -1698,7 +1702,7 @@ def assert_non_null_type(type_: Any) -> GraphQLNonNull: GraphQLList, ) -GraphQLNullableType = Union[ +GraphQLNullableType: TypeAlias = Union[ GraphQLScalarType, GraphQLObjectType, GraphQLInterfaceType, @@ -1747,7 +1751,7 @@ def get_nullable_type( graphql_input_types = (GraphQLScalarType, GraphQLEnumType, GraphQLInputObjectType) -GraphQLInputType = Union[ +GraphQLInputType: TypeAlias = Union[ GraphQLScalarType, GraphQLEnumType, GraphQLInputObjectType, GraphQLWrappingType ] @@ -1774,7 +1778,7 @@ def assert_input_type(type_: Any) -> GraphQLInputType: GraphQLEnumType, ) -GraphQLOutputType = Union[ +GraphQLOutputType: TypeAlias = Union[ GraphQLScalarType, GraphQLObjectType, GraphQLInterfaceType, @@ -1798,11 +1802,11 @@ def assert_output_type(type_: Any) -> GraphQLOutputType: # These named types do not include modifiers like List or NonNull. -GraphQLNamedInputType = Union[ +GraphQLNamedInputType: TypeAlias = Union[ GraphQLScalarType, GraphQLEnumType, GraphQLInputObjectType ] -GraphQLNamedOutputType = Union[ +GraphQLNamedOutputType: TypeAlias = Union[ GraphQLScalarType, GraphQLObjectType, GraphQLInterfaceType, @@ -1845,7 +1849,7 @@ def get_named_type(type_: Optional[GraphQLType]) -> Optional[GraphQLNamedType]: graphql_leaf_types = (GraphQLScalarType, GraphQLEnumType) -GraphQLLeafType = Union[GraphQLScalarType, GraphQLEnumType] +GraphQLLeafType: TypeAlias = Union[GraphQLScalarType, GraphQLEnumType] def is_leaf_type(type_: Any) -> TypeGuard[GraphQLLeafType]: @@ -1862,7 +1866,9 @@ def assert_leaf_type(type_: Any) -> GraphQLLeafType: graphql_composite_types = (GraphQLObjectType, GraphQLInterfaceType, GraphQLUnionType) -GraphQLCompositeType = Union[GraphQLObjectType, GraphQLInterfaceType, GraphQLUnionType] +GraphQLCompositeType: TypeAlias = Union[ + GraphQLObjectType, GraphQLInterfaceType, GraphQLUnionType +] def is_composite_type(type_: Any) -> TypeGuard[GraphQLCompositeType]: @@ -1879,7 +1885,7 @@ def assert_composite_type(type_: Any) -> GraphQLType: graphql_abstract_types = (GraphQLInterfaceType, GraphQLUnionType) -GraphQLAbstractType = Union[GraphQLInterfaceType, GraphQLUnionType] +GraphQLAbstractType: TypeAlias = Union[GraphQLInterfaceType, GraphQLUnionType] def is_abstract_type(type_: Any) -> TypeGuard[GraphQLAbstractType]: diff --git a/src/graphql/type/schema.py b/src/graphql/type/schema.py index fe857a37..dad93295 100644 --- a/src/graphql/type/schema.py +++ b/src/graphql/type/schema.py @@ -28,15 +28,15 @@ except ImportError: # Python < 3.8 from typing_extensions import TypedDict try: - from typing import TypeGuard + from typing import TypeAlias, TypeGuard except ImportError: # Python < 3.10 - from typing_extensions import TypeGuard + from typing_extensions import TypeAlias, TypeGuard __all__ = ["GraphQLSchema", "GraphQLSchemaKwargs", "is_schema", "assert_schema"] -TypeMap = Dict[str, GraphQLNamedType] +TypeMap: TypeAlias = Dict[str, GraphQLNamedType] class InterfaceImplementations(NamedTuple): diff --git a/src/graphql/utilities/coerce_input_value.py b/src/graphql/utilities/coerce_input_value.py index 42a31120..31237d1e 100644 --- a/src/graphql/utilities/coerce_input_value.py +++ b/src/graphql/utilities/coerce_input_value.py @@ -20,10 +20,16 @@ ) +try: + from typing import TypeAlias +except ImportError: # Python < 3.10 + from typing_extensions import TypeAlias + + __all__ = ["coerce_input_value"] -OnErrorCB = Callable[[List[Union[str, int]], Any, GraphQLError], None] +OnErrorCB: TypeAlias = Callable[[List[Union[str, int]], Any, GraphQLError], None] def default_on_error( diff --git a/src/graphql/utilities/find_breaking_changes.py b/src/graphql/utilities/find_breaking_changes.py index 8c515291..5dce9959 100644 --- a/src/graphql/utilities/find_breaking_changes.py +++ b/src/graphql/utilities/find_breaking_changes.py @@ -31,6 +31,12 @@ from .ast_from_value import ast_from_value +try: + from typing import TypeAlias +except ImportError: # Python < 3.10 + from typing_extensions import TypeAlias + + __all__ = [ "BreakingChange", "BreakingChangeType", @@ -79,7 +85,7 @@ class DangerousChange(NamedTuple): description: str -Change = Union[BreakingChange, DangerousChange] +Change: TypeAlias = Union[BreakingChange, DangerousChange] def find_breaking_changes( diff --git a/src/graphql/utilities/get_introspection_query.py b/src/graphql/utilities/get_introspection_query.py index c1b839f0..93478dc2 100644 --- a/src/graphql/utilities/get_introspection_query.py +++ b/src/graphql/utilities/get_introspection_query.py @@ -8,6 +8,11 @@ from typing import Literal, TypedDict except ImportError: # Python < 3.8 from typing_extensions import Literal, TypedDict # type: ignore +try: + from typing import TypeAlias +except ImportError: # Python < 3.10 + from typing_extensions import TypeAlias + __all__ = [ "get_introspection_query", @@ -158,7 +163,7 @@ def input_deprecation(string: str) -> Optional[str]: # - no generic typed dicts, see https://github.com/python/mypy/issues/3863 # simplified IntrospectionNamedType to avoids cycles -SimpleIntrospectionType = Dict[str, Any] +SimpleIntrospectionType: TypeAlias = Dict[str, Any] class MaybeWithDescription(TypedDict, total=False): @@ -238,7 +243,7 @@ class IntrospectionInputObjectType(WithName): inputFields: List[IntrospectionInputValue] -IntrospectionType = Union[ +IntrospectionType: TypeAlias = Union[ IntrospectionScalarType, IntrospectionObjectType, IntrospectionInterfaceType, @@ -248,7 +253,7 @@ class IntrospectionInputObjectType(WithName): ] -IntrospectionOutputType = Union[ +IntrospectionOutputType: TypeAlias = Union[ IntrospectionScalarType, IntrospectionObjectType, IntrospectionInterfaceType, @@ -257,7 +262,7 @@ class IntrospectionInputObjectType(WithName): ] -IntrospectionInputType = Union[ +IntrospectionInputType: TypeAlias = Union[ IntrospectionScalarType, IntrospectionEnumType, IntrospectionInputObjectType ] @@ -272,7 +277,7 @@ class IntrospectionNonNullType(TypedDict): ofType: SimpleIntrospectionType # should be IntrospectionType -IntrospectionTypeRef = Union[ +IntrospectionTypeRef: TypeAlias = Union[ IntrospectionType, IntrospectionListType, IntrospectionNonNullType ] diff --git a/src/graphql/utilities/separate_operations.py b/src/graphql/utilities/separate_operations.py index 48ce555e..bc3ac7d2 100644 --- a/src/graphql/utilities/separate_operations.py +++ b/src/graphql/utilities/separate_operations.py @@ -11,10 +11,16 @@ ) +try: + from typing import TypeAlias +except ImportError: # Python < 3.10 + from typing_extensions import TypeAlias + + __all__ = ["separate_operations"] -DepGraph = Dict[str, List[str]] +DepGraph: TypeAlias = Dict[str, List[str]] def separate_operations(document_ast: DocumentNode) -> Dict[str, DocumentNode]: diff --git a/src/graphql/utilities/type_info.py b/src/graphql/utilities/type_info.py index 6aa5e086..db740f8a 100644 --- a/src/graphql/utilities/type_info.py +++ b/src/graphql/utilities/type_info.py @@ -44,10 +44,16 @@ from .type_from_ast import type_from_ast +try: + from typing import TypeAlias +except ImportError: # Python < 3.10 + from typing_extensions import TypeAlias + + __all__ = ["TypeInfo", "TypeInfoVisitor"] -GetFieldDefFn = Callable[ +GetFieldDefFn: TypeAlias = Callable[ [GraphQLSchema, GraphQLType, FieldNode], Optional[GraphQLField] ] diff --git a/src/graphql/validation/rules/overlapping_fields_can_be_merged.py b/src/graphql/validation/rules/overlapping_fields_can_be_merged.py index 4e1acf5e..d853c669 100644 --- a/src/graphql/validation/rules/overlapping_fields_can_be_merged.py +++ b/src/graphql/validation/rules/overlapping_fields_can_be_merged.py @@ -29,6 +29,12 @@ from . import ValidationContext, ValidationRule +try: + from typing import TypeAlias +except ImportError: # Python < 3.10 + from typing_extensions import TypeAlias + + MYPY = False __all__ = ["OverlappingFieldsCanBeMergedRule"] @@ -85,18 +91,18 @@ def enter_selection_set(self, selection_set: SelectionSetNode, *_args: Any) -> N ) -Conflict = Tuple["ConflictReason", List[FieldNode], List[FieldNode]] +Conflict: TypeAlias = Tuple["ConflictReason", List[FieldNode], List[FieldNode]] # Field name and reason. -ConflictReason = Tuple[str, "ConflictReasonMessage"] +ConflictReason: TypeAlias = Tuple[str, "ConflictReasonMessage"] # Reason is a string, or a nested list of conflicts. if MYPY: # recursive types not fully supported yet (/python/mypy/issues/731) - ConflictReasonMessage = Union[str, List] + ConflictReasonMessage: TypeAlias = Union[str, List] else: - ConflictReasonMessage = Union[str, List[ConflictReason]] + ConflictReasonMessage: TypeAlias = Union[str, List[ConflictReason]] # Tuple defining a field node in a context. -NodeAndDef = Tuple[GraphQLCompositeType, FieldNode, Optional[GraphQLField]] +NodeAndDef: TypeAlias = Tuple[GraphQLCompositeType, FieldNode, Optional[GraphQLField]] # Dictionary of lists of those. -NodeAndDefCollection = Dict[str, List[NodeAndDef]] +NodeAndDefCollection: TypeAlias = Dict[str, List[NodeAndDef]] # Algorithm: diff --git a/src/graphql/validation/validation_context.py b/src/graphql/validation/validation_context.py index b3dadc3f..c9930188 100644 --- a/src/graphql/validation/validation_context.py +++ b/src/graphql/validation/validation_context.py @@ -25,6 +25,12 @@ from ..utilities import TypeInfo, TypeInfoVisitor +try: + from typing import TypeAlias +except ImportError: # Python < 3.10 + from typing_extensions import TypeAlias + + __all__ = [ "ASTValidationContext", "SDLValidationContext", @@ -33,7 +39,7 @@ "VariableUsageVisitor", ] -NodeWithSelectionSet = Union[OperationDefinitionNode, FragmentDefinitionNode] +NodeWithSelectionSet: TypeAlias = Union[OperationDefinitionNode, FragmentDefinitionNode] class VariableUsage(NamedTuple): diff --git a/tests/execution/test_middleware.py b/tests/execution/test_middleware.py index 6db8bdab..86436c27 100644 --- a/tests/execution/test_middleware.py +++ b/tests/execution/test_middleware.py @@ -1,8 +1,8 @@ -from typing import Awaitable +from typing import Awaitable, cast from pytest import mark, raises -from graphql.execution import MiddlewareManager, execute +from graphql.execution import Middleware, MiddlewareManager, execute from graphql.language.parser import parse from graphql.type import GraphQLField, GraphQLObjectType, GraphQLSchema, GraphQLString @@ -286,7 +286,7 @@ def bad_middleware_object(): GraphQLSchema(test_type), doc, None, - middleware={"bad": "value"}, # type: ignore + middleware=cast(Middleware, {"bad": "value"}), ) assert str(exc_info.value) == ( diff --git a/tests/execution/test_subscribe.py b/tests/execution/test_subscribe.py index 8f3ceb55..faf10f64 100644 --- a/tests/execution/test_subscribe.py +++ b/tests/execution/test_subscribe.py @@ -18,6 +18,11 @@ ) +try: + from typing import TypedDict +except ImportError: # Python < 3.8 + from typing_extensions import TypedDict + try: anext except NameError: # pragma: no cover (Python < 3.10) @@ -27,7 +32,15 @@ async def anext(iterator): return await iterator.__anext__() -Email = Dict # should become a TypedDict once we require Python 3.8 +Email = TypedDict( + "Email", + { + "from": str, + "subject": str, + "message": str, + "unread": bool, + }, +) EmailType = GraphQLObjectType( "Email", diff --git a/tests/language/test_lexer.py b/tests/language/test_lexer.py index 81759768..2be6af4e 100644 --- a/tests/language/test_lexer.py +++ b/tests/language/test_lexer.py @@ -10,7 +10,13 @@ from ..utils import dedent -Location = Optional[Tuple[int, int]] +try: + from typing import TypeAlias +except ImportError: # Python < 3.10 + from typing_extensions import TypeAlias + + +Location: TypeAlias = Optional[Tuple[int, int]] def lex_one(s: str) -> Token: diff --git a/tests/language/test_parser.py b/tests/language/test_parser.py index a98d4d69..cc807bb4 100644 --- a/tests/language/test_parser.py +++ b/tests/language/test_parser.py @@ -37,7 +37,13 @@ from ..utils import dedent -Location = Optional[Tuple[int, int]] +try: + from typing import TypeAlias +except ImportError: # Python < 3.10 + from typing_extensions import TypeAlias + + +Location: TypeAlias = Optional[Tuple[int, int]] def assert_syntax_error(text: str, message: str, location: Location) -> None: diff --git a/tests/language/test_schema_parser.py b/tests/language/test_schema_parser.py index feab6543..5faa5c52 100644 --- a/tests/language/test_schema_parser.py +++ b/tests/language/test_schema_parser.py @@ -40,7 +40,13 @@ from ..fixtures import kitchen_sink_sdl # noqa: F401 -Location = Optional[Tuple[int, int]] +try: + from typing import TypeAlias +except ImportError: # Python < 3.10 + from typing_extensions import TypeAlias + + +Location: TypeAlias = Optional[Tuple[int, int]] def assert_syntax_error(text: str, message: str, location: Location) -> None: diff --git a/tests/test_docs.py b/tests/test_docs.py index a85ea6aa..cfe419f0 100644 --- a/tests/test_docs.py +++ b/tests/test_docs.py @@ -6,7 +6,13 @@ from .utils import dedent -Scope = Dict[str, Any] +try: + from typing import TypeAlias +except ImportError: # Python < 3.10 + from typing_extensions import TypeAlias + + +Scope: TypeAlias = Dict[str, Any] def get_snippets(source, indent=4): diff --git a/tests/utilities/test_build_ast_schema.py b/tests/utilities/test_build_ast_schema.py index bb0dc561..b3ceffce 100644 --- a/tests/utilities/test_build_ast_schema.py +++ b/tests/utilities/test_build_ast_schema.py @@ -42,6 +42,12 @@ from ..utils import dedent, timeout_factor +try: + from typing import TypeAlias +except ImportError: # Python < 3.10 + from typing_extensions import TypeAlias + + def cycle_sdl(sdl: str) -> str: """Full cycle test. @@ -54,11 +60,11 @@ def cycle_sdl(sdl: str) -> str: return print_schema(schema) -TypeWithAstNode = Union[ +TypeWithAstNode: TypeAlias = Union[ GraphQLArgument, GraphQLEnumValue, GraphQLField, GraphQLInputField, GraphQLNamedType ] -TypeWithExtensionAstNodes = GraphQLNamedType +TypeWithExtensionAstNodes: TypeAlias = GraphQLNamedType def expect_ast_node(obj: TypeWithAstNode, expected: str) -> None: diff --git a/tests/utilities/test_extend_schema.py b/tests/utilities/test_extend_schema.py index 84391714..db6a1c69 100644 --- a/tests/utilities/test_extend_schema.py +++ b/tests/utilities/test_extend_schema.py @@ -30,7 +30,13 @@ from ..utils import dedent -TypeWithAstNode = Union[ +try: + from typing import TypeAlias +except ImportError: # Python < 3.10 + from typing_extensions import TypeAlias + + +TypeWithAstNode: TypeAlias = Union[ GraphQLArgument, GraphQLEnumValue, GraphQLField, @@ -39,7 +45,7 @@ GraphQLSchema, ] -TypeWithExtensionAstNodes = Union[ +TypeWithExtensionAstNodes: TypeAlias = Union[ GraphQLNamedType, GraphQLSchema, ] From 71763a3feff5b7a47c01478ae7f3243f1fb81775 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Thu, 3 Nov 2022 15:31:36 +0100 Subject: [PATCH 055/230] Refinement of non null types Also removed internal tuples of types, they could be confusing. Roughly replicates graphql/graphql-js@c52676722ddecad34029a0148b5da89eab3115e9 --- src/graphql/__init__.py | 4 + src/graphql/type/__init__.py | 4 + src/graphql/type/definition.py | 176 ++++++++++-------- src/graphql/utilities/type_info.py | 2 +- .../rules/values_of_correct_type.py | 2 +- 5 files changed, 104 insertions(+), 84 deletions(-) diff --git a/src/graphql/__init__.py b/src/graphql/__init__.py index 07ff4d96..0327396a 100644 --- a/src/graphql/__init__.py +++ b/src/graphql/__init__.py @@ -325,6 +325,8 @@ GraphQLAbstractType, GraphQLWrappingType, GraphQLNullableType, + GraphQLNullableInputType, + GraphQLNullableOutputType, GraphQLNamedType, GraphQLNamedInputType, GraphQLNamedOutputType, @@ -543,6 +545,8 @@ "GraphQLAbstractType", "GraphQLWrappingType", "GraphQLNullableType", + "GraphQLNullableInputType", + "GraphQLNullableOutputType", "GraphQLNamedType", "GraphQLNamedInputType", "GraphQLNamedOutputType", diff --git a/src/graphql/type/__init__.py b/src/graphql/type/__init__.py index 6a86c0f7..569e4f52 100644 --- a/src/graphql/type/__init__.py +++ b/src/graphql/type/__init__.py @@ -82,6 +82,8 @@ GraphQLAbstractType, GraphQLWrappingType, GraphQLNullableType, + GraphQLNullableInputType, + GraphQLNullableOutputType, GraphQLNamedType, GraphQLNamedInputType, GraphQLNamedOutputType, @@ -234,6 +236,8 @@ "GraphQLAbstractType", "GraphQLWrappingType", "GraphQLNullableType", + "GraphQLNullableInputType", + "GraphQLNullableOutputType", "GraphQLNamedType", "GraphQLNamedInputType", "GraphQLNamedOutputType", diff --git a/src/graphql/type/definition.py b/src/graphql/type/definition.py index 09345f67..feaab760 100644 --- a/src/graphql/type/definition.py +++ b/src/graphql/type/definition.py @@ -144,6 +144,8 @@ "GraphQLNamedInputType", "GraphQLNamedOutputType", "GraphQLNullableType", + "GraphQLNullableInputType", + "GraphQLNullableOutputType", "GraphQLNonNull", "GraphQLResolveInfo", "GraphQLScalarType", @@ -169,7 +171,7 @@ class GraphQLType: """Base class for all GraphQL types""" # Note: We don't use slots for GraphQLType objects because memory considerations - # are not really important for the schema definition and it would make caching + # are not really important for the schema definition, and it would make caching # properties slower or more complicated. @@ -188,7 +190,7 @@ def assert_type(type_: Any) -> GraphQLType: # These types wrap and modify other types -GT = TypeVar("GT", bound=GraphQLType) +GT = TypeVar("GT", bound=GraphQLType, covariant=True) class GraphQLWrappingType(GraphQLType, Generic[GT]): @@ -1609,7 +1611,7 @@ def is_required_input_field(field: GraphQLInputField) -> bool: # Wrapper types -class GraphQLList(Generic[GT], GraphQLWrappingType[GT]): +class GraphQLList(GraphQLWrappingType[GT]): """List Type Wrapper A list is a wrapping type which points to another type. Lists are often created @@ -1645,10 +1647,10 @@ def assert_list_type(type_: Any) -> GraphQLList: return type_ -GNT = TypeVar("GNT", bound="GraphQLNullableType") +GNT = TypeVar("GNT", bound="GraphQLNullableType", covariant=True) -class GraphQLNonNull(GraphQLWrappingType[GNT], Generic[GNT]): +class GraphQLNonNull(GraphQLWrappingType[GNT]): """Non-Null Type Wrapper A non-null is a wrapping type which points to another type. Non-null types enforce @@ -1680,19 +1682,9 @@ def __str__(self) -> str: return f"{self.of_type}!" -def is_non_null_type(type_: Any) -> TypeGuard[GraphQLNonNull]: - return isinstance(type_, GraphQLNonNull) - - -def assert_non_null_type(type_: Any) -> GraphQLNonNull: - if not is_non_null_type(type_): - raise TypeError(f"Expected {type_} to be a GraphQL Non-Null type.") - return type_ - - # These types can all accept null as a value. -graphql_nullable_types = ( +GraphQLNullableType: TypeAlias = Union[ GraphQLScalarType, GraphQLObjectType, GraphQLInterfaceType, @@ -1700,21 +1692,98 @@ def assert_non_null_type(type_: Any) -> GraphQLNonNull: GraphQLEnumType, GraphQLInputObjectType, GraphQLList, -) +] -GraphQLNullableType: TypeAlias = Union[ + +# These types may be used as input types for arguments and directives. + +GraphQLNullableInputType: TypeAlias = Union[ + GraphQLScalarType, + GraphQLEnumType, + GraphQLInputObjectType, + # actually GraphQLList[GraphQLInputType], but we can't recurse + GraphQLList, +] + +GraphQLInputType: TypeAlias = Union[ + GraphQLNullableInputType, GraphQLNonNull[GraphQLNullableInputType] +] + + +# These types may be used as output types as the result of fields. + +GraphQLNullableOutputType: TypeAlias = Union[ GraphQLScalarType, GraphQLObjectType, GraphQLInterfaceType, GraphQLUnionType, GraphQLEnumType, - GraphQLInputObjectType, + # actually GraphQLList[GraphQLOutputType], but we can't recurse GraphQLList, ] +GraphQLOutputType: TypeAlias = Union[ + GraphQLNullableOutputType, GraphQLNonNull[GraphQLNullableOutputType] +] + + +# Predicates and Assertions + + +def is_input_type(type_: Any) -> TypeGuard[GraphQLInputType]: + return isinstance( + type_, (GraphQLScalarType, GraphQLEnumType, GraphQLInputObjectType) + ) or (isinstance(type_, GraphQLWrappingType) and is_input_type(type_.of_type)) + + +def assert_input_type(type_: Any) -> GraphQLInputType: + if not is_input_type(type_): + raise TypeError(f"Expected {type_} to be a GraphQL input type.") + return type_ + + +def is_output_type(type_: Any) -> TypeGuard[GraphQLOutputType]: + return isinstance( + type_, + ( + GraphQLScalarType, + GraphQLObjectType, + GraphQLInterfaceType, + GraphQLUnionType, + GraphQLEnumType, + ), + ) or (isinstance(type_, GraphQLWrappingType) and is_output_type(type_.of_type)) + + +def assert_output_type(type_: Any) -> GraphQLOutputType: + if not is_output_type(type_): + raise TypeError(f"Expected {type_} to be a GraphQL output type.") + return type_ + + +def is_non_null_type(type_: Any) -> TypeGuard[GraphQLNonNull]: + return isinstance(type_, GraphQLNonNull) + + +def assert_non_null_type(type_: Any) -> GraphQLNonNull: + if not is_non_null_type(type_): + raise TypeError(f"Expected {type_} to be a GraphQL Non-Null type.") + return type_ + def is_nullable_type(type_: Any) -> TypeGuard[GraphQLNullableType]: - return isinstance(type_, graphql_nullable_types) + return isinstance( + type_, + ( + GraphQLScalarType, + GraphQLObjectType, + GraphQLInterfaceType, + GraphQLUnionType, + GraphQLEnumType, + GraphQLInputObjectType, + GraphQLList, + ), + ) def assert_nullable_type(type_: Any) -> GraphQLNullableType: @@ -1747,59 +1816,6 @@ def get_nullable_type( return cast(Optional[GraphQLNullableType], type_) -# These types may be used as input types for arguments and directives. - -graphql_input_types = (GraphQLScalarType, GraphQLEnumType, GraphQLInputObjectType) - -GraphQLInputType: TypeAlias = Union[ - GraphQLScalarType, GraphQLEnumType, GraphQLInputObjectType, GraphQLWrappingType -] - - -def is_input_type(type_: Any) -> TypeGuard[GraphQLInputType]: - return isinstance(type_, graphql_input_types) or ( - isinstance(type_, GraphQLWrappingType) and is_input_type(type_.of_type) - ) - - -def assert_input_type(type_: Any) -> GraphQLInputType: - if not is_input_type(type_): - raise TypeError(f"Expected {type_} to be a GraphQL input type.") - return type_ - - -# These types may be used as output types as the result of fields. - -graphql_output_types = ( - GraphQLScalarType, - GraphQLObjectType, - GraphQLInterfaceType, - GraphQLUnionType, - GraphQLEnumType, -) - -GraphQLOutputType: TypeAlias = Union[ - GraphQLScalarType, - GraphQLObjectType, - GraphQLInterfaceType, - GraphQLUnionType, - GraphQLEnumType, - GraphQLWrappingType, -] - - -def is_output_type(type_: Any) -> TypeGuard[GraphQLOutputType]: - return isinstance(type_, graphql_output_types) or ( - isinstance(type_, GraphQLWrappingType) and is_output_type(type_.of_type) - ) - - -def assert_output_type(type_: Any) -> GraphQLOutputType: - if not is_output_type(type_): - raise TypeError(f"Expected {type_} to be a GraphQL output type.") - return type_ - - # These named types do not include modifiers like List or NonNull. GraphQLNamedInputType: TypeAlias = Union[ @@ -1847,13 +1863,11 @@ def get_named_type(type_: Optional[GraphQLType]) -> Optional[GraphQLNamedType]: # These types may describe types which may be leaf values. -graphql_leaf_types = (GraphQLScalarType, GraphQLEnumType) - GraphQLLeafType: TypeAlias = Union[GraphQLScalarType, GraphQLEnumType] def is_leaf_type(type_: Any) -> TypeGuard[GraphQLLeafType]: - return isinstance(type_, graphql_leaf_types) + return isinstance(type_, (GraphQLScalarType, GraphQLEnumType)) def assert_leaf_type(type_: Any) -> GraphQLLeafType: @@ -1864,15 +1878,15 @@ def assert_leaf_type(type_: Any) -> GraphQLLeafType: # These types may describe the parent context of a selection set. -graphql_composite_types = (GraphQLObjectType, GraphQLInterfaceType, GraphQLUnionType) - GraphQLCompositeType: TypeAlias = Union[ GraphQLObjectType, GraphQLInterfaceType, GraphQLUnionType ] def is_composite_type(type_: Any) -> TypeGuard[GraphQLCompositeType]: - return isinstance(type_, graphql_composite_types) + return isinstance( + type_, (GraphQLObjectType, GraphQLInterfaceType, GraphQLUnionType) + ) def assert_composite_type(type_: Any) -> GraphQLType: @@ -1883,13 +1897,11 @@ def assert_composite_type(type_: Any) -> GraphQLType: # These types may describe abstract types. -graphql_abstract_types = (GraphQLInterfaceType, GraphQLUnionType) - GraphQLAbstractType: TypeAlias = Union[GraphQLInterfaceType, GraphQLUnionType] def is_abstract_type(type_: Any) -> TypeGuard[GraphQLAbstractType]: - return isinstance(type_, graphql_abstract_types) + return isinstance(type_, (GraphQLInterfaceType, GraphQLUnionType)) def assert_abstract_type(type_: Any) -> GraphQLAbstractType: diff --git a/src/graphql/utilities/type_info.py b/src/graphql/utilities/type_info.py index db740f8a..584b468e 100644 --- a/src/graphql/utilities/type_info.py +++ b/src/graphql/utilities/type_info.py @@ -201,7 +201,7 @@ def enter_argument(self, node: ArgumentNode) -> None: # noinspection PyUnusedLocal def enter_list_value(self, node: ListValueNode) -> None: - list_type = get_nullable_type(self.get_input_type()) # type: ignore + list_type = get_nullable_type(self.get_input_type()) item_type = list_type.of_type if is_list_type(list_type) else list_type # List positions never have a default value. self._default_value_stack.append(Undefined) diff --git a/src/graphql/validation/rules/values_of_correct_type.py b/src/graphql/validation/rules/values_of_correct_type.py index 29a081e3..bc390f9d 100644 --- a/src/graphql/validation/rules/values_of_correct_type.py +++ b/src/graphql/validation/rules/values_of_correct_type.py @@ -45,7 +45,7 @@ class ValuesOfCorrectTypeRule(ValidationRule): def enter_list_value(self, node: ListValueNode, *_args: Any) -> VisitorAction: # Note: TypeInfo will traverse into a list's item type, so look to the parent # input type to check if it is a list. - type_ = get_nullable_type(self.context.get_parent_input_type()) # type: ignore + type_ = get_nullable_type(self.context.get_parent_input_type()) if not is_list_type(type_): self.is_valid_value_node(node) return SKIP # Don't traverse further. From aa770826dacfdcfc0858ced7313e6624aa7734f3 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Thu, 3 Nov 2022 15:41:41 +0100 Subject: [PATCH 056/230] benchmark: test performance cost of re-creating schema Replicates graphql/graphql-js@c7d7026982ceee536900a24ae31235127560297a --- tests/benchmarks/test_graphql_schema.py | 11 +++++++++++ 1 file changed, 11 insertions(+) create mode 100644 tests/benchmarks/test_graphql_schema.py diff --git a/tests/benchmarks/test_graphql_schema.py b/tests/benchmarks/test_graphql_schema.py new file mode 100644 index 00000000..b7b6ed46 --- /dev/null +++ b/tests/benchmarks/test_graphql_schema.py @@ -0,0 +1,11 @@ +from graphql import GraphQLSchema, build_schema, print_schema + +from ..fixtures import big_schema_sdl # noqa: F401 + + +def test_recreate_a_graphql_schema(benchmark, big_schema_sdl): # noqa: F811 + schema = build_schema(big_schema_sdl, assume_valid=True) + recreated_schema: GraphQLSchema = benchmark( + lambda: GraphQLSchema(**schema.to_kwargs()) + ) + assert print_schema(schema) == print_schema(recreated_schema) From 2792f599c65c55f452884efdcfd2914023ea56d7 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Thu, 3 Nov 2022 20:13:10 +0100 Subject: [PATCH 057/230] Add new GraphQLSchema.get_field method Replicates graphql/graphql-js@69e155474f06b7715e172d5c75c0f3e5ad819e21 --- src/graphql/execution/execute.py | 32 +---------- src/graphql/execution/subscribe.py | 5 +- src/graphql/type/schema.py | 38 ++++++++++++- src/graphql/utilities/type_info.py | 25 ++------- tests/type/test_schema.py | 58 ++++++++++++++++++++ tests/utilities/test_type_info.py | 86 +++++++++++++++++++++++++++++- 6 files changed, 187 insertions(+), 57 deletions(-) diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index 5509f1e2..60ad8211 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -50,9 +50,6 @@ GraphQLResolveInfo, GraphQLSchema, GraphQLTypeResolver, - SchemaMetaFieldDef, - TypeMetaFieldDef, - TypeNameMetaFieldDef, assert_valid_schema, is_abstract_type, is_leaf_type, @@ -71,7 +68,6 @@ "default_type_resolver", "execute", "execute_sync", - "get_field_def", "ExecutionResult", "ExecutionContext", "FormattedExecutionResult", @@ -501,7 +497,8 @@ def execute_field( calling its resolve function, then calls complete_value to await coroutine objects, serialize scalars, or execute the sub-selection-set for objects. """ - field_def = get_field_def(self.schema, parent_type, field_nodes[0]) + field_name = field_nodes[0].name.value + field_def = self.schema.get_field(parent_type, field_name) if not field_def: return Undefined @@ -1130,31 +1127,6 @@ def assert_valid_execution_arguments( ) -def get_field_def( - schema: GraphQLSchema, parent_type: GraphQLObjectType, field_node: FieldNode -) -> GraphQLField: - """Get field definition. - - This method looks up the field on the given type definition. It has special casing - for the three introspection fields, ``__schema``, ``__type`, and ``__typename``. - ``__typename`` is special because it can always be queried as a field, even in - situations where no other fields are allowed, like on a Union. ``__schema`` and - ``__type`` could get automatically added to the query type, but that would require - mutating type definitions, which would cause issues. - - For internal use only. - """ - field_name = field_node.name.value - - if field_name == "__schema" and schema.query_type == parent_type: - return SchemaMetaFieldDef - elif field_name == "__type" and schema.query_type == parent_type: - return TypeMetaFieldDef - elif field_name == "__typename": - return TypeNameMetaFieldDef - return parent_type.fields.get(field_name) - - def invalid_return_type_error( return_type: GraphQLObjectType, result: Any, field_nodes: List[FieldNode] ) -> GraphQLError: diff --git a/src/graphql/execution/subscribe.py b/src/graphql/execution/subscribe.py index e803ba03..992fb051 100644 --- a/src/graphql/execution/subscribe.py +++ b/src/graphql/execution/subscribe.py @@ -8,7 +8,6 @@ ExecutionResult, assert_valid_execution_arguments, execute, - get_field_def, ) from ..execution.values import get_argument_values from ..language import DocumentNode @@ -179,10 +178,10 @@ async def execute_subscription(context: ExecutionContext) -> AsyncIterable[Any]: context.operation.selection_set, ) response_name, field_nodes = next(iter(root_fields.items())) - field_def = get_field_def(schema, root_type, field_nodes[0]) + field_name = field_nodes[0].name.value + field_def = schema.get_field(root_type, field_name) if not field_def: - field_name = field_nodes[0].name.value raise GraphQLError( f"The subscription field '{field_name}' is not defined.", field_nodes ) diff --git a/src/graphql/type/schema.py b/src/graphql/type/schema.py index dad93295..a3673d8b 100644 --- a/src/graphql/type/schema.py +++ b/src/graphql/type/schema.py @@ -8,6 +8,8 @@ from ..pyutils import inspect, is_collection, is_description from .definition import ( GraphQLAbstractType, + GraphQLCompositeType, + GraphQLField, GraphQLInterfaceType, GraphQLNamedType, GraphQLObjectType, @@ -20,7 +22,12 @@ is_wrapping_type, ) from .directives import GraphQLDirective, is_directive, specified_directives -from .introspection import introspection_types +from .introspection import ( + SchemaMetaFieldDef, + TypeMetaFieldDef, + TypeNameMetaFieldDef, + introspection_types, +) try: @@ -387,6 +394,35 @@ def get_directive(self, name: str) -> Optional[GraphQLDirective]: return directive return None + def get_field( + self, parent_type: GraphQLCompositeType, field_name: str + ) -> Optional[GraphQLField]: + """Get field of a given type with the given name. + + This method looks up the field on the given type definition. + It has special casing for the three introspection fields, `__schema`, + `__type` and `__typename`. + + `__typename` is special because it can always be queried as a field, even + in situations where no other fields are allowed, like on a Union. + + `__schema` and `__type` could get automatically added to the query type, + but that would require mutating type definitions, which would cause issues. + """ + if field_name == "__schema": + return SchemaMetaFieldDef if self.query_type is parent_type else None + if field_name == "__type": + return TypeMetaFieldDef if self.query_type is parent_type else None + if field_name == "__typename": + return TypeNameMetaFieldDef + + # this function is part of a "hot" path inside executor and to assume presence + # of 'fields' is faster than to use `not is_union_type` + try: + return parent_type.fields[field_name] # type: ignore + except (AttributeError, KeyError): + return None + @property def validation_errors(self) -> Optional[List[GraphQLError]]: return self._validation_errors diff --git a/src/graphql/utilities/type_info.py b/src/graphql/utilities/type_info.py index 584b468e..eeba22f9 100644 --- a/src/graphql/utilities/type_info.py +++ b/src/graphql/utilities/type_info.py @@ -27,16 +27,12 @@ GraphQLOutputType, GraphQLSchema, GraphQLType, - SchemaMetaFieldDef, - TypeMetaFieldDef, - TypeNameMetaFieldDef, get_named_type, get_nullable_type, is_composite_type, is_enum_type, is_input_object_type, is_input_type, - is_interface_type, is_list_type, is_object_type, is_output_type, @@ -54,7 +50,7 @@ GetFieldDefFn: TypeAlias = Callable[ - [GraphQLSchema, GraphQLType, FieldNode], Optional[GraphQLField] + [GraphQLSchema, GraphQLCompositeType, FieldNode], Optional[GraphQLField] ] @@ -264,24 +260,9 @@ def leave_enum_value(self) -> None: def get_field_def( - schema: GraphQLSchema, parent_type: GraphQLType, field_node: FieldNode + schema: GraphQLSchema, parent_type: GraphQLCompositeType, field_node: FieldNode ) -> Optional[GraphQLField]: - """Get field definition. - - Not exactly the same as the executor's definition of - :func:`graphql.execution.get_field_def`, in this statically evaluated environment - we do not always have an Object type, and need to handle Interface and Union types. - """ - name = field_node.name.value - if name == "__schema" and schema.query_type is parent_type: - return SchemaMetaFieldDef - if name == "__type" and schema.query_type is parent_type: - return TypeMetaFieldDef - if name == "__typename" and is_composite_type(parent_type): - return TypeNameMetaFieldDef - if is_object_type(parent_type) or is_interface_type(parent_type): - return parent_type.fields.get(name) - return None + return schema.get_field(parent_type, field_node.name.value) class TypeInfoVisitor(Visitor): diff --git a/tests/type/test_schema.py b/tests/type/test_schema.py index efd44f86..bcf5975d 100644 --- a/tests/type/test_schema.py +++ b/tests/type/test_schema.py @@ -26,6 +26,10 @@ GraphQLSchema, GraphQLString, GraphQLType, + GraphQLUnionType, + SchemaMetaFieldDef, + TypeMetaFieldDef, + TypeNameMetaFieldDef, specified_directives, ) from graphql.utilities import build_schema, lexicographic_sort_schema, print_schema @@ -293,6 +297,60 @@ def preserves_the_order_of_user_provided_types(): copy_schema = GraphQLSchema(**schema.to_kwargs()) assert list(copy_schema.type_map) == type_names + def describe_get_field(): + pet_type = GraphQLInterfaceType("Pet", {"name": GraphQLField(GraphQLString)}) + cat_type = GraphQLObjectType( + "Cat", {"name": GraphQLField(GraphQLString)}, [pet_type] + ) + dog_type = GraphQLObjectType( + "Dog", {"name": GraphQLField(GraphQLString)}, [pet_type] + ) + cat_or_dog = GraphQLUnionType("CatOrDog", [cat_type, dog_type]) + query_type = GraphQLObjectType("Query", {"catOrDog": GraphQLField(cat_or_dog)}) + mutation_type = GraphQLObjectType("Mutation", {}) + subscription_type = GraphQLObjectType("Subscription", {}) + schema = GraphQLSchema(query_type, mutation_type, subscription_type) + + _get_field = schema.get_field + + def returns_known_field(): + assert _get_field(pet_type, "name") == pet_type.fields["name"] + assert _get_field(cat_type, "name") == cat_type.fields["name"] + + assert _get_field(query_type, "catOrDog") == query_type.fields["catOrDog"] + + def returns_none_for_unknown_fields(): + assert _get_field(cat_or_dog, "name") is None + + assert _get_field(query_type, "unknown") is None + assert _get_field(pet_type, "unknown") is None + assert _get_field(cat_type, "unknown") is None + assert _get_field(cat_or_dog, "unknown") is None + + def handles_introspection_fields(): + assert _get_field(query_type, "__typename") == TypeNameMetaFieldDef + assert _get_field(mutation_type, "__typename") == TypeNameMetaFieldDef + assert _get_field(subscription_type, "__typename") == TypeNameMetaFieldDef + + assert _get_field(pet_type, "__typename") is TypeNameMetaFieldDef + assert _get_field(cat_type, "__typename") is TypeNameMetaFieldDef + assert _get_field(dog_type, "__typename") is TypeNameMetaFieldDef + assert _get_field(cat_or_dog, "__typename") is TypeNameMetaFieldDef + + assert _get_field(query_type, "__type") is TypeMetaFieldDef + assert _get_field(query_type, "__schema") is SchemaMetaFieldDef + + def returns_non_for_introspection_fields_in_wrong_location(): + assert _get_field(pet_type, "__type") is None + assert _get_field(dog_type, "__type") is None + assert _get_field(mutation_type, "__type") is None + assert _get_field(subscription_type, "__type") is None + + assert _get_field(pet_type, "__schema") is None + assert _get_field(dog_type, "__schema") is None + assert _get_field(mutation_type, "__schema") is None + assert _get_field(subscription_type, "__schema") is None + def describe_validity(): def describe_when_not_assumed_valid(): def configures_the_schema_to_still_needing_validation(): diff --git a/tests/utilities/test_type_info.py b/tests/utilities/test_type_info.py index 28445257..699cce6d 100644 --- a/tests/utilities/test_type_info.py +++ b/tests/utilities/test_type_info.py @@ -1,3 +1,5 @@ +from typing import List, Optional, Tuple + from graphql.language import ( FieldNode, NameNode, @@ -10,7 +12,14 @@ print_ast, visit, ) -from graphql.type import GraphQLSchema, get_named_type, is_composite_type +from graphql.type import ( + GraphQLSchema, + SchemaMetaFieldDef, + TypeMetaFieldDef, + TypeNameMetaFieldDef, + get_named_type, + is_composite_type, +) from graphql.utilities import TypeInfo, TypeInfoVisitor, build_schema from ..fixtures import kitchen_sink_query # noqa: F401 @@ -39,9 +48,13 @@ name(surname: Boolean): String } + union HumanOrAlien = Human | Alien + type QueryRoot { human(id: ID): Human alien: Alien + humanOrAlien: HumanOrAlien + pet: Pet } schema { @@ -140,6 +153,77 @@ def leave(self, *args): assert test_visitor.args == wrapped_visitor.args + def supports_introspection_fields(): + type_info = TypeInfo(test_schema) + + ast = parse( + """ + { + __typename + __type(name: "Cat") { __typename } + __schema { + __typename # in object type + } + humanOrAlien { + __typename # in union type + } + pet { + __typename # in interface type + } + someUnknownType { + __typename # unknown + } + pet { + __type # unknown + __schema # unknown + } + } + """ + ) + + visited_fields: List[Tuple[Optional[str], Optional[str]]] = [] + + class TestVisitor(Visitor): + @staticmethod + def enter_field(self, node: OperationDefinitionNode, *_args): + parent_type = type_info.get_parent_type() + type_name = getattr(type_info.get_parent_type(), "name", None) + field_def = type_info.get_field_def() + fields = getattr(parent_type, "fields", {}) + fields = dict( + **fields, + __type=TypeMetaFieldDef, + __typename=TypeNameMetaFieldDef, + __schema=SchemaMetaFieldDef, + ) + for name, field in fields.items(): + if field is field_def: + field_name = name + break + else: + field_name = None + visited_fields.append((type_name, field_name)) + + test_visitor = TestVisitor() + assert visit(ast, TypeInfoVisitor(type_info, test_visitor)) + + assert visited_fields == [ + ("QueryRoot", "__typename"), + ("QueryRoot", "__type"), + ("__Type", "__typename"), + ("QueryRoot", "__schema"), + ("__Schema", "__typename"), + ("QueryRoot", "humanOrAlien"), + ("HumanOrAlien", "__typename"), + ("QueryRoot", "pet"), + ("Pet", "__typename"), + ("QueryRoot", None), + (None, None), + ("QueryRoot", "pet"), + ("Pet", None), + ("Pet", None), + ] + def maintains_type_info_during_visit(): visited = [] From 1d6e008bc7d79b2c85ea0123d5065325b61acb86 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Thu, 3 Nov 2022 20:30:46 +0100 Subject: [PATCH 058/230] extend_schema: Do not modify standard directives Replicates graphql/graphql-js@fdc3555cc911c42571841cc0465c38b57f39d157 --- src/graphql/utilities/extend_schema.py | 5 ++++ tests/utilities/test_extend_schema.py | 33 ++++++++++++++++++++++++++ 2 files changed, 38 insertions(+) diff --git a/src/graphql/utilities/extend_schema.py b/src/graphql/utilities/extend_schema.py index 8cf9b614..445f0c03 100644 --- a/src/graphql/utilities/extend_schema.py +++ b/src/graphql/utilities/extend_schema.py @@ -80,6 +80,7 @@ is_non_null_type, is_object_type, is_scalar_type, + is_specified_directive, is_specified_scalar_type, is_union_type, specified_scalar_types, @@ -257,6 +258,10 @@ def replace_named_type(self, type_: GraphQLNamedType) -> GraphQLNamedType: # noinspection PyShadowingNames def replace_directive(self, directive: GraphQLDirective) -> GraphQLDirective: + if is_specified_directive(directive): + # Builtin directives are not extended. + return directive + kwargs = directive.to_kwargs() return GraphQLDirective( **merge_kwargs( diff --git a/tests/utilities/test_extend_schema.py b/tests/utilities/test_extend_schema.py index db6a1c69..49db1dc6 100644 --- a/tests/utilities/test_extend_schema.py +++ b/tests/utilities/test_extend_schema.py @@ -23,6 +23,7 @@ assert_object_type, assert_scalar_type, assert_union_type, + specified_directives, validate_schema, ) from graphql.utilities import build_schema, concat_ast, extend_schema, print_schema @@ -102,6 +103,38 @@ def can_be_used_for_limited_execution(): ) assert result == ({"newField": "123"}, None) + def does_not_modify_built_in_types_and_directives(): + schema = build_schema( + """ + type Query { + str: String + int: Int + float: Float + id: ID + bool: Boolean + } + """ + ) + + extension_sdl = dedent( + """ + extend type Query { + foo: String + } + """ + ) + + extended_schema = extend_schema(schema, parse(extension_sdl)) + + # built-ins are used + assert extended_schema.get_type("Int") is GraphQLInt + assert extended_schema.get_type("Float") is GraphQLFloat + assert extended_schema.get_type("String") is GraphQLString + assert extended_schema.get_type("Boolean") is GraphQLBoolean + assert extended_schema.get_type("ID") is GraphQLID + + assert extended_schema.directives == specified_directives + def extends_objects_by_adding_new_fields(): schema = build_schema( ''' From 47ecdb37cefaa0c31f582d0a65d96e51bb1961b5 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Thu, 3 Nov 2022 20:59:58 +0100 Subject: [PATCH 059/230] subscribe-test: extract subscribe_with_bad_fn function Replicates graphql/graphql-js@2deb27214ee4b74c09b19c55d934d72b0a7fd355 --- tests/execution/test_subscribe.py | 61 ++++++++++++++----------------- 1 file changed, 27 insertions(+), 34 deletions(-) diff --git a/tests/execution/test_subscribe.py b/tests/execution/test_subscribe.py index faf10f64..24655915 100644 --- a/tests/execution/test_subscribe.py +++ b/tests/execution/test_subscribe.py @@ -3,7 +3,12 @@ from pytest import mark, raises -from graphql.execution import MapAsyncIterator, create_source_event_stream, subscribe +from graphql.execution import ( + ExecutionResult, + MapAsyncIterator, + create_source_event_stream, + subscribe, +) from graphql.language import parse from graphql.pyutils import SimplePubSub from graphql.type import ( @@ -132,6 +137,22 @@ def transform(new_email): DummyQueryType = GraphQLObjectType("Query", {"dummy": GraphQLField(GraphQLString)}) +async def subscribe_with_bad_fn(subscribe_fn: Callable) -> ExecutionResult: + schema = GraphQLSchema( + query=DummyQueryType, + subscription=GraphQLObjectType( + "Subscription", + {"foo": GraphQLField(GraphQLString, subscribe=subscribe_fn)}, + ), + ) + document = parse("subscription { foo }") + result = await subscribe(schema, document) + + assert isinstance(result, ExecutionResult) + assert await create_source_event_stream(schema, document) == result + return result + + # Check all error cases when initializing the subscription. def describe_subscription_initialization_phase(): @mark.asyncio @@ -333,22 +354,8 @@ async def should_pass_through_unexpected_errors_thrown_in_subscribe(): @mark.asyncio @mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") async def throws_an_error_if_subscribe_does_not_return_an_iterator(): - schema = GraphQLSchema( - query=DummyQueryType, - subscription=GraphQLObjectType( - "Subscription", - { - "foo": GraphQLField( - GraphQLString, subscribe=lambda _obj, _info: "test" - ) - }, - ), - ) - - document = parse("subscription { foo }") - with raises(TypeError) as exc_info: - await subscribe(schema, document) + await subscribe_with_bad_fn(lambda _obj, _info: "test") assert str(exc_info.value) == ( "Subscription field must return AsyncIterable. Received: 'test'." @@ -356,20 +363,6 @@ async def throws_an_error_if_subscribe_does_not_return_an_iterator(): @mark.asyncio async def resolves_to_an_error_for_subscription_resolver_errors(): - async def subscribe_with_fn(subscribe_fn: Callable): - schema = GraphQLSchema( - query=DummyQueryType, - subscription=GraphQLObjectType( - "Subscription", - {"foo": GraphQLField(GraphQLString, subscribe=subscribe_fn)}, - ), - ) - document = parse("subscription { foo }") - result = await subscribe(schema, document) - - assert await create_source_event_stream(schema, document) == result - return result - expected_result = ( None, [ @@ -385,25 +378,25 @@ async def subscribe_with_fn(subscribe_fn: Callable): def return_error(_obj, _info): return TypeError("test error") - assert await subscribe_with_fn(return_error) == expected_result + assert await subscribe_with_bad_fn(return_error) == expected_result # Throwing an error def throw_error(*_args): raise TypeError("test error") - assert await subscribe_with_fn(throw_error) == expected_result + assert await subscribe_with_bad_fn(throw_error) == expected_result # Resolving to an error async def resolve_error(*_args): return TypeError("test error") - assert await subscribe_with_fn(resolve_error) == expected_result + assert await subscribe_with_bad_fn(resolve_error) == expected_result # Rejecting with an error async def reject_error(*_args): return TypeError("test error") - assert await subscribe_with_fn(reject_error) == expected_result + assert await subscribe_with_bad_fn(reject_error) == expected_result @mark.asyncio async def resolves_to_an_error_if_variables_were_wrong_type(): From 8d7d0f681f0f7348562d96a3913b4f8052acb294 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Thu, 3 Nov 2022 21:58:44 +0100 Subject: [PATCH 060/230] Change incorrect subscribe return type to a GraphQLError rather than systems error Replicates graphql/graphql-js@ea1894ae22bd732f1dc7fa4f7bf21f1525688e9a --- src/graphql/execution/subscribe.py | 19 ++++++++----------- tests/execution/test_subscribe.py | 15 ++++++++++----- 2 files changed, 18 insertions(+), 16 deletions(-) diff --git a/src/graphql/execution/subscribe.py b/src/graphql/execution/subscribe.py index 992fb051..358487ec 100644 --- a/src/graphql/execution/subscribe.py +++ b/src/graphql/execution/subscribe.py @@ -145,18 +145,8 @@ async def create_source_event_stream( return ExecutionResult(data=None, errors=context) try: - event_stream = await execute_subscription(context) - - # Assert field returned an event stream, otherwise yield an error. - if not isinstance(event_stream, AsyncIterable): - raise TypeError( - "Subscription field must return AsyncIterable." - f" Received: {inspect(event_stream)}." - ) - return event_stream - + return await execute_subscription(context) except GraphQLError as error: - # Report it as an ExecutionResult, containing only errors and no data. return ExecutionResult(data=None, errors=[error]) @@ -207,6 +197,13 @@ async def execute_subscription(context: ExecutionContext) -> AsyncIterable[Any]: if isinstance(event_stream, Exception): raise event_stream + # Assert field returned an event stream, otherwise yield an error. + if not isinstance(event_stream, AsyncIterable): + raise GraphQLError( + "Subscription field must return AsyncIterable." + f" Received: {inspect(event_stream)}." + ) + return event_stream except Exception as error: raise located_error(error, field_nodes, path.as_list()) diff --git a/tests/execution/test_subscribe.py b/tests/execution/test_subscribe.py index 24655915..94a4c4f9 100644 --- a/tests/execution/test_subscribe.py +++ b/tests/execution/test_subscribe.py @@ -354,11 +354,16 @@ async def should_pass_through_unexpected_errors_thrown_in_subscribe(): @mark.asyncio @mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") async def throws_an_error_if_subscribe_does_not_return_an_iterator(): - with raises(TypeError) as exc_info: - await subscribe_with_bad_fn(lambda _obj, _info: "test") - - assert str(exc_info.value) == ( - "Subscription field must return AsyncIterable. Received: 'test'." + assert await subscribe_with_bad_fn(lambda _obj, _info: "test") == ( + None, + [ + { + "message": "Subscription field must return AsyncIterable." + " Received: 'test'.", + "locations": [(1, 16)], + "path": ["foo"], + } + ], ) @mark.asyncio From 619a77a7d79415aca63da5d7070249ed0a7ac3b5 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Thu, 3 Nov 2022 22:12:48 +0100 Subject: [PATCH 061/230] Alpha release v3.3.0a2 with new features --- .bumpversion.cfg | 2 +- README.md | 2 +- docs/conf.py | 2 +- poetry.lock | 64 +++++++++++++++++++++--------------------- pyproject.toml | 2 +- src/graphql/version.py | 2 +- 6 files changed, 37 insertions(+), 37 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 6fe9800a..9cb8d30b 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 3.3.0a1 +current_version = 3.3.0a2 commit = False tag = False diff --git a/README.md b/README.md index e171f4a5..bbe91db9 100644 --- a/README.md +++ b/README.md @@ -16,7 +16,7 @@ reliable and compatible with GraphQL.js. The current stable version 3.2.3 of GraphQL-core is up-to-date with GraphQL.js version 16.6.0. -You can also try out the latest alpha version 3.3.0a1 of GraphQL-core that is up-to-date with GraphQL.js version 17.0.0a1. +You can also try out the latest alpha version 3.3.0a2 of GraphQL-core that is up-to-date with GraphQL.js version 17.0.0a1. Please note that this new minor version of GraphQL-core does not support Python 3.7 anymore. Note that for various reasons, GraphQL-core does not use SemVer like GraphQL.js. Changes in the major version of GraphQL.js are reflected in the minor version of GraphQL-core instead. This means there can be breaking changes in the API when the minor version changes, and only patch releases are fully backward compatible. Therefore, we recommend something like `=~ 3.2.0` as version specifier when including GraphQL-core as a dependency. diff --git a/docs/conf.py b/docs/conf.py index 96ba12f4..2763fd54 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -61,7 +61,7 @@ # The short X.Y version. # version = '3.3' # The full version, including alpha/beta/rc tags. -version = release = '3.3.0a1' +version = release = '3.3.0a2' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/poetry.lock b/poetry.lock index 959c78fb..d04c8859 100644 --- a/poetry.lock +++ b/poetry.lock @@ -18,11 +18,11 @@ python-versions = ">=3.5" dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"] docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"] -tests_no_zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"] +tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"] [[package]] -name = "Babel" -version = "2.10.3" +name = "babel" +version = "2.11.0" description = "Internationalization utilities" category = "dev" optional = false @@ -98,7 +98,7 @@ optional = false python-versions = ">=3.6.0" [package.extras] -unicode_backport = ["unicodedata2"] +unicode-backport = ["unicodedata2"] [[package]] name = "click" @@ -226,7 +226,7 @@ python-versions = ">=3.6" smmap = ">=3.0.1,<6" [[package]] -name = "GitPython" +name = "gitpython" version = "3.1.29" description = "GitPython is a python library used to interact with Git repositories" category = "dev" @@ -287,12 +287,12 @@ python-versions = ">=3.6.1,<4.0" [package.extras] colors = ["colorama (>=0.4.3,<0.5.0)"] -pipfile_deprecated_finder = ["pipreqs", "requirementslib"] +pipfile-deprecated-finder = ["pipreqs", "requirementslib"] plugins = ["setuptools"] -requirements_deprecated_finder = ["pip-api", "pipreqs"] +requirements-deprecated-finder = ["pip-api", "pipreqs"] [[package]] -name = "Jinja2" +name = "jinja2" version = "3.1.2" description = "A very fast and expressive template engine." category = "dev" @@ -306,7 +306,7 @@ MarkupSafe = ">=2.0" i18n = ["Babel (>=2.7)"] [[package]] -name = "MarkupSafe" +name = "markupsafe" version = "2.1.1" description = "Safely add untrusted strings to HTML/XML markup." category = "dev" @@ -435,7 +435,7 @@ optional = false python-versions = ">=3.6" [[package]] -name = "Pygments" +name = "pygments" version = "2.13.0" description = "Pygments is a syntax highlighting package written in Python." category = "dev" @@ -548,14 +548,14 @@ pytest = ">=5.0.0" [[package]] name = "pytz" -version = "2022.5" +version = "2022.6" description = "World timezone definitions, modern and historical" category = "dev" optional = false python-versions = "*" [[package]] -name = "PyYAML" +name = "pyyaml" version = "6.0" description = "YAML parser and emitter for Python" category = "dev" @@ -578,7 +578,7 @@ urllib3 = ">=1.21.1,<1.27" [package.extras] socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "setuptools" @@ -618,7 +618,7 @@ optional = false python-versions = "*" [[package]] -name = "Sphinx" +name = "sphinx" version = "4.3.2" description = "Python documentation generator" category = "dev" @@ -651,18 +651,18 @@ test = ["cython", "html5lib", "pytest", "pytest-cov", "typed-ast"] [[package]] name = "sphinx-rtd-theme" -version = "1.0.0" +version = "1.1.0" description = "Read the Docs theme for Sphinx" category = "dev" optional = false -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" [package.dependencies] docutils = "<0.18" -sphinx = ">=1.6" +sphinx = ">=1.6,<6" [package.extras] -dev = ["bump2version", "sphinxcontrib-httpdomain", "transifex-client"] +dev = ["bump2version", "sphinxcontrib-httpdomain", "transifex-client", "wheel"] [[package]] name = "sphinxcontrib-applehelp" @@ -840,7 +840,7 @@ testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools" [metadata] lock-version = "1.1" python-versions = "^3.7" -content-hash = "9e24c4a6981f196d340d868d48fe7285655f3161fa0eaa14aa6689e3a48ece0c" +content-hash = "723ec4ca1362e0c9a55618da9b3df294685dbe0f0b6f62a5f5f69e3b9ed0a406" [metadata.files] alabaster = [ @@ -851,9 +851,9 @@ attrs = [ {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"}, {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"}, ] -Babel = [ - {file = "Babel-2.10.3-py3-none-any.whl", hash = "sha256:ff56f4892c1c4bf0d814575ea23471c230d544203c7748e8c68f0089478d48eb"}, - {file = "Babel-2.10.3.tar.gz", hash = "sha256:7614553711ee97490f732126dc077f8d0ae084ebc6a96e23db1482afabdb2c51"}, +babel = [ + {file = "Babel-2.11.0-py3-none-any.whl", hash = "sha256:1ad3eca1c885218f6dce2ab67291178944f810a10a9b5f3cb8382a5a232b64fe"}, + {file = "Babel-2.11.0.tar.gz", hash = "sha256:5ef4b3226b0180dedded4229651c8b0e1a3a6a2837d45a073272f313e4cf97f6"}, ] bandit = [ {file = "bandit-1.7.4-py3-none-any.whl", hash = "sha256:412d3f259dab4077d0e7f0c11f50f650cc7d10db905d98f6520a95a18049658a"}, @@ -986,7 +986,7 @@ gitdb = [ {file = "gitdb-4.0.9-py3-none-any.whl", hash = "sha256:8033ad4e853066ba6ca92050b9df2f89301b8fc8bf7e9324d412a63f8bf1a8fd"}, {file = "gitdb-4.0.9.tar.gz", hash = "sha256:bac2fd45c0a1c9cf619e63a90d62bdc63892ef92387424b855792a6cabe789aa"}, ] -GitPython = [ +gitpython = [ {file = "GitPython-3.1.29-py3-none-any.whl", hash = "sha256:41eea0deec2deea139b459ac03656f0dd28fc4a3387240ec1d3c259a2c47850f"}, {file = "GitPython-3.1.29.tar.gz", hash = "sha256:cc36bfc4a3f913e66805a28e84703e419d9c264c1077e537b54f0e1af85dbefd"}, ] @@ -1010,11 +1010,11 @@ isort = [ {file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"}, {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"}, ] -Jinja2 = [ +jinja2 = [ {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, ] -MarkupSafe = [ +markupsafe = [ {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812"}, {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a"}, {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e"}, @@ -1126,7 +1126,7 @@ pyflakes = [ {file = "pyflakes-2.5.0-py2.py3-none-any.whl", hash = "sha256:4579f67d887f804e67edb544428f264b7b24f435b263c4614f384135cea553d2"}, {file = "pyflakes-2.5.0.tar.gz", hash = "sha256:491feb020dca48ccc562a8c0cbe8df07ee13078df59813b83959cbdada312ea3"}, ] -Pygments = [ +pygments = [ {file = "Pygments-2.13.0-py3-none-any.whl", hash = "sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42"}, {file = "Pygments-2.13.0.tar.gz", hash = "sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1"}, ] @@ -1159,10 +1159,10 @@ pytest-timeout = [ {file = "pytest_timeout-2.1.0-py3-none-any.whl", hash = "sha256:f6f50101443ce70ad325ceb4473c4255e9d74e3c7cd0ef827309dfa4c0d975c6"}, ] pytz = [ - {file = "pytz-2022.5-py2.py3-none-any.whl", hash = "sha256:335ab46900b1465e714b4fda4963d87363264eb662aab5e65da039c25f1f5b22"}, - {file = "pytz-2022.5.tar.gz", hash = "sha256:c4d88f472f54d615e9cd582a5004d1e5f624854a6a27a6211591c251f22a6914"}, + {file = "pytz-2022.6-py2.py3-none-any.whl", hash = "sha256:222439474e9c98fced559f1709d89e6c9cbf8d79c794ff3eb9f8800064291427"}, + {file = "pytz-2022.6.tar.gz", hash = "sha256:e89512406b793ca39f5971bc999cc538ce125c0e51c27941bef4568b460095e2"}, ] -PyYAML = [ +pyyaml = [ {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, @@ -1224,13 +1224,13 @@ snowballstemmer = [ {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, ] -Sphinx = [ +sphinx = [ {file = "Sphinx-4.3.2-py3-none-any.whl", hash = "sha256:6a11ea5dd0bdb197f9c2abc2e0ce73e01340464feaece525e64036546d24c851"}, {file = "Sphinx-4.3.2.tar.gz", hash = "sha256:0a8836751a68306b3fe97ecbe44db786f8479c3bf4b80e3a7f5c838657b4698c"}, ] sphinx-rtd-theme = [ - {file = "sphinx_rtd_theme-1.0.0-py2.py3-none-any.whl", hash = "sha256:4d35a56f4508cfee4c4fb604373ede6feae2a306731d533f409ef5c3496fdbd8"}, - {file = "sphinx_rtd_theme-1.0.0.tar.gz", hash = "sha256:eec6d497e4c2195fa0e8b2016b337532b8a699a68bcb22a512870e16925c6a5c"}, + {file = "sphinx_rtd_theme-1.1.0-py2.py3-none-any.whl", hash = "sha256:36da4267c804b98197419df8aa415d245449b8945301fce8c961038e0ba79ec5"}, + {file = "sphinx_rtd_theme-1.1.0.tar.gz", hash = "sha256:6e20f00f62b2c05434a33c5116bc3348a41ca94af03d3d7d1714c63457073bb3"}, ] sphinxcontrib-applehelp = [ {file = "sphinxcontrib-applehelp-1.0.2.tar.gz", hash = "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58"}, diff --git a/pyproject.toml b/pyproject.toml index d7727970..8ab75bf9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "graphql-core" -version = "3.3.0a1" +version = "3.3.0a2" description = """ GraphQL-core is a Python port of GraphQL.js,\ the JavaScript reference implementation for GraphQL.""" diff --git a/src/graphql/version.py b/src/graphql/version.py index 13de8d48..0a7c851d 100644 --- a/src/graphql/version.py +++ b/src/graphql/version.py @@ -7,7 +7,7 @@ __all__ = ["version", "version_info", "version_js", "version_info_js"] -version = "3.3.0a1" +version = "3.3.0a2" version_js = "17.0.0a1" From 59504707428407735253452126cfa80ce15b388f Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 20 Nov 2022 15:01:11 +0100 Subject: [PATCH 062/230] Update mypy --- poetry.lock | 119 +++++++++++++++++++----------------- pyproject.toml | 4 +- tests/test_user_registry.py | 3 +- tox.ini | 4 +- 4 files changed, 68 insertions(+), 62 deletions(-) diff --git a/poetry.lock b/poetry.lock index d04c8859..8a15dabf 100644 --- a/poetry.lock +++ b/poetry.lock @@ -152,7 +152,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "exceptiongroup" -version = "1.0.0" +version = "1.0.4" description = "Backport of PEP 654 (exception groups)" category = "dev" optional = false @@ -201,18 +201,18 @@ flake8 = ">=5.0.0" [[package]] name = "flake8-bugbear" -version = "22.9.23" +version = "22.10.27" description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] attrs = ">=19.2.0" flake8 = ">=3.0.0" [package.extras] -dev = ["coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit"] +dev = ["coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit", "tox"] [[package]] name = "gitdb" @@ -323,7 +323,7 @@ python-versions = ">=3.6" [[package]] name = "mypy" -version = "0.982" +version = "0.991" description = "Optional static typing for Python" category = "dev" optional = false @@ -337,6 +337,7 @@ typing-extensions = ">=3.10" [package.extras] dmypy = ["psutil (>=4.0)"] +install-types = ["pip"] python2 = ["typed-ast (>=1.4.0,<2)"] reports = ["lxml"] @@ -361,7 +362,7 @@ pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" [[package]] name = "pathspec" -version = "0.10.1" +version = "0.10.2" description = "Utility library for gitignore style pattern matching of file paths." category = "dev" optional = false @@ -377,15 +378,15 @@ python-versions = ">=2.6" [[package]] name = "platformdirs" -version = "2.5.2" -description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +version = "2.5.4" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "dev" optional = false python-versions = ">=3.7" [package.extras] -docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx (>=4)", "sphinx-autodoc-typehints (>=1.12)"] -test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"] +docs = ["furo (>=2022.9.29)", "proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.4)"] +test = ["appdirs (==1.4.4)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] [[package]] name = "pluggy" @@ -479,7 +480,7 @@ testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2. [[package]] name = "pytest-asyncio" -version = "0.20.1" +version = "0.20.2" description = "Pytest support for asyncio" category = "dev" optional = false @@ -582,7 +583,7 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "setuptools" -version = "65.5.0" +version = "65.6.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" category = "dev" optional = false @@ -590,7 +591,7 @@ python-versions = ">=3.7" [package.extras] docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mock", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] @@ -651,7 +652,7 @@ test = ["cython", "html5lib", "pytest", "pytest-cov", "typed-ast"] [[package]] name = "sphinx-rtd-theme" -version = "1.1.0" +version = "1.1.1" description = "Read the Docs theme for Sphinx" category = "dev" optional = false @@ -757,7 +758,7 @@ python-versions = ">=3.7" [[package]] name = "tox" -version = "3.27.0" +version = "3.27.1" description = "tox is a generic virtualenv management and test command line tool" category = "dev" optional = false @@ -840,7 +841,7 @@ testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools" [metadata] lock-version = "1.1" python-versions = "^3.7" -content-hash = "723ec4ca1362e0c9a55618da9b3df294685dbe0f0b6f62a5f5f69e3b9ed0a406" +content-hash = "c2af909043cc15321f4940901cd26042ac60ce824b71cc651735e543efef8852" [metadata.files] alabaster = [ @@ -963,8 +964,8 @@ docutils = [ {file = "docutils-0.17.1.tar.gz", hash = "sha256:686577d2e4c32380bb50cbb22f575ed742d58168cee37e99117a854bcd88f125"}, ] exceptiongroup = [ - {file = "exceptiongroup-1.0.0-py3-none-any.whl", hash = "sha256:2ac84b496be68464a2da60da518af3785fff8b7ec0d090a581604bc870bdee41"}, - {file = "exceptiongroup-1.0.0.tar.gz", hash = "sha256:affbabf13fb6e98988c38d9c5650e701569fe3c1de3233cfb61c5f33774690ad"}, + {file = "exceptiongroup-1.0.4-py3-none-any.whl", hash = "sha256:542adf9dea4055530d6e1279602fa5cb11dab2395fa650b8674eaec35fc4a828"}, + {file = "exceptiongroup-1.0.4.tar.gz", hash = "sha256:bd14967b79cd9bdb54d97323216f8fdf533e278df937aa2a90089e7d6e06e5ec"}, ] filelock = [ {file = "filelock-3.8.0-py3-none-any.whl", hash = "sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4"}, @@ -979,8 +980,8 @@ flake8-bandit = [ {file = "flake8_bandit-4.1.1.tar.gz", hash = "sha256:068e09287189cbfd7f986e92605adea2067630b75380c6b5733dab7d87f9a84e"}, ] flake8-bugbear = [ - {file = "flake8-bugbear-22.9.23.tar.gz", hash = "sha256:17b9623325e6e0dcdcc80ed9e4aa811287fcc81d7e03313b8736ea5733759937"}, - {file = "flake8_bugbear-22.9.23-py3-none-any.whl", hash = "sha256:cd2779b2b7ada212d7a322814a1e5651f1868ab0d3f24cc9da66169ab8fda474"}, + {file = "flake8-bugbear-22.10.27.tar.gz", hash = "sha256:a6708608965c9e0de5fff13904fed82e0ba21ac929fe4896459226a797e11cd5"}, + {file = "flake8_bugbear-22.10.27-py3-none-any.whl", hash = "sha256:6ad0ab754507319060695e2f2be80e6d8977cfcea082293089a9226276bd825d"}, ] gitdb = [ {file = "gitdb-4.0.9-py3-none-any.whl", hash = "sha256:8033ad4e853066ba6ca92050b9df2f89301b8fc8bf7e9324d412a63f8bf1a8fd"}, @@ -1061,30 +1062,36 @@ mccabe = [ {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, ] mypy = [ - {file = "mypy-0.982-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5085e6f442003fa915aeb0a46d4da58128da69325d8213b4b35cc7054090aed5"}, - {file = "mypy-0.982-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:41fd1cf9bc0e1c19b9af13a6580ccb66c381a5ee2cf63ee5ebab747a4badeba3"}, - {file = "mypy-0.982-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f793e3dd95e166b66d50e7b63e69e58e88643d80a3dcc3bcd81368e0478b089c"}, - {file = "mypy-0.982-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86ebe67adf4d021b28c3f547da6aa2cce660b57f0432617af2cca932d4d378a6"}, - {file = "mypy-0.982-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:175f292f649a3af7082fe36620369ffc4661a71005aa9f8297ea473df5772046"}, - {file = "mypy-0.982-cp310-cp310-win_amd64.whl", hash = "sha256:8ee8c2472e96beb1045e9081de8e92f295b89ac10c4109afdf3a23ad6e644f3e"}, - {file = "mypy-0.982-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:58f27ebafe726a8e5ccb58d896451dd9a662a511a3188ff6a8a6a919142ecc20"}, - {file = "mypy-0.982-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6af646bd46f10d53834a8e8983e130e47d8ab2d4b7a97363e35b24e1d588947"}, - {file = "mypy-0.982-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e7aeaa763c7ab86d5b66ff27f68493d672e44c8099af636d433a7f3fa5596d40"}, - {file = "mypy-0.982-cp37-cp37m-win_amd64.whl", hash = "sha256:724d36be56444f569c20a629d1d4ee0cb0ad666078d59bb84f8f887952511ca1"}, - {file = "mypy-0.982-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:14d53cdd4cf93765aa747a7399f0961a365bcddf7855d9cef6306fa41de01c24"}, - {file = "mypy-0.982-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:26ae64555d480ad4b32a267d10cab7aec92ff44de35a7cd95b2b7cb8e64ebe3e"}, - {file = "mypy-0.982-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6389af3e204975d6658de4fb8ac16f58c14e1bacc6142fee86d1b5b26aa52bda"}, - {file = "mypy-0.982-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b35ce03a289480d6544aac85fa3674f493f323d80ea7226410ed065cd46f206"}, - {file = "mypy-0.982-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c6e564f035d25c99fd2b863e13049744d96bd1947e3d3d2f16f5828864506763"}, - {file = "mypy-0.982-cp38-cp38-win_amd64.whl", hash = "sha256:cebca7fd333f90b61b3ef7f217ff75ce2e287482206ef4a8b18f32b49927b1a2"}, - {file = "mypy-0.982-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a705a93670c8b74769496280d2fe6cd59961506c64f329bb179970ff1d24f9f8"}, - {file = "mypy-0.982-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:75838c649290d83a2b83a88288c1eb60fe7a05b36d46cbea9d22efc790002146"}, - {file = "mypy-0.982-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:91781eff1f3f2607519c8b0e8518aad8498af1419e8442d5d0afb108059881fc"}, - {file = "mypy-0.982-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaa97b9ddd1dd9901a22a879491dbb951b5dec75c3b90032e2baa7336777363b"}, - {file = "mypy-0.982-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a692a8e7d07abe5f4b2dd32d731812a0175626a90a223d4b58f10f458747dd8a"}, - {file = "mypy-0.982-cp39-cp39-win_amd64.whl", hash = "sha256:eb7a068e503be3543c4bd329c994103874fa543c1727ba5288393c21d912d795"}, - {file = "mypy-0.982-py3-none-any.whl", hash = "sha256:1021c241e8b6e1ca5a47e4d52601274ac078a89845cfde66c6d5f769819ffa1d"}, - {file = "mypy-0.982.tar.gz", hash = "sha256:85f7a343542dc8b1ed0a888cdd34dca56462654ef23aa673907305b260b3d746"}, + {file = "mypy-0.991-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7d17e0a9707d0772f4a7b878f04b4fd11f6f5bcb9b3813975a9b13c9332153ab"}, + {file = "mypy-0.991-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0714258640194d75677e86c786e80ccf294972cc76885d3ebbb560f11db0003d"}, + {file = "mypy-0.991-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0c8f3be99e8a8bd403caa8c03be619544bc2c77a7093685dcf308c6b109426c6"}, + {file = "mypy-0.991-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc9ec663ed6c8f15f4ae9d3c04c989b744436c16d26580eaa760ae9dd5d662eb"}, + {file = "mypy-0.991-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4307270436fd7694b41f913eb09210faff27ea4979ecbcd849e57d2da2f65305"}, + {file = "mypy-0.991-cp310-cp310-win_amd64.whl", hash = "sha256:901c2c269c616e6cb0998b33d4adbb4a6af0ac4ce5cd078afd7bc95830e62c1c"}, + {file = "mypy-0.991-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d13674f3fb73805ba0c45eb6c0c3053d218aa1f7abead6e446d474529aafc372"}, + {file = "mypy-0.991-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1c8cd4fb70e8584ca1ed5805cbc7c017a3d1a29fb450621089ffed3e99d1857f"}, + {file = "mypy-0.991-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:209ee89fbb0deed518605edddd234af80506aec932ad28d73c08f1400ef80a33"}, + {file = "mypy-0.991-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37bd02ebf9d10e05b00d71302d2c2e6ca333e6c2a8584a98c00e038db8121f05"}, + {file = "mypy-0.991-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:26efb2fcc6b67e4d5a55561f39176821d2adf88f2745ddc72751b7890f3194ad"}, + {file = "mypy-0.991-cp311-cp311-win_amd64.whl", hash = "sha256:3a700330b567114b673cf8ee7388e949f843b356a73b5ab22dd7cff4742a5297"}, + {file = "mypy-0.991-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:1f7d1a520373e2272b10796c3ff721ea1a0712288cafaa95931e66aa15798813"}, + {file = "mypy-0.991-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:641411733b127c3e0dab94c45af15fea99e4468f99ac88b39efb1ad677da5711"}, + {file = "mypy-0.991-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3d80e36b7d7a9259b740be6d8d906221789b0d836201af4234093cae89ced0cd"}, + {file = "mypy-0.991-cp37-cp37m-win_amd64.whl", hash = "sha256:e62ebaad93be3ad1a828a11e90f0e76f15449371ffeecca4a0a0b9adc99abcef"}, + {file = "mypy-0.991-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b86ce2c1866a748c0f6faca5232059f881cda6dda2a893b9a8373353cfe3715a"}, + {file = "mypy-0.991-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ac6e503823143464538efda0e8e356d871557ef60ccd38f8824a4257acc18d93"}, + {file = "mypy-0.991-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0cca5adf694af539aeaa6ac633a7afe9bbd760df9d31be55ab780b77ab5ae8bf"}, + {file = "mypy-0.991-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a12c56bf73cdab116df96e4ff39610b92a348cc99a1307e1da3c3768bbb5b135"}, + {file = "mypy-0.991-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:652b651d42f155033a1967739788c436491b577b6a44e4c39fb340d0ee7f0d70"}, + {file = "mypy-0.991-cp38-cp38-win_amd64.whl", hash = "sha256:4175593dc25d9da12f7de8de873a33f9b2b8bdb4e827a7cae952e5b1a342e243"}, + {file = "mypy-0.991-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:98e781cd35c0acf33eb0295e8b9c55cdbef64fcb35f6d3aa2186f289bed6e80d"}, + {file = "mypy-0.991-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6d7464bac72a85cb3491c7e92b5b62f3dcccb8af26826257760a552a5e244aa5"}, + {file = "mypy-0.991-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c9166b3f81a10cdf9b49f2d594b21b31adadb3d5e9db9b834866c3258b695be3"}, + {file = "mypy-0.991-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8472f736a5bfb159a5e36740847808f6f5b659960115ff29c7cecec1741c648"}, + {file = "mypy-0.991-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e80e758243b97b618cdf22004beb09e8a2de1af481382e4d84bc52152d1c476"}, + {file = "mypy-0.991-cp39-cp39-win_amd64.whl", hash = "sha256:74e259b5c19f70d35fcc1ad3d56499065c601dfe94ff67ae48b85596b9ec1461"}, + {file = "mypy-0.991-py3-none-any.whl", hash = "sha256:de32edc9b0a7e67c2775e574cb061a537660e51210fbf6006b0b36ea695ae9bb"}, + {file = "mypy-0.991.tar.gz", hash = "sha256:3c0165ba8f354a6d9881809ef29f1a9318a236a6d81c690094c5df32107bde06"}, ] mypy-extensions = [ {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, @@ -1095,16 +1102,16 @@ packaging = [ {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, ] pathspec = [ - {file = "pathspec-0.10.1-py3-none-any.whl", hash = "sha256:46846318467efc4556ccfd27816e004270a9eeeeb4d062ce5e6fc7a87c573f93"}, - {file = "pathspec-0.10.1.tar.gz", hash = "sha256:7ace6161b621d31e7902eb6b5ae148d12cfd23f4a249b9ffb6b9fee12084323d"}, + {file = "pathspec-0.10.2-py3-none-any.whl", hash = "sha256:88c2606f2c1e818b978540f73ecc908e13999c6c3a383daf3705652ae79807a5"}, + {file = "pathspec-0.10.2.tar.gz", hash = "sha256:8f6bf73e5758fd365ef5d58ce09ac7c27d2833a8d7da51712eac6e27e35141b0"}, ] pbr = [ {file = "pbr-5.11.0-py2.py3-none-any.whl", hash = "sha256:db2317ff07c84c4c63648c9064a79fe9d9f5c7ce85a9099d4b6258b3db83225a"}, {file = "pbr-5.11.0.tar.gz", hash = "sha256:b97bc6695b2aff02144133c2e7399d5885223d42b7912ffaec2ca3898e673bfe"}, ] platformdirs = [ - {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"}, - {file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"}, + {file = "platformdirs-2.5.4-py3-none-any.whl", hash = "sha256:af0276409f9a02373d540bf8480021a048711d572745aef4b7842dad245eba10"}, + {file = "platformdirs-2.5.4.tar.gz", hash = "sha256:1006647646d80f16130f052404c6b901e80ee4ed6bef6792e1f238a8969106f7"}, ] pluggy = [ {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, @@ -1139,8 +1146,8 @@ pytest = [ {file = "pytest-7.2.0.tar.gz", hash = "sha256:c4014eb40e10f11f355ad4e3c2fb2c6c6d1919c73f3b5a433de4708202cade59"}, ] pytest-asyncio = [ - {file = "pytest-asyncio-0.20.1.tar.gz", hash = "sha256:626699de2a747611f3eeb64168b3575f70439b06c3d0206e6ceaeeb956e65519"}, - {file = "pytest_asyncio-0.20.1-py3-none-any.whl", hash = "sha256:2c85a835df33fda40fe3973b451e0c194ca11bc2c007eabff90bb3d156fc172b"}, + {file = "pytest-asyncio-0.20.2.tar.gz", hash = "sha256:32a87a9836298a881c0ec637ebcc952cfe23a56436bdc0d09d1511941dd8a812"}, + {file = "pytest_asyncio-0.20.2-py3-none-any.whl", hash = "sha256:07e0abf9e6e6b95894a39f688a4a875d63c2128f76c02d03d16ccbc35bcc0f8a"}, ] pytest-benchmark = [ {file = "pytest-benchmark-4.0.0.tar.gz", hash = "sha256:fb0785b83efe599a6a956361c0691ae1dbb5318018561af10f3e915caa0048d1"}, @@ -1209,8 +1216,8 @@ requests = [ {file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"}, ] setuptools = [ - {file = "setuptools-65.5.0-py3-none-any.whl", hash = "sha256:f62ea9da9ed6289bfe868cd6845968a2c854d1427f8548d52cae02a42b4f0356"}, - {file = "setuptools-65.5.0.tar.gz", hash = "sha256:512e5536220e38146176efb833d4a62aa726b7bbff82cfbc8ba9eaa3996e0b17"}, + {file = "setuptools-65.6.0-py3-none-any.whl", hash = "sha256:6211d2f5eddad8757bd0484923ca7c0a6302ebc4ab32ea5e94357176e0ca0840"}, + {file = "setuptools-65.6.0.tar.gz", hash = "sha256:d1eebf881c6114e51df1664bc2c9133d022f78d12d5f4f665b9191f084e2862d"}, ] six = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, @@ -1229,8 +1236,8 @@ sphinx = [ {file = "Sphinx-4.3.2.tar.gz", hash = "sha256:0a8836751a68306b3fe97ecbe44db786f8479c3bf4b80e3a7f5c838657b4698c"}, ] sphinx-rtd-theme = [ - {file = "sphinx_rtd_theme-1.1.0-py2.py3-none-any.whl", hash = "sha256:36da4267c804b98197419df8aa415d245449b8945301fce8c961038e0ba79ec5"}, - {file = "sphinx_rtd_theme-1.1.0.tar.gz", hash = "sha256:6e20f00f62b2c05434a33c5116bc3348a41ca94af03d3d7d1714c63457073bb3"}, + {file = "sphinx_rtd_theme-1.1.1-py2.py3-none-any.whl", hash = "sha256:31faa07d3e97c8955637fc3f1423a5ab2c44b74b8cc558a51498c202ce5cbda7"}, + {file = "sphinx_rtd_theme-1.1.1.tar.gz", hash = "sha256:6146c845f1e1947b3c3dd4432c28998a1693ccc742b4f9ad7c63129f0757c103"}, ] sphinxcontrib-applehelp = [ {file = "sphinxcontrib-applehelp-1.0.2.tar.gz", hash = "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58"}, @@ -1265,8 +1272,8 @@ tomli = [ {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] tox = [ - {file = "tox-3.27.0-py2.py3-none-any.whl", hash = "sha256:89e4bc6df3854e9fc5582462e328dd3660d7d865ba625ae5881bbc63836a6324"}, - {file = "tox-3.27.0.tar.gz", hash = "sha256:d2c945f02a03d4501374a3d5430877380deb69b218b1df9b7f1d2f2a10befaf9"}, + {file = "tox-3.27.1-py2.py3-none-any.whl", hash = "sha256:f52ca66eae115fcfef0e77ef81fd107133d295c97c52df337adedb8dfac6ab84"}, + {file = "tox-3.27.1.tar.gz", hash = "sha256:b2a920e35a668cc06942ffd1cf3a4fb221a4d909ca72191fb6d84b0b18a7be04"}, ] typed-ast = [ {file = "typed_ast-1.5.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:669dd0c4167f6f2cd9f57041e03c3c2ebf9063d0757dc89f79ba1daa2bfca9d4"}, diff --git a/pyproject.toml b/pyproject.toml index 8ab75bf9..10cc93ff 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -64,9 +64,9 @@ optional = true black = "22.10.0" flake8 = "^5.0" flake8-bandit = "^4.1" -flake8-bugbear = "22.9.23" +flake8-bugbear = "22.10.27" isort = "^5.10" -mypy = "0.982" +mypy = "0.991" bump2version = ">=1.0,<2" [tool.poetry.group.doc] diff --git a/tests/test_user_registry.py b/tests/test_user_registry.py index 670f5d0f..0aa08fd4 100644 --- a/tests/test_user_registry.py +++ b/tests/test_user_registry.py @@ -504,8 +504,7 @@ async def receive_all(): break tasks = [ - create_task(task()) if create_task else task() - for task in (mutate_users, receive_one, receive_all) + create_task(task()) for task in (mutate_users, receive_one, receive_all) ] done, pending = await wait(tasks, timeout=1) assert not pending diff --git a/tox.ini b/tox.ini index 0002f4be..5745a729 100644 --- a/tox.ini +++ b/tox.ini @@ -24,7 +24,7 @@ basepython = python3.10 deps = flake8>=5,<6 flake8-bandit>=4.1,<6 - flake8-bugbear==22.9.23 + flake8-bugbear==22.10.27 commands = flake8 src tests @@ -37,7 +37,7 @@ commands = [testenv:mypy] basepython = python3.10 deps = - mypy==0.982 + mypy==0.991 pytest>=7.2,<8 commands = mypy src tests From e30f16692dda237207d25f9ea3545c31070ad7df Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 20 Nov 2022 18:28:38 +0100 Subject: [PATCH 063/230] subscribe: stay synchronous when possible This (breaking!) change aligns the return types of `execute` and `subscribe` (as well as `create_source_event_stream`) with respect to returning values or awaitables. Replicates graphql/graphql-js@6d42ced81226c230790335ccee8bdfdd59ecfca6 --- src/graphql/execution/subscribe.py | 103 +++++++++++++---- tests/execution/test_customize.py | 4 +- tests/execution/test_subscribe.py | 174 +++++++++++++++++++++-------- tests/test_user_registry.py | 4 +- 4 files changed, 213 insertions(+), 72 deletions(-) diff --git a/src/graphql/execution/subscribe.py b/src/graphql/execution/subscribe.py index 358487ec..a8db0bbe 100644 --- a/src/graphql/execution/subscribe.py +++ b/src/graphql/execution/subscribe.py @@ -1,5 +1,15 @@ from inspect import isawaitable -from typing import Any, AsyncIterable, AsyncIterator, Dict, Optional, Type, Union +from typing import ( + Any, + AsyncIterable, + AsyncIterator, + Awaitable, + Dict, + Optional, + Type, + Union, + cast, +) from ..error import GraphQLError, located_error from ..execution.collect_fields import collect_fields @@ -11,7 +21,7 @@ ) from ..execution.values import get_argument_values from ..language import DocumentNode -from ..pyutils import Path, inspect +from ..pyutils import AwaitableOrValue, Path, inspect from ..type import GraphQLFieldResolver, GraphQLSchema from .map_async_iterator import MapAsyncIterator @@ -19,7 +29,7 @@ __all__ = ["subscribe", "create_source_event_stream"] -async def subscribe( +def subscribe( schema: GraphQLSchema, document: DocumentNode, root_value: Any = None, @@ -29,7 +39,7 @@ async def subscribe( field_resolver: Optional[GraphQLFieldResolver] = None, subscribe_field_resolver: Optional[GraphQLFieldResolver] = None, execution_context_class: Optional[Type[ExecutionContext]] = None, -) -> Union[AsyncIterator[ExecutionResult], ExecutionResult]: +) -> AwaitableOrValue[Union[AsyncIterator[ExecutionResult], ExecutionResult]]: """Create a GraphQL subscription. Implements the "Subscribe" algorithm described in the GraphQL spec. @@ -49,7 +59,7 @@ async def subscribe( If the operation succeeded, the coroutine will yield an AsyncIterator, which yields a stream of ExecutionResults representing the response stream. """ - result_or_stream = await create_source_event_stream( + result_or_stream = create_source_event_stream( schema, document, root_value, @@ -59,8 +69,6 @@ async def subscribe( subscribe_field_resolver, execution_context_class, ) - if isinstance(result_or_stream, ExecutionResult): - return result_or_stream async def map_source_to_response(payload: Any) -> ExecutionResult: """Map source to response. @@ -84,11 +92,28 @@ async def map_source_to_response(payload: Any) -> ExecutionResult: ) return await result if isawaitable(result) else result + if (execution_context_class or ExecutionContext).is_awaitable(result_or_stream): + awaitable_result_or_stream = cast(Awaitable, result_or_stream) + + # noinspection PyShadowingNames + async def await_result() -> Any: + result_or_stream = await awaitable_result_or_stream + if isinstance(result_or_stream, ExecutionResult): + return result_or_stream + return MapAsyncIterator(result_or_stream, map_source_to_response) + + return await_result() + + if isinstance(result_or_stream, ExecutionResult): + return result_or_stream + # Map every source value to a ExecutionResult value as described above. - return MapAsyncIterator(result_or_stream, map_source_to_response) + return MapAsyncIterator( + cast(AsyncIterable[Any], result_or_stream), map_source_to_response + ) -async def create_source_event_stream( +def create_source_event_stream( schema: GraphQLSchema, document: DocumentNode, root_value: Any = None, @@ -97,7 +122,7 @@ async def create_source_event_stream( operation_name: Optional[str] = None, subscribe_field_resolver: Optional[GraphQLFieldResolver] = None, execution_context_class: Optional[Type[ExecutionContext]] = None, -) -> Union[AsyncIterable[Any], ExecutionResult]: +) -> AwaitableOrValue[Union[AsyncIterable[Any], ExecutionResult]]: """Create source event stream Implements the "CreateSourceEventStream" algorithm described in the GraphQL @@ -145,12 +170,28 @@ async def create_source_event_stream( return ExecutionResult(data=None, errors=context) try: - return await execute_subscription(context) + event_stream = execute_subscription(context) except GraphQLError as error: return ExecutionResult(data=None, errors=[error]) + if context.is_awaitable(event_stream): + awaitable_event_stream = cast(Awaitable, event_stream) + + # noinspection PyShadowingNames + async def await_event_stream() -> Union[AsyncIterable[Any], ExecutionResult]: + try: + return await awaitable_event_stream + except GraphQLError as error: + return ExecutionResult(data=None, errors=[error]) -async def execute_subscription(context: ExecutionContext) -> AsyncIterable[Any]: + return await_event_stream() + + return event_stream + + +def execute_subscription( + context: ExecutionContext, +) -> AwaitableOrValue[AsyncIterable[Any]]: schema = context.schema root_type = schema.subscription_type @@ -191,19 +232,33 @@ async def execute_subscription(context: ExecutionContext) -> AsyncIterable[Any]: # AsyncIterable yielding raw payloads. resolve_fn = field_def.subscribe or context.subscribe_field_resolver - event_stream = resolve_fn(context.root_value, info, **args) - if context.is_awaitable(event_stream): - event_stream = await event_stream - if isinstance(event_stream, Exception): - raise event_stream + result = resolve_fn(context.root_value, info, **args) + if context.is_awaitable(result): - # Assert field returned an event stream, otherwise yield an error. - if not isinstance(event_stream, AsyncIterable): - raise GraphQLError( - "Subscription field must return AsyncIterable." - f" Received: {inspect(event_stream)}." - ) + # noinspection PyShadowingNames + async def await_result() -> AsyncIterable[Any]: + try: + return assert_event_stream(await result) + except Exception as error: + raise located_error(error, field_nodes, path.as_list()) + + return await_result() + + return assert_event_stream(result) - return event_stream except Exception as error: raise located_error(error, field_nodes, path.as_list()) + + +def assert_event_stream(result: Any) -> AsyncIterable: + if isinstance(result, Exception): + raise result + + # Assert field returned an event stream, otherwise yield an error. + if not isinstance(result, AsyncIterable): + raise GraphQLError( + "Subscription field must return AsyncIterable." + f" Received: {inspect(result)}." + ) + + return result diff --git a/tests/execution/test_customize.py b/tests/execution/test_customize.py index baca20ba..a82d7e0d 100644 --- a/tests/execution/test_customize.py +++ b/tests/execution/test_customize.py @@ -67,7 +67,7 @@ class Root: async def custom_foo(): yield {"foo": "FooValue"} - subscription = await subscribe( + subscription = subscribe( schema, document=parse("subscription { foo }"), root_value=Root(), @@ -111,7 +111,7 @@ def resolve_foo(message, _info): ) document = parse("subscription { foo }") - subscription = await subscribe( + subscription = subscribe( schema, document, context_value={}, diff --git a/tests/execution/test_subscribe.py b/tests/execution/test_subscribe.py index 94a4c4f9..f01dc354 100644 --- a/tests/execution/test_subscribe.py +++ b/tests/execution/test_subscribe.py @@ -1,5 +1,15 @@ import asyncio -from typing import Any, Callable, Dict, List +from typing import ( + Any, + AsyncIterable, + Awaitable, + Callable, + Dict, + List, + TypeVar, + Union, + cast, +) from pytest import mark, raises @@ -10,7 +20,7 @@ subscribe, ) from graphql.language import parse -from graphql.pyutils import SimplePubSub +from graphql.pyutils import AwaitableOrValue, SimplePubSub, is_awaitable from graphql.type import ( GraphQLArgument, GraphQLBoolean, @@ -37,6 +47,8 @@ async def anext(iterator): return await iterator.__anext__() +T = TypeVar("T") + Email = TypedDict( "Email", { @@ -137,7 +149,30 @@ def transform(new_email): DummyQueryType = GraphQLObjectType("Query", {"dummy": GraphQLField(GraphQLString)}) -async def subscribe_with_bad_fn(subscribe_fn: Callable) -> ExecutionResult: +def assert_equal_awaitables_or_values( + value1: AwaitableOrValue[T], value2: AwaitableOrValue[T] +) -> AwaitableOrValue[T]: + if is_awaitable(value1): + awaitable1 = cast(Awaitable[T], value1) + assert is_awaitable(value2) + awaitable2 = cast(Awaitable[T], value2) + + # noinspection PyShadowingNames + async def awaited_equal_value(): + value1 = await awaitable1 + value2 = await awaitable2 + assert value1 == value2 + return value1 + + return awaited_equal_value() + assert not is_awaitable(value2) + assert value1 == value2 + return value1 + + +def subscribe_with_bad_fn( + subscribe_fn: Callable, +) -> AwaitableOrValue[Union[ExecutionResult, AsyncIterable[Any]]]: schema = GraphQLSchema( query=DummyQueryType, subscription=GraphQLObjectType( @@ -146,11 +181,10 @@ async def subscribe_with_bad_fn(subscribe_fn: Callable) -> ExecutionResult: ), ) document = parse("subscription { foo }") - result = await subscribe(schema, document) - assert isinstance(result, ExecutionResult) - assert await create_source_event_stream(schema, document) == result - return result + return assert_equal_awaitables_or_values( + subscribe(schema, document), create_source_event_stream(schema, document) + ) # Check all error cases when initializing the subscription. @@ -169,9 +203,7 @@ async def empty_async_iterator(_info): for value in (): # type: ignore yield value # pragma: no cover - ai = await subscribe( - email_schema, document, {"importantEmail": empty_async_iterator} - ) + ai = subscribe(email_schema, document, {"importantEmail": empty_async_iterator}) with raises(StopAsyncIteration): await anext(ai) @@ -193,7 +225,7 @@ async def accepts_multiple_subscription_fields_defined_in_schema(): async def foo_generator(_info): yield {"foo": "FooValue"} - subscription = await subscribe( + subscription = subscribe( schema, parse("subscription { foo }"), {"foo": foo_generator} ) assert isinstance(subscription, MapAsyncIterator) @@ -215,7 +247,7 @@ async def foo_generator(_obj, _info): ), ) - subscription = await subscribe(schema, parse("subscription { foo }")) + subscription = subscribe(schema, parse("subscription { foo }")) assert isinstance(subscription, MapAsyncIterator) assert await anext(subscription) == ({"foo": "FooValue"}, None) @@ -228,15 +260,22 @@ async def foo_generator(_obj, _info): await asyncio.sleep(0) yield {"foo": "FooValue"} + async def subscribe_fn(obj, info): + await asyncio.sleep(0) + return foo_generator(obj, info) + schema = GraphQLSchema( query=DummyQueryType, subscription=GraphQLObjectType( "Subscription", - {"foo": GraphQLField(GraphQLString, subscribe=foo_generator)}, + {"foo": GraphQLField(GraphQLString, subscribe=subscribe_fn)}, ), ) - subscription = await subscribe(schema, parse("subscription { foo }")) + awaitable = subscribe(schema, parse("subscription { foo }")) + assert is_awaitable(awaitable) + + subscription = await awaitable assert isinstance(subscription, MapAsyncIterator) assert await anext(subscription) == ({"foo": "FooValue"}, None) @@ -266,7 +305,7 @@ async def subscribe_bar(_obj, _info): # pragma: no cover ), ) - subscription = await subscribe(schema, parse("subscription { foo bar }")) + subscription = subscribe(schema, parse("subscription { foo bar }")) assert isinstance(subscription, MapAsyncIterator) assert await anext(subscription) == ( @@ -290,23 +329,23 @@ async def throws_an_error_if_some_of_required_arguments_are_missing(): ) with raises(TypeError, match="^Expected None to be a GraphQL schema\\.$"): - await subscribe(None, document) # type: ignore + subscribe(None, document) # type: ignore with raises(TypeError, match="missing .* positional argument: 'schema'"): - await subscribe(document=document) # type: ignore + subscribe(document=document) # type: ignore with raises(TypeError, match="^Must provide document\\.$"): - await subscribe(schema, None) # type: ignore + subscribe(schema, None) # type: ignore with raises(TypeError, match="missing .* positional argument: 'document'"): - await subscribe(schema=schema) # type: ignore + subscribe(schema=schema) # type: ignore @mark.asyncio async def resolves_to_an_error_if_schema_does_not_support_subscriptions(): schema = GraphQLSchema(query=DummyQueryType) document = parse("subscription { unknownField }") - result = await subscribe(schema, document) + result = subscribe(schema, document) assert result == ( None, @@ -329,7 +368,7 @@ async def resolves_to_an_error_for_unknown_subscription_field(): ) document = parse("subscription { unknownField }") - result = await subscribe(schema, document) + result = subscribe(schema, document) assert result == ( None, [ @@ -349,12 +388,12 @@ async def should_pass_through_unexpected_errors_thrown_in_subscribe(): ), ) with raises(TypeError, match="^Must provide document\\.$"): - await subscribe(schema=schema, document={}) # type: ignore + subscribe(schema=schema, document={}) # type: ignore @mark.asyncio @mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") async def throws_an_error_if_subscribe_does_not_return_an_iterator(): - assert await subscribe_with_bad_fn(lambda _obj, _info: "test") == ( + expected_result = ( None, [ { @@ -366,6 +405,18 @@ async def throws_an_error_if_subscribe_does_not_return_an_iterator(): ], ) + def sync_fn(_obj, _info): + return "test" + + assert subscribe_with_bad_fn(sync_fn) == expected_result + + async def async_fn(obj, info): + return sync_fn(obj, info) + + result = subscribe_with_bad_fn(async_fn) + assert is_awaitable(result) + assert await result == expected_result + @mark.asyncio async def resolves_to_an_error_for_subscription_resolver_errors(): expected_result = ( @@ -380,28 +431,33 @@ async def resolves_to_an_error_for_subscription_resolver_errors(): ) # Returning an error - def return_error(_obj, _info): + def return_error(*_args): return TypeError("test error") - assert await subscribe_with_bad_fn(return_error) == expected_result + assert subscribe_with_bad_fn(return_error) == expected_result # Throwing an error def throw_error(*_args): raise TypeError("test error") - assert await subscribe_with_bad_fn(throw_error) == expected_result + assert subscribe_with_bad_fn(throw_error) == expected_result # Resolving to an error - async def resolve_error(*_args): - return TypeError("test error") + async def resolve_to_error(*args): + return return_error(*args) - assert await subscribe_with_bad_fn(resolve_error) == expected_result + result = subscribe_with_bad_fn(resolve_to_error) + assert is_awaitable(result) + assert await result == expected_result # Rejecting with an error - async def reject_error(*_args): - return TypeError("test error") - assert await subscribe_with_bad_fn(reject_error) == expected_result + async def reject_with_error(*args): + return throw_error(*args) + + result = subscribe_with_bad_fn(reject_with_error) + assert is_awaitable(result) + assert await result == expected_result @mark.asyncio async def resolves_to_an_error_if_variables_were_wrong_type(): @@ -428,7 +484,7 @@ async def resolves_to_an_error_if_variables_were_wrong_type(): # If we receive variables that cannot be coerced correctly, subscribe() will # resolve to an ExecutionResult that contains an informative error description. - result = await subscribe(schema, document, variable_values=variable_values) + result = subscribe(schema, document, variable_values=variable_values) assert result == ( None, @@ -450,10 +506,10 @@ def describe_subscription_publish_phase(): async def produces_a_payload_for_multiple_subscribe_in_same_subscription(): pubsub = SimplePubSub() - subscription = await create_subscription(pubsub) + subscription = create_subscription(pubsub) assert isinstance(subscription, MapAsyncIterator) - second_subscription = await create_subscription(pubsub) + second_subscription = create_subscription(pubsub) assert isinstance(subscription, MapAsyncIterator) payload1 = anext(subscription) @@ -484,7 +540,7 @@ async def produces_a_payload_for_multiple_subscribe_in_same_subscription(): @mark.asyncio async def produces_a_payload_per_subscription_event(): pubsub = SimplePubSub() - subscription = await create_subscription(pubsub) + subscription = create_subscription(pubsub) assert isinstance(subscription, MapAsyncIterator) # Wait for the next subscription payload. @@ -562,7 +618,7 @@ async def produces_a_payload_per_subscription_event(): @mark.asyncio async def produces_a_payload_when_there_are_multiple_events(): pubsub = SimplePubSub() - subscription = await create_subscription(pubsub) + subscription = create_subscription(pubsub) assert isinstance(subscription, MapAsyncIterator) payload = anext(subscription) @@ -618,7 +674,7 @@ async def produces_a_payload_when_there_are_multiple_events(): @mark.asyncio async def should_not_trigger_when_subscription_is_already_done(): pubsub = SimplePubSub() - subscription = await create_subscription(pubsub) + subscription = create_subscription(pubsub) assert isinstance(subscription, MapAsyncIterator) payload = anext(subscription) @@ -668,7 +724,7 @@ async def should_not_trigger_when_subscription_is_already_done(): @mark.asyncio async def should_not_trigger_when_subscription_is_thrown(): pubsub = SimplePubSub() - subscription = await create_subscription(pubsub) + subscription = create_subscription(pubsub) assert isinstance(subscription, MapAsyncIterator) payload = anext(subscription) @@ -709,7 +765,7 @@ async def should_not_trigger_when_subscription_is_thrown(): @mark.asyncio async def event_order_is_correct_for_multiple_publishes(): pubsub = SimplePubSub() - subscription = await create_subscription(pubsub) + subscription = create_subscription(pubsub) assert isinstance(subscription, MapAsyncIterator) payload = anext(subscription) @@ -789,7 +845,7 @@ def resolve_message(message, _info): ) document = parse("subscription { newMessage }") - subscription = await subscribe(schema, document) + subscription = subscribe(schema, document) assert isinstance(subscription, MapAsyncIterator) assert await anext(subscription) == ({"newMessage": "Hello"}, None) @@ -834,7 +890,7 @@ def resolve_message(message, _info): ) document = parse("subscription { newMessage }") - subscription = await subscribe(schema, document) + subscription = subscribe(schema, document) assert isinstance(subscription, MapAsyncIterator) assert await (anext(subscription)) == ({"newMessage": "Hello"}, None) @@ -848,7 +904,7 @@ def resolve_message(message, _info): await anext(subscription) @mark.asyncio - async def should_work_with_async_resolve_function(): + async def should_work_with_sync_resolve_function(): async def generate_messages(_obj, _info): yield "Hello" @@ -870,7 +926,37 @@ def resolve_message(message, _info): ) document = parse("subscription { newMessage }") - subscription = await subscribe(schema, document) + subscription = subscribe(schema, document) + assert isinstance(subscription, MapAsyncIterator) + + assert await anext(subscription) == ({"newMessage": "Hello"}, None) + + @mark.asyncio + async def should_work_with_async_resolve_function(): + async def generate_messages(_obj, _info): + await asyncio.sleep(0) + yield "Hello" + + async def resolve_message(message, _info): + await asyncio.sleep(0) + return message + + schema = GraphQLSchema( + query=QueryType, + subscription=GraphQLObjectType( + "Subscription", + { + "newMessage": GraphQLField( + GraphQLString, + resolve=resolve_message, + subscribe=generate_messages, + ) + }, + ), + ) + + document = parse("subscription { newMessage }") + subscription = subscribe(schema, document) assert isinstance(subscription, MapAsyncIterator) assert await anext(subscription) == ({"newMessage": "Hello"}, None) diff --git a/tests/test_user_registry.py b/tests/test_user_registry.py index 0aa08fd4..74c0e56e 100644 --- a/tests/test_user_registry.py +++ b/tests/test_user_registry.py @@ -410,7 +410,7 @@ async def subscribe_to_user_mutations(context): """ variables = {"userId": "0"} - subscription_one = await subscribe( + subscription_one = subscribe( schema, parse(query), context_value=context, variable_values=variables ) assert isinstance(subscription_one, MapAsyncIterator) @@ -424,7 +424,7 @@ async def subscribe_to_user_mutations(context): } """ - subscription_all = await subscribe(schema, parse(query), context_value=context) + subscription_all = subscribe(schema, parse(query), context_value=context) assert isinstance(subscription_all, MapAsyncIterator) received_one = [] From 689a70d052a58277d20cc6b6348b0eb31ad3a113 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 20 Nov 2022 18:57:13 +0100 Subject: [PATCH 064/230] refactor: move `subscribe` code to `execute` file Replicates graphql/graphql-js@e24f426ad2afeddecb0b081e1f9f7434ab04c1ec --- src/graphql/execution/__init__.py | 3 +- src/graphql/execution/execute.py | 239 ++++++++++++++++++++++++++ src/graphql/execution/subscribe.py | 264 ----------------------------- 3 files changed, 241 insertions(+), 265 deletions(-) delete mode 100644 src/graphql/execution/subscribe.py diff --git a/src/graphql/execution/__init__.py b/src/graphql/execution/__init__.py index 7317fef2..a9cd45b6 100644 --- a/src/graphql/execution/__init__.py +++ b/src/graphql/execution/__init__.py @@ -5,17 +5,18 @@ """ from .execute import ( + create_source_event_stream, execute, execute_sync, default_field_resolver, default_type_resolver, + subscribe, ExecutionContext, ExecutionResult, FormattedExecutionResult, Middleware, ) from .map_async_iterator import MapAsyncIterator -from .subscribe import subscribe, create_source_event_stream from .middleware import MiddlewareManager from .values import get_argument_values, get_directive_values, get_variable_values diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index 60ad8211..2cf3dda6 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -6,6 +6,7 @@ from typing import ( Any, AsyncIterable, + AsyncIterator, Awaitable, Callable, Dict, @@ -58,16 +59,19 @@ is_object_type, ) from .collect_fields import collect_fields, collect_subfields +from .map_async_iterator import MapAsyncIterator from .middleware import MiddlewareManager from .values import get_argument_values, get_variable_values __all__ = [ "assert_valid_execution_arguments", + "create_source_event_stream", "default_field_resolver", "default_type_resolver", "execute", "execute_sync", + "subscribe", "ExecutionResult", "ExecutionContext", "FormattedExecutionResult", @@ -1222,3 +1226,238 @@ def default_field_resolver(source: Any, info: GraphQLResolveInfo, **args: Any) - if callable(value): return value(info, **args) return value + + +def subscribe( + schema: GraphQLSchema, + document: DocumentNode, + root_value: Any = None, + context_value: Any = None, + variable_values: Optional[Dict[str, Any]] = None, + operation_name: Optional[str] = None, + field_resolver: Optional[GraphQLFieldResolver] = None, + subscribe_field_resolver: Optional[GraphQLFieldResolver] = None, + execution_context_class: Optional[Type[ExecutionContext]] = None, +) -> AwaitableOrValue[Union[AsyncIterator[ExecutionResult], ExecutionResult]]: + """Create a GraphQL subscription. + + Implements the "Subscribe" algorithm described in the GraphQL spec. + + Returns a coroutine object which yields either an AsyncIterator (if successful) or + an ExecutionResult (client error). The coroutine will raise an exception if a server + error occurs. + + If the client-provided arguments to this function do not result in a compliant + subscription, a GraphQL Response (ExecutionResult) with descriptive errors and no + data will be returned. + + If the source stream could not be created due to faulty subscription resolver logic + or underlying systems, the coroutine object will yield a single ExecutionResult + containing ``errors`` and no ``data``. + + If the operation succeeded, the coroutine will yield an AsyncIterator, which yields + a stream of ExecutionResults representing the response stream. + """ + result_or_stream = create_source_event_stream( + schema, + document, + root_value, + context_value, + variable_values, + operation_name, + subscribe_field_resolver, + execution_context_class, + ) + + async def map_source_to_response(payload: Any) -> ExecutionResult: + """Map source to response. + + For each payload yielded from a subscription, map it over the normal GraphQL + :func:`~graphql.execute` function, with ``payload`` as the ``root_value``. + This implements the "MapSourceToResponseEvent" algorithm described in the + GraphQL specification. The :func:`~graphql.execute` function provides the + "ExecuteSubscriptionEvent" algorithm, as it is nearly identical to the + "ExecuteQuery" algorithm, for which :func:`~graphql.execute` is also used. + """ + result = execute( + schema, + document, + payload, + context_value, + variable_values, + operation_name, + field_resolver, + execution_context_class=execution_context_class, + ) + return await result if isawaitable(result) else result + + if (execution_context_class or ExecutionContext).is_awaitable(result_or_stream): + awaitable_result_or_stream = cast(Awaitable, result_or_stream) + + # noinspection PyShadowingNames + async def await_result() -> Any: + result_or_stream = await awaitable_result_or_stream + if isinstance(result_or_stream, ExecutionResult): + return result_or_stream + return MapAsyncIterator(result_or_stream, map_source_to_response) + + return await_result() + + if isinstance(result_or_stream, ExecutionResult): + return result_or_stream + + # Map every source value to a ExecutionResult value as described above. + return MapAsyncIterator( + cast(AsyncIterable[Any], result_or_stream), map_source_to_response + ) + + +def create_source_event_stream( + schema: GraphQLSchema, + document: DocumentNode, + root_value: Any = None, + context_value: Any = None, + variable_values: Optional[Dict[str, Any]] = None, + operation_name: Optional[str] = None, + subscribe_field_resolver: Optional[GraphQLFieldResolver] = None, + execution_context_class: Optional[Type[ExecutionContext]] = None, +) -> AwaitableOrValue[Union[AsyncIterable[Any], ExecutionResult]]: + """Create source event stream + + Implements the "CreateSourceEventStream" algorithm described in the GraphQL + specification, resolving the subscription source event stream. + + Returns a coroutine that yields an AsyncIterable. + + If the client-provided arguments to this function do not result in a compliant + subscription, a GraphQL Response (ExecutionResult) with descriptive errors and no + data will be returned. + + If the source stream could not be created due to faulty subscription resolver logic + or underlying systems, the coroutine object will yield a single ExecutionResult + containing ``errors`` and no ``data``. + + A source event stream represents a sequence of events, each of which triggers a + GraphQL execution for that event. + + This may be useful when hosting the stateful subscription service in a different + process or machine than the stateless GraphQL execution engine, or otherwise + separating these two steps. For more on this, see the "Supporting Subscriptions + at Scale" information in the GraphQL spec. + """ + # If arguments are missing or incorrectly typed, this is an internal developer + # mistake which should throw an early error. + assert_valid_execution_arguments(schema, document, variable_values) + + if not execution_context_class: + execution_context_class = ExecutionContext + + # If a valid context cannot be created due to incorrect arguments, + # a "Response" with only errors is returned. + context = execution_context_class.build( + schema, + document, + root_value, + context_value, + variable_values, + operation_name, + subscribe_field_resolver=subscribe_field_resolver, + ) + + # Return early errors if execution context failed. + if isinstance(context, list): + return ExecutionResult(data=None, errors=context) + + try: + event_stream = execute_subscription(context) + except GraphQLError as error: + return ExecutionResult(data=None, errors=[error]) + + if context.is_awaitable(event_stream): + awaitable_event_stream = cast(Awaitable, event_stream) + + # noinspection PyShadowingNames + async def await_event_stream() -> Union[AsyncIterable[Any], ExecutionResult]: + try: + return await awaitable_event_stream + except GraphQLError as error: + return ExecutionResult(data=None, errors=[error]) + + return await_event_stream() + + return event_stream + + +def execute_subscription( + context: ExecutionContext, +) -> AwaitableOrValue[AsyncIterable[Any]]: + schema = context.schema + + root_type = schema.subscription_type + if root_type is None: + raise GraphQLError( + "Schema is not configured to execute subscription operation.", + context.operation, + ) + + root_fields = collect_fields( + schema, + context.fragments, + context.variable_values, + root_type, + context.operation.selection_set, + ) + response_name, field_nodes = next(iter(root_fields.items())) + field_name = field_nodes[0].name.value + field_def = schema.get_field(root_type, field_name) + + if not field_def: + raise GraphQLError( + f"The subscription field '{field_name}' is not defined.", field_nodes + ) + + path = Path(None, response_name, root_type.name) + info = context.build_resolve_info(field_def, field_nodes, root_type, path) + + # Implements the "ResolveFieldEventStream" algorithm from GraphQL specification. + # It differs from "ResolveFieldValue" due to providing a different `resolveFn`. + + try: + # Build a dictionary of arguments from the field.arguments AST, using the + # variables scope to fulfill any variable references. + args = get_argument_values(field_def, field_nodes[0], context.variable_values) + + # Call the `subscribe()` resolver or the default resolver to produce an + # AsyncIterable yielding raw payloads. + resolve_fn = field_def.subscribe or context.subscribe_field_resolver + + result = resolve_fn(context.root_value, info, **args) + if context.is_awaitable(result): + + # noinspection PyShadowingNames + async def await_result() -> AsyncIterable[Any]: + try: + return assert_event_stream(await result) + except Exception as error: + raise located_error(error, field_nodes, path.as_list()) + + return await_result() + + return assert_event_stream(result) + + except Exception as error: + raise located_error(error, field_nodes, path.as_list()) + + +def assert_event_stream(result: Any) -> AsyncIterable: + if isinstance(result, Exception): + raise result + + # Assert field returned an event stream, otherwise yield an error. + if not isinstance(result, AsyncIterable): + raise GraphQLError( + "Subscription field must return AsyncIterable." + f" Received: {inspect(result)}." + ) + + return result diff --git a/src/graphql/execution/subscribe.py b/src/graphql/execution/subscribe.py deleted file mode 100644 index a8db0bbe..00000000 --- a/src/graphql/execution/subscribe.py +++ /dev/null @@ -1,264 +0,0 @@ -from inspect import isawaitable -from typing import ( - Any, - AsyncIterable, - AsyncIterator, - Awaitable, - Dict, - Optional, - Type, - Union, - cast, -) - -from ..error import GraphQLError, located_error -from ..execution.collect_fields import collect_fields -from ..execution.execute import ( - ExecutionContext, - ExecutionResult, - assert_valid_execution_arguments, - execute, -) -from ..execution.values import get_argument_values -from ..language import DocumentNode -from ..pyutils import AwaitableOrValue, Path, inspect -from ..type import GraphQLFieldResolver, GraphQLSchema -from .map_async_iterator import MapAsyncIterator - - -__all__ = ["subscribe", "create_source_event_stream"] - - -def subscribe( - schema: GraphQLSchema, - document: DocumentNode, - root_value: Any = None, - context_value: Any = None, - variable_values: Optional[Dict[str, Any]] = None, - operation_name: Optional[str] = None, - field_resolver: Optional[GraphQLFieldResolver] = None, - subscribe_field_resolver: Optional[GraphQLFieldResolver] = None, - execution_context_class: Optional[Type[ExecutionContext]] = None, -) -> AwaitableOrValue[Union[AsyncIterator[ExecutionResult], ExecutionResult]]: - """Create a GraphQL subscription. - - Implements the "Subscribe" algorithm described in the GraphQL spec. - - Returns a coroutine object which yields either an AsyncIterator (if successful) or - an ExecutionResult (client error). The coroutine will raise an exception if a server - error occurs. - - If the client-provided arguments to this function do not result in a compliant - subscription, a GraphQL Response (ExecutionResult) with descriptive errors and no - data will be returned. - - If the source stream could not be created due to faulty subscription resolver logic - or underlying systems, the coroutine object will yield a single ExecutionResult - containing ``errors`` and no ``data``. - - If the operation succeeded, the coroutine will yield an AsyncIterator, which yields - a stream of ExecutionResults representing the response stream. - """ - result_or_stream = create_source_event_stream( - schema, - document, - root_value, - context_value, - variable_values, - operation_name, - subscribe_field_resolver, - execution_context_class, - ) - - async def map_source_to_response(payload: Any) -> ExecutionResult: - """Map source to response. - - For each payload yielded from a subscription, map it over the normal GraphQL - :func:`~graphql.execute` function, with ``payload`` as the ``root_value``. - This implements the "MapSourceToResponseEvent" algorithm described in the - GraphQL specification. The :func:`~graphql.execute` function provides the - "ExecuteSubscriptionEvent" algorithm, as it is nearly identical to the - "ExecuteQuery" algorithm, for which :func:`~graphql.execute` is also used. - """ - result = execute( - schema, - document, - payload, - context_value, - variable_values, - operation_name, - field_resolver, - execution_context_class=execution_context_class, - ) - return await result if isawaitable(result) else result - - if (execution_context_class or ExecutionContext).is_awaitable(result_or_stream): - awaitable_result_or_stream = cast(Awaitable, result_or_stream) - - # noinspection PyShadowingNames - async def await_result() -> Any: - result_or_stream = await awaitable_result_or_stream - if isinstance(result_or_stream, ExecutionResult): - return result_or_stream - return MapAsyncIterator(result_or_stream, map_source_to_response) - - return await_result() - - if isinstance(result_or_stream, ExecutionResult): - return result_or_stream - - # Map every source value to a ExecutionResult value as described above. - return MapAsyncIterator( - cast(AsyncIterable[Any], result_or_stream), map_source_to_response - ) - - -def create_source_event_stream( - schema: GraphQLSchema, - document: DocumentNode, - root_value: Any = None, - context_value: Any = None, - variable_values: Optional[Dict[str, Any]] = None, - operation_name: Optional[str] = None, - subscribe_field_resolver: Optional[GraphQLFieldResolver] = None, - execution_context_class: Optional[Type[ExecutionContext]] = None, -) -> AwaitableOrValue[Union[AsyncIterable[Any], ExecutionResult]]: - """Create source event stream - - Implements the "CreateSourceEventStream" algorithm described in the GraphQL - specification, resolving the subscription source event stream. - - Returns a coroutine that yields an AsyncIterable. - - If the client-provided arguments to this function do not result in a compliant - subscription, a GraphQL Response (ExecutionResult) with descriptive errors and no - data will be returned. - - If the source stream could not be created due to faulty subscription resolver logic - or underlying systems, the coroutine object will yield a single ExecutionResult - containing ``errors`` and no ``data``. - - A source event stream represents a sequence of events, each of which triggers a - GraphQL execution for that event. - - This may be useful when hosting the stateful subscription service in a different - process or machine than the stateless GraphQL execution engine, or otherwise - separating these two steps. For more on this, see the "Supporting Subscriptions - at Scale" information in the GraphQL spec. - """ - # If arguments are missing or incorrectly typed, this is an internal developer - # mistake which should throw an early error. - assert_valid_execution_arguments(schema, document, variable_values) - - if not execution_context_class: - execution_context_class = ExecutionContext - - # If a valid context cannot be created due to incorrect arguments, - # a "Response" with only errors is returned. - context = execution_context_class.build( - schema, - document, - root_value, - context_value, - variable_values, - operation_name, - subscribe_field_resolver=subscribe_field_resolver, - ) - - # Return early errors if execution context failed. - if isinstance(context, list): - return ExecutionResult(data=None, errors=context) - - try: - event_stream = execute_subscription(context) - except GraphQLError as error: - return ExecutionResult(data=None, errors=[error]) - - if context.is_awaitable(event_stream): - awaitable_event_stream = cast(Awaitable, event_stream) - - # noinspection PyShadowingNames - async def await_event_stream() -> Union[AsyncIterable[Any], ExecutionResult]: - try: - return await awaitable_event_stream - except GraphQLError as error: - return ExecutionResult(data=None, errors=[error]) - - return await_event_stream() - - return event_stream - - -def execute_subscription( - context: ExecutionContext, -) -> AwaitableOrValue[AsyncIterable[Any]]: - schema = context.schema - - root_type = schema.subscription_type - if root_type is None: - raise GraphQLError( - "Schema is not configured to execute subscription operation.", - context.operation, - ) - - root_fields = collect_fields( - schema, - context.fragments, - context.variable_values, - root_type, - context.operation.selection_set, - ) - response_name, field_nodes = next(iter(root_fields.items())) - field_name = field_nodes[0].name.value - field_def = schema.get_field(root_type, field_name) - - if not field_def: - raise GraphQLError( - f"The subscription field '{field_name}' is not defined.", field_nodes - ) - - path = Path(None, response_name, root_type.name) - info = context.build_resolve_info(field_def, field_nodes, root_type, path) - - # Implements the "ResolveFieldEventStream" algorithm from GraphQL specification. - # It differs from "ResolveFieldValue" due to providing a different `resolveFn`. - - try: - # Build a dictionary of arguments from the field.arguments AST, using the - # variables scope to fulfill any variable references. - args = get_argument_values(field_def, field_nodes[0], context.variable_values) - - # Call the `subscribe()` resolver or the default resolver to produce an - # AsyncIterable yielding raw payloads. - resolve_fn = field_def.subscribe or context.subscribe_field_resolver - - result = resolve_fn(context.root_value, info, **args) - if context.is_awaitable(result): - - # noinspection PyShadowingNames - async def await_result() -> AsyncIterable[Any]: - try: - return assert_event_stream(await result) - except Exception as error: - raise located_error(error, field_nodes, path.as_list()) - - return await_result() - - return assert_event_stream(result) - - except Exception as error: - raise located_error(error, field_nodes, path.as_list()) - - -def assert_event_stream(result: Any) -> AsyncIterable: - if isinstance(result, Exception): - raise result - - # Assert field returned an event stream, otherwise yield an error. - if not isinstance(result, AsyncIterable): - raise GraphQLError( - "Subscription field must return AsyncIterable." - f" Received: {inspect(result)}." - ) - - return result From 1e5be9cacdec8ed33b30c89b1c8217b69460fbca Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 8 Jan 2023 14:10:38 +0100 Subject: [PATCH 065/230] Update dependencies --- poetry.lock | 1268 +++++++++++++++++++++++++++--------------------- pyproject.toml | 21 +- tox.ini | 16 +- 3 files changed, 729 insertions(+), 576 deletions(-) diff --git a/poetry.lock b/poetry.lock index 8a15dabf..e23c825d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,3 +1,5 @@ +# This file is automatically @generated by Poetry and should not be changed by hand. + [[package]] name = "alabaster" version = "0.7.12" @@ -5,20 +7,29 @@ description = "A configurable sidebar-enabled Sphinx theme" category = "dev" optional = false python-versions = "*" +files = [ + {file = "alabaster-0.7.12-py2.py3-none-any.whl", hash = "sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359"}, + {file = "alabaster-0.7.12.tar.gz", hash = "sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02"}, +] [[package]] name = "attrs" -version = "22.1.0" +version = "22.2.0" description = "Classes Without Boilerplate" category = "dev" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" +files = [ + {file = "attrs-22.2.0-py3-none-any.whl", hash = "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836"}, + {file = "attrs-22.2.0.tar.gz", hash = "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99"}, +] [package.extras] -dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"] -docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] -tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"] -tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"] +cov = ["attrs[tests]", "coverage-enable-subprocess", "coverage[toml] (>=5.3)"] +dev = ["attrs[docs,tests]"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope.interface"] +tests = ["attrs[tests-no-zope]", "zope.interface"] +tests-no-zope = ["cloudpickle", "cloudpickle", "hypothesis", "hypothesis", "mypy (>=0.971,<0.990)", "mypy (>=0.971,<0.990)", "pympler", "pympler", "pytest (>=4.3.0)", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-mypy-plugins", "pytest-xdist[psutil]", "pytest-xdist[psutil]"] [[package]] name = "babel" @@ -27,6 +38,10 @@ description = "Internationalization utilities" category = "dev" optional = false python-versions = ">=3.6" +files = [ + {file = "Babel-2.11.0-py3-none-any.whl", hash = "sha256:1ad3eca1c885218f6dce2ab67291178944f810a10a9b5f3cb8382a5a232b64fe"}, + {file = "Babel-2.11.0.tar.gz", hash = "sha256:5ef4b3226b0180dedded4229651c8b0e1a3a6a2837d45a073272f313e4cf97f6"}, +] [package.dependencies] pytz = ">=2015.7" @@ -38,6 +53,10 @@ description = "Security oriented static analyser for python code." category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "bandit-1.7.4-py3-none-any.whl", hash = "sha256:412d3f259dab4077d0e7f0c11f50f650cc7d10db905d98f6520a95a18049658a"}, + {file = "bandit-1.7.4.tar.gz", hash = "sha256:2d63a8c573417bae338962d4b9b06fbc6080f74ecd955a092849e1e65c717bd2"}, +] [package.dependencies] colorama = {version = ">=0.3.9", markers = "platform_system == \"Windows\""} @@ -52,11 +71,25 @@ yaml = ["PyYAML"] [[package]] name = "black" -version = "22.10.0" +version = "22.12.0" description = "The uncompromising code formatter." category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "black-22.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eedd20838bd5d75b80c9f5487dbcb06836a43833a37846cf1d8c1cc01cef59d"}, + {file = "black-22.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:159a46a4947f73387b4d83e87ea006dbb2337eab6c879620a3ba52699b1f4351"}, + {file = "black-22.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d30b212bffeb1e252b31dd269dfae69dd17e06d92b87ad26e23890f3efea366f"}, + {file = "black-22.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:7412e75863aa5c5411886804678b7d083c7c28421210180d67dfd8cf1221e1f4"}, + {file = "black-22.12.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c116eed0efb9ff870ded8b62fe9f28dd61ef6e9ddd28d83d7d264a38417dcee2"}, + {file = "black-22.12.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1f58cbe16dfe8c12b7434e50ff889fa479072096d79f0a7f25e4ab8e94cd8350"}, + {file = "black-22.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77d86c9f3db9b1bf6761244bc0b3572a546f5fe37917a044e02f3166d5aafa7d"}, + {file = "black-22.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:82d9fe8fee3401e02e79767016b4907820a7dc28d70d137eb397b92ef3cc5bfc"}, + {file = "black-22.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:101c69b23df9b44247bd88e1d7e90154336ac4992502d4197bdac35dd7ee3320"}, + {file = "black-22.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:559c7a1ba9a006226f09e4916060982fd27334ae1998e7a38b3f33a37f7a2148"}, + {file = "black-22.12.0-py3-none-any.whl", hash = "sha256:436cc9167dd28040ad90d3b404aec22cedf24a6e4d7de221bec2730ec0c97bcf"}, + {file = "black-22.12.0.tar.gz", hash = "sha256:229351e5a18ca30f447bf724d007f890f97e13af070bb6ad4c0a441cd7596a2f"}, +] [package.dependencies] click = ">=8.0.0" @@ -80,14 +113,46 @@ description = "Version-bump your software with a single command!" category = "dev" optional = false python-versions = ">=3.5" +files = [ + {file = "bump2version-1.0.1-py2.py3-none-any.whl", hash = "sha256:37f927ea17cde7ae2d7baf832f8e80ce3777624554a653006c9144f8017fe410"}, + {file = "bump2version-1.0.1.tar.gz", hash = "sha256:762cb2bfad61f4ec8e2bdf452c7c267416f8c70dd9ecb1653fd0bbb01fa936e6"}, +] + +[[package]] +name = "cachetools" +version = "5.2.0" +description = "Extensible memoizing collections and decorators" +category = "dev" +optional = false +python-versions = "~=3.7" +files = [ + {file = "cachetools-5.2.0-py3-none-any.whl", hash = "sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db"}, + {file = "cachetools-5.2.0.tar.gz", hash = "sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757"}, +] [[package]] name = "certifi" -version = "2022.9.24" +version = "2022.12.7" description = "Python package for providing Mozilla's CA Bundle." category = "dev" optional = false python-versions = ">=3.6" +files = [ + {file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"}, + {file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"}, +] + +[[package]] +name = "chardet" +version = "5.1.0" +description = "Universal encoding detector for Python 3" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "chardet-5.1.0-py3-none-any.whl", hash = "sha256:362777fb014af596ad31334fde1e8c327dfdb076e1960d1694662d46a6917ab9"}, + {file = "chardet-5.1.0.tar.gz", hash = "sha256:0d62712b956bc154f85fb0a266e2a3c5913c2967e00348701b32411d6def31e5"}, +] [[package]] name = "charset-normalizer" @@ -96,6 +161,10 @@ description = "The Real First Universal Charset Detector. Open, modern and activ category = "dev" optional = false python-versions = ">=3.6.0" +files = [ + {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"}, + {file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"}, +] [package.extras] unicode-backport = ["unicodedata2"] @@ -107,6 +176,10 @@ description = "Composable command line interface toolkit" category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, + {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, +] [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} @@ -119,14 +192,71 @@ description = "Cross-platform colored terminal text." category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] [[package]] name = "coverage" -version = "6.5.0" +version = "7.0.4" description = "Code coverage measurement for Python" category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "coverage-7.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:daf91db39324e9939a9db919ee4fb42a1a23634a056616dae891a030e89f87ba"}, + {file = "coverage-7.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:55121fe140d7e42cb970999b93cf1c2b24484ce028b32bbd00238bb25c13e34a"}, + {file = "coverage-7.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c027fbb83a8c78a6e06a0302ea1799fdb70e5cda9845a5e000545b8e2b47ea39"}, + {file = "coverage-7.0.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:caf82db5b7f16b51ec32fe0bd2da0805b177c807aa8bfb478c7e6f893418c284"}, + {file = "coverage-7.0.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ba5cc54baf3c322c4388de2a43cc95f7809366f0600e743e5aae8ea9d1038b2"}, + {file = "coverage-7.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:260854160083f8275a9d9d49a05ab0ffc7a1f08f2ccccbfaec94a18aae9f407c"}, + {file = "coverage-7.0.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ea45f0dba5a993e93b158f1a9dcfff2770e3bcabf2b80dbe7aa15dce0bcb3bf3"}, + {file = "coverage-7.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6abc91f6f8b3cc0ae1034e2c03f38769fba1952ab70d0b26953aa01691265c39"}, + {file = "coverage-7.0.4-cp310-cp310-win32.whl", hash = "sha256:053cdc47cae08257051d7e934a0de4d095b60eb8a3024fa9f1b2322fa1547137"}, + {file = "coverage-7.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:1e9e94f2612ee549a4b3ee79cbc61bceed77e69cf38cfa05858bae939a886d16"}, + {file = "coverage-7.0.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5caa9dd91dcc5f054350dc57a02e053d79633907b9ccffff999568d13dcd19f8"}, + {file = "coverage-7.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:efc200fa75d9634525b40babc7a16342bd21c101db1a58ef84dc14f4bf6ac0fd"}, + {file = "coverage-7.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1791e5f74c5b52f76e83fe9f4bb9571cf76d40ee0c51952ee1e4ee935b7e98b9"}, + {file = "coverage-7.0.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3d9201cfa5a98652b9cef36ab202f17fe3ea83f497b4ba2a8ed39399dfb8fcd4"}, + {file = "coverage-7.0.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22d8ef6865cb6834cab2b72fff20747a55c714b57b675f7e11c9624fe4f7cb45"}, + {file = "coverage-7.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b84076e3de192fba0f95e279ac017b64c7c6ecd4f09f36f13420f5bed898a9c7"}, + {file = "coverage-7.0.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:dcfbf8ffc046f20d75fd775a92c378f6fc7b9bded6c6f2ab88b6b9cb5805a184"}, + {file = "coverage-7.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4665a714af31f160403c2e448fb2fef330719d2e04e836b08d60d612707c1041"}, + {file = "coverage-7.0.4-cp311-cp311-win32.whl", hash = "sha256:2e59aef3fba5758059208c9eff10ae7ded3629e797972746ec33b56844f69411"}, + {file = "coverage-7.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:2b854f7985b48122b6fe346631e86d67b63293f8255cb59a93d79e3d9f1574e3"}, + {file = "coverage-7.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e44b60b0b49aa85d548d392a2dca2c6a581cd4084e72e9e16bd58bd86ec20816"}, + {file = "coverage-7.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2904d7a0388911c61e7e3beefe48c29dfccaba938fc1158f63190101a21e04c2"}, + {file = "coverage-7.0.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bc74b64bfa89e2f862ea45dd6ac1def371d7cc883b76680d20bdd61a6f3daa20"}, + {file = "coverage-7.0.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c06046f54e719da21c79f98ecc0962581d1aee0b3798dc6b12b1217da8bf93f4"}, + {file = "coverage-7.0.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:bc9c77004970a364a1e5454cf7cb884e4277592b959c287689b2a0fd027ef552"}, + {file = "coverage-7.0.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:0815a09b32384e8ff00a5939ec9cd10efce8742347e019c2daca1a32f5ac2aae"}, + {file = "coverage-7.0.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a78a80d131c067d67d8a6f9bd3d3f7ea7eac82c1c7259f97d7ab73f723da9d55"}, + {file = "coverage-7.0.4-cp37-cp37m-win32.whl", hash = "sha256:2b5936b624fbe711ed02dfd86edd678822e5ee68da02b6d231e5c01090b64590"}, + {file = "coverage-7.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:a63922765ee49d5b4c32afb2cd5516812c8665f3b78e64a0dd005bdfabf991b1"}, + {file = "coverage-7.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d68f2f7bddb3acdd3b36ef7f334b9d14f30b93e094f808fbbd8d288b8f9e2f9b"}, + {file = "coverage-7.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9dafdba3b2b9010abab08cb8c0dc6549bfca6e1630fe14d47b01dca00d39e694"}, + {file = "coverage-7.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0322354757b47640535daabd2d56384ff3cad2896248fc84d328c5fad4922d5c"}, + {file = "coverage-7.0.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4e8267466662aff93d66fa72b9591d02122dfc8a729b0a43dd70e0fb07ed9b37"}, + {file = "coverage-7.0.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f684d88eb4924ed0630cf488fd5606e334c6835594bb5fe36b50a509b10383ed"}, + {file = "coverage-7.0.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:70c294bb15ba576fb96b580db35895bf03749d683df044212b74e938a7f6821f"}, + {file = "coverage-7.0.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:34c0457e1ba450ae8b22dc8ea2fd36ada1010af61291e4c96963cd9d9633366f"}, + {file = "coverage-7.0.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b75aff2c35ceaa299691e772f7bf7c8aeab25f46acea2be3dd04cccb914a9860"}, + {file = "coverage-7.0.4-cp38-cp38-win32.whl", hash = "sha256:6c5554d55668381e131577f20e8f620d4882b04ad558f7e7f3f1f55b3124c379"}, + {file = "coverage-7.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:c82f34fafaf5bc05d222fcf84423d6e156432ca35ca78672d4affd0c09c6ef6c"}, + {file = "coverage-7.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b8dfb5fed540f77e814bf4ec79619c241af6b4578fa1093c5e3389bbb7beab3f"}, + {file = "coverage-7.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ee32a080bab779b71c4d09a3eb5254bfca43ee88828a683dab27dfe8f582516e"}, + {file = "coverage-7.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2dfbee0bf0d633be3a2ab068f5a5731a70adf147d0ba17d9f9932b46c7c5782b"}, + {file = "coverage-7.0.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32dc010713455ac0fe2fddb0e48aa43875cc7eb7b09768df10bad8ce45f9c430"}, + {file = "coverage-7.0.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9cb88a3019ad042eaa69fc7639ef077793fedbf313e89207aa82fefe92c97ebd"}, + {file = "coverage-7.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:73bc6114aab7753ca784f87bcd3b7613bc797aa255b5bca45e5654070ae9acfb"}, + {file = "coverage-7.0.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:92f135d370fcd7a6fb9659fa2eb716dd2ca364719cbb1756f74d90a221bca1a7"}, + {file = "coverage-7.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f3d485e6ec6e09857bf2115ece572d666b7c498377d4c70e66bb06c63ed177c2"}, + {file = "coverage-7.0.4-cp39-cp39-win32.whl", hash = "sha256:c58921fcd9914b56444292e7546fe183d079db99528142c809549ddeaeacd8e9"}, + {file = "coverage-7.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:f092d9f2ddaa30235d33335fbdb61eb8f3657af519ef5f9dd6bdae65272def11"}, + {file = "coverage-7.0.4-pp37.pp38.pp39-none-any.whl", hash = "sha256:cb8cfa3bf3a9f18211279458917fef5edeb5e1fdebe2ea8b11969ec2ebe48884"}, + {file = "coverage-7.0.4.tar.gz", hash = "sha256:f6c4ad409a0caf7e2e12e203348b1a9b19c514e7d078520973147bf2d3dcbc6f"}, +] [package.dependencies] tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} @@ -141,6 +271,10 @@ description = "Distribution utilities" category = "dev" optional = false python-versions = "*" +files = [ + {file = "distlib-0.3.6-py2.py3-none-any.whl", hash = "sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e"}, + {file = "distlib-0.3.6.tar.gz", hash = "sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46"}, +] [[package]] name = "docutils" @@ -149,29 +283,41 @@ description = "Docutils -- Python Documentation Utilities" category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "docutils-0.17.1-py2.py3-none-any.whl", hash = "sha256:cf316c8370a737a022b72b56874f6602acf974a37a9fba42ec2876387549fc61"}, + {file = "docutils-0.17.1.tar.gz", hash = "sha256:686577d2e4c32380bb50cbb22f575ed742d58168cee37e99117a854bcd88f125"}, +] [[package]] name = "exceptiongroup" -version = "1.0.4" +version = "1.1.0" description = "Backport of PEP 654 (exception groups)" category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.1.0-py3-none-any.whl", hash = "sha256:327cbda3da756e2de031a3107b81ab7b3770a602c4d16ca618298c526f4bec1e"}, + {file = "exceptiongroup-1.1.0.tar.gz", hash = "sha256:bcb67d800a4497e1b404c2dd44fca47d3b7a5e5433dbab67f96c1a685cdfdf23"}, +] [package.extras] test = ["pytest (>=6)"] [[package]] name = "filelock" -version = "3.8.0" +version = "3.9.0" description = "A platform independent file lock." category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "filelock-3.9.0-py3-none-any.whl", hash = "sha256:f58d535af89bb9ad5cd4df046f741f8553a418c01a7856bf0d173bbc9f6bd16d"}, + {file = "filelock-3.9.0.tar.gz", hash = "sha256:7b319f24340b51f55a2bf7a12ac0755a9b03e718311dac567a0f4f7fabd2f5de"}, +] [package.extras] -docs = ["furo (>=2022.6.21)", "sphinx (>=5.1.1)", "sphinx-autodoc-typehints (>=1.19.1)"] -testing = ["covdefaults (>=2.2)", "coverage (>=6.4.2)", "pytest (>=7.1.2)", "pytest-cov (>=3)", "pytest-timeout (>=2.1)"] +docs = ["furo (>=2022.12.7)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"] +testing = ["covdefaults (>=2.2.2)", "coverage (>=7.0.1)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-timeout (>=2.1)"] [[package]] name = "flake8" @@ -180,6 +326,10 @@ description = "the modular source code checker: pep8 pyflakes and co" category = "dev" optional = false python-versions = ">=3.6.1" +files = [ + {file = "flake8-5.0.4-py2.py3-none-any.whl", hash = "sha256:7a1cf6b73744f5806ab95e526f6f0d8c01c66d7bbe349562d22dfca20610b248"}, + {file = "flake8-5.0.4.tar.gz", hash = "sha256:6fbe320aad8d6b95cec8b8e47bc933004678dc63095be98528b7bdd2a9f510db"}, +] [package.dependencies] importlib-metadata = {version = ">=1.1.0,<4.3", markers = "python_version < \"3.8\""} @@ -194,6 +344,10 @@ description = "Automated security testing with bandit and flake8." category = "dev" optional = false python-versions = ">=3.6" +files = [ + {file = "flake8_bandit-4.1.1-py3-none-any.whl", hash = "sha256:4c8a53eb48f23d4ef1e59293657181a3c989d0077c9952717e98a0eace43e06d"}, + {file = "flake8_bandit-4.1.1.tar.gz", hash = "sha256:068e09287189cbfd7f986e92605adea2067630b75380c6b5733dab7d87f9a84e"}, +] [package.dependencies] bandit = ">=1.7.3" @@ -201,11 +355,15 @@ flake8 = ">=5.0.0" [[package]] name = "flake8-bugbear" -version = "22.10.27" +version = "22.12.6" description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "flake8-bugbear-22.12.6.tar.gz", hash = "sha256:4cdb2c06e229971104443ae293e75e64c6107798229202fbe4f4091427a30ac0"}, + {file = "flake8_bugbear-22.12.6-py3-none-any.whl", hash = "sha256:b69a510634f8a9c298dfda2b18a8036455e6b19ecac4fe582e4d7a0abfa50a30"}, +] [package.dependencies] attrs = ">=19.2.0" @@ -216,22 +374,30 @@ dev = ["coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit", "tox"] [[package]] name = "gitdb" -version = "4.0.9" +version = "4.0.10" description = "Git Object Database" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" +files = [ + {file = "gitdb-4.0.10-py3-none-any.whl", hash = "sha256:c286cf298426064079ed96a9e4a9d39e7f3e9bf15ba60701e95f5492f28415c7"}, + {file = "gitdb-4.0.10.tar.gz", hash = "sha256:6eb990b69df4e15bad899ea868dc46572c3f75339735663b81de79b06f17eb9a"}, +] [package.dependencies] smmap = ">=3.0.1,<6" [[package]] name = "gitpython" -version = "3.1.29" +version = "3.1.30" description = "GitPython is a python library used to interact with Git repositories" category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "GitPython-3.1.30-py3-none-any.whl", hash = "sha256:cd455b0000615c60e286208ba540271af9fe531fa6a87cc590a7298785ab2882"}, + {file = "GitPython-3.1.30.tar.gz", hash = "sha256:769c2d83e13f5d938b7688479da374c4e3d49f71549aaf462b646db9602ea6f8"}, +] [package.dependencies] gitdb = ">=4.0.1,<5" @@ -244,6 +410,10 @@ description = "Internationalized Domain Names in Applications (IDNA)" category = "dev" optional = false python-versions = ">=3.5" +files = [ + {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, + {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, +] [[package]] name = "imagesize" @@ -252,6 +422,10 @@ description = "Getting image size from png/jpeg/jpeg2000/gif file" category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, + {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, +] [[package]] name = "importlib-metadata" @@ -260,6 +434,10 @@ description = "Read metadata from Python packages" category = "dev" optional = false python-versions = ">=3.6" +files = [ + {file = "importlib_metadata-4.2.0-py3-none-any.whl", hash = "sha256:057e92c15bc8d9e8109738a48db0ccb31b4d9d5cfbee5a8670879a30be66304b"}, + {file = "importlib_metadata-4.2.0.tar.gz", hash = "sha256:b7e52a1f8dec14a75ea73e0891f3060099ca1d8e6a462a4dff11c3e119ea1b31"}, +] [package.dependencies] typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} @@ -269,21 +447,50 @@ zipp = ">=0.5" docs = ["jaraco.packaging (>=8.2)", "rst.linker (>=1.9)", "sphinx"] testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pep517", "pyfakefs", "pytest (>=4.6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy"] +[[package]] +name = "importlib-metadata" +version = "6.0.0" +description = "Read metadata from Python packages" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "importlib_metadata-6.0.0-py3-none-any.whl", hash = "sha256:7efb448ec9a5e313a57655d35aa54cd3e01b7e1fbcf72dce1bf06119420f5bad"}, + {file = "importlib_metadata-6.0.0.tar.gz", hash = "sha256:e354bedeb60efa6affdcc8ae121b73544a7aa74156d047311948f6d711cd378d"}, +] + +[package.dependencies] +typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} +zipp = ">=0.5" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +perf = ["ipython"] +testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] + [[package]] name = "iniconfig" -version = "1.1.1" -description = "iniconfig: brain-dead simple config-ini parsing" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] [[package]] name = "isort" -version = "5.10.1" +version = "5.11.4" description = "A Python utility / library to sort Python imports." category = "dev" optional = false -python-versions = ">=3.6.1,<4.0" +python-versions = ">=3.7.0" +files = [ + {file = "isort-5.11.4-py3-none-any.whl", hash = "sha256:c033fd0edb91000a7f09527fe5c75321878f98322a77ddcc81adbd83724afb7b"}, + {file = "isort-5.11.4.tar.gz", hash = "sha256:6db30c5ded9815d813932c04c2f85a360bcdd35fed496f4d8f35495ef0a261b6"}, +] [package.extras] colors = ["colorama (>=0.4.3,<0.5.0)"] @@ -298,6 +505,10 @@ description = "A very fast and expressive template engine." category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, + {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, +] [package.dependencies] MarkupSafe = ">=2.0" @@ -312,6 +523,48 @@ description = "Safely add untrusted strings to HTML/XML markup." category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-win32.whl", hash = "sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-win32.whl", hash = "sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-win32.whl", hash = "sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-win32.whl", hash = "sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247"}, + {file = "MarkupSafe-2.1.1.tar.gz", hash = "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b"}, +] [[package]] name = "mccabe" @@ -320,6 +573,10 @@ description = "McCabe checker, plugin for flake8" category = "dev" optional = false python-versions = ">=3.6" +files = [ + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, +] [[package]] name = "mypy" @@ -328,6 +585,38 @@ description = "Optional static typing for Python" category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "mypy-0.991-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7d17e0a9707d0772f4a7b878f04b4fd11f6f5bcb9b3813975a9b13c9332153ab"}, + {file = "mypy-0.991-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0714258640194d75677e86c786e80ccf294972cc76885d3ebbb560f11db0003d"}, + {file = "mypy-0.991-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0c8f3be99e8a8bd403caa8c03be619544bc2c77a7093685dcf308c6b109426c6"}, + {file = "mypy-0.991-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc9ec663ed6c8f15f4ae9d3c04c989b744436c16d26580eaa760ae9dd5d662eb"}, + {file = "mypy-0.991-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4307270436fd7694b41f913eb09210faff27ea4979ecbcd849e57d2da2f65305"}, + {file = "mypy-0.991-cp310-cp310-win_amd64.whl", hash = "sha256:901c2c269c616e6cb0998b33d4adbb4a6af0ac4ce5cd078afd7bc95830e62c1c"}, + {file = "mypy-0.991-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d13674f3fb73805ba0c45eb6c0c3053d218aa1f7abead6e446d474529aafc372"}, + {file = "mypy-0.991-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1c8cd4fb70e8584ca1ed5805cbc7c017a3d1a29fb450621089ffed3e99d1857f"}, + {file = "mypy-0.991-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:209ee89fbb0deed518605edddd234af80506aec932ad28d73c08f1400ef80a33"}, + {file = "mypy-0.991-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37bd02ebf9d10e05b00d71302d2c2e6ca333e6c2a8584a98c00e038db8121f05"}, + {file = "mypy-0.991-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:26efb2fcc6b67e4d5a55561f39176821d2adf88f2745ddc72751b7890f3194ad"}, + {file = "mypy-0.991-cp311-cp311-win_amd64.whl", hash = "sha256:3a700330b567114b673cf8ee7388e949f843b356a73b5ab22dd7cff4742a5297"}, + {file = "mypy-0.991-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:1f7d1a520373e2272b10796c3ff721ea1a0712288cafaa95931e66aa15798813"}, + {file = "mypy-0.991-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:641411733b127c3e0dab94c45af15fea99e4468f99ac88b39efb1ad677da5711"}, + {file = "mypy-0.991-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3d80e36b7d7a9259b740be6d8d906221789b0d836201af4234093cae89ced0cd"}, + {file = "mypy-0.991-cp37-cp37m-win_amd64.whl", hash = "sha256:e62ebaad93be3ad1a828a11e90f0e76f15449371ffeecca4a0a0b9adc99abcef"}, + {file = "mypy-0.991-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b86ce2c1866a748c0f6faca5232059f881cda6dda2a893b9a8373353cfe3715a"}, + {file = "mypy-0.991-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ac6e503823143464538efda0e8e356d871557ef60ccd38f8824a4257acc18d93"}, + {file = "mypy-0.991-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0cca5adf694af539aeaa6ac633a7afe9bbd760df9d31be55ab780b77ab5ae8bf"}, + {file = "mypy-0.991-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a12c56bf73cdab116df96e4ff39610b92a348cc99a1307e1da3c3768bbb5b135"}, + {file = "mypy-0.991-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:652b651d42f155033a1967739788c436491b577b6a44e4c39fb340d0ee7f0d70"}, + {file = "mypy-0.991-cp38-cp38-win_amd64.whl", hash = "sha256:4175593dc25d9da12f7de8de873a33f9b2b8bdb4e827a7cae952e5b1a342e243"}, + {file = "mypy-0.991-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:98e781cd35c0acf33eb0295e8b9c55cdbef64fcb35f6d3aa2186f289bed6e80d"}, + {file = "mypy-0.991-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6d7464bac72a85cb3491c7e92b5b62f3dcccb8af26826257760a552a5e244aa5"}, + {file = "mypy-0.991-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c9166b3f81a10cdf9b49f2d594b21b31adadb3d5e9db9b834866c3258b695be3"}, + {file = "mypy-0.991-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8472f736a5bfb159a5e36740847808f6f5b659960115ff29c7cecec1741c648"}, + {file = "mypy-0.991-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e80e758243b97b618cdf22004beb09e8a2de1af481382e4d84bc52152d1c476"}, + {file = "mypy-0.991-cp39-cp39-win_amd64.whl", hash = "sha256:74e259b5c19f70d35fcc1ad3d56499065c601dfe94ff67ae48b85596b9ec1461"}, + {file = "mypy-0.991-py3-none-any.whl", hash = "sha256:de32edc9b0a7e67c2775e574cb061a537660e51210fbf6006b0b36ea695ae9bb"}, + {file = "mypy-0.991.tar.gz", hash = "sha256:3c0165ba8f354a6d9881809ef29f1a9318a236a6d81c690094c5df32107bde06"}, +] [package.dependencies] mypy-extensions = ">=0.4.3" @@ -348,25 +637,34 @@ description = "Experimental type system extensions for programs checked with the category = "dev" optional = false python-versions = "*" +files = [ + {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, + {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, +] [[package]] name = "packaging" -version = "21.3" +version = "22.0" description = "Core utilities for Python packages" category = "dev" optional = false -python-versions = ">=3.6" - -[package.dependencies] -pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" +python-versions = ">=3.7" +files = [ + {file = "packaging-22.0-py3-none-any.whl", hash = "sha256:957e2148ba0e1a3b282772e791ef1d8083648bc131c8ab0c1feba110ce1146c3"}, + {file = "packaging-22.0.tar.gz", hash = "sha256:2198ec20bd4c017b8f9717e00f0c8714076fc2fd93816750ab48e2c41de2cfd3"}, +] [[package]] name = "pathspec" -version = "0.10.2" +version = "0.10.3" description = "Utility library for gitignore style pattern matching of file paths." category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "pathspec-0.10.3-py3-none-any.whl", hash = "sha256:3c95343af8b756205e2aba76e843ba9520a24dd84f68c22b9f93251507509dd6"}, + {file = "pathspec-0.10.3.tar.gz", hash = "sha256:56200de4077d9d0791465aa9095a01d421861e405b5096955051deefd697d6f6"}, +] [[package]] name = "pbr" @@ -375,18 +673,29 @@ description = "Python Build Reasonableness" category = "dev" optional = false python-versions = ">=2.6" +files = [ + {file = "pbr-5.11.0-py2.py3-none-any.whl", hash = "sha256:db2317ff07c84c4c63648c9064a79fe9d9f5c7ce85a9099d4b6258b3db83225a"}, + {file = "pbr-5.11.0.tar.gz", hash = "sha256:b97bc6695b2aff02144133c2e7399d5885223d42b7912ffaec2ca3898e673bfe"}, +] [[package]] name = "platformdirs" -version = "2.5.4" +version = "2.6.2" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "platformdirs-2.6.2-py3-none-any.whl", hash = "sha256:83c8f6d04389165de7c9b6f0c682439697887bca0aa2f1c87ef1826be3584490"}, + {file = "platformdirs-2.6.2.tar.gz", hash = "sha256:e1fea1fe471b9ff8332e229df3cb7de4f53eeea4998d3b6bfff542115e998bd2"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.4", markers = "python_version < \"3.8\""} [package.extras] -docs = ["furo (>=2022.9.29)", "proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.4)"] -test = ["appdirs (==1.4.4)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] +docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] [[package]] name = "pluggy" @@ -395,6 +704,10 @@ description = "plugin and hook calling mechanisms for python" category = "dev" optional = false python-versions = ">=3.6" +files = [ + {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, + {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, +] [package.dependencies] importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} @@ -410,6 +723,10 @@ description = "library with cross-python path, ini-parsing, io, code, log facili category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, +] [[package]] name = "py-cpuinfo" @@ -418,6 +735,10 @@ description = "Get CPU info with pure Python" category = "dev" optional = false python-versions = "*" +files = [ + {file = "py-cpuinfo-9.0.0.tar.gz", hash = "sha256:3cdbbf3fac90dc6f118bfd64384f309edeadd902d7c8fb17f02ffa1fc3f49690"}, + {file = "py_cpuinfo-9.0.0-py3-none-any.whl", hash = "sha256:859625bc251f64e21f077d099d4162689c762b5d6a4c3c97553d56241c9674d5"}, +] [[package]] name = "pycodestyle" @@ -426,6 +747,10 @@ description = "Python style guide checker" category = "dev" optional = false python-versions = ">=3.6" +files = [ + {file = "pycodestyle-2.9.1-py2.py3-none-any.whl", hash = "sha256:d1735fc58b418fd7c5f658d28d943854f8a849b01a5d0a1e6f3f3fdd0166804b"}, + {file = "pycodestyle-2.9.1.tar.gz", hash = "sha256:2c9607871d58c76354b697b42f5d57e1ada7d261c261efac224b664affdc5785"}, +] [[package]] name = "pyflakes" @@ -434,28 +759,45 @@ description = "passive checker of Python programs" category = "dev" optional = false python-versions = ">=3.6" +files = [ + {file = "pyflakes-2.5.0-py2.py3-none-any.whl", hash = "sha256:4579f67d887f804e67edb544428f264b7b24f435b263c4614f384135cea553d2"}, + {file = "pyflakes-2.5.0.tar.gz", hash = "sha256:491feb020dca48ccc562a8c0cbe8df07ee13078df59813b83959cbdada312ea3"}, +] [[package]] name = "pygments" -version = "2.13.0" +version = "2.14.0" description = "Pygments is a syntax highlighting package written in Python." category = "dev" optional = false python-versions = ">=3.6" +files = [ + {file = "Pygments-2.14.0-py3-none-any.whl", hash = "sha256:fa7bd7bd2771287c0de303af8bfdfc731f51bd2c6a47ab69d117138893b82717"}, + {file = "Pygments-2.14.0.tar.gz", hash = "sha256:b3ed06a9e8ac9a9aae5a6f5dbe78a8a58655d17b43b93c078f094ddc476ae297"}, +] [package.extras] plugins = ["importlib-metadata"] [[package]] -name = "pyparsing" -version = "3.0.9" -description = "pyparsing module - Classes and methods to define and execute parsing grammars" +name = "pyproject-api" +version = "1.4.0" +description = "API to interact with the python pyproject.toml based projects" category = "dev" optional = false -python-versions = ">=3.6.8" +python-versions = ">=3.7" +files = [ + {file = "pyproject_api-1.4.0-py3-none-any.whl", hash = "sha256:c34226297781efdd1ba4dfb74ce21076d9a8360e2125ea31803c1a02c76b2460"}, + {file = "pyproject_api-1.4.0.tar.gz", hash = "sha256:ac85c1f82e0291dbae5a7739dbb9a990e11ee4034c9b5599ea714f07a24ecd71"}, +] + +[package.dependencies] +packaging = ">=21.3" +tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} [package.extras] -diagrams = ["jinja2", "railroad-diagrams"] +docs = ["furo (>=2022.9.29)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"] +testing = ["covdefaults (>=2.2.2)", "importlib-metadata (>=5.1)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)", "virtualenv (>=20.17)", "wheel (>=0.38.4)"] [[package]] name = "pytest" @@ -464,6 +806,10 @@ description = "pytest: simple powerful testing with Python" category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "pytest-7.2.0-py3-none-any.whl", hash = "sha256:892f933d339f068883b6fd5a459f03d85bfcb355e4981e146d2c7616c21fef71"}, + {file = "pytest-7.2.0.tar.gz", hash = "sha256:c4014eb40e10f11f355ad4e3c2fb2c6c6d1919c73f3b5a433de4708202cade59"}, +] [package.dependencies] attrs = ">=19.2.0" @@ -480,17 +826,22 @@ testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2. [[package]] name = "pytest-asyncio" -version = "0.20.2" +version = "0.20.3" description = "Pytest support for asyncio" category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "pytest-asyncio-0.20.3.tar.gz", hash = "sha256:83cbf01169ce3e8eb71c6c278ccb0574d1a7a3bb8eaaf5e50e0ad342afb33b36"}, + {file = "pytest_asyncio-0.20.3-py3-none-any.whl", hash = "sha256:f129998b209d04fcc65c96fc85c11e5316738358909a8399e93be553d7656442"}, +] [package.dependencies] pytest = ">=6.1.0" typing-extensions = {version = ">=3.7.2", markers = "python_version < \"3.8\""} [package.extras] +docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy (>=0.931)", "pytest-trio (>=0.7.0)"] [[package]] @@ -500,6 +851,10 @@ description = "A ``pytest`` fixture for benchmarking code. It will group the tes category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "pytest-benchmark-4.0.0.tar.gz", hash = "sha256:fb0785b83efe599a6a956361c0691ae1dbb5318018561af10f3e915caa0048d1"}, + {file = "pytest_benchmark-4.0.0-py3-none-any.whl", hash = "sha256:fdb7db64e31c8b277dff9850d2a2556d8b60bcb0ea6524e36e28ffd7c87f71d6"}, +] [package.dependencies] py-cpuinfo = "*" @@ -517,6 +872,10 @@ description = "Pytest plugin for measuring coverage." category = "dev" optional = false python-versions = ">=3.6" +files = [ + {file = "pytest-cov-4.0.0.tar.gz", hash = "sha256:996b79efde6433cdbd0088872dbc5fb3ed7fe1578b68cdbba634f14bb8dd0470"}, + {file = "pytest_cov-4.0.0-py3-none-any.whl", hash = "sha256:2feb1b751d66a8bd934e5edfa2e961d11309dc37b73b0eabe73b5945fee20f6b"}, +] [package.dependencies] coverage = {version = ">=5.2.1", extras = ["toml"]} @@ -532,6 +891,10 @@ description = "Describe-style plugin for pytest" category = "dev" optional = false python-versions = "*" +files = [ + {file = "pytest-describe-2.0.1.tar.gz", hash = "sha256:e5cbaa31169f0060348ad5ca0191027e5f1f41f3f27fdeef208365e09c55eb9a"}, + {file = "pytest_describe-2.0.1-py3-none-any.whl", hash = "sha256:ea347838bdf774b498ee7cb4a0b802a40be89e667a399fb63d860e3223bf4183"}, +] [package.dependencies] pytest = ">=4.0.0" @@ -543,17 +906,25 @@ description = "pytest plugin to abort hanging tests" category = "dev" optional = false python-versions = ">=3.6" +files = [ + {file = "pytest-timeout-2.1.0.tar.gz", hash = "sha256:c07ca07404c612f8abbe22294b23c368e2e5104b521c1790195561f37e1ac3d9"}, + {file = "pytest_timeout-2.1.0-py3-none-any.whl", hash = "sha256:f6f50101443ce70ad325ceb4473c4255e9d74e3c7cd0ef827309dfa4c0d975c6"}, +] [package.dependencies] pytest = ">=5.0.0" [[package]] name = "pytz" -version = "2022.6" +version = "2022.7" description = "World timezone definitions, modern and historical" category = "dev" optional = false python-versions = "*" +files = [ + {file = "pytz-2022.7-py2.py3-none-any.whl", hash = "sha256:93007def75ae22f7cd991c84e02d434876818661f8df9ad5df9e950ff4e52cfd"}, + {file = "pytz-2022.7.tar.gz", hash = "sha256:7ccfae7b4b2c067464a6733c6261673fdb8fd1be905460396b97a073e9fa683a"}, +] [[package]] name = "pyyaml" @@ -562,46 +933,100 @@ description = "YAML parser and emitter for Python" category = "dev" optional = false python-versions = ">=3.6" - -[[package]] -name = "requests" -version = "2.28.1" -description = "Python HTTP for Humans." -category = "dev" -optional = false -python-versions = ">=3.7, <4" - -[package.dependencies] -certifi = ">=2017.4.17" -charset-normalizer = ">=2,<3" -idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<1.27" - -[package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] - -[[package]] -name = "setuptools" -version = "65.6.0" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] - -[[package]] -name = "six" -version = "1.16.0" -description = "Python 2 and 3 compatibility utilities" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" - +files = [ + {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, + {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, + {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, + {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, + {file = "PyYAML-6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358"}, + {file = "PyYAML-6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1"}, + {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d"}, + {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f"}, + {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782"}, + {file = "PyYAML-6.0-cp311-cp311-win32.whl", hash = "sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7"}, + {file = "PyYAML-6.0-cp311-cp311-win_amd64.whl", hash = "sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf"}, + {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"}, + {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"}, + {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"}, + {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"}, + {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"}, + {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"}, + {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"}, + {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"}, + {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"}, + {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"}, + {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"}, + {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"}, + {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, + {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, +] + +[[package]] +name = "requests" +version = "2.28.1" +description = "Python HTTP for Humans." +category = "dev" +optional = false +python-versions = ">=3.7, <4" +files = [ + {file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"}, + {file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<3" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<1.27" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "setuptools" +version = "65.6.3" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "setuptools-65.6.3-py3-none-any.whl", hash = "sha256:57f6f22bde4e042978bcd50176fdb381d7c21a9efa4041202288d3737a0c6a54"}, + {file = "setuptools-65.6.3.tar.gz", hash = "sha256:a7620757bf984b58deaf32fc8a4577a9bbc0850cf92c20e1ce41c38c19e5fb75"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + [[package]] name = "smmap" version = "5.0.0" @@ -609,6 +1034,10 @@ description = "A pure Python implementation of a sliding window memory map manag category = "dev" optional = false python-versions = ">=3.6" +files = [ + {file = "smmap-5.0.0-py3-none-any.whl", hash = "sha256:2aba19d6a040e78d8b09de5c57e96207b09ed71d8e55ce0959eeee6c8e190d94"}, + {file = "smmap-5.0.0.tar.gz", hash = "sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936"}, +] [[package]] name = "snowballstemmer" @@ -617,6 +1046,10 @@ description = "This package provides 29 stemmers for 28 languages generated from category = "dev" optional = false python-versions = "*" +files = [ + {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, + {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, +] [[package]] name = "sphinx" @@ -625,6 +1058,10 @@ description = "Python documentation generator" category = "dev" optional = false python-versions = ">=3.6" +files = [ + {file = "Sphinx-4.3.2-py3-none-any.whl", hash = "sha256:6a11ea5dd0bdb197f9c2abc2e0ce73e01340464feaece525e64036546d24c851"}, + {file = "Sphinx-4.3.2.tar.gz", hash = "sha256:0a8836751a68306b3fe97ecbe44db786f8479c3bf4b80e3a7f5c838657b4698c"}, +] [package.dependencies] alabaster = ">=0.7,<0.8" @@ -650,6 +1087,42 @@ docs = ["sphinxcontrib-websupport"] lint = ["docutils-stubs", "flake8 (>=3.5.0)", "isort", "mypy (>=0.920)", "types-pkg-resources", "types-requests", "types-typed-ast"] test = ["cython", "html5lib", "pytest", "pytest-cov", "typed-ast"] +[[package]] +name = "sphinx" +version = "5.3.0" +description = "Python documentation generator" +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ + {file = "Sphinx-5.3.0.tar.gz", hash = "sha256:51026de0a9ff9fc13c05d74913ad66047e104f56a129ff73e174eb5c3ee794b5"}, + {file = "sphinx-5.3.0-py3-none-any.whl", hash = "sha256:060ca5c9f7ba57a08a1219e547b269fadf125ae25b06b9fa7f66768efb652d6d"}, +] + +[package.dependencies] +alabaster = ">=0.7,<0.8" +babel = ">=2.9" +colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} +docutils = ">=0.14,<0.20" +imagesize = ">=1.3" +importlib-metadata = {version = ">=4.8", markers = "python_version < \"3.10\""} +Jinja2 = ">=3.0" +packaging = ">=21.0" +Pygments = ">=2.12" +requests = ">=2.5.0" +snowballstemmer = ">=2.0" +sphinxcontrib-applehelp = "*" +sphinxcontrib-devhelp = "*" +sphinxcontrib-htmlhelp = ">=2.0.0" +sphinxcontrib-jsmath = "*" +sphinxcontrib-qthelp = "*" +sphinxcontrib-serializinghtml = ">=1.1.5" + +[package.extras] +docs = ["sphinxcontrib-websupport"] +lint = ["docutils-stubs", "flake8 (>=3.5.0)", "flake8-bugbear", "flake8-comprehensions", "flake8-simplify", "isort", "mypy (>=0.981)", "sphinx-lint", "types-requests", "types-typed-ast"] +test = ["cython", "html5lib", "pytest (>=4.6)", "typed_ast"] + [[package]] name = "sphinx-rtd-theme" version = "1.1.1" @@ -657,6 +1130,10 @@ description = "Read the Docs theme for Sphinx" category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" +files = [ + {file = "sphinx_rtd_theme-1.1.1-py2.py3-none-any.whl", hash = "sha256:31faa07d3e97c8955637fc3f1423a5ab2c44b74b8cc558a51498c202ce5cbda7"}, + {file = "sphinx_rtd_theme-1.1.1.tar.gz", hash = "sha256:6146c845f1e1947b3c3dd4432c28998a1693ccc742b4f9ad7c63129f0757c103"}, +] [package.dependencies] docutils = "<0.18" @@ -672,6 +1149,26 @@ description = "sphinxcontrib-applehelp is a sphinx extension which outputs Apple category = "dev" optional = false python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-applehelp-1.0.2.tar.gz", hash = "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58"}, + {file = "sphinxcontrib_applehelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-applehelp" +version = "1.0.3" +description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "sphinxcontrib.applehelp-1.0.3-py3-none-any.whl", hash = "sha256:ba0f2a22e6eeada8da6428d0d520215ee8864253f32facf958cca81e426f661d"}, + {file = "sphinxcontrib.applehelp-1.0.3.tar.gz", hash = "sha256:83749f09f6ac843b8cb685277dbc818a8bf2d76cc19602699094fe9a74db529e"}, +] [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] @@ -684,6 +1181,10 @@ description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp category = "dev" optional = false python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"}, + {file = "sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e"}, +] [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] @@ -696,6 +1197,10 @@ description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML h category = "dev" optional = false python-versions = ">=3.6" +files = [ + {file = "sphinxcontrib-htmlhelp-2.0.0.tar.gz", hash = "sha256:f5f8bb2d0d629f398bf47d0d69c07bc13b65f75a81ad9e2f71a63d4b7a2f6db2"}, + {file = "sphinxcontrib_htmlhelp-2.0.0-py2.py3-none-any.whl", hash = "sha256:d412243dfb797ae3ec2b59eca0e52dac12e75a241bf0e4eb861e450d06c6ed07"}, +] [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] @@ -708,6 +1213,10 @@ description = "A sphinx extension which renders display math in HTML via JavaScr category = "dev" optional = false python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, + {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, +] [package.extras] test = ["flake8", "mypy", "pytest"] @@ -719,6 +1228,10 @@ description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp d category = "dev" optional = false python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-qthelp-1.0.3.tar.gz", hash = "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72"}, + {file = "sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"}, +] [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] @@ -731,6 +1244,10 @@ description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs category = "dev" optional = false python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-serializinghtml-1.1.5.tar.gz", hash = "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952"}, + {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"}, +] [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] @@ -743,6 +1260,10 @@ description = "Manage dynamic plugins for Python applications" category = "dev" optional = false python-versions = ">=3.6" +files = [ + {file = "stevedore-3.5.2-py3-none-any.whl", hash = "sha256:fa2630e3d0ad3e22d4914aff2501445815b9a4467a6edc49387c667a38faf5bf"}, + {file = "stevedore-3.5.2.tar.gz", hash = "sha256:cf99f41fc0d5a4f185ca4d3d42b03be9011b0a1ec1a4ea1a282be1b4b306dcc2"}, +] [package.dependencies] importlib-metadata = {version = ">=1.7.0", markers = "python_version < \"3.8\""} @@ -755,14 +1276,22 @@ description = "A lil' TOML parser" category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] [[package]] name = "tox" -version = "3.27.1" +version = "3.28.0" description = "tox is a generic virtualenv management and test command line tool" category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +files = [ + {file = "tox-3.28.0-py2.py3-none-any.whl", hash = "sha256:57b5ab7e8bb3074edc3c0c0b4b192a4f3799d3723b2c5b76f1fa9f2d40316eea"}, + {file = "tox-3.28.0.tar.gz", hash = "sha256:d0d28f3fe6d6d7195c27f8b054c3e99d5451952b54abdae673b71609a581f640"}, +] [package.dependencies] colorama = {version = ">=0.4.1", markers = "platform_system == \"Windows\""} @@ -779,6 +1308,36 @@ virtualenv = ">=16.0.0,<20.0.0 || >20.0.0,<20.0.1 || >20.0.1,<20.0.2 || >20.0.2, docs = ["pygments-github-lexers (>=0.0.5)", "sphinx (>=2.0.0)", "sphinxcontrib-autoprogram (>=0.1.5)", "towncrier (>=18.5.0)"] testing = ["flaky (>=3.4.0)", "freezegun (>=0.3.11)", "pathlib2 (>=2.3.3)", "psutil (>=5.6.1)", "pytest (>=4.0.0)", "pytest-cov (>=2.5.1)", "pytest-mock (>=1.10.0)", "pytest-randomly (>=1.0.0)"] +[[package]] +name = "tox" +version = "4.2.6" +description = "tox is a generic virtualenv management and test command line tool" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tox-4.2.6-py3-none-any.whl", hash = "sha256:fb79b3e4b788491949576a9c80c2d56419eac994567c3591e24bb2788b5901d0"}, + {file = "tox-4.2.6.tar.gz", hash = "sha256:ecf224a4f3a318adcdd71aa8fe15ffd31f14afd6a9845a43ffd63950a7325538"}, +] + +[package.dependencies] +cachetools = ">=5.2" +chardet = ">=5.1" +colorama = ">=0.4.6" +filelock = ">=3.9" +importlib-metadata = {version = ">=5.2", markers = "python_version < \"3.8\""} +packaging = ">=22" +platformdirs = ">=2.6.2" +pluggy = ">=1" +pyproject-api = ">=1.2.1" +tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.4", markers = "python_version < \"3.8\""} +virtualenv = ">=20.17.1" + +[package.extras] +docs = ["furo (>=2022.12.7)", "sphinx (>=6)", "sphinx-argparse-cli (>=1.10)", "sphinx-autodoc-typehints (>=1.19.5)", "sphinx-copybutton (>=0.5.1)", "sphinx-inline-tabs (>=2022.1.2b11)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=22.12)"] +testing = ["build[virtualenv] (>=0.9)", "covdefaults (>=2.2.2)", "devpi-process (>=0.3)", "diff-cover (>=7.3)", "distlib (>=0.3.6)", "flaky (>=3.7)", "hatch-vcs (>=0.3)", "hatchling (>=1.12)", "psutil (>=5.9.4)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)", "pytest-xdist (>=3.1)", "re-assert (>=1.1)", "time-machine (>=2.8.2)"] + [[package]] name = "typed-ast" version = "1.5.4" @@ -786,6 +1345,32 @@ description = "a fork of Python 2 and 3 ast modules with type comment support" category = "dev" optional = false python-versions = ">=3.6" +files = [ + {file = "typed_ast-1.5.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:669dd0c4167f6f2cd9f57041e03c3c2ebf9063d0757dc89f79ba1daa2bfca9d4"}, + {file = "typed_ast-1.5.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:211260621ab1cd7324e0798d6be953d00b74e0428382991adfddb352252f1d62"}, + {file = "typed_ast-1.5.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:267e3f78697a6c00c689c03db4876dd1efdfea2f251a5ad6555e82a26847b4ac"}, + {file = "typed_ast-1.5.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c542eeda69212fa10a7ada75e668876fdec5f856cd3d06829e6aa64ad17c8dfe"}, + {file = "typed_ast-1.5.4-cp310-cp310-win_amd64.whl", hash = "sha256:a9916d2bb8865f973824fb47436fa45e1ebf2efd920f2b9f99342cb7fab93f72"}, + {file = "typed_ast-1.5.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:79b1e0869db7c830ba6a981d58711c88b6677506e648496b1f64ac7d15633aec"}, + {file = "typed_ast-1.5.4-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a94d55d142c9265f4ea46fab70977a1944ecae359ae867397757d836ea5a3f47"}, + {file = "typed_ast-1.5.4-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:183afdf0ec5b1b211724dfef3d2cad2d767cbefac291f24d69b00546c1837fb6"}, + {file = "typed_ast-1.5.4-cp36-cp36m-win_amd64.whl", hash = "sha256:639c5f0b21776605dd6c9dbe592d5228f021404dafd377e2b7ac046b0349b1a1"}, + {file = "typed_ast-1.5.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cf4afcfac006ece570e32d6fa90ab74a17245b83dfd6655a6f68568098345ff6"}, + {file = "typed_ast-1.5.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed855bbe3eb3715fca349c80174cfcfd699c2f9de574d40527b8429acae23a66"}, + {file = "typed_ast-1.5.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6778e1b2f81dfc7bc58e4b259363b83d2e509a65198e85d5700dfae4c6c8ff1c"}, + {file = "typed_ast-1.5.4-cp37-cp37m-win_amd64.whl", hash = "sha256:0261195c2062caf107831e92a76764c81227dae162c4f75192c0d489faf751a2"}, + {file = "typed_ast-1.5.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2efae9db7a8c05ad5547d522e7dbe62c83d838d3906a3716d1478b6c1d61388d"}, + {file = "typed_ast-1.5.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7d5d014b7daa8b0bf2eaef684295acae12b036d79f54178b92a2b6a56f92278f"}, + {file = "typed_ast-1.5.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:370788a63915e82fd6f212865a596a0fefcbb7d408bbbb13dea723d971ed8bdc"}, + {file = "typed_ast-1.5.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4e964b4ff86550a7a7d56345c7864b18f403f5bd7380edf44a3c1fb4ee7ac6c6"}, + {file = "typed_ast-1.5.4-cp38-cp38-win_amd64.whl", hash = "sha256:683407d92dc953c8a7347119596f0b0e6c55eb98ebebd9b23437501b28dcbb8e"}, + {file = "typed_ast-1.5.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4879da6c9b73443f97e731b617184a596ac1235fe91f98d279a7af36c796da35"}, + {file = "typed_ast-1.5.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3e123d878ba170397916557d31c8f589951e353cc95fb7f24f6bb69adc1a8a97"}, + {file = "typed_ast-1.5.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebd9d7f80ccf7a82ac5f88c521115cc55d84e35bf8b446fcd7836eb6b98929a3"}, + {file = "typed_ast-1.5.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98f80dee3c03455e92796b58b98ff6ca0b2a6f652120c263efdba4d6c5e58f72"}, + {file = "typed_ast-1.5.4-cp39-cp39-win_amd64.whl", hash = "sha256:0fdbcf2fef0ca421a3f5912555804296f0b0960f0418c440f5d6d3abb549f3e1"}, + {file = "typed_ast-1.5.4.tar.gz", hash = "sha256:39e21ceb7388e4bb37f4c679d72707ed46c2fbf2a5609b8b8ebc4b067d977df2"}, +] [[package]] name = "typing-extensions" @@ -794,14 +1379,22 @@ description = "Backported and Experimental Type Hints for Python 3.7+" category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "typing_extensions-4.4.0-py3-none-any.whl", hash = "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"}, + {file = "typing_extensions-4.4.0.tar.gz", hash = "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa"}, +] [[package]] name = "urllib3" -version = "1.26.12" +version = "1.26.13" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "urllib3-1.26.13-py2.py3-none-any.whl", hash = "sha256:47cc05d99aaa09c9e72ed5809b60e7ba354e64b59c9c173ac3018642d8bb41fc"}, + {file = "urllib3-1.26.13.tar.gz", hash = "sha256:c083dd0dce68dbfbe1129d5271cb90f9447dea7d52097c6e0126120c521ddea8"}, +] [package.extras] brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] @@ -815,6 +1408,10 @@ description = "Virtual Python Environment builder" category = "dev" optional = false python-versions = ">=3.6" +files = [ + {file = "virtualenv-20.16.2-py2.py3-none-any.whl", hash = "sha256:635b272a8e2f77cb051946f46c60a54ace3cb5e25568228bd6b57fc70eca9ff3"}, + {file = "virtualenv-20.16.2.tar.gz", hash = "sha256:0ef5be6d07181946891f5abc8047fda8bc2f0b4b9bf222c64e6e8963baee76db"}, +] [package.dependencies] distlib = ">=0.3.1,<1" @@ -826,494 +1423,45 @@ platformdirs = ">=2,<3" docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=21.3)"] testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "packaging (>=20.0)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)"] +[[package]] +name = "virtualenv" +version = "20.17.1" +description = "Virtual Python Environment builder" +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ + {file = "virtualenv-20.17.1-py3-none-any.whl", hash = "sha256:ce3b1684d6e1a20a3e5ed36795a97dfc6af29bc3970ca8dab93e11ac6094b3c4"}, + {file = "virtualenv-20.17.1.tar.gz", hash = "sha256:f8b927684efc6f1cc206c9db297a570ab9ad0e51c16fa9e45487d36d1905c058"}, +] + +[package.dependencies] +distlib = ">=0.3.6,<1" +filelock = ">=3.4.1,<4" +importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.8\""} +platformdirs = ">=2.4,<3" + +[package.extras] +docs = ["proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-argparse (>=0.3.2)", "sphinx-rtd-theme (>=1)", "towncrier (>=22.8)"] +testing = ["coverage (>=6.2)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=21.3)", "pytest (>=7.0.1)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.6.1)", "pytest-randomly (>=3.10.3)", "pytest-timeout (>=2.1)"] + [[package]] name = "zipp" -version = "3.10.0" +version = "3.11.0" description = "Backport of pathlib-compatible object wrapper for zip files" category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "zipp-3.11.0-py3-none-any.whl", hash = "sha256:83a28fcb75844b5c0cdaf5aa4003c2d728c77e05f5aeabe8e95e56727005fbaa"}, + {file = "zipp-3.11.0.tar.gz", hash = "sha256:a7a22e05929290a67401440b39690ae6563279bced5f314609d9d03798f56766"}, +] [package.extras] docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"] testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] [metadata] -lock-version = "1.1" +lock-version = "2.0" python-versions = "^3.7" -content-hash = "c2af909043cc15321f4940901cd26042ac60ce824b71cc651735e543efef8852" - -[metadata.files] -alabaster = [ - {file = "alabaster-0.7.12-py2.py3-none-any.whl", hash = "sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359"}, - {file = "alabaster-0.7.12.tar.gz", hash = "sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02"}, -] -attrs = [ - {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"}, - {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"}, -] -babel = [ - {file = "Babel-2.11.0-py3-none-any.whl", hash = "sha256:1ad3eca1c885218f6dce2ab67291178944f810a10a9b5f3cb8382a5a232b64fe"}, - {file = "Babel-2.11.0.tar.gz", hash = "sha256:5ef4b3226b0180dedded4229651c8b0e1a3a6a2837d45a073272f313e4cf97f6"}, -] -bandit = [ - {file = "bandit-1.7.4-py3-none-any.whl", hash = "sha256:412d3f259dab4077d0e7f0c11f50f650cc7d10db905d98f6520a95a18049658a"}, - {file = "bandit-1.7.4.tar.gz", hash = "sha256:2d63a8c573417bae338962d4b9b06fbc6080f74ecd955a092849e1e65c717bd2"}, -] -black = [ - {file = "black-22.10.0-1fixedarch-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:5cc42ca67989e9c3cf859e84c2bf014f6633db63d1cbdf8fdb666dcd9e77e3fa"}, - {file = "black-22.10.0-1fixedarch-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:5d8f74030e67087b219b032aa33a919fae8806d49c867846bfacde57f43972ef"}, - {file = "black-22.10.0-1fixedarch-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:197df8509263b0b8614e1df1756b1dd41be6738eed2ba9e9769f3880c2b9d7b6"}, - {file = "black-22.10.0-1fixedarch-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:2644b5d63633702bc2c5f3754b1b475378fbbfb481f62319388235d0cd104c2d"}, - {file = "black-22.10.0-1fixedarch-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:e41a86c6c650bcecc6633ee3180d80a025db041a8e2398dcc059b3afa8382cd4"}, - {file = "black-22.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2039230db3c6c639bd84efe3292ec7b06e9214a2992cd9beb293d639c6402edb"}, - {file = "black-22.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14ff67aec0a47c424bc99b71005202045dc09270da44a27848d534600ac64fc7"}, - {file = "black-22.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:819dc789f4498ecc91438a7de64427c73b45035e2e3680c92e18795a839ebb66"}, - {file = "black-22.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5b9b29da4f564ba8787c119f37d174f2b69cdfdf9015b7d8c5c16121ddc054ae"}, - {file = "black-22.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8b49776299fece66bffaafe357d929ca9451450f5466e997a7285ab0fe28e3b"}, - {file = "black-22.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:21199526696b8f09c3997e2b4db8d0b108d801a348414264d2eb8eb2532e540d"}, - {file = "black-22.10.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e464456d24e23d11fced2bc8c47ef66d471f845c7b7a42f3bd77bf3d1789650"}, - {file = "black-22.10.0-cp37-cp37m-win_amd64.whl", hash = "sha256:9311e99228ae10023300ecac05be5a296f60d2fd10fff31cf5c1fa4ca4b1988d"}, - {file = "black-22.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fba8a281e570adafb79f7755ac8721b6cf1bbf691186a287e990c7929c7692ff"}, - {file = "black-22.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:915ace4ff03fdfff953962fa672d44be269deb2eaf88499a0f8805221bc68c87"}, - {file = "black-22.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:444ebfb4e441254e87bad00c661fe32df9969b2bf224373a448d8aca2132b395"}, - {file = "black-22.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:974308c58d057a651d182208a484ce80a26dac0caef2895836a92dd6ebd725e0"}, - {file = "black-22.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72ef3925f30e12a184889aac03d77d031056860ccae8a1e519f6cbb742736383"}, - {file = "black-22.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:432247333090c8c5366e69627ccb363bc58514ae3e63f7fc75c54b1ea80fa7de"}, - {file = "black-22.10.0-py3-none-any.whl", hash = "sha256:c957b2b4ea88587b46cf49d1dc17681c1e672864fd7af32fc1e9664d572b3458"}, - {file = "black-22.10.0.tar.gz", hash = "sha256:f513588da599943e0cde4e32cc9879e825d58720d6557062d1098c5ad80080e1"}, -] -bump2version = [ - {file = "bump2version-1.0.1-py2.py3-none-any.whl", hash = "sha256:37f927ea17cde7ae2d7baf832f8e80ce3777624554a653006c9144f8017fe410"}, - {file = "bump2version-1.0.1.tar.gz", hash = "sha256:762cb2bfad61f4ec8e2bdf452c7c267416f8c70dd9ecb1653fd0bbb01fa936e6"}, -] -certifi = [ - {file = "certifi-2022.9.24-py3-none-any.whl", hash = "sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382"}, - {file = "certifi-2022.9.24.tar.gz", hash = "sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14"}, -] -charset-normalizer = [ - {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"}, - {file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"}, -] -click = [ - {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, - {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, -] -colorama = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] -coverage = [ - {file = "coverage-6.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef8674b0ee8cc11e2d574e3e2998aea5df5ab242e012286824ea3c6970580e53"}, - {file = "coverage-6.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:784f53ebc9f3fd0e2a3f6a78b2be1bd1f5575d7863e10c6e12504f240fd06660"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4a5be1748d538a710f87542f22c2cad22f80545a847ad91ce45e77417293eb4"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83516205e254a0cb77d2d7bb3632ee019d93d9f4005de31dca0a8c3667d5bc04"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af4fffaffc4067232253715065e30c5a7ec6faac36f8fc8d6f64263b15f74db0"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:97117225cdd992a9c2a5515db1f66b59db634f59d0679ca1fa3fe8da32749cae"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1170fa54185845505fbfa672f1c1ab175446c887cce8212c44149581cf2d466"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:11b990d520ea75e7ee8dcab5bc908072aaada194a794db9f6d7d5cfd19661e5a"}, - {file = "coverage-6.5.0-cp310-cp310-win32.whl", hash = "sha256:5dbec3b9095749390c09ab7c89d314727f18800060d8d24e87f01fb9cfb40b32"}, - {file = "coverage-6.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:59f53f1dc5b656cafb1badd0feb428c1e7bc19b867479ff72f7a9dd9b479f10e"}, - {file = "coverage-6.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4a5375e28c5191ac38cca59b38edd33ef4cc914732c916f2929029b4bfb50795"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4ed2820d919351f4167e52425e096af41bfabacb1857186c1ea32ff9983ed75"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33a7da4376d5977fbf0a8ed91c4dffaaa8dbf0ddbf4c8eea500a2486d8bc4d7b"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8fb6cf131ac4070c9c5a3e21de0f7dc5a0fbe8bc77c9456ced896c12fcdad91"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a6b7d95969b8845250586f269e81e5dfdd8ff828ddeb8567a4a2eaa7313460c4"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1ef221513e6f68b69ee9e159506d583d31aa3567e0ae84eaad9d6ec1107dddaa"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cca4435eebea7962a52bdb216dec27215d0df64cf27fc1dd538415f5d2b9da6b"}, - {file = "coverage-6.5.0-cp311-cp311-win32.whl", hash = "sha256:98e8a10b7a314f454d9eff4216a9a94d143a7ee65018dd12442e898ee2310578"}, - {file = "coverage-6.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:bc8ef5e043a2af066fa8cbfc6e708d58017024dc4345a1f9757b329a249f041b"}, - {file = "coverage-6.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4433b90fae13f86fafff0b326453dd42fc9a639a0d9e4eec4d366436d1a41b6d"}, - {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4f05d88d9a80ad3cac6244d36dd89a3c00abc16371769f1340101d3cb899fc3"}, - {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:94e2565443291bd778421856bc975d351738963071e9b8839ca1fc08b42d4bef"}, - {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:027018943386e7b942fa832372ebc120155fd970837489896099f5cfa2890f79"}, - {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:255758a1e3b61db372ec2736c8e2a1fdfaf563977eedbdf131de003ca5779b7d"}, - {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:851cf4ff24062c6aec510a454b2584f6e998cada52d4cb58c5e233d07172e50c"}, - {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:12adf310e4aafddc58afdb04d686795f33f4d7a6fa67a7a9d4ce7d6ae24d949f"}, - {file = "coverage-6.5.0-cp37-cp37m-win32.whl", hash = "sha256:b5604380f3415ba69de87a289a2b56687faa4fe04dbee0754bfcae433489316b"}, - {file = "coverage-6.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4a8dbc1f0fbb2ae3de73eb0bdbb914180c7abfbf258e90b311dcd4f585d44bd2"}, - {file = "coverage-6.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d900bb429fdfd7f511f868cedd03a6bbb142f3f9118c09b99ef8dc9bf9643c3c"}, - {file = "coverage-6.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2198ea6fc548de52adc826f62cb18554caedfb1d26548c1b7c88d8f7faa8f6ba"}, - {file = "coverage-6.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c4459b3de97b75e3bd6b7d4b7f0db13f17f504f3d13e2a7c623786289dd670e"}, - {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:20c8ac5386253717e5ccc827caad43ed66fea0efe255727b1053a8154d952398"}, - {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b07130585d54fe8dff3d97b93b0e20290de974dc8177c320aeaf23459219c0b"}, - {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dbdb91cd8c048c2b09eb17713b0c12a54fbd587d79adcebad543bc0cd9a3410b"}, - {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:de3001a203182842a4630e7b8d1a2c7c07ec1b45d3084a83d5d227a3806f530f"}, - {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e07f4a4a9b41583d6eabec04f8b68076ab3cd44c20bd29332c6572dda36f372e"}, - {file = "coverage-6.5.0-cp38-cp38-win32.whl", hash = "sha256:6d4817234349a80dbf03640cec6109cd90cba068330703fa65ddf56b60223a6d"}, - {file = "coverage-6.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:7ccf362abd726b0410bf8911c31fbf97f09f8f1061f8c1cf03dfc4b6372848f6"}, - {file = "coverage-6.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:633713d70ad6bfc49b34ead4060531658dc6dfc9b3eb7d8a716d5873377ab745"}, - {file = "coverage-6.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:95203854f974e07af96358c0b261f1048d8e1083f2de9b1c565e1be4a3a48cfc"}, - {file = "coverage-6.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9023e237f4c02ff739581ef35969c3739445fb059b060ca51771e69101efffe"}, - {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:265de0fa6778d07de30bcf4d9dc471c3dc4314a23a3c6603d356a3c9abc2dfcf"}, - {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f830ed581b45b82451a40faabb89c84e1a998124ee4212d440e9c6cf70083e5"}, - {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7b6be138d61e458e18d8e6ddcddd36dd96215edfe5f1168de0b1b32635839b62"}, - {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:42eafe6778551cf006a7c43153af1211c3aaab658d4d66fa5fcc021613d02518"}, - {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:723e8130d4ecc8f56e9a611e73b31219595baa3bb252d539206f7bbbab6ffc1f"}, - {file = "coverage-6.5.0-cp39-cp39-win32.whl", hash = "sha256:d9ecf0829c6a62b9b573c7bb6d4dcd6ba8b6f80be9ba4fc7ed50bf4ac9aecd72"}, - {file = "coverage-6.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc2af30ed0d5ae0b1abdb4ebdce598eafd5b35397d4d75deb341a614d333d987"}, - {file = "coverage-6.5.0-pp36.pp37.pp38-none-any.whl", hash = "sha256:1431986dac3923c5945271f169f59c45b8802a114c8f548d611f2015133df77a"}, - {file = "coverage-6.5.0.tar.gz", hash = "sha256:f642e90754ee3e06b0e7e51bce3379590e76b7f76b708e1a71ff043f87025c84"}, -] -distlib = [ - {file = "distlib-0.3.6-py2.py3-none-any.whl", hash = "sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e"}, - {file = "distlib-0.3.6.tar.gz", hash = "sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46"}, -] -docutils = [ - {file = "docutils-0.17.1-py2.py3-none-any.whl", hash = "sha256:cf316c8370a737a022b72b56874f6602acf974a37a9fba42ec2876387549fc61"}, - {file = "docutils-0.17.1.tar.gz", hash = "sha256:686577d2e4c32380bb50cbb22f575ed742d58168cee37e99117a854bcd88f125"}, -] -exceptiongroup = [ - {file = "exceptiongroup-1.0.4-py3-none-any.whl", hash = "sha256:542adf9dea4055530d6e1279602fa5cb11dab2395fa650b8674eaec35fc4a828"}, - {file = "exceptiongroup-1.0.4.tar.gz", hash = "sha256:bd14967b79cd9bdb54d97323216f8fdf533e278df937aa2a90089e7d6e06e5ec"}, -] -filelock = [ - {file = "filelock-3.8.0-py3-none-any.whl", hash = "sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4"}, - {file = "filelock-3.8.0.tar.gz", hash = "sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc"}, -] -flake8 = [ - {file = "flake8-5.0.4-py2.py3-none-any.whl", hash = "sha256:7a1cf6b73744f5806ab95e526f6f0d8c01c66d7bbe349562d22dfca20610b248"}, - {file = "flake8-5.0.4.tar.gz", hash = "sha256:6fbe320aad8d6b95cec8b8e47bc933004678dc63095be98528b7bdd2a9f510db"}, -] -flake8-bandit = [ - {file = "flake8_bandit-4.1.1-py3-none-any.whl", hash = "sha256:4c8a53eb48f23d4ef1e59293657181a3c989d0077c9952717e98a0eace43e06d"}, - {file = "flake8_bandit-4.1.1.tar.gz", hash = "sha256:068e09287189cbfd7f986e92605adea2067630b75380c6b5733dab7d87f9a84e"}, -] -flake8-bugbear = [ - {file = "flake8-bugbear-22.10.27.tar.gz", hash = "sha256:a6708608965c9e0de5fff13904fed82e0ba21ac929fe4896459226a797e11cd5"}, - {file = "flake8_bugbear-22.10.27-py3-none-any.whl", hash = "sha256:6ad0ab754507319060695e2f2be80e6d8977cfcea082293089a9226276bd825d"}, -] -gitdb = [ - {file = "gitdb-4.0.9-py3-none-any.whl", hash = "sha256:8033ad4e853066ba6ca92050b9df2f89301b8fc8bf7e9324d412a63f8bf1a8fd"}, - {file = "gitdb-4.0.9.tar.gz", hash = "sha256:bac2fd45c0a1c9cf619e63a90d62bdc63892ef92387424b855792a6cabe789aa"}, -] -gitpython = [ - {file = "GitPython-3.1.29-py3-none-any.whl", hash = "sha256:41eea0deec2deea139b459ac03656f0dd28fc4a3387240ec1d3c259a2c47850f"}, - {file = "GitPython-3.1.29.tar.gz", hash = "sha256:cc36bfc4a3f913e66805a28e84703e419d9c264c1077e537b54f0e1af85dbefd"}, -] -idna = [ - {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, - {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, -] -imagesize = [ - {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, - {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, -] -importlib-metadata = [ - {file = "importlib_metadata-4.2.0-py3-none-any.whl", hash = "sha256:057e92c15bc8d9e8109738a48db0ccb31b4d9d5cfbee5a8670879a30be66304b"}, - {file = "importlib_metadata-4.2.0.tar.gz", hash = "sha256:b7e52a1f8dec14a75ea73e0891f3060099ca1d8e6a462a4dff11c3e119ea1b31"}, -] -iniconfig = [ - {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, - {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, -] -isort = [ - {file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"}, - {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"}, -] -jinja2 = [ - {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, - {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, -] -markupsafe = [ - {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-win32.whl", hash = "sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-win32.whl", hash = "sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-win32.whl", hash = "sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-win32.whl", hash = "sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247"}, - {file = "MarkupSafe-2.1.1.tar.gz", hash = "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b"}, -] -mccabe = [ - {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, - {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, -] -mypy = [ - {file = "mypy-0.991-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7d17e0a9707d0772f4a7b878f04b4fd11f6f5bcb9b3813975a9b13c9332153ab"}, - {file = "mypy-0.991-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0714258640194d75677e86c786e80ccf294972cc76885d3ebbb560f11db0003d"}, - {file = "mypy-0.991-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0c8f3be99e8a8bd403caa8c03be619544bc2c77a7093685dcf308c6b109426c6"}, - {file = "mypy-0.991-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc9ec663ed6c8f15f4ae9d3c04c989b744436c16d26580eaa760ae9dd5d662eb"}, - {file = "mypy-0.991-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4307270436fd7694b41f913eb09210faff27ea4979ecbcd849e57d2da2f65305"}, - {file = "mypy-0.991-cp310-cp310-win_amd64.whl", hash = "sha256:901c2c269c616e6cb0998b33d4adbb4a6af0ac4ce5cd078afd7bc95830e62c1c"}, - {file = "mypy-0.991-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d13674f3fb73805ba0c45eb6c0c3053d218aa1f7abead6e446d474529aafc372"}, - {file = "mypy-0.991-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1c8cd4fb70e8584ca1ed5805cbc7c017a3d1a29fb450621089ffed3e99d1857f"}, - {file = "mypy-0.991-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:209ee89fbb0deed518605edddd234af80506aec932ad28d73c08f1400ef80a33"}, - {file = "mypy-0.991-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37bd02ebf9d10e05b00d71302d2c2e6ca333e6c2a8584a98c00e038db8121f05"}, - {file = "mypy-0.991-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:26efb2fcc6b67e4d5a55561f39176821d2adf88f2745ddc72751b7890f3194ad"}, - {file = "mypy-0.991-cp311-cp311-win_amd64.whl", hash = "sha256:3a700330b567114b673cf8ee7388e949f843b356a73b5ab22dd7cff4742a5297"}, - {file = "mypy-0.991-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:1f7d1a520373e2272b10796c3ff721ea1a0712288cafaa95931e66aa15798813"}, - {file = "mypy-0.991-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:641411733b127c3e0dab94c45af15fea99e4468f99ac88b39efb1ad677da5711"}, - {file = "mypy-0.991-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3d80e36b7d7a9259b740be6d8d906221789b0d836201af4234093cae89ced0cd"}, - {file = "mypy-0.991-cp37-cp37m-win_amd64.whl", hash = "sha256:e62ebaad93be3ad1a828a11e90f0e76f15449371ffeecca4a0a0b9adc99abcef"}, - {file = "mypy-0.991-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b86ce2c1866a748c0f6faca5232059f881cda6dda2a893b9a8373353cfe3715a"}, - {file = "mypy-0.991-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ac6e503823143464538efda0e8e356d871557ef60ccd38f8824a4257acc18d93"}, - {file = "mypy-0.991-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0cca5adf694af539aeaa6ac633a7afe9bbd760df9d31be55ab780b77ab5ae8bf"}, - {file = "mypy-0.991-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a12c56bf73cdab116df96e4ff39610b92a348cc99a1307e1da3c3768bbb5b135"}, - {file = "mypy-0.991-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:652b651d42f155033a1967739788c436491b577b6a44e4c39fb340d0ee7f0d70"}, - {file = "mypy-0.991-cp38-cp38-win_amd64.whl", hash = "sha256:4175593dc25d9da12f7de8de873a33f9b2b8bdb4e827a7cae952e5b1a342e243"}, - {file = "mypy-0.991-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:98e781cd35c0acf33eb0295e8b9c55cdbef64fcb35f6d3aa2186f289bed6e80d"}, - {file = "mypy-0.991-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6d7464bac72a85cb3491c7e92b5b62f3dcccb8af26826257760a552a5e244aa5"}, - {file = "mypy-0.991-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c9166b3f81a10cdf9b49f2d594b21b31adadb3d5e9db9b834866c3258b695be3"}, - {file = "mypy-0.991-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8472f736a5bfb159a5e36740847808f6f5b659960115ff29c7cecec1741c648"}, - {file = "mypy-0.991-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e80e758243b97b618cdf22004beb09e8a2de1af481382e4d84bc52152d1c476"}, - {file = "mypy-0.991-cp39-cp39-win_amd64.whl", hash = "sha256:74e259b5c19f70d35fcc1ad3d56499065c601dfe94ff67ae48b85596b9ec1461"}, - {file = "mypy-0.991-py3-none-any.whl", hash = "sha256:de32edc9b0a7e67c2775e574cb061a537660e51210fbf6006b0b36ea695ae9bb"}, - {file = "mypy-0.991.tar.gz", hash = "sha256:3c0165ba8f354a6d9881809ef29f1a9318a236a6d81c690094c5df32107bde06"}, -] -mypy-extensions = [ - {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, - {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, -] -packaging = [ - {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, - {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, -] -pathspec = [ - {file = "pathspec-0.10.2-py3-none-any.whl", hash = "sha256:88c2606f2c1e818b978540f73ecc908e13999c6c3a383daf3705652ae79807a5"}, - {file = "pathspec-0.10.2.tar.gz", hash = "sha256:8f6bf73e5758fd365ef5d58ce09ac7c27d2833a8d7da51712eac6e27e35141b0"}, -] -pbr = [ - {file = "pbr-5.11.0-py2.py3-none-any.whl", hash = "sha256:db2317ff07c84c4c63648c9064a79fe9d9f5c7ce85a9099d4b6258b3db83225a"}, - {file = "pbr-5.11.0.tar.gz", hash = "sha256:b97bc6695b2aff02144133c2e7399d5885223d42b7912ffaec2ca3898e673bfe"}, -] -platformdirs = [ - {file = "platformdirs-2.5.4-py3-none-any.whl", hash = "sha256:af0276409f9a02373d540bf8480021a048711d572745aef4b7842dad245eba10"}, - {file = "platformdirs-2.5.4.tar.gz", hash = "sha256:1006647646d80f16130f052404c6b901e80ee4ed6bef6792e1f238a8969106f7"}, -] -pluggy = [ - {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, - {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, -] -py = [ - {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, - {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, -] -py-cpuinfo = [ - {file = "py-cpuinfo-9.0.0.tar.gz", hash = "sha256:3cdbbf3fac90dc6f118bfd64384f309edeadd902d7c8fb17f02ffa1fc3f49690"}, - {file = "py_cpuinfo-9.0.0-py3-none-any.whl", hash = "sha256:859625bc251f64e21f077d099d4162689c762b5d6a4c3c97553d56241c9674d5"}, -] -pycodestyle = [ - {file = "pycodestyle-2.9.1-py2.py3-none-any.whl", hash = "sha256:d1735fc58b418fd7c5f658d28d943854f8a849b01a5d0a1e6f3f3fdd0166804b"}, - {file = "pycodestyle-2.9.1.tar.gz", hash = "sha256:2c9607871d58c76354b697b42f5d57e1ada7d261c261efac224b664affdc5785"}, -] -pyflakes = [ - {file = "pyflakes-2.5.0-py2.py3-none-any.whl", hash = "sha256:4579f67d887f804e67edb544428f264b7b24f435b263c4614f384135cea553d2"}, - {file = "pyflakes-2.5.0.tar.gz", hash = "sha256:491feb020dca48ccc562a8c0cbe8df07ee13078df59813b83959cbdada312ea3"}, -] -pygments = [ - {file = "Pygments-2.13.0-py3-none-any.whl", hash = "sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42"}, - {file = "Pygments-2.13.0.tar.gz", hash = "sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1"}, -] -pyparsing = [ - {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, - {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, -] -pytest = [ - {file = "pytest-7.2.0-py3-none-any.whl", hash = "sha256:892f933d339f068883b6fd5a459f03d85bfcb355e4981e146d2c7616c21fef71"}, - {file = "pytest-7.2.0.tar.gz", hash = "sha256:c4014eb40e10f11f355ad4e3c2fb2c6c6d1919c73f3b5a433de4708202cade59"}, -] -pytest-asyncio = [ - {file = "pytest-asyncio-0.20.2.tar.gz", hash = "sha256:32a87a9836298a881c0ec637ebcc952cfe23a56436bdc0d09d1511941dd8a812"}, - {file = "pytest_asyncio-0.20.2-py3-none-any.whl", hash = "sha256:07e0abf9e6e6b95894a39f688a4a875d63c2128f76c02d03d16ccbc35bcc0f8a"}, -] -pytest-benchmark = [ - {file = "pytest-benchmark-4.0.0.tar.gz", hash = "sha256:fb0785b83efe599a6a956361c0691ae1dbb5318018561af10f3e915caa0048d1"}, - {file = "pytest_benchmark-4.0.0-py3-none-any.whl", hash = "sha256:fdb7db64e31c8b277dff9850d2a2556d8b60bcb0ea6524e36e28ffd7c87f71d6"}, -] -pytest-cov = [ - {file = "pytest-cov-4.0.0.tar.gz", hash = "sha256:996b79efde6433cdbd0088872dbc5fb3ed7fe1578b68cdbba634f14bb8dd0470"}, - {file = "pytest_cov-4.0.0-py3-none-any.whl", hash = "sha256:2feb1b751d66a8bd934e5edfa2e961d11309dc37b73b0eabe73b5945fee20f6b"}, -] -pytest-describe = [ - {file = "pytest-describe-2.0.1.tar.gz", hash = "sha256:e5cbaa31169f0060348ad5ca0191027e5f1f41f3f27fdeef208365e09c55eb9a"}, - {file = "pytest_describe-2.0.1-py3-none-any.whl", hash = "sha256:ea347838bdf774b498ee7cb4a0b802a40be89e667a399fb63d860e3223bf4183"}, -] -pytest-timeout = [ - {file = "pytest-timeout-2.1.0.tar.gz", hash = "sha256:c07ca07404c612f8abbe22294b23c368e2e5104b521c1790195561f37e1ac3d9"}, - {file = "pytest_timeout-2.1.0-py3-none-any.whl", hash = "sha256:f6f50101443ce70ad325ceb4473c4255e9d74e3c7cd0ef827309dfa4c0d975c6"}, -] -pytz = [ - {file = "pytz-2022.6-py2.py3-none-any.whl", hash = "sha256:222439474e9c98fced559f1709d89e6c9cbf8d79c794ff3eb9f8800064291427"}, - {file = "pytz-2022.6.tar.gz", hash = "sha256:e89512406b793ca39f5971bc999cc538ce125c0e51c27941bef4568b460095e2"}, -] -pyyaml = [ - {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, - {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, - {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, - {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, - {file = "PyYAML-6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358"}, - {file = "PyYAML-6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1"}, - {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d"}, - {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f"}, - {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782"}, - {file = "PyYAML-6.0-cp311-cp311-win32.whl", hash = "sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7"}, - {file = "PyYAML-6.0-cp311-cp311-win_amd64.whl", hash = "sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf"}, - {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"}, - {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"}, - {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"}, - {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"}, - {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"}, - {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"}, - {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"}, - {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"}, - {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"}, - {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"}, - {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"}, - {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"}, - {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, - {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, -] -requests = [ - {file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"}, - {file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"}, -] -setuptools = [ - {file = "setuptools-65.6.0-py3-none-any.whl", hash = "sha256:6211d2f5eddad8757bd0484923ca7c0a6302ebc4ab32ea5e94357176e0ca0840"}, - {file = "setuptools-65.6.0.tar.gz", hash = "sha256:d1eebf881c6114e51df1664bc2c9133d022f78d12d5f4f665b9191f084e2862d"}, -] -six = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] -smmap = [ - {file = "smmap-5.0.0-py3-none-any.whl", hash = "sha256:2aba19d6a040e78d8b09de5c57e96207b09ed71d8e55ce0959eeee6c8e190d94"}, - {file = "smmap-5.0.0.tar.gz", hash = "sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936"}, -] -snowballstemmer = [ - {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, - {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, -] -sphinx = [ - {file = "Sphinx-4.3.2-py3-none-any.whl", hash = "sha256:6a11ea5dd0bdb197f9c2abc2e0ce73e01340464feaece525e64036546d24c851"}, - {file = "Sphinx-4.3.2.tar.gz", hash = "sha256:0a8836751a68306b3fe97ecbe44db786f8479c3bf4b80e3a7f5c838657b4698c"}, -] -sphinx-rtd-theme = [ - {file = "sphinx_rtd_theme-1.1.1-py2.py3-none-any.whl", hash = "sha256:31faa07d3e97c8955637fc3f1423a5ab2c44b74b8cc558a51498c202ce5cbda7"}, - {file = "sphinx_rtd_theme-1.1.1.tar.gz", hash = "sha256:6146c845f1e1947b3c3dd4432c28998a1693ccc742b4f9ad7c63129f0757c103"}, -] -sphinxcontrib-applehelp = [ - {file = "sphinxcontrib-applehelp-1.0.2.tar.gz", hash = "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58"}, - {file = "sphinxcontrib_applehelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a"}, -] -sphinxcontrib-devhelp = [ - {file = "sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"}, - {file = "sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e"}, -] -sphinxcontrib-htmlhelp = [ - {file = "sphinxcontrib-htmlhelp-2.0.0.tar.gz", hash = "sha256:f5f8bb2d0d629f398bf47d0d69c07bc13b65f75a81ad9e2f71a63d4b7a2f6db2"}, - {file = "sphinxcontrib_htmlhelp-2.0.0-py2.py3-none-any.whl", hash = "sha256:d412243dfb797ae3ec2b59eca0e52dac12e75a241bf0e4eb861e450d06c6ed07"}, -] -sphinxcontrib-jsmath = [ - {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, - {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, -] -sphinxcontrib-qthelp = [ - {file = "sphinxcontrib-qthelp-1.0.3.tar.gz", hash = "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72"}, - {file = "sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"}, -] -sphinxcontrib-serializinghtml = [ - {file = "sphinxcontrib-serializinghtml-1.1.5.tar.gz", hash = "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952"}, - {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"}, -] -stevedore = [ - {file = "stevedore-3.5.2-py3-none-any.whl", hash = "sha256:fa2630e3d0ad3e22d4914aff2501445815b9a4467a6edc49387c667a38faf5bf"}, - {file = "stevedore-3.5.2.tar.gz", hash = "sha256:cf99f41fc0d5a4f185ca4d3d42b03be9011b0a1ec1a4ea1a282be1b4b306dcc2"}, -] -tomli = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, -] -tox = [ - {file = "tox-3.27.1-py2.py3-none-any.whl", hash = "sha256:f52ca66eae115fcfef0e77ef81fd107133d295c97c52df337adedb8dfac6ab84"}, - {file = "tox-3.27.1.tar.gz", hash = "sha256:b2a920e35a668cc06942ffd1cf3a4fb221a4d909ca72191fb6d84b0b18a7be04"}, -] -typed-ast = [ - {file = "typed_ast-1.5.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:669dd0c4167f6f2cd9f57041e03c3c2ebf9063d0757dc89f79ba1daa2bfca9d4"}, - {file = "typed_ast-1.5.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:211260621ab1cd7324e0798d6be953d00b74e0428382991adfddb352252f1d62"}, - {file = "typed_ast-1.5.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:267e3f78697a6c00c689c03db4876dd1efdfea2f251a5ad6555e82a26847b4ac"}, - {file = "typed_ast-1.5.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c542eeda69212fa10a7ada75e668876fdec5f856cd3d06829e6aa64ad17c8dfe"}, - {file = "typed_ast-1.5.4-cp310-cp310-win_amd64.whl", hash = "sha256:a9916d2bb8865f973824fb47436fa45e1ebf2efd920f2b9f99342cb7fab93f72"}, - {file = "typed_ast-1.5.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:79b1e0869db7c830ba6a981d58711c88b6677506e648496b1f64ac7d15633aec"}, - {file = "typed_ast-1.5.4-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a94d55d142c9265f4ea46fab70977a1944ecae359ae867397757d836ea5a3f47"}, - {file = "typed_ast-1.5.4-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:183afdf0ec5b1b211724dfef3d2cad2d767cbefac291f24d69b00546c1837fb6"}, - {file = "typed_ast-1.5.4-cp36-cp36m-win_amd64.whl", hash = "sha256:639c5f0b21776605dd6c9dbe592d5228f021404dafd377e2b7ac046b0349b1a1"}, - {file = "typed_ast-1.5.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cf4afcfac006ece570e32d6fa90ab74a17245b83dfd6655a6f68568098345ff6"}, - {file = "typed_ast-1.5.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed855bbe3eb3715fca349c80174cfcfd699c2f9de574d40527b8429acae23a66"}, - {file = "typed_ast-1.5.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6778e1b2f81dfc7bc58e4b259363b83d2e509a65198e85d5700dfae4c6c8ff1c"}, - {file = "typed_ast-1.5.4-cp37-cp37m-win_amd64.whl", hash = "sha256:0261195c2062caf107831e92a76764c81227dae162c4f75192c0d489faf751a2"}, - {file = "typed_ast-1.5.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2efae9db7a8c05ad5547d522e7dbe62c83d838d3906a3716d1478b6c1d61388d"}, - {file = "typed_ast-1.5.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7d5d014b7daa8b0bf2eaef684295acae12b036d79f54178b92a2b6a56f92278f"}, - {file = "typed_ast-1.5.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:370788a63915e82fd6f212865a596a0fefcbb7d408bbbb13dea723d971ed8bdc"}, - {file = "typed_ast-1.5.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4e964b4ff86550a7a7d56345c7864b18f403f5bd7380edf44a3c1fb4ee7ac6c6"}, - {file = "typed_ast-1.5.4-cp38-cp38-win_amd64.whl", hash = "sha256:683407d92dc953c8a7347119596f0b0e6c55eb98ebebd9b23437501b28dcbb8e"}, - {file = "typed_ast-1.5.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4879da6c9b73443f97e731b617184a596ac1235fe91f98d279a7af36c796da35"}, - {file = "typed_ast-1.5.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3e123d878ba170397916557d31c8f589951e353cc95fb7f24f6bb69adc1a8a97"}, - {file = "typed_ast-1.5.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebd9d7f80ccf7a82ac5f88c521115cc55d84e35bf8b446fcd7836eb6b98929a3"}, - {file = "typed_ast-1.5.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98f80dee3c03455e92796b58b98ff6ca0b2a6f652120c263efdba4d6c5e58f72"}, - {file = "typed_ast-1.5.4-cp39-cp39-win_amd64.whl", hash = "sha256:0fdbcf2fef0ca421a3f5912555804296f0b0960f0418c440f5d6d3abb549f3e1"}, - {file = "typed_ast-1.5.4.tar.gz", hash = "sha256:39e21ceb7388e4bb37f4c679d72707ed46c2fbf2a5609b8b8ebc4b067d977df2"}, -] -typing-extensions = [ - {file = "typing_extensions-4.4.0-py3-none-any.whl", hash = "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"}, - {file = "typing_extensions-4.4.0.tar.gz", hash = "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa"}, -] -urllib3 = [ - {file = "urllib3-1.26.12-py2.py3-none-any.whl", hash = "sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997"}, - {file = "urllib3-1.26.12.tar.gz", hash = "sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e"}, -] -virtualenv = [ - {file = "virtualenv-20.16.2-py2.py3-none-any.whl", hash = "sha256:635b272a8e2f77cb051946f46c60a54ace3cb5e25568228bd6b57fc70eca9ff3"}, - {file = "virtualenv-20.16.2.tar.gz", hash = "sha256:0ef5be6d07181946891f5abc8047fda8bc2f0b4b9bf222c64e6e8963baee76db"}, -] -zipp = [ - {file = "zipp-3.10.0-py3-none-any.whl", hash = "sha256:4fcb6f278987a6605757302a6e40e896257570d11c51628968ccb2a47e80c6c1"}, - {file = "zipp-3.10.0.tar.gz", hash = "sha256:7a7262fd930bd3e36c50b9a64897aec3fafff3dfdeec9623ae22b40e93f99bb8"}, -] +content-hash = "3130d4d2f2d8d6d28c20322a41ae9d267d8f1a930a8b069ed8f3717431253e5b" diff --git a/pyproject.toml b/pyproject.toml index 10cc93ff..a14ef58a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -55,17 +55,20 @@ pytest-benchmark = "^4.0" pytest-cov = "^4.0" pytest-describe = "^2.0" pytest-timeout = "^2.1" -tox = "^3.26" +tox = ">=3.0" [tool.poetry.group.lint] optional = true [tool.poetry.group.lint.dependencies] -black = "22.10.0" -flake8 = "^5.0" +black = "22.12.0" +flake8 = [ + { version = ">=5,<7", python = ">=3.8" }, + { version = ">=5,<6", python = "<3.8" } +] flake8-bandit = "^4.1" -flake8-bugbear = "22.10.27" -isort = "^5.10" +flake8-bugbear = "22.12.6" +isort = "^5.11" mypy = "0.991" bump2version = ">=1.0,<2" @@ -73,8 +76,10 @@ bump2version = ">=1.0,<2" optional = true [tool.poetry.group.doc.dependencies] -# Sphinx >= 4.4 has conflicting dependencies with Flake8 -sphinx = ">= 4.3,<6" +sphinx = [ + { version = ">=4,<7", python = ">=3.8" }, + { version = ">=4,<6", python = "<3.8" } +] sphinx_rtd_theme = ">=1,<2" [tool.bandit] @@ -153,5 +158,5 @@ timeout = "100" filterwarnings = "ignore::pytest.PytestConfigWarning" [build-system] -requires = ["poetry_core>=1.2,<2"] +requires = ["poetry_core>=1.4,<2"] build-backend = "poetry.core.masonry.api" diff --git a/tox.ini b/tox.ini index 5745a729..35ee29d7 100644 --- a/tox.ini +++ b/tox.ini @@ -8,7 +8,7 @@ python = 3.7: py37 3.8: py38 3.9: py39 - 3.10: py310=5,<6 - flake8-bandit>=4.1,<6 - flake8-bugbear==22.10.27 + flake8>=6,<7 + flake8-bandit>=4.1,<5 + flake8-bugbear==22.12.6 commands = flake8 src tests [testenv:isort] basepython = python3.10 -deps = isort>=5.10,<6 +deps = isort>=5.11,<6 commands = isort src tests --check-only @@ -45,8 +45,8 @@ commands = [testenv:docs] basepython = python3.10 deps = - sphinx>=5.2.1,<6 - sphinx_rtd_theme>=1,<2 + sphinx>=5.3,<6 + sphinx_rtd_theme>=1.1,<2 commands = sphinx-build -b html -nEW docs docs/_build/html @@ -57,7 +57,7 @@ deps = pytest-benchmark>=4,<5 pytest-cov>=4,<5 pytest-describe>=2,<3 - pytest-timeout>=2,<3 + pytest-timeout>=2.1,<3 py37,py38,py39,pypy39: typing-extensions>=4.4,<5 commands = # to also run the time-consuming tests: tox -e py310 -- --run-slow From b8190cc25382ebc325703b579b15aa3064d80f8b Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 8 Jan 2023 14:51:54 +0100 Subject: [PATCH 066/230] Undefined should not be an exception (#187) --- README.md | 2 +- src/graphql/pyutils/undefined.py | 4 ++-- tests/pyutils/test_undefined.py | 13 ++++++++++--- 3 files changed, 13 insertions(+), 6 deletions(-) diff --git a/README.md b/README.md index bbe91db9..3059765f 100644 --- a/README.md +++ b/README.md @@ -10,7 +10,7 @@ a query language for APIs created by Facebook. ![Lint Status](https://github.com/graphql-python/graphql-core/actions/workflows/lint.yml/badge.svg) [![Code Style](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/ambv/black) -An extensive test suite with over 2300 unit tests and 100% coverage comprises a +An extensive test suite with over 2400 unit tests and 100% coverage comprises a replication of the complete test suite of GraphQL.js, making sure this port is reliable and compatible with GraphQL.js. diff --git a/src/graphql/pyutils/undefined.py b/src/graphql/pyutils/undefined.py index e573227e..1e54ac32 100644 --- a/src/graphql/pyutils/undefined.py +++ b/src/graphql/pyutils/undefined.py @@ -7,7 +7,7 @@ __all__ = ["Undefined", "UndefinedType"] -class UndefinedType(ValueError): +class UndefinedType: """Auxiliary class for creating the Undefined singleton.""" _instance: Optional[UndefinedType] = None @@ -34,7 +34,7 @@ def __bool__(self) -> bool: return False def __eq__(self, other: Any) -> bool: - return other is Undefined + return other is Undefined or other is None def __ne__(self, other: Any) -> bool: return not self == other diff --git a/tests/pyutils/test_undefined.py b/tests/pyutils/test_undefined.py index 9cd5303f..16d71b0a 100644 --- a/tests/pyutils/test_undefined.py +++ b/tests/pyutils/test_undefined.py @@ -21,16 +21,23 @@ def is_hashable(): def as_bool_is_false(): assert bool(Undefined) is False - def only_equal_to_itself(): + def only_equal_to_itself_and_none(): + # because we want it to behave similarly to JavaScript assert Undefined == Undefined assert not Undefined != Undefined none_object = None - assert Undefined != none_object - assert not Undefined == none_object + assert Undefined == none_object + assert not Undefined != none_object false_object = False assert Undefined != false_object assert not Undefined == false_object + def should_not_be_an_exception(): + # because we want to create similar code to JavaScript where + # undefined return values are different from exceptions + # (for instance, this is used in the completeValue function) + assert not isinstance(Undefined, Exception) + def cannot_be_redefined(): with warns(RuntimeWarning, match="Redefinition of 'Undefined'"): redefined_undefined = UndefinedType() From a3313519fcaf1e4caea65820aef8a7a16d84ba4a Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 8 Jan 2023 15:45:37 +0100 Subject: [PATCH 067/230] refactor: move assert_valid_execution_arguments into ExecutionContext.build Replicates graphql/graphql-js@2f91eb862aa517e43e9f610b5146d0cf445ade4e --- src/graphql/execution/execute.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index 2cf3dda6..15ae0628 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -250,6 +250,9 @@ def build( For internal use only. """ + # If arguments are missing or incorrect, throw an error. + assert_valid_execution_arguments(schema, document, raw_variable_values) + operation: Optional[OperationDefinitionNode] = None fragments: Dict[str, FragmentDefinitionNode] = {} middleware_manager: Optional[MiddlewareManager] = None @@ -988,9 +991,6 @@ def execute( If the arguments to this function do not result in a legal execution context, a GraphQLError will be thrown immediately explaining the invalid input. """ - # If arguments are missing or incorrect, throw an error. - assert_valid_execution_arguments(schema, document, variable_values) - if execution_context_class is None: execution_context_class = ExecutionContext From abaa1275d9dbfeeae781e018e19553a564ac494a Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 8 Jan 2023 16:04:12 +0100 Subject: [PATCH 068/230] refactor: remove `root_value` argument from `execute_operation` Replicates graphql/graphql-js@75286fec8d19ee5f64976a73424fee50ac97e309 --- src/graphql/execution/execute.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index 15ae0628..1e45b4a9 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -333,7 +333,7 @@ def build_response( return ExecutionResult(data, errors) def execute_operation( - self, operation: OperationDefinitionNode, root_value: Any + self, operation: OperationDefinitionNode ) -> Optional[AwaitableOrValue[Any]]: """Execute an operation. @@ -355,13 +355,11 @@ def execute_operation( operation.selection_set, ) - path = None - return ( self.execute_fields_serially if operation.operation == OperationType.MUTATION else self.execute_fields - )(root_type, root_value, path, root_fields) + )(root_type, self.root_value, None, root_fields) def execute_fields_serially( self, @@ -1029,7 +1027,7 @@ def execute( build_response = exe_context.build_response try: operation = exe_context.operation - result = exe_context.execute_operation(operation, root_value) + result = exe_context.execute_operation(operation) if exe_context.is_awaitable(result): # noinspection PyShadowingNames From 0dc7b0921d71c33fa9bcacc0a55f1c0e2013bc29 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 8 Jan 2023 16:12:00 +0100 Subject: [PATCH 069/230] refactor: remove Optional from execute_operation return type Replicates graphql/graphql-js@35bc6a525c9390cf6e1d6b6bf10d71fa16455d73 --- src/graphql/execution/execute.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index 1e45b4a9..4d1f7632 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -334,7 +334,7 @@ def build_response( def execute_operation( self, operation: OperationDefinitionNode - ) -> Optional[AwaitableOrValue[Any]]: + ) -> AwaitableOrValue[Any]: """Execute an operation. Implements the "Executing operations" section of the spec. @@ -1033,7 +1033,7 @@ def execute( # noinspection PyShadowingNames async def await_result() -> Any: try: - return build_response(await result, errors) # type: ignore + return build_response(await result, errors) except GraphQLError as error: errors.append(error) return build_response(None, errors) From b43c820f9e8674eaf24f16e71beef5f613109e33 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 8 Jan 2023 20:16:09 +0100 Subject: [PATCH 070/230] Remove runtime checks that duplicate type hints The code contained many runtime checks that threw TypeErrors if wrong types were passed, even though it is considered to be more "pythonic" to not check types at runtime at all. In general, these checks add runtime cost and we can't realistically check all arguments to all functions. Instead we should focus on adding more asserts on stuff that can't be checked using type hints. If runtime checks are desired, they can still be added by using runtime type-checkers like Beartype. Replicates graphql/graphql-js@a4b085b7dc6f65ad5f4f148211e9101717352bc5 --- README.md | 2 +- src/graphql/execution/execute.py | 17 +- src/graphql/type/definition.py | 312 +-------- src/graphql/type/directives.py | 35 +- src/graphql/type/schema.py | 59 +- src/graphql/utilities/build_ast_schema.py | 3 - src/graphql/utilities/build_client_schema.py | 2 + src/graphql/utilities/extend_schema.py | 3 - src/graphql/validation/validate.py | 13 - tests/execution/test_executor.py | 55 +- tests/execution/test_subscribe.py | 50 +- tests/pyutils/test_description.py | 30 +- tests/type/test_assert_name.py | 7 - tests/type/test_definition.py | 634 +------------------ tests/type/test_directives.py | 52 +- tests/type/test_extensions.py | 70 +- tests/type/test_predicate.py | 31 + tests/type/test_schema.py | 61 -- tests/type/test_validation.py | 344 ++-------- tests/utilities/test_build_ast_schema.py | 29 +- tests/utilities/test_extend_schema.py | 30 +- tests/validation/test_validation.py | 36 -- 22 files changed, 165 insertions(+), 1710 deletions(-) diff --git a/README.md b/README.md index 3059765f..bbe91db9 100644 --- a/README.md +++ b/README.md @@ -10,7 +10,7 @@ a query language for APIs created by Facebook. ![Lint Status](https://github.com/graphql-python/graphql-core/actions/workflows/lint.yml/badge.svg) [![Code Style](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/ambv/black) -An extensive test suite with over 2400 unit tests and 100% coverage comprises a +An extensive test suite with over 2300 unit tests and 100% coverage comprises a replication of the complete test suite of GraphQL.js, making sure this port is reliable and compatible with GraphQL.js. diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index 4d1f7632..61a63c7d 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -251,7 +251,7 @@ def build( For internal use only. """ # If arguments are missing or incorrect, throw an error. - assert_valid_execution_arguments(schema, document, raw_variable_values) + assert_valid_execution_arguments(schema) operation: Optional[OperationDefinitionNode] = None fragments: Dict[str, FragmentDefinitionNode] = {} @@ -1104,8 +1104,6 @@ def execute_sync( def assert_valid_execution_arguments( schema: GraphQLSchema, - document: DocumentNode, - raw_variable_values: Optional[Dict[str, Any]] = None, ) -> None: """Check that the arguments are acceptable. @@ -1114,20 +1112,9 @@ def assert_valid_execution_arguments( For internal use only. """ - if not document: - raise TypeError("Must provide document.") - # If the schema used for execution is invalid, throw an error. assert_valid_schema(schema) - # Variables, if provided, must be a dictionary. - if not (raw_variable_values is None or isinstance(raw_variable_values, dict)): - raise TypeError( - "Variable values must be provided as a dictionary" - " with variable names as keys. Perhaps look to see" - " if an unparsed JSON string was provided." - ) - def invalid_return_type_error( return_type: GraphQLObjectType, result: Any, field_nodes: List[FieldNode] @@ -1345,7 +1332,7 @@ def create_source_event_stream( """ # If arguments are missing or incorrectly typed, this is an internal developer # mistake which should throw an early error. - assert_valid_execution_arguments(schema, document, variable_values) + assert_valid_execution_arguments(schema) if not execution_context_class: execution_context_class = ExecutionContext diff --git a/src/graphql/type/definition.py b/src/graphql/type/definition.py index feaab760..327ac7f3 100644 --- a/src/graphql/type/definition.py +++ b/src/graphql/type/definition.py @@ -53,8 +53,6 @@ cached_property, did_you_mean, inspect, - is_collection, - is_description, suggestion_list, ) from ..utilities.value_from_ast_untyped import value_from_ast_untyped @@ -199,11 +197,7 @@ class GraphQLWrappingType(GraphQLType, Generic[GT]): of_type: GT def __init__(self, type_: GT) -> None: - if not is_type(type_): - raise TypeError( - f"Can only create a wrapper for a GraphQLType, but got: {type_}." - ) - self.of_type = cast(GT, type_) + self.of_type = type_ def __repr__(self) -> str: return f"<{self.__class__.__name__} {self.of_type!r}>" @@ -264,33 +258,13 @@ def __init__( extension_ast_nodes: Optional[Collection[TypeExtensionNode]] = None, ) -> None: assert_name(name) - if description is not None and not is_description(description): - raise TypeError("The description must be a string.") - if extensions is None: - extensions = {} - elif not isinstance(extensions, dict) or not all( - isinstance(key, str) for key in extensions - ): - raise TypeError(f"{name} extensions must be a dictionary with string keys.") - if ast_node and not isinstance(ast_node, TypeDefinitionNode): - raise TypeError(f"{name} AST node must be a TypeDefinitionNode.") - if extension_ast_nodes: - if not is_collection(extension_ast_nodes) or not all( - isinstance(node, TypeExtensionNode) for node in extension_ast_nodes - ): - raise TypeError( - f"{name} extension AST nodes must be specified" - " as a collection of TypeExtensionNode instances." - ) - if not isinstance(extension_ast_nodes, tuple): - extension_ast_nodes = tuple(extension_ast_nodes) - else: - extension_ast_nodes = () self.name = name self.description = description - self.extensions = extensions + self.extensions = extensions or {} self.ast_node = ast_node - self.extension_ast_nodes = extension_ast_nodes + self.extension_ast_nodes = ( + tuple(extension_ast_nodes) if extension_ast_nodes else () + ) def __repr__(self) -> str: return f"<{self.__class__.__name__} {self.name!r}>" @@ -392,41 +366,19 @@ def __init__( ast_node=ast_node, extension_ast_nodes=extension_ast_nodes, ) - if specified_by_url is not None and not isinstance(specified_by_url, str): - raise TypeError( - f"{name} must provide 'specified_by_url' as a string," - f" but got: {inspect(specified_by_url)}." - ) - if serialize is not None and not callable(serialize): - raise TypeError( - f"{name} must provide 'serialize' as a function." - " If this custom Scalar is also used as an input type," - " ensure 'parse_value' and 'parse_literal' functions" - " are also provided." - ) - if parse_literal is not None and ( - not callable(parse_literal) - or (parse_value is None or not callable(parse_value)) - ): - raise TypeError( - f"{name} must provide" - " both 'parse_value' and 'parse_literal' as functions." - ) - if ast_node and not isinstance(ast_node, ScalarTypeDefinitionNode): - raise TypeError(f"{name} AST node must be a ScalarTypeDefinitionNode.") - if extension_ast_nodes and not all( - isinstance(node, ScalarTypeExtensionNode) for node in extension_ast_nodes - ): - raise TypeError( - f"{name} extension AST nodes must be specified" - " as a collection of ScalarTypeExtensionNode instances." - ) + if serialize is not None: self.serialize = serialize # type: ignore if parse_value is not None: self.parse_value = parse_value # type: ignore if parse_literal is not None: self.parse_literal = parse_literal # type: ignore + if parse_literal is not None: + if parse_value is None: + raise TypeError( + f"{name} must provide" + " both 'parse_value' and 'parse_literal' functions." + ) self.specified_by_url = specified_by_url def __repr__(self) -> str: @@ -531,50 +483,22 @@ def __init__( extensions: Optional[Dict[str, Any]] = None, ast_node: Optional[FieldDefinitionNode] = None, ) -> None: - if not is_output_type(type_): - raise TypeError("Field type must be an output type.") - if args is None: - args = {} - elif not isinstance(args, dict): - raise TypeError("Field args must be a dict with argument names as keys.") - elif not all( - isinstance(value, GraphQLArgument) or is_input_type(value) - for value in args.values() - ): - raise TypeError( - "Field args must be GraphQLArguments or input type objects." - ) - else: + if args: args = { assert_name(name): value if isinstance(value, GraphQLArgument) else GraphQLArgument(cast(GraphQLInputType, value)) for name, value in args.items() } - if resolve is not None and not callable(resolve): - raise TypeError( - "Field resolver must be a function if provided, " - f" but got: {inspect(resolve)}." - ) - if description is not None and not is_description(description): - raise TypeError("The description must be a string.") - if deprecation_reason is not None and not is_description(deprecation_reason): - raise TypeError("The deprecation reason must be a string.") - if extensions is None: - extensions = {} - elif not isinstance(extensions, dict) or not all( - isinstance(key, str) for key in extensions - ): - raise TypeError("Field extensions must be a dictionary with string keys.") - if ast_node and not isinstance(ast_node, FieldDefinitionNode): - raise TypeError("Field AST node must be a FieldDefinitionNode.") + else: + args = {} self.type = type_ self.args = args or {} self.resolve = resolve self.subscribe = subscribe self.description = description self.deprecation_reason = deprecation_reason - self.extensions = extensions + self.extensions = extensions or {} self.ast_node = ast_node def __repr__(self) -> str: @@ -689,30 +613,12 @@ def __init__( extensions: Optional[Dict[str, Any]] = None, ast_node: Optional[InputValueDefinitionNode] = None, ) -> None: - if not is_input_type(type_): - raise TypeError("Argument type must be a GraphQL input type.") - if description is not None and not is_description(description): - raise TypeError("Argument description must be a string.") - if deprecation_reason is not None and not is_description(deprecation_reason): - raise TypeError("Argument deprecation reason must be a string.") - if out_name is not None and not isinstance(out_name, str): - raise TypeError("Argument out name must be a string.") - if extensions is None: - extensions = {} - elif not isinstance(extensions, dict) or not all( - isinstance(key, str) for key in extensions - ): - raise TypeError( - "Argument extensions must be a dictionary with string keys." - ) - if ast_node and not isinstance(ast_node, InputValueDefinitionNode): - raise TypeError("Argument AST node must be an InputValueDefinitionNode.") self.type = type_ self.default_value = default_value self.description = description self.deprecation_reason = deprecation_reason self.out_name = out_name - self.extensions = extensions + self.extensions = extensions or {} self.ast_node = ast_node def __eq__(self, other: Any) -> bool: @@ -802,20 +708,6 @@ def __init__( ast_node=ast_node, extension_ast_nodes=extension_ast_nodes, ) - if is_type_of is not None and not callable(is_type_of): - raise TypeError( - f"{name} must provide 'is_type_of' as a function," - f" but got: {inspect(is_type_of)}." - ) - if ast_node and not isinstance(ast_node, ObjectTypeDefinitionNode): - raise TypeError(f"{name} AST node must be an ObjectTypeDefinitionNode.") - if extension_ast_nodes and not all( - isinstance(node, ObjectTypeExtensionNode) for node in extension_ast_nodes - ): - raise TypeError( - f"{name} extension AST nodes must be specified" - " as a collection of ObjectTypeExtensionNode instances." - ) self._fields = fields self._interfaces = interfaces self.is_type_of = is_type_of @@ -840,20 +732,6 @@ def fields(self) -> GraphQLFieldMap: except Exception as error: cls = GraphQLError if isinstance(error, GraphQLError) else TypeError raise cls(f"{self.name} fields cannot be resolved. {error}") from error - if not isinstance(fields, Mapping) or not all( - isinstance(key, str) for key in fields - ): - raise TypeError( - f"{self.name} fields must be specified" - " as a mapping with field names as keys." - ) - if not all( - isinstance(value, GraphQLField) or is_output_type(value) - for value in fields.values() - ): - raise TypeError( - f"{self.name} fields must be GraphQLField or output type objects." - ) return { assert_name(name): value if isinstance(value, GraphQLField) @@ -871,16 +749,7 @@ def interfaces(self) -> Tuple[GraphQLInterfaceType, ...]: except Exception as error: cls = GraphQLError if isinstance(error, GraphQLError) else TypeError raise cls(f"{self.name} interfaces cannot be resolved. {error}") from error - if interfaces is None: - interfaces = () - elif not is_collection(interfaces) or not all( - isinstance(value, GraphQLInterfaceType) for value in interfaces - ): - raise TypeError( - f"{self.name} interfaces must be specified" - " as a collection of GraphQLInterfaceType instances." - ) - return tuple(interfaces) + return tuple(interfaces) if interfaces else () def is_object_type(type_: Any) -> TypeGuard[GraphQLObjectType]: @@ -936,20 +805,6 @@ def __init__( ast_node=ast_node, extension_ast_nodes=extension_ast_nodes, ) - if resolve_type is not None and not callable(resolve_type): - raise TypeError( - f"{name} must provide 'resolve_type' as a function," - f" but got: {inspect(resolve_type)}." - ) - if ast_node and not isinstance(ast_node, InterfaceTypeDefinitionNode): - raise TypeError(f"{name} AST node must be an InterfaceTypeDefinitionNode.") - if extension_ast_nodes and not all( - isinstance(node, InterfaceTypeExtensionNode) for node in extension_ast_nodes - ): - raise TypeError( - f"{name} extension AST nodes must be specified" - " as a collection of InterfaceTypeExtensionNode instances." - ) self._fields = fields self._interfaces = interfaces self.resolve_type = resolve_type @@ -974,20 +829,6 @@ def fields(self) -> GraphQLFieldMap: except Exception as error: cls = GraphQLError if isinstance(error, GraphQLError) else TypeError raise cls(f"{self.name} fields cannot be resolved. {error}") from error - if not isinstance(fields, Mapping) or not all( - isinstance(key, str) for key in fields - ): - raise TypeError( - f"{self.name} fields must be specified" - " as a mapping with field names as keys." - ) - if not all( - isinstance(value, GraphQLField) or is_output_type(value) - for value in fields.values() - ): - raise TypeError( - f"{self.name} fields must be GraphQLField or output type objects." - ) return { assert_name(name): value if isinstance(value, GraphQLField) @@ -1005,16 +846,7 @@ def interfaces(self) -> Tuple[GraphQLInterfaceType, ...]: except Exception as error: cls = GraphQLError if isinstance(error, GraphQLError) else TypeError raise cls(f"{self.name} interfaces cannot be resolved. {error}") from error - if interfaces is None: - interfaces = () - elif not is_collection(interfaces) or not all( - isinstance(value, GraphQLInterfaceType) for value in interfaces - ): - raise TypeError( - f"{self.name} interfaces must be specified" - " as a collection of GraphQLInterfaceType instances." - ) - return tuple(interfaces) + return tuple(interfaces) if interfaces else () def is_interface_type(type_: Any) -> TypeGuard[GraphQLInterfaceType]: @@ -1071,20 +903,6 @@ def __init__( ast_node=ast_node, extension_ast_nodes=extension_ast_nodes, ) - if resolve_type is not None and not callable(resolve_type): - raise TypeError( - f"{name} must provide 'resolve_type' as a function," - f" but got: {inspect(resolve_type)}." - ) - if ast_node and not isinstance(ast_node, UnionTypeDefinitionNode): - raise TypeError(f"{name} AST node must be a UnionTypeDefinitionNode.") - if extension_ast_nodes and not all( - isinstance(node, UnionTypeExtensionNode) for node in extension_ast_nodes - ): - raise TypeError( - f"{name} extension AST nodes must be specified" - " as a collection of UnionTypeExtensionNode instances." - ) self._types = types self.resolve_type = resolve_type @@ -1105,16 +923,7 @@ def types(self) -> Tuple[GraphQLObjectType, ...]: except Exception as error: cls = GraphQLError if isinstance(error, GraphQLError) else TypeError raise cls(f"{self.name} types cannot be resolved. {error}") from error - if types is None: - types = () - elif not is_collection(types) or not all( - isinstance(value, GraphQLObjectType) for value in types - ): - raise TypeError( - f"{self.name} types must be specified" - " as a collection of GraphQLObjectType instances." - ) - return tuple(types) + return tuple(types) if types else () def is_union_type(type_: Any) -> TypeGuard[GraphQLUnionType]: @@ -1217,15 +1026,6 @@ def __init__( else GraphQLEnumValue(value) for key, value in values.items() } - if ast_node and not isinstance(ast_node, EnumTypeDefinitionNode): - raise TypeError(f"{name} AST node must be an EnumTypeDefinitionNode.") - if extension_ast_nodes and not all( - isinstance(node, EnumTypeExtensionNode) for node in extension_ast_nodes - ): - raise TypeError( - f"{name} extension AST nodes must be specified" - " as a collection of EnumTypeExtensionNode instances." - ) self.values = values def to_kwargs(self) -> GraphQLEnumTypeKwargs: @@ -1343,26 +1143,10 @@ def __init__( extensions: Optional[Dict[str, Any]] = None, ast_node: Optional[EnumValueDefinitionNode] = None, ) -> None: - if description is not None and not is_description(description): - raise TypeError("The description of the enum value must be a string.") - if deprecation_reason is not None and not is_description(deprecation_reason): - raise TypeError( - "The deprecation reason for the enum value must be a string." - ) - if extensions is None: - extensions = {} - elif not isinstance(extensions, dict) or not all( - isinstance(key, str) for key in extensions - ): - raise TypeError( - "Enum value extensions must be a dictionary with string keys." - ) - if ast_node and not isinstance(ast_node, EnumValueDefinitionNode): - raise TypeError("AST node must be an EnumValueDefinitionNode.") self.value = value self.description = description self.deprecation_reason = deprecation_reason - self.extensions = extensions + self.extensions = extensions or {} self.ast_node = ast_node def __eq__(self, other: Any) -> bool: @@ -1441,20 +1225,6 @@ def __init__( ast_node=ast_node, extension_ast_nodes=extension_ast_nodes, ) - if out_type is not None and not callable(out_type): - raise TypeError(f"The out type for {name} must be a function or a class.") - if ast_node and not isinstance(ast_node, InputObjectTypeDefinitionNode): - raise TypeError( - f"{name} AST node must be an InputObjectTypeDefinitionNode." - ) - if extension_ast_nodes and not all( - isinstance(node, InputObjectTypeExtensionNode) - for node in extension_ast_nodes - ): - raise TypeError( - f"{name} extension AST nodes must be specified" - " as a collection of InputObjectTypeExtensionNode instances." - ) self._fields = fields if out_type is not None: self.out_type = out_type # type: ignore @@ -1488,21 +1258,6 @@ def fields(self) -> GraphQLInputFieldMap: except Exception as error: cls = GraphQLError if isinstance(error, GraphQLError) else TypeError raise cls(f"{self.name} fields cannot be resolved. {error}") from error - if not isinstance(fields, Mapping) or not all( - isinstance(key, str) for key in fields - ): - raise TypeError( - f"{self.name} fields must be specified" - " as a mapping with field names as keys." - ) - if not all( - isinstance(value, GraphQLInputField) or is_input_type(value) - for value in fields.values() - ): - raise TypeError( - f"{self.name} fields must be" - " GraphQLInputField or input type objects." - ) return { assert_name(name): value if isinstance(value, GraphQLInputField) @@ -1552,30 +1307,12 @@ def __init__( extensions: Optional[Dict[str, Any]] = None, ast_node: Optional[InputValueDefinitionNode] = None, ) -> None: - if not is_input_type(type_): - raise TypeError("Input field type must be a GraphQL input type.") - if description is not None and not is_description(description): - raise TypeError("Input field description must be a string.") - if deprecation_reason is not None and not is_description(deprecation_reason): - raise TypeError("Input field deprecation reason must be a string.") - if out_name is not None and not isinstance(out_name, str): - raise TypeError("Input field out name must be a string.") - if extensions is None: - extensions = {} - elif not isinstance(extensions, dict) or not all( - isinstance(key, str) for key in extensions - ): - raise TypeError( - "Input field extensions must be a dictionary with string keys." - ) - if ast_node and not isinstance(ast_node, InputValueDefinitionNode): - raise TypeError("Input field AST node must be an InputValueDefinitionNode.") self.type = type_ self.default_value = default_value self.description = description self.deprecation_reason = deprecation_reason self.out_name = out_name - self.extensions = extensions + self.extensions = extensions or {} self.ast_node = ast_node def __eq__(self, other: Any) -> bool: @@ -1672,11 +1409,6 @@ class RowType(GraphQLObjectType): def __init__(self, type_: GNT): super().__init__(type_=type_) - if isinstance(type_, GraphQLNonNull): - raise TypeError( - "Can only create NonNull of a Nullable GraphQLType but got:" - f" {type_}." - ) def __str__(self) -> str: return f"{self.of_type}!" diff --git a/src/graphql/type/directives.py b/src/graphql/type/directives.py index a460bb16..c3555615 100644 --- a/src/graphql/type/directives.py +++ b/src/graphql/type/directives.py @@ -3,9 +3,9 @@ from typing import Any, Collection, Dict, Optional, Tuple, cast from ..language import DirectiveLocation, ast -from ..pyutils import inspect, is_description +from ..pyutils import inspect from .assert_name import assert_name -from .definition import GraphQLArgument, GraphQLInputType, GraphQLNonNull, is_input_type +from .definition import GraphQLArgument, GraphQLInputType, GraphQLNonNull from .scalars import GraphQLBoolean, GraphQLString @@ -82,44 +82,21 @@ def __init__( f"{name} locations must be specified" " as a collection of DirectiveLocation enum values." ) - if args is None: - args = {} - elif not isinstance(args, dict) or not all( - isinstance(key, str) for key in args - ): - raise TypeError(f"{name} args must be a dict with argument names as keys.") - elif not all( - isinstance(value, GraphQLArgument) or is_input_type(value) - for value in args.values() - ): - raise TypeError( - f"{name} args must be GraphQLArgument or input type objects." - ) - else: + if args: args = { assert_name(name): value if isinstance(value, GraphQLArgument) else GraphQLArgument(cast(GraphQLInputType, value)) for name, value in args.items() } - if not isinstance(is_repeatable, bool): - raise TypeError(f"{name} is_repeatable flag must be True or False.") - if ast_node and not isinstance(ast_node, ast.DirectiveDefinitionNode): - raise TypeError(f"{name} AST node must be a DirectiveDefinitionNode.") - if description is not None and not is_description(description): - raise TypeError(f"{name} description must be a string.") - if extensions is None: - extensions = {} - elif not isinstance(extensions, dict) or not all( - isinstance(key, str) for key in extensions - ): - raise TypeError(f"{name} extensions must be a dictionary with string keys.") + else: + args = {} self.name = name self.locations = locations self.args = args self.is_repeatable = is_repeatable self.description = description - self.extensions = extensions + self.extensions = extensions or {} self.ast_node = ast_node def __str__(self) -> str: diff --git a/src/graphql/type/schema.py b/src/graphql/type/schema.py index a3673d8b..31cfabbf 100644 --- a/src/graphql/type/schema.py +++ b/src/graphql/type/schema.py @@ -5,7 +5,7 @@ from ..error import GraphQLError from ..language import OperationType, ast -from ..pyutils import inspect, is_collection, is_description +from ..pyutils import inspect from .definition import ( GraphQLAbstractType, GraphQLCompositeType, @@ -149,62 +149,19 @@ def __init__( """ self._validation_errors = [] if assume_valid else None - # Check for common mistakes during construction to produce clear and early - # error messages, but we leave the specific tests for the validation. - if query and not isinstance(query, GraphQLType): - raise TypeError("Expected query to be a GraphQL type.") - if mutation and not isinstance(mutation, GraphQLType): - raise TypeError("Expected mutation to be a GraphQL type.") - if subscription and not isinstance(subscription, GraphQLType): - raise TypeError("Expected subscription to be a GraphQL type.") - if types is None: - types = [] - else: - if not is_collection(types) or not all( - isinstance(type_, GraphQLType) for type_ in types - ): - raise TypeError( - "Schema types must be specified as a collection of GraphQL types." - ) - if directives is not None: - # noinspection PyUnresolvedReferences - if not is_collection(directives): - raise TypeError("Schema directives must be a collection.") - if not isinstance(directives, tuple): - directives = tuple(directives) - if description is not None and not is_description(description): - raise TypeError("Schema description must be a string.") - if extensions is None: - extensions = {} - elif not isinstance(extensions, dict) or not all( - isinstance(key, str) for key in extensions - ): - raise TypeError("Schema extensions must be a dictionary with string keys.") - if ast_node and not isinstance(ast_node, ast.SchemaDefinitionNode): - raise TypeError("Schema AST node must be a SchemaDefinitionNode.") - if extension_ast_nodes: - if not is_collection(extension_ast_nodes) or not all( - isinstance(node, ast.SchemaExtensionNode) - for node in extension_ast_nodes - ): - raise TypeError( - "Schema extension AST nodes must be specified" - " as a collection of SchemaExtensionNode instances." - ) - if not isinstance(extension_ast_nodes, tuple): - extension_ast_nodes = tuple(extension_ast_nodes) - else: - extension_ast_nodes = () - self.description = description - self.extensions = extensions + self.extensions = extensions or {} self.ast_node = ast_node - self.extension_ast_nodes = extension_ast_nodes + self.extension_ast_nodes = ( + tuple(extension_ast_nodes) if extension_ast_nodes else () + ) self.query_type = query self.mutation_type = mutation self.subscription_type = subscription # Provide specified directives (e.g. @include and @skip) by default - self.directives = specified_directives if directives is None else directives + self.directives = ( + specified_directives if directives is None else tuple(directives) + ) # To preserve order of user-provided types, we first add them to the set # of "collected" types, so `collect_referenced_types` ignores them. diff --git a/src/graphql/utilities/build_ast_schema.py b/src/graphql/utilities/build_ast_schema.py index 3f4fb804..acbef291 100644 --- a/src/graphql/utilities/build_ast_schema.py +++ b/src/graphql/utilities/build_ast_schema.py @@ -37,9 +37,6 @@ def build_ast_schema( the produced schema is valid. Set ``assume_valid_sdl`` to ``True`` to assume it is already a valid SDL document. """ - if not isinstance(document_ast, DocumentNode): - raise TypeError("Must provide valid Document AST.") - if not (assume_valid or assume_valid_sdl): from ..validation.validate import assert_valid_sdl diff --git a/src/graphql/utilities/build_client_schema.py b/src/graphql/utilities/build_client_schema.py index 75efdb78..94c08a45 100644 --- a/src/graphql/utilities/build_client_schema.py +++ b/src/graphql/utilities/build_client_schema.py @@ -63,6 +63,8 @@ def build_client_schema( This function expects a complete introspection result. Don't forget to check the "errors" field of a server response before calling this function. """ + # Even though the `introspection` argument is typed, in most cases it's received + # as an untyped value from the server, so we will do an additional check here. if not isinstance(introspection, dict) or not isinstance( introspection.get("__schema"), dict ): diff --git a/src/graphql/utilities/extend_schema.py b/src/graphql/utilities/extend_schema.py index 445f0c03..18a261fc 100644 --- a/src/graphql/utilities/extend_schema.py +++ b/src/graphql/utilities/extend_schema.py @@ -119,9 +119,6 @@ def extend_schema( """ assert_schema(schema) - if not isinstance(document_ast, DocumentNode): - raise TypeError("Must provide valid Document AST.") - if not (assume_valid or assume_valid_sdl): from ..validation.validate import assert_valid_sdl_extension diff --git a/src/graphql/validation/validate.py b/src/graphql/validation/validate.py index 8f301396..c58d9e7a 100644 --- a/src/graphql/validation/validate.py +++ b/src/graphql/validation/validate.py @@ -2,7 +2,6 @@ from ..error import GraphQLError from ..language import DocumentNode, ParallelVisitor, visit -from ..pyutils import inspect, is_collection from ..type import GraphQLSchema, assert_valid_schema from ..utilities import TypeInfo, TypeInfoVisitor from .rules import ASTValidationRule @@ -42,26 +41,14 @@ def validate( Providing a custom TypeInfo instance is deprecated and will be removed in v3.3. """ - if not document_ast or not isinstance(document_ast, DocumentNode): - raise TypeError("Must provide document.") # If the schema used for validation is invalid, throw an error. assert_valid_schema(schema) if max_errors is None: max_errors = 100 - elif not isinstance(max_errors, int): - raise TypeError("The maximum number of errors must be passed as an int.") if type_info is None: type_info = TypeInfo(schema) - elif not isinstance(type_info, TypeInfo): - raise TypeError(f"Not a TypeInfo object: {inspect(type_info)}.") if rules is None: rules = specified_rules - elif not is_collection(rules) or not all( - isinstance(rule, type) and issubclass(rule, ASTValidationRule) for rule in rules - ): - raise TypeError( - "Rules must be specified as a collection of ASTValidationRule subclasses." - ) errors: List[GraphQLError] = [] diff --git a/tests/execution/test_executor.py b/tests/execution/test_executor.py index d93abf21..e197ab27 100644 --- a/tests/execution/test_executor.py +++ b/tests/execution/test_executor.py @@ -1,7 +1,7 @@ import asyncio from typing import Any, Awaitable, Optional, cast -from pytest import mark, raises +from pytest import mark from graphql.error import GraphQLError from graphql.execution import execute, execute_sync @@ -26,59 +26,6 @@ def describe_execute_handles_basic_execution_tasks(): - # noinspection PyTypeChecker - def throws_if_no_document_is_provided(): - schema = GraphQLSchema( - GraphQLObjectType("Type", {"a": GraphQLField(GraphQLString)}) - ) - - with raises(TypeError) as exc_info: - assert execute_sync(schema=schema, document=None) # type: ignore - - assert str(exc_info.value) == "Must provide document." - - # noinspection PyTypeChecker - def throws_if_no_schema_is_provided(): - document = parse("{ field }") - - with raises(TypeError) as exc_info: - assert execute_sync(schema=None, document=document) # type: ignore - - assert str(exc_info.value) == "Expected None to be a GraphQL schema." - - def throws_on_invalid_variables(): - schema = GraphQLSchema( - GraphQLObjectType( - "Type", - { - "fieldA": GraphQLField( - GraphQLString, args={"argA": GraphQLArgument(GraphQLInt)} - ) - }, - ) - ) - document = parse( - """ - query ($a: Int) { - fieldA(argA: $a) - } - """ - ) - variable_values = "{'a': 1}" - - with raises(TypeError) as exc_info: - assert execute_sync( - schema=schema, - document=document, - variable_values=variable_values, # type: ignore - ) - - assert str(exc_info.value) == ( - "Variable values must be provided as a dictionary" - " with variable names as keys. Perhaps look to see" - " if an unparsed JSON string was provided." - ) - def accepts_positional_arguments(): schema = GraphQLSchema( GraphQLObjectType( diff --git a/tests/execution/test_subscribe.py b/tests/execution/test_subscribe.py index f01dc354..e2ed520f 100644 --- a/tests/execution/test_subscribe.py +++ b/tests/execution/test_subscribe.py @@ -6,6 +6,7 @@ Callable, Dict, List, + Optional, TypeVar, Union, cast, @@ -19,7 +20,7 @@ create_source_event_stream, subscribe, ) -from graphql.language import parse +from graphql.language import DocumentNode, parse from graphql.pyutils import AwaitableOrValue, SimplePubSub, is_awaitable from graphql.type import ( GraphQLArgument, @@ -181,9 +182,17 @@ def subscribe_with_bad_fn( ), ) document = parse("subscription { foo }") + return subscribe_with_bad_args(schema, document) + +def subscribe_with_bad_args( + schema: GraphQLSchema, + document: DocumentNode, + variable_values: Optional[Dict[str, Any]] = None, +): return assert_equal_awaitables_or_values( - subscribe(schema, document), create_source_event_stream(schema, document) + subscribe(schema, document, variable_values=variable_values), + create_source_event_stream(schema, document, variable_values=variable_values), ) @@ -317,35 +326,12 @@ async def subscribe_bar(_obj, _info): # pragma: no cover await subscription.aclose() - @mark.asyncio - async def throws_an_error_if_some_of_required_arguments_are_missing(): - document = parse("subscription { foo }") - - schema = GraphQLSchema( - query=DummyQueryType, - subscription=GraphQLObjectType( - "Subscription", {"foo": GraphQLField(GraphQLString)} - ), - ) - - with raises(TypeError, match="^Expected None to be a GraphQL schema\\.$"): - subscribe(None, document) # type: ignore - - with raises(TypeError, match="missing .* positional argument: 'schema'"): - subscribe(document=document) # type: ignore - - with raises(TypeError, match="^Must provide document\\.$"): - subscribe(schema, None) # type: ignore - - with raises(TypeError, match="missing .* positional argument: 'document'"): - subscribe(schema=schema) # type: ignore - @mark.asyncio async def resolves_to_an_error_if_schema_does_not_support_subscriptions(): schema = GraphQLSchema(query=DummyQueryType) document = parse("subscription { unknownField }") - result = subscribe(schema, document) + result = subscribe_with_bad_args(schema, document) assert result == ( None, @@ -368,7 +354,7 @@ async def resolves_to_an_error_for_unknown_subscription_field(): ) document = parse("subscription { unknownField }") - result = subscribe(schema, document) + result = subscribe_with_bad_args(schema, document) assert result == ( None, [ @@ -387,8 +373,8 @@ async def should_pass_through_unexpected_errors_thrown_in_subscribe(): "Subscription", {"foo": GraphQLField(GraphQLString)} ), ) - with raises(TypeError, match="^Must provide document\\.$"): - subscribe(schema=schema, document={}) # type: ignore + with raises(AttributeError): + subscribe_with_bad_args(schema=schema, document={}) # type: ignore @mark.asyncio @mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") @@ -484,7 +470,9 @@ async def resolves_to_an_error_if_variables_were_wrong_type(): # If we receive variables that cannot be coerced correctly, subscribe() will # resolve to an ExecutionResult that contains an informative error description. - result = subscribe(schema, document, variable_values=variable_values) + result = subscribe_with_bad_args( + schema, document, variable_values=variable_values + ) assert result == ( None, @@ -497,7 +485,7 @@ async def resolves_to_an_error_if_variables_were_wrong_type(): ], ) - assert result.errors[0].original_error is None # type: ignore + assert result.errors[0].original_error is None # Once a subscription returns a valid AsyncIterator, it can still yield errors. diff --git a/tests/pyutils/test_description.py b/tests/pyutils/test_description.py index d217f72d..db72792d 100644 --- a/tests/pyutils/test_description.py +++ b/tests/pyutils/test_description.py @@ -109,26 +109,17 @@ def graphql_named_type(): named_type = GraphQLNamedType(name="Foo", description="not lazy") assert named_type.name == "Foo" assert named_type.description == "not lazy" - with raises(TypeError, match="Expected name to be a string\\."): - GraphQLNamedType(name=lazy_string) - with raises(TypeError, match="The description must be a string\\."): - GraphQLNamedType(name="Foo", description=lazy_string) with registered(LazyString): named_type = GraphQLNamedType(name="Foo", description=lazy_string) assert named_type.description is lazy_string assert str(named_type.description).endswith("lazy?") - with raises(TypeError, match="Expected name to be a string\\."): - GraphQLNamedType(name=lazy_string) def graphql_field(): field = GraphQLField(GraphQLString, description="not lazy") assert field.description == "not lazy" field = GraphQLField(GraphQLString, deprecation_reason="not lazy") assert field.deprecation_reason == "not lazy" - with raises(TypeError, match="The description must be a string\\."): - GraphQLField(GraphQLString, description=lazy_string) - with raises(TypeError, match="The deprecation reason must be a string\\."): - GraphQLField(GraphQLString, deprecation_reason=lazy_string) + GraphQLField(GraphQLString, description=lazy_string) with registered(LazyString): field = GraphQLField( GraphQLString, @@ -143,8 +134,6 @@ def graphql_field(): def graphql_argument(): arg = GraphQLArgument(GraphQLString, description="not lazy") assert arg.description == "not lazy" - with raises(TypeError, match="Argument description must be a string\\."): - GraphQLArgument(GraphQLString, description=lazy_string) with registered(LazyString): arg = GraphQLArgument(GraphQLString, description=lazy_string) assert arg.description is lazy_string @@ -155,15 +144,6 @@ def graphql_enum_value(): assert value.description == "not lazy" value = GraphQLEnumValue(deprecation_reason="not lazy") assert value.deprecation_reason == "not lazy" - with raises( - TypeError, match="The description of the enum value must be a string\\." - ): - GraphQLEnumValue(description=lazy_string) - with raises( - TypeError, - match="The deprecation reason for the enum value must be a string\\.", - ): - GraphQLEnumValue(deprecation_reason=lazy_string) with registered(LazyString): value = GraphQLEnumValue( description=lazy_string, deprecation_reason=lazy_string @@ -176,8 +156,6 @@ def graphql_enum_value(): def graphql_input_field(): field = GraphQLInputField(GraphQLString, description="not lazy") assert field.description == "not lazy" - with raises(TypeError, match="Input field description must be a string\\."): - GraphQLInputField(GraphQLString, description=lazy_string) with registered(LazyString): field = GraphQLInputField(GraphQLString, description=lazy_string) assert field.description is lazy_string @@ -187,16 +165,10 @@ def graphql_directive(): directive = GraphQLDirective("Foo", [], description="not lazy") assert directive.name == "Foo" assert directive.description == "not lazy" - with raises(TypeError, match="Expected name to be a string\\."): - GraphQLDirective(lazy_string, []) - with raises(TypeError, match="Foo description must be a string\\."): - GraphQLDirective("Foo", [], description=lazy_string) with registered(LazyString): directive = GraphQLDirective("Foo", [], description=lazy_string) assert directive.description is lazy_string assert str(directive.description).endswith("lazy?") - with raises(TypeError, match="Expected name to be a string\\."): - GraphQLDirective(lazy_string, []) def handels_introspection(): class Lazy: diff --git a/tests/type/test_assert_name.py b/tests/type/test_assert_name.py index a26a7ea0..06dd1116 100644 --- a/tests/type/test_assert_name.py +++ b/tests/type/test_assert_name.py @@ -8,13 +8,6 @@ def describe_assert_name(): def pass_through_valid_name(): assert assert_name("_ValidName123") == "_ValidName123" - def throws_for_non_strings(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - assert_name({}) # type: ignore - msg = str(exc_info.value) - assert msg == "Expected name to be a string." - def throws_on_empty_strings(): with raises(GraphQLError) as exc_info: assert_name("") diff --git a/tests/type/test_definition.py b/tests/type/test_definition.py index 24973086..c06c1a68 100644 --- a/tests/type/test_definition.py +++ b/tests/type/test_definition.py @@ -1,7 +1,7 @@ import pickle from enum import Enum from math import isnan, nan -from typing import Dict, cast +from typing import Dict from pytest import mark, raises @@ -15,14 +15,11 @@ InputValueDefinitionNode, InterfaceTypeDefinitionNode, InterfaceTypeExtensionNode, - Node, ObjectTypeDefinitionNode, ObjectTypeExtensionNode, ScalarTypeDefinitionNode, ScalarTypeExtensionNode, StringValueNode, - TypeDefinitionNode, - TypeExtensionNode, UnionTypeDefinitionNode, UnionTypeExtensionNode, ValueNode, @@ -181,32 +178,6 @@ def rejects_a_scalar_type_with_invalid_name(): "Names must only contain [_a-zA-Z0-9] but 'bad-name' does not." ) - def rejects_a_scalar_type_with_incorrectly_typed_description(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLScalarType("SomeScalar", description=[]) # type: ignore - assert str(exc_info.value) == "The description must be a string." - - def rejects_a_scalar_type_defining_specified_by_url_with_an_incorrect_type(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLScalarType("SomeScalar", specified_by_url={}) # type: ignore - assert ( - str(exc_info.value) - == "SomeScalar must provide 'specified_by_url' as a string, but got: {}." - ) - - def rejects_a_scalar_type_defining_serialize_with_incorrect_type(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLScalarType("SomeScalar", {}) # type: ignore - assert str(exc_info.value) == ( - "SomeScalar must provide 'serialize' as a function." - " If this custom Scalar is also used as an input type," - " ensure 'parse_value' and 'parse_literal' functions" - " are also provided." - ) - def rejects_a_scalar_type_defining_parse_literal_but_not_parse_value(): def parse_literal(_node: ValueNode, _vars=None): return Undefined # pragma: no cover @@ -215,52 +186,7 @@ def parse_literal(_node: ValueNode, _vars=None): GraphQLScalarType("SomeScalar", parse_literal=parse_literal) assert str(exc_info.value) == ( "SomeScalar must provide both" - " 'parse_value' and 'parse_literal' as functions." - ) - - def rejects_a_scalar_type_incorrectly_defining_parse_literal_and_value(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLScalarType( - "SomeScalar", parse_value={}, parse_literal={} # type: ignore - ) - assert str(exc_info.value) == ( - "SomeScalar must provide both" - " 'parse_value' and 'parse_literal' as functions." - ) - - def rejects_a_scalar_type_with_an_incorrect_ast_node(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLScalarType("SomeScalar", ast_node=Node()) # type: ignore - msg = str(exc_info.value) - assert msg == "SomeScalar AST node must be a TypeDefinitionNode." - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLScalarType( - "SomeScalar", ast_node=TypeDefinitionNode() # type: ignore - ) - msg = str(exc_info.value) - assert msg == "SomeScalar AST node must be a ScalarTypeDefinitionNode." - - def rejects_a_scalar_type_with_incorrect_extension_ast_nodes(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLScalarType( - "SomeScalar", extension_ast_nodes=[Node()] # type: ignore - ) - assert str(exc_info.value) == ( - "SomeScalar extension AST nodes must be specified" - " as a collection of TypeExtensionNode instances." - ) - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLScalarType( - "SomeScalar", extension_ast_nodes=[TypeExtensionNode()] # type: ignore - ) - assert str(exc_info.value) == ( - "SomeScalar extension AST nodes must be specified" - " as a collection of ScalarTypeExtensionNode instances." + " 'parse_value' and 'parse_literal' functions." ) def pickles_a_custom_scalar_type(): @@ -319,44 +245,6 @@ def defines_a_scalar_type_with_a_deprecation_reason(): assert field.deprecation_reason is deprecation_reason assert field.to_kwargs()["deprecation_reason"] is deprecation_reason - def rejects_a_field_with_incorrect_type(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLField(InputObjectType) # type: ignore - assert str(exc_info.value) == "Field type must be an output type." - - def rejects_a_field_with_incorrect_args(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLField(GraphQLString, args=[]) # type: ignore - assert str(exc_info.value) == ( - "Field args must be a dict with argument names as keys." - ) - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLField(GraphQLString, args={"arg": GraphQLObjectType}) # type: ignore - assert str(exc_info.value) == ( - "Field args must be GraphQLArguments or input type objects." - ) - - def rejects_a_field_with_an_incorrectly_typed_description(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLField(GraphQLString, description=[]) # type: ignore - assert str(exc_info.value) == "The description must be a string." - - def rejects_a_field_with_an_incorrectly_typed_deprecation_reason(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLField(GraphQLString, deprecation_reason=[]) # type: ignore - assert str(exc_info.value) == "The deprecation reason must be a string." - - def rejects_a_field_with_an_incorrect_ast_node(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLField(GraphQLString, ast_node=Node()) # type: ignore - assert str(exc_info.value) == "Field AST node must be a FieldDefinitionNode." - def describe_type_system_objects(): def defines_an_object_type(): @@ -566,22 +454,6 @@ def rejects_an_object_type_with_invalid_name(): "Names must only contain [_a-zA-Z0-9] but 'bad-name' does not." ) - def rejects_an_object_type_field_with_undefined_config(): - undefined_field = cast(GraphQLField, None) - obj_type = GraphQLObjectType("SomeObject", {"f": undefined_field}) - with raises(TypeError) as exc_info: - assert not obj_type.fields - msg = str(exc_info.value) - assert msg == "SomeObject fields must be GraphQLField or output type objects." - - def rejects_an_object_type_with_incorrectly_typed_fields(): - invalid_field = cast(GraphQLField, [GraphQLField(ScalarType)]) - obj_type = GraphQLObjectType("SomeObject", {"f": invalid_field}) - with raises(TypeError) as exc_info: - assert not obj_type.fields - msg = str(exc_info.value) - assert msg == "SomeObject fields must be GraphQLField or output type objects." - def rejects_an_object_type_with_incorrectly_named_fields(): obj_type = GraphQLObjectType( "SomeObject", {"bad-name": GraphQLField(ScalarType)} @@ -591,16 +463,6 @@ def rejects_an_object_type_with_incorrectly_named_fields(): msg = str(exc_info.value) assert msg == "Names must only contain [_a-zA-Z0-9] but 'bad-name' does not." - def rejects_an_object_type_field_function_that_returns_incorrect_type(): - obj_type = GraphQLObjectType( - "SomeObject", lambda: [GraphQLField(ScalarType)] # type: ignore - ) - with raises(TypeError) as exc_info: - assert not obj_type.fields - assert str(exc_info.value) == ( - "SomeObject fields must be specified as a mapping with field names as keys." - ) - def rejects_an_object_type_field_function_that_raises_an_error(): def fields(): raise RuntimeError("Oops!") @@ -610,21 +472,6 @@ def fields(): assert not obj_type.fields assert str(exc_info.value) == "SomeObject fields cannot be resolved. Oops!" - def rejects_an_object_type_with_incorrectly_typed_field_args(): - invalid_args = [{"bad_args": GraphQLArgument(ScalarType)}] - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLObjectType( - "SomeObject", - { - "badField": GraphQLField( - ScalarType, args=invalid_args # type: ignore - ) - }, - ) - msg = str(exc_info.value) - assert msg == "Field args must be a dict with argument names as keys." - def rejects_an_object_type_with_incorrectly_named_field_args(): obj_type = GraphQLObjectType( "SomeObject", @@ -642,24 +489,6 @@ def rejects_an_object_type_with_incorrectly_named_field_args(): " Names must only contain [_a-zA-Z0-9] but 'bad-name' does not." ) - def rejects_an_object_type_with_incorrectly_typed_interfaces(): - obj_type = GraphQLObjectType("SomeObject", {}, interfaces={}) - with raises(TypeError) as exc_info: - assert not obj_type.interfaces - assert str(exc_info.value) == ( - "SomeObject interfaces must be specified" - " as a collection of GraphQLInterfaceType instances." - ) - - def rejects_object_type_with_incorrectly_typed_interfaces_as_a_function(): - obj_type = GraphQLObjectType("SomeObject", {}, interfaces=lambda: {}) - with raises(TypeError) as exc_info: - assert not obj_type.interfaces - assert str(exc_info.value) == ( - "SomeObject interfaces must be specified" - " as a collection of GraphQLInterfaceType instances." - ) - def rejects_object_type_with_interfaces_as_function_that_raises_an_error(): def interfaces(): raise RuntimeError("Oops!") @@ -669,70 +498,6 @@ def interfaces(): assert not obj_type.interfaces assert str(exc_info.value) == "SomeObject interfaces cannot be resolved. Oops!" - def rejects_an_empty_object_field_resolver(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLObjectType( - "SomeObject", - {"field": GraphQLField(ScalarType, resolve={})}, # type: ignore - ) - msg = str(exc_info.value) - assert msg == "Field resolver must be a function if provided, but got: {}." - - def rejects_a_constant_scalar_value_resolver(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLObjectType( - "SomeObject", - {"field": GraphQLField(ScalarType, resolve=0)}, # type: ignore - ) - msg = str(exc_info.value) - assert msg == "Field resolver must be a function if provided, but got: 0." - - def rejects_an_object_type_with_an_incorrect_type_for_is_type_of(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLObjectType("AnotherObject", {}, is_type_of={}) # type: ignore - assert str(exc_info.value) == ( - "AnotherObject must provide 'is_type_of' as a function, but got: {}." - ) - - def rejects_an_object_type_with_an_incorrect_ast_node(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLObjectType("SomeObject", {}, ast_node=Node()) # type: ignore - msg = str(exc_info.value) - assert msg == "SomeObject AST node must be a TypeDefinitionNode." - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLObjectType( - "SomeObject", {}, ast_node=TypeDefinitionNode() # type: ignore - ) - msg = str(exc_info.value) - assert msg == "SomeObject AST node must be an ObjectTypeDefinitionNode." - - def rejects_an_object_type_with_incorrect_extension_ast_nodes(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLObjectType( - "SomeObject", {}, extension_ast_nodes=[Node()] # type: ignore - ) - assert str(exc_info.value) == ( - "SomeObject extension AST nodes must be specified" - " as a collection of TypeExtensionNode instances." - ) - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLObjectType( - "SomeObject", - {}, - extension_ast_nodes=[TypeExtensionNode()], # type: ignore - ) - assert str(exc_info.value) == ( - "SomeObject extension AST nodes must be specified" - " as a collection of ObjectTypeExtensionNode instances." - ) - def describe_type_system_interfaces(): def defines_an_interface_type(): @@ -834,23 +599,6 @@ def accepts_an_interface_type_with_ast_node_and_extension_ast_nodes(): assert interface_type.ast_node is ast_node assert interface_type.extension_ast_nodes == tuple(extension_ast_nodes) - def rejects_an_interface_type_with_incorrectly_typed_fields(): - interface = GraphQLInterfaceType("SomeInterface", []) # type: ignore - with raises(TypeError) as exc_info: - assert not interface.fields - assert str(exc_info.value) == ( - "SomeInterface fields must be specified" - " as a mapping with field names as keys." - ) - interface = GraphQLInterfaceType( - "SomeInterface", {"f": InputObjectType} # type: ignore - ) - with raises(TypeError) as exc_info: - assert not interface.fields - assert str(exc_info.value) == ( - "SomeInterface fields must be GraphQLField or output type objects." - ) - def rejects_an_interface_type_with_unresolvable_fields(): def fields(): raise RuntimeError("Oops!") @@ -860,19 +608,6 @@ def fields(): assert not interface.fields assert str(exc_info.value) == "SomeInterface fields cannot be resolved. Oops!" - def rejects_an_interface_type_with_incorrectly_typed_name(): - with raises(TypeError, match="missing .* required .* 'name'"): - # noinspection PyArgumentList - GraphQLInterfaceType() # type: ignore - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLInterfaceType(None, {}) # type: ignore - assert str(exc_info.value) == "Must provide name." - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLInterfaceType(42, {}) # type: ignore - assert str(exc_info.value) == "Expected name to be a string." - def rejects_an_interface_type_with_invalid_name(): with raises(GraphQLError) as exc_info: GraphQLInterfaceType("", {}) @@ -883,15 +618,6 @@ def rejects_an_interface_type_with_invalid_name(): "Names must only contain [_a-zA-Z0-9] but 'bad-name' does not." ) - def rejects_an_interface_type_with_incorrectly_typed_interfaces(): - interface = GraphQLInterfaceType("AnotherInterface", {}, lambda: {}) - with raises(TypeError) as exc_info: - assert not interface.interfaces - assert str(exc_info.value) == ( - "AnotherInterface interfaces must be specified" - " as a collection of GraphQLInterfaceType instances." - ) - def rejects_an_interface_type_with_unresolvable_interfaces(): def interfaces(): raise RuntimeError("Oops!") @@ -904,53 +630,6 @@ def interfaces(): == "AnotherInterface interfaces cannot be resolved. Oops!" ) - def rejects_an_interface_type_with_an_incorrect_type_for_resolve_type(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLInterfaceType( - "AnotherInterface", {}, resolve_type={} # type: ignore - ) - assert str(exc_info.value) == ( - "AnotherInterface must provide 'resolve_type' as a function," - " but got: {}." - ) - - def rejects_an_interface_type_with_an_incorrect_ast_node(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLInterfaceType("SomeInterface", {}, ast_node=Node()) # type: ignore - msg = str(exc_info.value) - assert msg == "SomeInterface AST node must be a TypeDefinitionNode." - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLInterfaceType( - "SomeInterface", {}, ast_node=TypeDefinitionNode() # type: ignore - ) - msg = str(exc_info.value) - assert msg == "SomeInterface AST node must be an InterfaceTypeDefinitionNode." - - def rejects_an_interface_type_with_incorrect_extension_ast_nodes(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLInterfaceType( - "SomeInterface", {}, extension_ast_nodes=[Node()] # type: ignore - ) - assert str(exc_info.value) == ( - "SomeInterface extension AST nodes must be specified" - " as a collection of TypeExtensionNode instances." - ) - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLInterfaceType( - "SomeInterface", - {}, - extension_ast_nodes=[TypeExtensionNode()], # type: ignore - ) - assert str(exc_info.value) == ( - "SomeInterface extension AST nodes must be specified" - " as a collection of InterfaceTypeExtensionNode instances." - ) - def describe_type_system_unions(): def accepts_a_union_type_defining_resolve_type(): @@ -985,19 +664,6 @@ def accepts_a_union_type_with_ast_node_and_extension_ast_nodes(): assert union_type.ast_node is ast_node assert union_type.extension_ast_nodes == tuple(extension_ast_nodes) - def rejects_a_union_type_with_incorrectly_typed__name(): - with raises(TypeError, match="missing .* required .* 'name'"): - # noinspection PyArgumentList - GraphQLUnionType() # type: ignore - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLUnionType(None, []) # type: ignore - assert str(exc_info.value) == "Must provide name." - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLUnionType(42, []) # type: ignore - assert str(exc_info.value) == "Expected name to be a string." - def rejects_a_union_type_with_invalid_name(): with raises(GraphQLError) as exc_info: GraphQLUnionType("", []) @@ -1008,23 +674,6 @@ def rejects_a_union_type_with_invalid_name(): "Names must only contain [_a-zA-Z0-9] but 'bad-name' does not." ) - def rejects_a_union_type_with_an_incorrect_type_for_resolve_type(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLUnionType("SomeUnion", [], resolve_type={}) # type: ignore - assert str(exc_info.value) == ( - "SomeUnion must provide 'resolve_type' as a function, but got: {}." - ) - - def rejects_a_union_type_with_incorrectly_typed_types(): - union_type = GraphQLUnionType("SomeUnion", {"type": ObjectType}) # type: ignore - with raises(TypeError) as exc_info: - assert not union_type.types - assert str(exc_info.value) == ( - "SomeUnion types must be specified" - " as a collection of GraphQLObjectType instances." - ) - def rejects_a_union_type_with_unresolvable_types(): def types(): raise RuntimeError("Oops!") @@ -1034,42 +683,6 @@ def types(): assert not union_type.types assert str(exc_info.value) == "SomeUnion types cannot be resolved. Oops!" - def rejects_a_union_type_with_an_incorrect_ast_node(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLUnionType("SomeUnion", [], ast_node=Node()) # type: ignore - msg = str(exc_info.value) - assert msg == "SomeUnion AST node must be a TypeDefinitionNode." - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLUnionType( - "SomeUnion", [], ast_node=TypeDefinitionNode() # type: ignore - ) - msg = str(exc_info.value) - assert msg == "SomeUnion AST node must be a UnionTypeDefinitionNode." - - def rejects_a_union_type_with_incorrect_extension_ast_nodes(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLUnionType( - "SomeUnion", [], extension_ast_nodes=[Node()] # type: ignore - ) - assert str(exc_info.value) == ( - "SomeUnion extension AST nodes must be specified" - " as a collection of TypeExtensionNode instances." - ) - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLUnionType( - "SomeUnion", - [], - extension_ast_nodes=[TypeExtensionNode()], # type: ignore - ) - assert str(exc_info.value) == ( - "SomeUnion extension AST nodes must be specified" - " as a collection of UnionTypeExtensionNode instances." - ) - def describe_type_system_enums(): def defines_an_enum_using_a_dict(): @@ -1326,48 +939,6 @@ def rejects_an_enum_type_with_incorrectly_typed_values(): "SomeEnum values must be an Enum or a mapping with value names as keys." ) - def rejects_an_enum_type_with_an_incorrectly_typed_description(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLEnumType("SomeEnum", {"foo": None}, description=[]) # type: ignore - assert str(exc_info.value) == "The description must be a string." - - def rejects_an_enum_type_with_an_incorrect_ast_node(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLEnumType("SomeEnum", {"foo": None}, ast_node=Node()) # type: ignore - msg = str(exc_info.value) - assert msg == "SomeEnum AST node must be a TypeDefinitionNode." - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLEnumType( - "SomeEnum", {"foo": None}, ast_node=TypeDefinitionNode() # type: ignore - ) - msg = str(exc_info.value) - assert msg == "SomeEnum AST node must be an EnumTypeDefinitionNode." - - def rejects_an_enum_type_with_incorrect_extension_ast_nodes(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLEnumType( - "SomeEnum", {"foo": None}, extension_ast_nodes=[Node()] # type: ignore - ) - assert str(exc_info.value) == ( - "SomeEnum extension AST nodes must be specified" - " as a collection of TypeExtensionNode instances." - ) - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLEnumType( - "SomeEnum", - {"foo": None}, - extension_ast_nodes=[TypeExtensionNode()], # type: ignore - ) - assert str(exc_info.value) == ( - "SomeEnum extension AST nodes must be specified" - " as a collection of EnumTypeExtensionNode instances." - ) - def describe_enum_values(): def accepts_an_enum_value_without_value(): enum_value = GraphQLEnumValue() @@ -1407,27 +978,6 @@ def can_compare_enum_values(): deprecation_reason="reason 2" ) - def rejects_an_enum_value_with_an_incorrectly_typed_description(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLEnumValue(description=[]) # type: ignore - msg = str(exc_info.value) - assert msg == "The description of the enum value must be a string." - - def rejects_an_enum_value_with_an_incorrectly_typed_deprecation_reason(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLEnumValue(deprecation_reason=[]) # type: ignore - msg = str(exc_info.value) - assert msg == "The deprecation reason for the enum value must be a string." - - def rejects_an_enum_value_with_an_incorrect_ast_node(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLEnumValue(ast_node=TypeDefinitionNode()) # type: ignore - msg = str(exc_info.value) - assert msg == "AST node must be an EnumValueDefinitionNode." - def describe_type_system_input_objects(): def accepts_an_input_object_type_with_a_description(): @@ -1462,62 +1012,6 @@ def accepts_an_input_object_type_with_ast_node_and_extension_ast_nodes(): assert input_obj_type.ast_node is ast_node assert input_obj_type.extension_ast_nodes == tuple(extension_ast_nodes) - def rejects_an_input_object_type_with_incorrect_out_type_function(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLInputObjectType("SomeInputObject", {}, out_type=[]) # type: ignore - assert str(exc_info.value) == ( - "The out type for SomeInputObject must be a function or a class." - ) - - def rejects_an_input_object_type_with_incorrectly_typed_description(): - # noinspection PyTypeChecker - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLInputObjectType( - "SomeInputObject", {}, description=[] # type: ignore - ) - assert str(exc_info.value) == "The description must be a string." - - def rejects_an_input_object_type_with_an_incorrect_ast_node(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLInputObjectType( - "SomeInputObject", {}, ast_node=Node() # type: ignore - ) - msg = str(exc_info.value) - assert msg == "SomeInputObject AST node must be a TypeDefinitionNode." - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLInputObjectType( - "SomeInputObject", {}, ast_node=TypeDefinitionNode() # type: ignore - ) - assert str(exc_info.value) == ( - "SomeInputObject AST node must be an InputObjectTypeDefinitionNode." - ) - - def rejects_an_input_object_type_with_incorrect_extension_ast_nodes(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLInputObjectType( - "SomeInputObject", {}, extension_ast_nodes=[Node()] # type: ignore - ) - assert str(exc_info.value) == ( - "SomeInputObject extension AST nodes must be specified" - " as a collection of TypeExtensionNode instances." - ) - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLInputObjectType( - "SomeInputObject", - {}, - extension_ast_nodes=[TypeExtensionNode()], # type: ignore - ) - assert str(exc_info.value) == ( - "SomeInputObject extension AST nodes must be specified" - " as a collection of InputObjectTypeExtensionNode instances." - ) - def describe_input_objects_must_have_fields(): def accepts_an_input_object_type_with_fields(): input_obj_type = GraphQLInputObjectType( @@ -1581,28 +1075,6 @@ def rejects_an_input_object_type_with_invalid_name(): "Names must only contain [_a-zA-Z0-9] but 'bad-name' does not." ) - def rejects_an_input_object_type_with_incorrect_fields(): - input_obj_type = GraphQLInputObjectType( - "SomeInputObject", [] # type: ignore - ) - with raises(TypeError) as exc_info: - assert not input_obj_type.fields - assert str(exc_info.value) == ( - "SomeInputObject fields must be specified" - " as a mapping with field names as keys." - ) - - def rejects_an_input_object_type_with_incorrect_fields_function(): - input_obj_type = GraphQLInputObjectType( - "SomeInputObject", lambda: [] # type: ignore - ) - with raises(TypeError) as exc_info: - assert not input_obj_type.fields - assert str(exc_info.value) == ( - "SomeInputObject fields must be specified" - " as a mapping with field names as keys." - ) - def rejects_an_input_object_type_with_incorrectly_named_fields(): input_obj_type = GraphQLInputObjectType( "SomeInputObject", {"bad-name": GraphQLInputField(ScalarType)} @@ -1643,16 +1115,6 @@ def resolve(): ) }, ) - input_obj_type = GraphQLInputObjectType( - "SomeInputObject", - {"f": GraphQLField(ScalarType, resolve=resolve)}, # type: ignore - ) - with raises(TypeError) as exc_info: - assert not input_obj_type.fields - assert str(exc_info.value) == ( - "SomeInputObject fields must be GraphQLInputField" - " or input type objects." - ) def rejects_an_input_object_type_with_resolver_constant(): with raises( @@ -1696,39 +1158,6 @@ def rejects_an_argument_without_type(): # noinspection PyArgumentList GraphQLArgument() # type: ignore - def rejects_an_argument_with_an_incorrect_type(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLArgument(GraphQLObjectType) # type: ignore - msg = str(exc_info.value) - assert msg == "Argument type must be a GraphQL input type." - - def rejects_an_argument_with_an_incorrectly_typed_description(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLArgument(GraphQLString, description=[]) # type: ignore - assert str(exc_info.value) == "Argument description must be a string." - - def rejects_an_argument_with_an_incorrectly_typed_deprecation_reason(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLArgument(GraphQLString, deprecation_reason=[]) # type: ignore - assert str(exc_info.value) == "Argument deprecation reason must be a string." - - def rejects_an_argument_with_an_incorrect_out_name(): - # This is an extension of GraphQL.js. - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLArgument(GraphQLString, out_name=[]) # type: ignore - assert str(exc_info.value) == "Argument out name must be a string." - - def rejects_an_argument_with_an_incorrect_ast_node(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLArgument(GraphQLString, ast_node=Node()) # type: ignore - msg = str(exc_info.value) - assert msg == "Argument AST node must be an InputValueDefinitionNode." - def describe_type_system_input_fields(): def accepts_an_input_field_with_a_description(): @@ -1761,39 +1190,6 @@ def rejects_an_input_field_without_type(): # noinspection PyArgumentList GraphQLInputField() # type: ignore - def rejects_an_input_field_with_an_incorrect_type(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLInputField(GraphQLObjectType) # type: ignore - msg = str(exc_info.value) - assert msg == "Input field type must be a GraphQL input type." - - def rejects_an_input_field_with_an_incorrectly_typed_description(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLInputField(GraphQLString, description=[]) # type: ignore - assert str(exc_info.value) == "Input field description must be a string." - - def rejects_an_input_field_with_an_incorrectly_typed_deprecation_reason(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLInputField(GraphQLString, deprecation_reason=[]) # type: ignore - assert str(exc_info.value) == "Input field deprecation reason must be a string." - - def rejects_an_input_field_with_an_incorrect_out_name(): - # This is an extension of GraphQL.js. - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLInputField(GraphQLString, out_name=[]) # type: ignore - assert str(exc_info.value) == "Input field out name must be a string." - - def rejects_an_input_field_with_an_incorrect_ast_node(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLInputField(GraphQLString, ast_node=Node()) # type: ignore - msg = str(exc_info.value) - assert msg == "Input field AST node must be an InputValueDefinitionNode." - def deprecation_reason_is_preserved_on_fields(): input_obj_type = GraphQLInputObjectType( "someInputObject", @@ -1828,16 +1224,6 @@ def describe_type_system_list(): def accepts_a_type_as_item_type_of_list(type_): assert GraphQLList(type_) - not_types = [{}, dict, str, object, None] - - @mark.parametrize("type_", not_types, ids=lambda type_: repr(type_)) - def rejects_a_non_type_as_item_type_of_list(type_): - with raises(TypeError) as exc_info: - GraphQLList(type_) - assert str(exc_info.value) == ( - f"Can only create a wrapper for a GraphQLType, but got: {type_}." - ) - def describe_type_system_non_null(): types = [ @@ -1855,22 +1241,6 @@ def describe_type_system_non_null(): def accepts_a_type_as_nullable_type_of_non_null(type_): assert GraphQLNonNull(type_) - not_types = [NonNullScalarType, {}, dict, str, object, None] - - @mark.parametrize("type_", not_types, ids=lambda type_: repr(type_)) - def rejects_a_non_type_as_nullable_type_of_non_null(type_): - with raises(TypeError) as exc_info: - GraphQLNonNull(type_) - assert ( - str(exc_info.value) - == ( - "Can only create NonNull of a Nullable GraphQLType" - f" but got: {type_}." - ) - if isinstance(type_, GraphQLNonNull) - else f"Can only create a wrapper for a GraphQLType, but got: {type_}." - ) - def describe_type_system_test_utility_methods(): def stringifies_simple_types(): diff --git a/tests/type/test_directives.py b/tests/type/test_directives.py index 7398e108..8a6fb332 100644 --- a/tests/type/test_directives.py +++ b/tests/type/test_directives.py @@ -1,7 +1,7 @@ from pytest import raises from graphql.error import GraphQLError -from graphql.language import DirectiveDefinitionNode, DirectiveLocation, Node +from graphql.language import DirectiveDefinitionNode, DirectiveLocation from graphql.type import GraphQLArgument, GraphQLDirective, GraphQLInt, GraphQLString @@ -132,34 +132,6 @@ def rejects_a_directive_with_invalid_name(): "Names must only contain [_a-zA-Z0-9] but 'bad-name' does not." ) - def rejects_a_directive_with_incorrectly_typed_args(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLDirective("Foo", locations=[], args=["arg"]) # type: ignore - assert str(exc_info.value) == ( - "Foo args must be a dict with argument names as keys." - ) - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLDirective( - "Foo", - locations=[], - args={1: GraphQLArgument(GraphQLString)}, # type: ignore - ) - assert str(exc_info.value) == ( - "Foo args must be a dict with argument names as keys." - ) - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLDirective( - "Foo", - locations=[], - args={"arg": GraphQLDirective("Bar", [])}, # type: ignore - ) - assert str(exc_info.value) == ( - "Foo args must be GraphQLArgument or input type objects." - ) - def rejects_a_directive_with_incorrectly_named_args(): with raises(GraphQLError) as exc_info: GraphQLDirective( @@ -171,12 +143,6 @@ def rejects_a_directive_with_incorrectly_named_args(): "Names must only contain [_a-zA-Z0-9] but 'bad-name' does not." ) - def rejects_a_directive_with_incorrectly_typed_repeatable_flag(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLDirective("Foo", locations=[], is_repeatable=None) # type: ignore - assert str(exc_info.value) == "Foo is_repeatable flag must be True or False." - def rejects_a_directive_with_undefined_locations(): with raises(TypeError) as exc_info: # noinspection PyTypeChecker @@ -201,19 +167,3 @@ def rejects_a_directive_with_incorrectly_typed_locations(): "Foo locations must be specified" " as a collection of DirectiveLocation enum values." ) - - def rejects_a_directive_with_incorrectly_typed_description(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLDirective( - "Foo", locations=[], description={"bad": True} # type: ignore - ) - assert str(exc_info.value) == "Foo description must be a string." - - def rejects_a_directive_with_incorrectly_typed_ast_node(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLDirective("Foo", locations=[], ast_node=Node()) # type: ignore - assert str(exc_info.value) == ( - "Foo AST node must be a DirectiveDefinitionNode." - ) diff --git a/tests/type/test_extensions.py b/tests/type/test_extensions.py index 19eefd71..26a0d966 100644 --- a/tests/type/test_extensions.py +++ b/tests/type/test_extensions.py @@ -1,6 +1,4 @@ -from typing import Any, Dict, cast - -from pytest import mark, param, raises +from pytest import param from graphql.type import ( GraphQLArgument, @@ -23,10 +21,6 @@ bad_extensions = [param([], id="list"), param({1: "ext"}, id="non_string_key")] -def bad_extensions_msg(name: str) -> str: - return f"{name} extensions must be a dictionary with string keys." - - def describe_type_system_extensions(): def describe_graphql_scalar_type(): def without_extensions(): @@ -41,12 +35,6 @@ def with_extensions(): assert some_scalar.extensions is scalar_extensions assert some_scalar.to_kwargs()["extensions"] is scalar_extensions - @mark.parametrize("extensions", bad_extensions) - def with_bad_extensions(extensions): - with raises(TypeError, match=bad_extensions_msg("SomeScalar")): - # noinspection PyTypeChecker - GraphQLScalarType("SomeScalar", extensions=extensions) - def describe_graphql_object_type(): def without_extensions(): some_object = GraphQLObjectType( @@ -99,18 +87,6 @@ def with_extensions(): assert some_field.to_kwargs()["extensions"] is field_extensions assert some_arg.to_kwargs()["extensions"] is arg_extensions - @mark.parametrize("extensions", bad_extensions) - def with_bad_extensions(extensions): - with raises(TypeError, match=bad_extensions_msg("SomeObject")): - # noinspection PyTypeChecker - GraphQLObjectType("SomeObject", {}, extensions=extensions) - with raises(TypeError, match=bad_extensions_msg("Field")): - # noinspection PyTypeChecker - GraphQLField(dummy_type, extensions=extensions) - with raises(TypeError, match=bad_extensions_msg("Argument")): - # noinspection PyTypeChecker - GraphQLArgument(dummy_type, extensions=extensions) - def describe_graphql_interface_type(): def without_extensions(): some_interface = GraphQLInterfaceType( @@ -163,12 +139,6 @@ def with_extensions(): assert some_field.to_kwargs()["extensions"] is field_extensions assert some_arg.to_kwargs()["extensions"] is arg_extensions - @mark.parametrize("extensions", bad_extensions) - def with_bad_extensions(extensions): - with raises(TypeError, match=bad_extensions_msg("SomeInterface")): - # noinspection PyTypeChecker - GraphQLInterfaceType("SomeInterface", {}, extensions=extensions) - def describe_graphql_union_type(): def without_extensions(): some_union = GraphQLUnionType("SomeUnion", []) @@ -186,12 +156,6 @@ def with_extensions(): assert some_union.to_kwargs()["extensions"] is union_extensions - @mark.parametrize("extensions", bad_extensions) - def with_bad_extensions(extensions): - with raises(TypeError, match=bad_extensions_msg("SomeUnion")): - # noinspection PyTypeChecker - GraphQLUnionType("SomeUnion", [], extensions=extensions) - def describe_graphql_enum_type(): def without_extensions(): some_enum = GraphQLEnumType("SomeEnum", {"SOME_VALUE": None}) @@ -220,17 +184,6 @@ def with_extensions(): assert some_enum.to_kwargs()["extensions"] is enum_extensions assert some_value.to_kwargs()["extensions"] is value_extensions - @mark.parametrize("extensions", bad_extensions) - def with_bad_extensions(extensions): - with raises(TypeError, match=bad_extensions_msg("SomeEnum")): - # noinspection PyTypeChecker - GraphQLEnumType( - "SomeEnum", cast(Dict[str, Any], {}), extensions=extensions - ) - with raises(TypeError, match=bad_extensions_msg("Enum value")): - # noinspection PyTypeChecker - GraphQLEnumValue(extensions=extensions) - def describe_graphql_input_object_type(): def without_extensions(): some_input_object = GraphQLInputObjectType( @@ -267,15 +220,6 @@ def with_extensions(): ) assert some_input_field.to_kwargs()["extensions"] is input_field_extensions - @mark.parametrize("extensions", bad_extensions) - def with_bad_extensions(extensions): - with raises(TypeError, match=bad_extensions_msg("SomeInputObject")): - # noinspection PyTypeChecker - GraphQLInputObjectType("SomeInputObject", {}, extensions=extensions) - with raises(TypeError, match=bad_extensions_msg("Input field")): - # noinspection PyTypeChecker - GraphQLInputField(dummy_type, extensions=extensions) - def describe_graphql_directive(): def without_extensions(): some_directive = GraphQLDirective( @@ -307,12 +251,6 @@ def with_extensions(): assert some_directive.to_kwargs()["extensions"] is directive_extensions assert some_arg.to_kwargs()["extensions"] is arg_extensions - @mark.parametrize("extensions", bad_extensions) - def with_bad_extensions(extensions): - with raises(TypeError, match=bad_extensions_msg("Directive")): - # noinspection PyTypeChecker - GraphQLDirective("SomeDirective", [], extensions=extensions) - def describe_graphql_schema(): def without_extensions(): schema = GraphQLSchema() @@ -328,9 +266,3 @@ def with_extensions(): assert schema.extensions is schema_extensions assert schema.to_kwargs()["extensions"] is schema_extensions - - @mark.parametrize("extensions", bad_extensions) - def with_bad_extensions(extensions): - with raises(TypeError, match=bad_extensions_msg("Schema")): - # noinspection PyTypeChecker - GraphQLSchema(extensions=extensions) diff --git a/tests/type/test_predicate.py b/tests/type/test_predicate.py index c2ed29ef..64cbf2ab 100644 --- a/tests/type/test_predicate.py +++ b/tests/type/test_predicate.py @@ -20,6 +20,7 @@ GraphQLNonNull, GraphQLObjectType, GraphQLScalarType, + GraphQLSchema, GraphQLSkipDirective, GraphQLString, GraphQLUnionType, @@ -38,6 +39,7 @@ assert_object_type, assert_output_type, assert_scalar_type, + assert_schema, assert_type, assert_union_type, assert_wrapping_type, @@ -60,6 +62,7 @@ is_required_argument, is_required_input_field, is_scalar_type, + is_schema, is_specified_directive, is_specified_scalar_type, is_type, @@ -537,3 +540,31 @@ def returns_true_for_specified_directives(): def returns_false_for_custom_directive(): assert is_specified_directive(Directive) is False + + +def describe_schema_predicates(): + + schema = GraphQLSchema() + + def describe_is_schema_and_assert_schema(): + def returns_true_for_schema(): + assert is_schema(schema) is True + assert assert_schema(schema) is schema + + def returns_false_for_schema_class_rather_than_instance(): + assert is_schema(GraphQLSchema) is False + with raises(TypeError): + assert_schema(GraphQLSchema) + + def returns_false_for_non_schema(): + assert is_schema(EnumType) is False + with raises(TypeError): + assert_schema(EnumType) + assert is_schema(ScalarType) is False + with raises(TypeError): + assert_schema(ScalarType) + + def return_false_for_random_garbage(): + assert is_schema({"what": "is this"}) is False + with raises(TypeError): + assert_schema({"what": "is this"}) diff --git a/tests/type/test_schema.py b/tests/type/test_schema.py index bcf5975d..c30cd4de 100644 --- a/tests/type/test_schema.py +++ b/tests/type/test_schema.py @@ -6,8 +6,6 @@ DirectiveLocation, SchemaDefinitionNode, SchemaExtensionNode, - TypeDefinitionNode, - TypeExtensionNode, ) from graphql.type import ( GraphQLArgument, @@ -176,12 +174,6 @@ def freezes_the_specified_directives(): schema = GraphQLSchema(directives=directives_tuple) assert schema.directives is directives_tuple - def rejects_a_schema_with_incorrectly_typed_description(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLSchema(description=[]) # type: ignore - assert str(exc_info.value) == "Schema description must be a string." - def describe_type_map(): def includes_interface_possible_types_in_the_type_map(): SomeInterface = GraphQLInterfaceType("SomeInterface", {}) @@ -357,37 +349,6 @@ def configures_the_schema_to_still_needing_validation(): # noinspection PyProtectedMember assert GraphQLSchema(assume_valid=False).validation_errors is None - def checks_the_configuration_for_mistakes(): - def query(): - pass - - with raises(Exception): - # noinspection PyTypeChecker - GraphQLSchema(query) # type: ignore - with raises(Exception): - GraphQLSchema(types={}) - with raises(Exception): - GraphQLSchema(directives={}) - - def check_that_query_mutation_and_subscription_are_graphql_types(): - directive = GraphQLDirective("foo", []) - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLSchema(query=directive) # type: ignore - assert str(exc_info.value) == "Expected query to be a GraphQL type." - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLSchema(mutation=directive) # type: ignore - assert str(exc_info.value) == ( - "Expected mutation to be a GraphQL type." - ) - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLSchema(subscription=directive) # type: ignore - assert str(exc_info.value) == ( - "Expected subscription to be a GraphQL type." - ) - def describe_a_schema_must_contain_uniquely_named_types(): def rejects_a_schema_which_redefines_a_built_in_type(): # temporarily allow redefinition of the String scalar type @@ -476,28 +437,6 @@ def accepts_a_scalar_type_with_ast_node_and_extension_ast_nodes(): assert schema.ast_node is ast_node assert schema.extension_ast_nodes == tuple(extension_ast_nodes) - def rejects_a_schema_with_an_incorrect_ast_node(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLSchema( - GraphQLObjectType("Query", {}), - ast_node=TypeDefinitionNode(), # type: ignore - ) - msg = str(exc_info.value) - assert msg == "Schema AST node must be a SchemaDefinitionNode." - - def rejects_a_scalar_type_with_incorrect_extension_ast_nodes(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLSchema( - GraphQLObjectType("Query", {}), - extension_ast_nodes=[TypeExtensionNode()], # type: ignore - ) - assert str(exc_info.value) == ( - "Schema extension AST nodes must be specified" - " as a collection of SchemaExtensionNode instances." - ) - def can_deep_copy_a_schema(): source = """ schema { diff --git a/tests/type/test_validation.py b/tests/type/test_validation.py index 82020a43..e9f5d5e6 100644 --- a/tests/type/test_validation.py +++ b/tests/type/test_validation.py @@ -1,5 +1,5 @@ from operator import attrgetter -from typing import Any, List, Union +from typing import List, Union from pytest import mark, raises @@ -13,7 +13,6 @@ GraphQLInputField, GraphQLInputObjectType, GraphQLInputType, - GraphQLInt, GraphQLInterfaceType, GraphQLList, GraphQLNamedType, @@ -31,8 +30,6 @@ assert_scalar_type, assert_union_type, assert_valid_schema, - is_input_type, - is_output_type, validate_schema, ) from graphql.utilities import build_schema, extend_schema @@ -422,15 +419,6 @@ def rejects_a_schema_extended_with_invalid_root_types(): def rejects_a_schema_whose_types_are_incorrectly_type(): # invalid schema cannot be built with Python - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLSchema( - SomeObjectType, - types=[{"name": "SomeType"}, SomeDirective], # type: ignore - ) - assert str(exc_info.value) == ( - "Schema types must be specified as a collection of GraphQL types." - ) # construct invalid schema manually schema = GraphQLSchema(SomeObjectType) schema.type_map = { @@ -713,32 +701,6 @@ def rejects_a_union_type_with_duplicated_member_type(): def rejects_a_union_type_with_non_object_member_types(): # invalid schema cannot be built with Python - with raises(TypeError) as exc_info: - build_schema( - """ - type Query { - test: BadUnion - } - - type TypeA { - field: String - } - - type TypeB { - field: String - } - - union BadUnion = - | TypeA - | String - | TypeB - """ - ) - assert str(exc_info.value) == ( - "BadUnion types must be specified" - " as a collection of GraphQLObjectType instances." - ) - # construct invalid schema manually schema = build_schema( """ type Query { @@ -755,28 +717,13 @@ def rejects_a_union_type_with_non_object_member_types(): union BadUnion = | TypeA - | TypeA + | String | TypeB """ ) - with raises(TypeError) as exc_info: - extend_schema(schema, parse("extend union BadUnion = Int")) - assert str(exc_info.value) == ( - "BadUnion types must be specified" - " as a collection of GraphQLObjectType instances." - ) - schema = extend_schema(schema, parse("extend union BadUnion = TypeB")) - bad_union: Any = schema.get_type("BadUnion") - types = bad_union.types - assert isinstance(types, tuple) - types = list(types) - assert types[1].name == "TypeA" - types[1] = GraphQLString - assert types[3].name == "TypeB" - types[3] = GraphQLInt - bad_union.types = tuple(types) - bad_union.ast_node.types[1].name.value = "String" - bad_union.extension_ast_nodes[0].types[0].name.value = "Int" + + schema = extend_schema(schema, parse("extend union BadUnion = Int")) + assert validate_schema(schema) == [ { "message": "Union type BadUnion can only include Object types," @@ -804,17 +751,7 @@ def rejects_a_union_type_with_non_object_member_types(): bad_union = GraphQLUnionType( "BadUnion", types=[member_type] # type: ignore ) - with raises(TypeError) as exc_info: - schema_with_field_type(bad_union) - assert str(exc_info.value) == ( - "BadUnion types must be specified" - " as a collection of GraphQLObjectType instances." - ) - # noinspection PyPropertyAccess - bad_union.types = [] bad_schema = schema_with_field_type(bad_union) - # noinspection PyPropertyAccess - bad_union.types = [member_type] assert validate_schema(bad_schema) == [ { "message": "Union type BadUnion can only include Object types," @@ -961,32 +898,6 @@ def rejects_an_input_object_with_multiple_non_breakable_circular_reference(): ] def rejects_an_input_object_type_with_incorrectly_typed_fields(): - # invalid schema cannot be built with Python - with raises(TypeError) as exc_info: - build_schema( - """ - type Query { - field(arg: SomeInputObject): String - } - - type SomeObject { - field: String - } - - union SomeUnion = SomeObject - - input SomeInputObject { - badObject: SomeObject - badUnion: SomeUnion - goodInputObject: SomeInputObject - } - """ - ) - assert str(exc_info.value) == ( - "SomeInputObject fields cannot be resolved." - " Input field type must be a GraphQL input type." - ) - # construct invalid schema manually schema = build_schema( """ type Query { @@ -1000,15 +911,12 @@ def rejects_an_input_object_type_with_incorrectly_typed_fields(): union SomeUnion = SomeObject input SomeInputObject { - badObject: SomeInputObject - badUnion: SomeInputObject + badObject: SomeObject + badUnion: SomeUnion goodInputObject: SomeInputObject } """ ) - some_input_obj: Any = schema.get_type("SomeInputObject") - some_input_obj.fields["badObject"].type = schema.get_type("SomeObject") - some_input_obj.fields["badUnion"].type = schema.get_type("SomeUnion") assert validate_schema(schema) == [ { "message": "The type of SomeInputObject.badObject must be Input Type" @@ -1090,17 +998,9 @@ def rejects_an_enum_type_with_incorrectly_named_values(): def describe_type_system_object_fields_must_have_output_types(): def _schema_with_object_field(type_: GraphQLOutputType) -> GraphQLSchema: - if is_output_type(type_): - field = GraphQLField(type_) - else: - # invalid field cannot be built with Python directly - with raises(TypeError) as exc_info: - GraphQLField(type_) - assert str(exc_info.value) == "Field type must be an output type." - # therefore we need to monkey-patch a valid field - field = GraphQLField(GraphQLString) - field.type = type_ - bad_object_type = GraphQLObjectType("BadObject", {"badField": field}) + bad_object_type = GraphQLObjectType( + "BadObject", {"badField": GraphQLField(type_)} + ) return GraphQLSchema( GraphQLObjectType("Query", {"f": GraphQLField(bad_object_type)}), types=[SomeObjectType], @@ -1143,27 +1043,10 @@ def rejects_a_non_type_value_as_an_object_field_type(type_): ] def rejects_with_relevant_locations_for_a_non_output_type(): - # invalid schema cannot be built with Python - with raises(TypeError) as exc_info: - build_schema( - """ - type Query { - field: [SomeInputObject] - } - - input SomeInputObject { - field: String - } - """ - ) - assert str(exc_info.value) == ( - "Query fields cannot be resolved. Field type must be an output type." - ) - # therefore we need to monkey-patch a valid schema schema = build_schema( """ type Query { - field: [String] + field: [SomeInputObject] } input SomeInputObject { @@ -1171,8 +1054,6 @@ def rejects_with_relevant_locations_for_a_non_output_type(): } """ ) - some_input_obj = schema.get_type("SomeInputObject") - schema.query_type.fields["field"].type.of_type = some_input_obj # type: ignore assert validate_schema(schema) == [ { "message": "The type of Query.field must be Output Type" @@ -1200,27 +1081,27 @@ def rejects_an_object_implementing_a_non_type_value(): ] def rejects_an_object_implementing_a_non_interface_type(): - # invalid schema cannot be built with Python - with raises(TypeError) as exc_info: - build_schema( - """ - type Query { - test: BadObject - } + schema = build_schema( + """ + type Query { + test: BadObject + } - input SomeInputObject { - field: String - } + input SomeInputObject { + field: String + } - type BadObject implements SomeInputObject { - field: String - } - """ - ) - assert str(exc_info.value) == ( - "BadObject interfaces must be specified" - " as a collection of GraphQLInterfaceType instances." + type BadObject implements SomeInputObject { + field: String + } + """ ) + assert validate_schema(schema) == [ + { + "message": "Type BadObject must only implement Interface types," + " it cannot implement SomeInputObject." + } + ] def rejects_an_object_implementing_the_same_interface_twice(): schema = build_schema( @@ -1408,18 +1289,7 @@ def rejects_object_implementing_extended_interface_due_to_type_mismatch(): def describe_type_system_interface_fields_must_have_output_types(): def _schema_with_interface_field(type_: GraphQLOutputType) -> GraphQLSchema: - if is_output_type(type_): - field = GraphQLField(type_) - else: - # invalid field cannot be built with Python directly - with raises(TypeError) as exc_info: - GraphQLField(type_) - assert str(exc_info.value) == "Field type must be an output type." - # therefore we need to monkey-patch a valid field - field = GraphQLField(GraphQLString) - field.type = type_ - fields = {"badField": field} - + fields = {"badField": GraphQLField(type_)} bad_interface_type = GraphQLInterfaceType("BadInterface", fields) bad_implementing_type = GraphQLObjectType( "BadImplementing", @@ -1480,32 +1350,6 @@ def rejects_a_non_type_value_as_an_interface_field_type(type_): ] def rejects_a_non_output_type_as_an_interface_field_with_locations(): - # invalid schema cannot be built with Python - with raises(TypeError) as exc_info: - build_schema( - """ - type Query { - test: SomeInterface - } - - interface SomeInterface { - field: SomeInputObject - } - - input SomeInputObject { - foo: String - } - - type SomeObject implements SomeInterface { - field: SomeInputObject - } - """ - ) - assert str(exc_info.value) == ( - "SomeInterface fields cannot be resolved." - " Field type must be an output type." - ) - # therefore we need to monkey-patch a valid schema schema = build_schema( """ type Query { @@ -1513,7 +1357,7 @@ def rejects_a_non_output_type_as_an_interface_field_with_locations(): } interface SomeInterface { - field: String + field: SomeInputObject } input SomeInputObject { @@ -1521,16 +1365,10 @@ def rejects_a_non_output_type_as_an_interface_field_with_locations(): } type SomeObject implements SomeInterface { - field: String + field: SomeInputObject } """ ) - # therefore we need to monkey-patch a valid schema - some_input_obj = schema.get_type("SomeInputObject") - some_interface: Any = schema.get_type("SomeInterface") - some_interface.fields["field"].type = some_input_obj - some_object: Any = schema.get_type("SomeObject") - some_object.fields["field"].type = some_input_obj assert validate_schema(schema) == [ { "message": "The type of SomeInterface.field must be Output Type" @@ -1561,17 +1399,7 @@ def accepts_an_interface_not_implemented_by_at_least_one_object(): def describe_type_system_arguments_must_have_input_types(): def _schema_with_arg(type_: GraphQLInputType) -> GraphQLSchema: - if is_input_type(type_): - argument = GraphQLArgument(type_) - else: - # invalid argument cannot be built with Python directly - with raises(TypeError) as exc_info: - GraphQLArgument(type_) - assert str(exc_info.value) == "Argument type must be a GraphQL input type." - # therefore we need to monkey-patch a valid argument - argument = GraphQLArgument(GraphQLString) - argument.type = type_ - args = {"badArg": argument} + args = {"badArg": GraphQLArgument(type_)} bad_object_type = GraphQLObjectType( "BadObject", {"badField": GraphQLField(GraphQLString, args)}, @@ -1667,28 +1495,10 @@ def rejects_a_required_argument_that_is_deprecated(): ] def rejects_a_non_input_type_as_a_field_arg_with_locations(): - # invalid schema cannot be built with Python - with raises(TypeError) as exc_info: - build_schema( - """ - type Query { - test(arg: SomeObject): String - } - - type SomeObject { - foo: String - } - """ - ) - assert str(exc_info.value) == ( - "Query fields cannot be resolved." - " Argument type must be a GraphQL input type." - ) - # therefore we need to monkey-patch a valid schema schema = build_schema( """ type Query { - test(arg: String): String + test(arg: SomeObject): String } type SomeObject { @@ -1696,8 +1506,6 @@ def rejects_a_non_input_type_as_a_field_arg_with_locations(): } """ ) - some_object = schema.get_type("SomeObject") - schema.query_type.fields["test"].args["arg"].type = some_object # type: ignore assert validate_schema(schema) == [ { "message": "The type of Query.test(arg:) must be Input Type" @@ -1709,20 +1517,8 @@ def rejects_a_non_input_type_as_a_field_arg_with_locations(): def describe_type_system_input_object_fields_must_have_input_types(): def _schema_with_input_field(type_: GraphQLInputType) -> GraphQLSchema: - if is_input_type(type_): - input_field = GraphQLInputField(type_) - else: - # invalid input field cannot be built with Python directly - with raises(TypeError) as exc_info: - GraphQLInputField(type_) - assert str(exc_info.value) == ( - "Input field type must be a GraphQL input type." - ) - # therefore we need to monkey-patch a valid input field - input_field = GraphQLInputField(GraphQLString) - input_field.type = type_ bad_input_object_type = GraphQLInputObjectType( - "BadInputObject", {"badField": input_field} + "BadInputObject", {"badField": GraphQLInputField(type_)} ) return GraphQLSchema( GraphQLObjectType( @@ -1773,28 +1569,6 @@ def rejects_a_non_type_value_as_an_input_field_type(type_): ] def rejects_with_relevant_locations_for_a_non_input_type(): - # invalid schema cannot be built with Python - with raises(TypeError) as exc_info: - build_schema( - """ - type Query { - test(arg: SomeInputObject): String - } - - input SomeInputObject { - foo: SomeObject - } - - type SomeObject { - bar: String - } - """ - ) - assert str(exc_info.value) == ( - "SomeInputObject fields cannot be resolved." - " Input field type must be a GraphQL input type." - ) - # therefore we need to monkey-patch a valid schema schema = build_schema( """ type Query { @@ -1802,7 +1576,7 @@ def rejects_with_relevant_locations_for_a_non_input_type(): } input SomeInputObject { - foo: String + foo: SomeObject } type SomeObject { @@ -1810,9 +1584,6 @@ def rejects_with_relevant_locations_for_a_non_input_type(): } """ ) - some_object = schema.get_type("SomeObject") - some_input_object: Any = schema.get_type("SomeInputObject") - some_input_object.fields["foo"].type = some_object assert validate_schema(schema) == [ { "message": "The type of SomeInputObject.foo must be Input Type" @@ -2417,39 +2188,20 @@ def accepts_an_interface_with_a_subtyped_interface_field_union(): assert validate_schema(schema) == [] def rejects_an_interface_implementing_a_non_interface_type(): - # invalid schema cannot be built with Python - with raises(TypeError) as exc_info: - build_schema( - """ - type Query { - field: String - } + schema = build_schema( + """ + type Query { + field: String + } - input SomeInputObject { - field: String - } + input SomeInputObject { + field: String + } - interface BadInterface implements SomeInputObject { - field: String - } - """ - ) - assert str(exc_info.value) == ( - "BadInterface interfaces must be specified as a collection" - " of GraphQLInterfaceType instances." - ) - # therefore we construct the invalid schema manually - some_input_obj = GraphQLInputObjectType( - "SomeInputObject", {"field": GraphQLInputField(GraphQLString)} - ) - bad_interface = GraphQLInterfaceType( - "BadInterface", {"field": GraphQLField(GraphQLString)} - ) - # noinspection PyTypeChecker - bad_interface.interfaces = (some_input_obj,) - schema = GraphQLSchema( - GraphQLObjectType("Query", {"field": GraphQLField(GraphQLString)}), - types=[bad_interface], + interface BadInterface implements SomeInputObject { + field: String + } + """ ) assert validate_schema(schema) == [ { diff --git a/tests/utilities/test_build_ast_schema.py b/tests/utilities/test_build_ast_schema.py index b3ceffce..c58cc837 100644 --- a/tests/utilities/test_build_ast_schema.py +++ b/tests/utilities/test_build_ast_schema.py @@ -494,20 +494,17 @@ def multiple_union(): def can_build_recursive_union(): # invalid schema cannot be built with Python - with raises(TypeError) as exc_info: - build_schema( - """ - union Hello = Hello + schema = build_schema( + """ + union Hello = Hello - type Query { - hello: Hello - } - """ - ) - assert ( - str(exc_info.value) == "Hello types must be specified" - " as a collection of GraphQLObjectType instances." + type Query { + hello: Hello + } + """ ) + errors = validate_schema(schema) + assert errors and isinstance(errors, list) def custom_scalar(): sdl = dedent( @@ -1188,14 +1185,6 @@ def throws_on_unknown_types(): build_schema(sdl, assume_valid_sdl=True) assert str(exc_info.value).endswith("Unknown type: 'UnknownType'.") - def rejects_invalid_ast(): - with raises(TypeError) as exc_info: - build_ast_schema(None) # type: ignore - assert str(exc_info.value) == "Must provide valid Document AST." - with raises(TypeError) as exc_info: - build_ast_schema({}) # type: ignore - assert str(exc_info.value) == "Must provide valid Document AST." - def describe_deepcopy_and_pickle(): # pragma: no cover sdl = print_schema(star_wars_schema) diff --git a/tests/utilities/test_extend_schema.py b/tests/utilities/test_extend_schema.py index 49db1dc6..7c10b06e 100644 --- a/tests/utilities/test_extend_schema.py +++ b/tests/utilities/test_extend_schema.py @@ -314,12 +314,17 @@ def allows_extension_of_union_by_adding_itself(): extend union SomeUnion = SomeUnion """ ) - # invalid schema cannot be built with Python - with raises(TypeError) as exc_info: - extend_schema(schema, extend_ast) - assert str(exc_info.value) == ( - "SomeUnion types must be specified" - " as a collection of GraphQLObjectType instances." + extended_schema = extend_schema(schema, extend_ast) + + assert validate_schema(extended_schema) + expect_schema_changes( + schema, + extended_schema, + dedent( + """ + union SomeUnion = SomeUnion + """ + ), ) def extends_inputs_by_adding_new_fields(): @@ -1334,19 +1339,6 @@ def throws_on_unknown_types(): extend_schema(schema, ast, assume_valid_sdl=True) assert str(exc_info.value).endswith("Unknown type: 'UnknownType'.") - def rejects_invalid_ast(): - schema = GraphQLSchema() - - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - extend_schema(schema, None) # type: ignore - assert str(exc_info.value) == "Must provide valid Document AST." - - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - extend_schema(schema, {}) # type: ignore - assert str(exc_info.value) == "Must provide valid Document AST." - def does_not_allow_replacing_a_default_directive(): schema = GraphQLSchema() extend_ast = parse( diff --git a/tests/validation/test_validation.py b/tests/validation/test_validation.py index 0f7d80e6..85d64d6d 100644 --- a/tests/validation/test_validation.py +++ b/tests/validation/test_validation.py @@ -9,42 +9,6 @@ def describe_validate_supports_full_validation(): - def rejects_invalid_documents(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - assert validate(test_schema, None) # type: ignore - assert str(exc_info.value) == "Must provide document." - - def rejects_invalid_type_info(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - assert validate( - test_schema, parse("query { name }"), type_info={} # type: ignore - ) - assert str(exc_info.value) == "Not a TypeInfo object: {}." - - def rejects_invalid_rules(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - assert validate( - test_schema, parse("query { name }"), rules=[None] # type: ignore - ) - assert ( - str(exc_info.value) == "Rules must be specified as a collection" - " of ASTValidationRule subclasses." - ) - - def rejects_invalid_max_errors(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - assert validate( - test_schema, parse("query { name }"), max_errors=2.5 # type: ignore - ) - assert ( - str(exc_info.value) - == "The maximum number of errors must be passed as an int." - ) - def validates_queries(): doc = parse( """ From d6254c89b9ba272cb1caaaadc79cc67fac2d58d4 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Wed, 15 Feb 2023 22:54:42 +0100 Subject: [PATCH 071/230] Update mypy and other tools --- poetry.lock | 674 +++++++++++------- pyproject.toml | 13 +- src/graphql/execution/execute.py | 1 - src/graphql/language/ast.py | 6 - src/graphql/language/visitor.py | 1 - src/graphql/type/definition.py | 2 - src/graphql/type/schema.py | 1 - src/graphql/type/validate.py | 4 - .../utilities/find_breaking_changes.py | 3 +- .../rules/provided_required_arguments.py | 1 - tests/execution/test_abstract.py | 4 - tests/execution/test_execution_result.py | 1 - tests/execution/test_middleware.py | 3 - tests/execution/test_mutations.py | 2 - tests/execution/test_nonnull.py | 2 - tests/execution/test_schema.py | 1 - tests/execution/test_subscribe.py | 2 +- tests/execution/test_union_interface.py | 3 - tests/type/test_predicate.py | 1 - .../test_introspection_from_schema.py | 1 - .../test_overlapping_fields_can_be_merged.py | 1 - tox.ini | 10 +- 22 files changed, 441 insertions(+), 296 deletions(-) diff --git a/poetry.lock b/poetry.lock index e23c825d..821f108b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2,14 +2,26 @@ [[package]] name = "alabaster" -version = "0.7.12" +version = "0.7.13" description = "A configurable sidebar-enabled Sphinx theme" category = "dev" optional = false +python-versions = ">=3.6" +files = [ + {file = "alabaster-0.7.13-py3-none-any.whl", hash = "sha256:1ee19aca801bbabb5ba3f5f258e4422dfa86f82f3e9cefb0859b283cdd7f62a3"}, + {file = "alabaster-0.7.13.tar.gz", hash = "sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2"}, +] + +[[package]] +name = "appdirs" +version = "1.4.4" +description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "dev" +optional = false python-versions = "*" files = [ - {file = "alabaster-0.7.12-py2.py3-none-any.whl", hash = "sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359"}, - {file = "alabaster-0.7.12.tar.gz", hash = "sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02"}, + {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, + {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, ] [[package]] @@ -71,32 +83,46 @@ yaml = ["PyYAML"] [[package]] name = "black" -version = "22.12.0" +version = "23.1.0" description = "The uncompromising code formatter." category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "black-22.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eedd20838bd5d75b80c9f5487dbcb06836a43833a37846cf1d8c1cc01cef59d"}, - {file = "black-22.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:159a46a4947f73387b4d83e87ea006dbb2337eab6c879620a3ba52699b1f4351"}, - {file = "black-22.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d30b212bffeb1e252b31dd269dfae69dd17e06d92b87ad26e23890f3efea366f"}, - {file = "black-22.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:7412e75863aa5c5411886804678b7d083c7c28421210180d67dfd8cf1221e1f4"}, - {file = "black-22.12.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c116eed0efb9ff870ded8b62fe9f28dd61ef6e9ddd28d83d7d264a38417dcee2"}, - {file = "black-22.12.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1f58cbe16dfe8c12b7434e50ff889fa479072096d79f0a7f25e4ab8e94cd8350"}, - {file = "black-22.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77d86c9f3db9b1bf6761244bc0b3572a546f5fe37917a044e02f3166d5aafa7d"}, - {file = "black-22.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:82d9fe8fee3401e02e79767016b4907820a7dc28d70d137eb397b92ef3cc5bfc"}, - {file = "black-22.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:101c69b23df9b44247bd88e1d7e90154336ac4992502d4197bdac35dd7ee3320"}, - {file = "black-22.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:559c7a1ba9a006226f09e4916060982fd27334ae1998e7a38b3f33a37f7a2148"}, - {file = "black-22.12.0-py3-none-any.whl", hash = "sha256:436cc9167dd28040ad90d3b404aec22cedf24a6e4d7de221bec2730ec0c97bcf"}, - {file = "black-22.12.0.tar.gz", hash = "sha256:229351e5a18ca30f447bf724d007f890f97e13af070bb6ad4c0a441cd7596a2f"}, + {file = "black-23.1.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:b6a92a41ee34b883b359998f0c8e6eb8e99803aa8bf3123bf2b2e6fec505a221"}, + {file = "black-23.1.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:57c18c5165c1dbe291d5306e53fb3988122890e57bd9b3dcb75f967f13411a26"}, + {file = "black-23.1.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:9880d7d419bb7e709b37e28deb5e68a49227713b623c72b2b931028ea65f619b"}, + {file = "black-23.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e6663f91b6feca5d06f2ccd49a10f254f9298cc1f7f49c46e498a0771b507104"}, + {file = "black-23.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:9afd3f493666a0cd8f8df9a0200c6359ac53940cbde049dcb1a7eb6ee2dd7074"}, + {file = "black-23.1.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:bfffba28dc52a58f04492181392ee380e95262af14ee01d4bc7bb1b1c6ca8d27"}, + {file = "black-23.1.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:c1c476bc7b7d021321e7d93dc2cbd78ce103b84d5a4cf97ed535fbc0d6660648"}, + {file = "black-23.1.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:382998821f58e5c8238d3166c492139573325287820963d2f7de4d518bd76958"}, + {file = "black-23.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bf649fda611c8550ca9d7592b69f0637218c2369b7744694c5e4902873b2f3a"}, + {file = "black-23.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:121ca7f10b4a01fd99951234abdbd97728e1240be89fde18480ffac16503d481"}, + {file = "black-23.1.0-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:a8471939da5e824b891b25751955be52ee7f8a30a916d570a5ba8e0f2eb2ecad"}, + {file = "black-23.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8178318cb74f98bc571eef19068f6ab5613b3e59d4f47771582f04e175570ed8"}, + {file = "black-23.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:a436e7881d33acaf2536c46a454bb964a50eff59b21b51c6ccf5a40601fbef24"}, + {file = "black-23.1.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:a59db0a2094d2259c554676403fa2fac3473ccf1354c1c63eccf7ae65aac8ab6"}, + {file = "black-23.1.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:0052dba51dec07ed029ed61b18183942043e00008ec65d5028814afaab9a22fd"}, + {file = "black-23.1.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:49f7b39e30f326a34b5c9a4213213a6b221d7ae9d58ec70df1c4a307cf2a1580"}, + {file = "black-23.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:162e37d49e93bd6eb6f1afc3e17a3d23a823042530c37c3c42eeeaf026f38468"}, + {file = "black-23.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b70eb40a78dfac24842458476135f9b99ab952dd3f2dab738c1881a9b38b753"}, + {file = "black-23.1.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:a29650759a6a0944e7cca036674655c2f0f63806ddecc45ed40b7b8aa314b651"}, + {file = "black-23.1.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:bb460c8561c8c1bec7824ecbc3ce085eb50005883a6203dcfb0122e95797ee06"}, + {file = "black-23.1.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:c91dfc2c2a4e50df0026f88d2215e166616e0c80e86004d0003ece0488db2739"}, + {file = "black-23.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a951cc83ab535d248c89f300eccbd625e80ab880fbcfb5ac8afb5f01a258ac9"}, + {file = "black-23.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:0680d4380db3719ebcfb2613f34e86c8e6d15ffeabcf8ec59355c5e7b85bb555"}, + {file = "black-23.1.0-py3-none-any.whl", hash = "sha256:7a0f701d314cfa0896b9001df70a530eb2472babb76086344e688829efd97d32"}, + {file = "black-23.1.0.tar.gz", hash = "sha256:b0bd97bea8903f5a2ba7219257a44e3f1f9d00073d6cc1add68f0beec69692ac"}, ] [package.dependencies] click = ">=8.0.0" mypy-extensions = ">=0.4.3" +packaging = ">=22.0" pathspec = ">=0.9.0" platformdirs = ">=2" -tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""} +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} typed-ast = {version = ">=1.4.2", markers = "python_version < \"3.8\" and implementation_name == \"cpython\""} typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} @@ -120,14 +146,14 @@ files = [ [[package]] name = "cachetools" -version = "5.2.0" +version = "5.3.0" description = "Extensible memoizing collections and decorators" category = "dev" optional = false python-versions = "~=3.7" files = [ - {file = "cachetools-5.2.0-py3-none-any.whl", hash = "sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db"}, - {file = "cachetools-5.2.0.tar.gz", hash = "sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757"}, + {file = "cachetools-5.3.0-py3-none-any.whl", hash = "sha256:429e1a1e845c008ea6c85aa35d4b98b65d6a9763eeef3e37e92728a12d1de9d4"}, + {file = "cachetools-5.3.0.tar.gz", hash = "sha256:13dfddc7b8df938c21a940dfa6557ce6e94a2f1cdfa58eb90c805721d58f2c14"}, ] [[package]] @@ -156,19 +182,102 @@ files = [ [[package]] name = "charset-normalizer" -version = "2.1.1" +version = "3.0.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." category = "dev" optional = false -python-versions = ">=3.6.0" +python-versions = "*" files = [ - {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"}, - {file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"}, + {file = "charset-normalizer-3.0.1.tar.gz", hash = "sha256:ebea339af930f8ca5d7a699b921106c6e29c617fe9606fa7baa043c1cdae326f"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:88600c72ef7587fe1708fd242b385b6ed4b8904976d5da0893e31df8b3480cb6"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c75ffc45f25324e68ab238cb4b5c0a38cd1c3d7f1fb1f72b5541de469e2247db"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:db72b07027db150f468fbada4d85b3b2729a3db39178abf5c543b784c1254539"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62595ab75873d50d57323a91dd03e6966eb79c41fa834b7a1661ed043b2d404d"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ff6f3db31555657f3163b15a6b7c6938d08df7adbfc9dd13d9d19edad678f1e8"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:772b87914ff1152b92a197ef4ea40efe27a378606c39446ded52c8f80f79702e"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70990b9c51340e4044cfc394a81f614f3f90d41397104d226f21e66de668730d"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:292d5e8ba896bbfd6334b096e34bffb56161c81408d6d036a7dfa6929cff8783"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2edb64ee7bf1ed524a1da60cdcd2e1f6e2b4f66ef7c077680739f1641f62f555"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:31a9ddf4718d10ae04d9b18801bd776693487cbb57d74cc3458a7673f6f34639"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:44ba614de5361b3e5278e1241fda3dc1838deed864b50a10d7ce92983797fa76"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:12db3b2c533c23ab812c2b25934f60383361f8a376ae272665f8e48b88e8e1c6"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c512accbd6ff0270939b9ac214b84fb5ada5f0409c44298361b2f5e13f9aed9e"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-win32.whl", hash = "sha256:502218f52498a36d6bf5ea77081844017bf7982cdbe521ad85e64cabee1b608b"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:601f36512f9e28f029d9481bdaf8e89e5148ac5d89cffd3b05cd533eeb423b59"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0298eafff88c99982a4cf66ba2efa1128e4ddaca0b05eec4c456bbc7db691d8d"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a8d0fc946c784ff7f7c3742310cc8a57c5c6dc31631269876a88b809dbeff3d3"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:87701167f2a5c930b403e9756fab1d31d4d4da52856143b609e30a1ce7160f3c"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14e76c0f23218b8f46c4d87018ca2e441535aed3632ca134b10239dfb6dadd6b"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0c0a590235ccd933d9892c627dec5bc7511ce6ad6c1011fdf5b11363022746c1"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8c7fe7afa480e3e82eed58e0ca89f751cd14d767638e2550c77a92a9e749c317"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79909e27e8e4fcc9db4addea88aa63f6423ebb171db091fb4373e3312cb6d603"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ac7b6a045b814cf0c47f3623d21ebd88b3e8cf216a14790b455ea7ff0135d18"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:72966d1b297c741541ca8cf1223ff262a6febe52481af742036a0b296e35fa5a"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:f9d0c5c045a3ca9bedfc35dca8526798eb91a07aa7a2c0fee134c6c6f321cbd7"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:5995f0164fa7df59db4746112fec3f49c461dd6b31b841873443bdb077c13cfc"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4a8fcf28c05c1f6d7e177a9a46a1c52798bfe2ad80681d275b10dcf317deaf0b"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:761e8904c07ad053d285670f36dd94e1b6ab7f16ce62b9805c475b7aa1cffde6"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-win32.whl", hash = "sha256:71140351489970dfe5e60fc621ada3e0f41104a5eddaca47a7acb3c1b851d6d3"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:9ab77acb98eba3fd2a85cd160851816bfce6871d944d885febf012713f06659c"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:84c3990934bae40ea69a82034912ffe5a62c60bbf6ec5bc9691419641d7d5c9a"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74292fc76c905c0ef095fe11e188a32ebd03bc38f3f3e9bcb85e4e6db177b7ea"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c95a03c79bbe30eec3ec2b7f076074f4281526724c8685a42872974ef4d36b72"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4c39b0e3eac288fedc2b43055cfc2ca7a60362d0e5e87a637beac5d801ef478"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df2c707231459e8a4028eabcd3cfc827befd635b3ef72eada84ab13b52e1574d"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93ad6d87ac18e2a90b0fe89df7c65263b9a99a0eb98f0a3d2e079f12a0735837"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:59e5686dd847347e55dffcc191a96622f016bc0ad89105e24c14e0d6305acbc6"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:cd6056167405314a4dc3c173943f11249fa0f1b204f8b51ed4bde1a9cd1834dc"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:083c8d17153ecb403e5e1eb76a7ef4babfc2c48d58899c98fcaa04833e7a2f9a"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:f5057856d21e7586765171eac8b9fc3f7d44ef39425f85dbcccb13b3ebea806c"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:7eb33a30d75562222b64f569c642ff3dc6689e09adda43a082208397f016c39a"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-win32.whl", hash = "sha256:95dea361dd73757c6f1c0a1480ac499952c16ac83f7f5f4f84f0658a01b8ef41"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:eaa379fcd227ca235d04152ca6704c7cb55564116f8bc52545ff357628e10602"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3e45867f1f2ab0711d60c6c71746ac53537f1684baa699f4f668d4c6f6ce8e14"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cadaeaba78750d58d3cc6ac4d1fd867da6fc73c88156b7a3212a3cd4819d679d"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:911d8a40b2bef5b8bbae2e36a0b103f142ac53557ab421dc16ac4aafee6f53dc"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:503e65837c71b875ecdd733877d852adbc465bd82c768a067badd953bf1bc5a3"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a60332922359f920193b1d4826953c507a877b523b2395ad7bc716ddd386d866"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:16a8663d6e281208d78806dbe14ee9903715361cf81f6d4309944e4d1e59ac5b"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a16418ecf1329f71df119e8a65f3aa68004a3f9383821edcb20f0702934d8087"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9d9153257a3f70d5f69edf2325357251ed20f772b12e593f3b3377b5f78e7ef8"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:02a51034802cbf38db3f89c66fb5d2ec57e6fe7ef2f4a44d070a593c3688667b"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:2e396d70bc4ef5325b72b593a72c8979999aa52fb8bcf03f701c1b03e1166918"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:11b53acf2411c3b09e6af37e4b9005cba376c872503c8f28218c7243582df45d"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-win32.whl", hash = "sha256:0bf2dae5291758b6f84cf923bfaa285632816007db0330002fa1de38bfcb7154"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:2c03cc56021a4bd59be889c2b9257dae13bf55041a3372d3295416f86b295fb5"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:024e606be3ed92216e2b6952ed859d86b4cfa52cd5bc5f050e7dc28f9b43ec42"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4b0d02d7102dd0f997580b51edc4cebcf2ab6397a7edf89f1c73b586c614272c"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:358a7c4cb8ba9b46c453b1dd8d9e431452d5249072e4f56cfda3149f6ab1405e"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81d6741ab457d14fdedc215516665050f3822d3e56508921cc7239f8c8e66a58"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8b8af03d2e37866d023ad0ddea594edefc31e827fee64f8de5611a1dbc373174"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9cf4e8ad252f7c38dd1f676b46514f92dc0ebeb0db5552f5f403509705e24753"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e696f0dd336161fca9adbb846875d40752e6eba585843c768935ba5c9960722b"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c22d3fe05ce11d3671297dc8973267daa0f938b93ec716e12e0f6dee81591dc1"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:109487860ef6a328f3eec66f2bf78b0b72400280d8f8ea05f69c51644ba6521a"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:37f8febc8ec50c14f3ec9637505f28e58d4f66752207ea177c1d67df25da5aed"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:f97e83fa6c25693c7a35de154681fcc257c1c41b38beb0304b9c4d2d9e164479"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a152f5f33d64a6be73f1d30c9cc82dfc73cec6477ec268e7c6e4c7d23c2d2291"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:39049da0ffb96c8cbb65cbf5c5f3ca3168990adf3551bd1dee10c48fce8ae820"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-win32.whl", hash = "sha256:4457ea6774b5611f4bed5eaa5df55f70abde42364d498c5134b7ef4c6958e20e"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:e62164b50f84e20601c1ff8eb55620d2ad25fb81b59e3cd776a1902527a788af"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8eade758719add78ec36dc13201483f8e9b5d940329285edcd5f70c0a9edbd7f"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8499ca8f4502af841f68135133d8258f7b32a53a1d594aa98cc52013fff55678"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3fc1c4a2ffd64890aebdb3f97e1278b0cc72579a08ca4de8cd2c04799a3a22be"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00d3ffdaafe92a5dc603cb9bd5111aaa36dfa187c8285c543be562e61b755f6b"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c2ac1b08635a8cd4e0cbeaf6f5e922085908d48eb05d44c5ae9eabab148512ca"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f6f45710b4459401609ebebdbcfb34515da4fc2aa886f95107f556ac69a9147e"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ae1de54a77dc0d6d5fcf623290af4266412a7c4be0b1ff7444394f03f5c54e3"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b590df687e3c5ee0deef9fc8c547d81986d9a1b56073d82de008744452d6541"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab5de034a886f616a5668aa5d098af2b5385ed70142090e2a31bcbd0af0fdb3d"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9cb3032517f1627cc012dbc80a8ec976ae76d93ea2b5feaa9d2a5b8882597579"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:608862a7bf6957f2333fc54ab4399e405baad0163dc9f8d99cb236816db169d4"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0f438ae3532723fb6ead77e7c604be7c8374094ef4ee2c5e03a3a17f1fca256c"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:356541bf4381fa35856dafa6a965916e54bed415ad8a24ee6de6e37deccf2786"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-win32.whl", hash = "sha256:39cf9ed17fe3b1bc81f33c9ceb6ce67683ee7526e65fde1447c772afc54a1bb8"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:0a11e971ed097d24c534c037d298ad32c6ce81a45736d31e0ff0ad37ab437d59"}, + {file = "charset_normalizer-3.0.1-py3-none-any.whl", hash = "sha256:7e189e2e1d3ed2f4aebabd2d5b0f931e883676e51c7624826e0a4e5fe8a0bf24"}, ] -[package.extras] -unicode-backport = ["unicodedata2"] - [[package]] name = "click" version = "8.1.3" @@ -199,63 +308,63 @@ files = [ [[package]] name = "coverage" -version = "7.0.4" +version = "7.1.0" description = "Code coverage measurement for Python" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "coverage-7.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:daf91db39324e9939a9db919ee4fb42a1a23634a056616dae891a030e89f87ba"}, - {file = "coverage-7.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:55121fe140d7e42cb970999b93cf1c2b24484ce028b32bbd00238bb25c13e34a"}, - {file = "coverage-7.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c027fbb83a8c78a6e06a0302ea1799fdb70e5cda9845a5e000545b8e2b47ea39"}, - {file = "coverage-7.0.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:caf82db5b7f16b51ec32fe0bd2da0805b177c807aa8bfb478c7e6f893418c284"}, - {file = "coverage-7.0.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ba5cc54baf3c322c4388de2a43cc95f7809366f0600e743e5aae8ea9d1038b2"}, - {file = "coverage-7.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:260854160083f8275a9d9d49a05ab0ffc7a1f08f2ccccbfaec94a18aae9f407c"}, - {file = "coverage-7.0.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ea45f0dba5a993e93b158f1a9dcfff2770e3bcabf2b80dbe7aa15dce0bcb3bf3"}, - {file = "coverage-7.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6abc91f6f8b3cc0ae1034e2c03f38769fba1952ab70d0b26953aa01691265c39"}, - {file = "coverage-7.0.4-cp310-cp310-win32.whl", hash = "sha256:053cdc47cae08257051d7e934a0de4d095b60eb8a3024fa9f1b2322fa1547137"}, - {file = "coverage-7.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:1e9e94f2612ee549a4b3ee79cbc61bceed77e69cf38cfa05858bae939a886d16"}, - {file = "coverage-7.0.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5caa9dd91dcc5f054350dc57a02e053d79633907b9ccffff999568d13dcd19f8"}, - {file = "coverage-7.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:efc200fa75d9634525b40babc7a16342bd21c101db1a58ef84dc14f4bf6ac0fd"}, - {file = "coverage-7.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1791e5f74c5b52f76e83fe9f4bb9571cf76d40ee0c51952ee1e4ee935b7e98b9"}, - {file = "coverage-7.0.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3d9201cfa5a98652b9cef36ab202f17fe3ea83f497b4ba2a8ed39399dfb8fcd4"}, - {file = "coverage-7.0.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22d8ef6865cb6834cab2b72fff20747a55c714b57b675f7e11c9624fe4f7cb45"}, - {file = "coverage-7.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b84076e3de192fba0f95e279ac017b64c7c6ecd4f09f36f13420f5bed898a9c7"}, - {file = "coverage-7.0.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:dcfbf8ffc046f20d75fd775a92c378f6fc7b9bded6c6f2ab88b6b9cb5805a184"}, - {file = "coverage-7.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4665a714af31f160403c2e448fb2fef330719d2e04e836b08d60d612707c1041"}, - {file = "coverage-7.0.4-cp311-cp311-win32.whl", hash = "sha256:2e59aef3fba5758059208c9eff10ae7ded3629e797972746ec33b56844f69411"}, - {file = "coverage-7.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:2b854f7985b48122b6fe346631e86d67b63293f8255cb59a93d79e3d9f1574e3"}, - {file = "coverage-7.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e44b60b0b49aa85d548d392a2dca2c6a581cd4084e72e9e16bd58bd86ec20816"}, - {file = "coverage-7.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2904d7a0388911c61e7e3beefe48c29dfccaba938fc1158f63190101a21e04c2"}, - {file = "coverage-7.0.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bc74b64bfa89e2f862ea45dd6ac1def371d7cc883b76680d20bdd61a6f3daa20"}, - {file = "coverage-7.0.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c06046f54e719da21c79f98ecc0962581d1aee0b3798dc6b12b1217da8bf93f4"}, - {file = "coverage-7.0.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:bc9c77004970a364a1e5454cf7cb884e4277592b959c287689b2a0fd027ef552"}, - {file = "coverage-7.0.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:0815a09b32384e8ff00a5939ec9cd10efce8742347e019c2daca1a32f5ac2aae"}, - {file = "coverage-7.0.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a78a80d131c067d67d8a6f9bd3d3f7ea7eac82c1c7259f97d7ab73f723da9d55"}, - {file = "coverage-7.0.4-cp37-cp37m-win32.whl", hash = "sha256:2b5936b624fbe711ed02dfd86edd678822e5ee68da02b6d231e5c01090b64590"}, - {file = "coverage-7.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:a63922765ee49d5b4c32afb2cd5516812c8665f3b78e64a0dd005bdfabf991b1"}, - {file = "coverage-7.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d68f2f7bddb3acdd3b36ef7f334b9d14f30b93e094f808fbbd8d288b8f9e2f9b"}, - {file = "coverage-7.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9dafdba3b2b9010abab08cb8c0dc6549bfca6e1630fe14d47b01dca00d39e694"}, - {file = "coverage-7.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0322354757b47640535daabd2d56384ff3cad2896248fc84d328c5fad4922d5c"}, - {file = "coverage-7.0.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4e8267466662aff93d66fa72b9591d02122dfc8a729b0a43dd70e0fb07ed9b37"}, - {file = "coverage-7.0.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f684d88eb4924ed0630cf488fd5606e334c6835594bb5fe36b50a509b10383ed"}, - {file = "coverage-7.0.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:70c294bb15ba576fb96b580db35895bf03749d683df044212b74e938a7f6821f"}, - {file = "coverage-7.0.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:34c0457e1ba450ae8b22dc8ea2fd36ada1010af61291e4c96963cd9d9633366f"}, - {file = "coverage-7.0.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b75aff2c35ceaa299691e772f7bf7c8aeab25f46acea2be3dd04cccb914a9860"}, - {file = "coverage-7.0.4-cp38-cp38-win32.whl", hash = "sha256:6c5554d55668381e131577f20e8f620d4882b04ad558f7e7f3f1f55b3124c379"}, - {file = "coverage-7.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:c82f34fafaf5bc05d222fcf84423d6e156432ca35ca78672d4affd0c09c6ef6c"}, - {file = "coverage-7.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b8dfb5fed540f77e814bf4ec79619c241af6b4578fa1093c5e3389bbb7beab3f"}, - {file = "coverage-7.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ee32a080bab779b71c4d09a3eb5254bfca43ee88828a683dab27dfe8f582516e"}, - {file = "coverage-7.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2dfbee0bf0d633be3a2ab068f5a5731a70adf147d0ba17d9f9932b46c7c5782b"}, - {file = "coverage-7.0.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32dc010713455ac0fe2fddb0e48aa43875cc7eb7b09768df10bad8ce45f9c430"}, - {file = "coverage-7.0.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9cb88a3019ad042eaa69fc7639ef077793fedbf313e89207aa82fefe92c97ebd"}, - {file = "coverage-7.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:73bc6114aab7753ca784f87bcd3b7613bc797aa255b5bca45e5654070ae9acfb"}, - {file = "coverage-7.0.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:92f135d370fcd7a6fb9659fa2eb716dd2ca364719cbb1756f74d90a221bca1a7"}, - {file = "coverage-7.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f3d485e6ec6e09857bf2115ece572d666b7c498377d4c70e66bb06c63ed177c2"}, - {file = "coverage-7.0.4-cp39-cp39-win32.whl", hash = "sha256:c58921fcd9914b56444292e7546fe183d079db99528142c809549ddeaeacd8e9"}, - {file = "coverage-7.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:f092d9f2ddaa30235d33335fbdb61eb8f3657af519ef5f9dd6bdae65272def11"}, - {file = "coverage-7.0.4-pp37.pp38.pp39-none-any.whl", hash = "sha256:cb8cfa3bf3a9f18211279458917fef5edeb5e1fdebe2ea8b11969ec2ebe48884"}, - {file = "coverage-7.0.4.tar.gz", hash = "sha256:f6c4ad409a0caf7e2e12e203348b1a9b19c514e7d078520973147bf2d3dcbc6f"}, + {file = "coverage-7.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3b946bbcd5a8231383450b195cfb58cb01cbe7f8949f5758566b881df4b33baf"}, + {file = "coverage-7.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ec8e767f13be637d056f7e07e61d089e555f719b387a7070154ad80a0ff31801"}, + {file = "coverage-7.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4a5a5879a939cb84959d86869132b00176197ca561c664fc21478c1eee60d75"}, + {file = "coverage-7.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b643cb30821e7570c0aaf54feaf0bfb630b79059f85741843e9dc23f33aaca2c"}, + {file = "coverage-7.1.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32df215215f3af2c1617a55dbdfb403b772d463d54d219985ac7cd3bf124cada"}, + {file = "coverage-7.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:33d1ae9d4079e05ac4cc1ef9e20c648f5afabf1a92adfaf2ccf509c50b85717f"}, + {file = "coverage-7.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:29571503c37f2ef2138a306d23e7270687c0efb9cab4bd8038d609b5c2393a3a"}, + {file = "coverage-7.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:63ffd21aa133ff48c4dff7adcc46b7ec8b565491bfc371212122dd999812ea1c"}, + {file = "coverage-7.1.0-cp310-cp310-win32.whl", hash = "sha256:4b14d5e09c656de5038a3f9bfe5228f53439282abcab87317c9f7f1acb280352"}, + {file = "coverage-7.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:8361be1c2c073919500b6601220a6f2f98ea0b6d2fec5014c1d9cfa23dd07038"}, + {file = "coverage-7.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:da9b41d4539eefd408c46725fb76ecba3a50a3367cafb7dea5f250d0653c1040"}, + {file = "coverage-7.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c5b15ed7644ae4bee0ecf74fee95808dcc34ba6ace87e8dfbf5cb0dc20eab45a"}, + {file = "coverage-7.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d12d076582507ea460ea2a89a8c85cb558f83406c8a41dd641d7be9a32e1274f"}, + {file = "coverage-7.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2617759031dae1bf183c16cef8fcfb3de7617f394c813fa5e8e46e9b82d4222"}, + {file = "coverage-7.1.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4e4881fa9e9667afcc742f0c244d9364d197490fbc91d12ac3b5de0bf2df146"}, + {file = "coverage-7.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9d58885215094ab4a86a6aef044e42994a2bd76a446dc59b352622655ba6621b"}, + {file = "coverage-7.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ffeeb38ee4a80a30a6877c5c4c359e5498eec095878f1581453202bfacc8fbc2"}, + {file = "coverage-7.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3baf5f126f30781b5e93dbefcc8271cb2491647f8283f20ac54d12161dff080e"}, + {file = "coverage-7.1.0-cp311-cp311-win32.whl", hash = "sha256:ded59300d6330be27bc6cf0b74b89ada58069ced87c48eaf9344e5e84b0072f7"}, + {file = "coverage-7.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:6a43c7823cd7427b4ed763aa7fb63901ca8288591323b58c9cd6ec31ad910f3c"}, + {file = "coverage-7.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7a726d742816cb3a8973c8c9a97539c734b3a309345236cd533c4883dda05b8d"}, + {file = "coverage-7.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc7c85a150501286f8b56bd8ed3aa4093f4b88fb68c0843d21ff9656f0009d6a"}, + {file = "coverage-7.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5b4198d85a3755d27e64c52f8c95d6333119e49fd001ae5798dac872c95e0f8"}, + {file = "coverage-7.1.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ddb726cb861c3117a553f940372a495fe1078249ff5f8a5478c0576c7be12050"}, + {file = "coverage-7.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:51b236e764840a6df0661b67e50697aaa0e7d4124ca95e5058fa3d7cbc240b7c"}, + {file = "coverage-7.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:7ee5c9bb51695f80878faaa5598040dd6c9e172ddcf490382e8aedb8ec3fec8d"}, + {file = "coverage-7.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c31b75ae466c053a98bf26843563b3b3517b8f37da4d47b1c582fdc703112bc3"}, + {file = "coverage-7.1.0-cp37-cp37m-win32.whl", hash = "sha256:3b155caf3760408d1cb903b21e6a97ad4e2bdad43cbc265e3ce0afb8e0057e73"}, + {file = "coverage-7.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:2a60d6513781e87047c3e630b33b4d1e89f39836dac6e069ffee28c4786715f5"}, + {file = "coverage-7.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f2cba5c6db29ce991029b5e4ac51eb36774458f0a3b8d3137241b32d1bb91f06"}, + {file = "coverage-7.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:beeb129cacea34490ffd4d6153af70509aa3cda20fdda2ea1a2be870dfec8d52"}, + {file = "coverage-7.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0c45948f613d5d18c9ec5eaa203ce06a653334cf1bd47c783a12d0dd4fd9c851"}, + {file = "coverage-7.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef382417db92ba23dfb5864a3fc9be27ea4894e86620d342a116b243ade5d35d"}, + {file = "coverage-7.1.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c7c0d0827e853315c9bbd43c1162c006dd808dbbe297db7ae66cd17b07830f0"}, + {file = "coverage-7.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e5cdbb5cafcedea04924568d990e20ce7f1945a1dd54b560f879ee2d57226912"}, + {file = "coverage-7.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:9817733f0d3ea91bea80de0f79ef971ae94f81ca52f9b66500c6a2fea8e4b4f8"}, + {file = "coverage-7.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:218fe982371ac7387304153ecd51205f14e9d731b34fb0568181abaf7b443ba0"}, + {file = "coverage-7.1.0-cp38-cp38-win32.whl", hash = "sha256:04481245ef966fbd24ae9b9e537ce899ae584d521dfbe78f89cad003c38ca2ab"}, + {file = "coverage-7.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:8ae125d1134bf236acba8b83e74c603d1b30e207266121e76484562bc816344c"}, + {file = "coverage-7.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2bf1d5f2084c3932b56b962a683074a3692bce7cabd3aa023c987a2a8e7612f6"}, + {file = "coverage-7.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:98b85dd86514d889a2e3dd22ab3c18c9d0019e696478391d86708b805f4ea0fa"}, + {file = "coverage-7.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38da2db80cc505a611938d8624801158e409928b136c8916cd2e203970dde4dc"}, + {file = "coverage-7.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3164d31078fa9efe406e198aecd2a02d32a62fecbdef74f76dad6a46c7e48311"}, + {file = "coverage-7.1.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db61a79c07331e88b9a9974815c075fbd812bc9dbc4dc44b366b5368a2936063"}, + {file = "coverage-7.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9ccb092c9ede70b2517a57382a601619d20981f56f440eae7e4d7eaafd1d1d09"}, + {file = "coverage-7.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:33ff26d0f6cc3ca8de13d14fde1ff8efe1456b53e3f0273e63cc8b3c84a063d8"}, + {file = "coverage-7.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d47dd659a4ee952e90dc56c97d78132573dc5c7b09d61b416a9deef4ebe01a0c"}, + {file = "coverage-7.1.0-cp39-cp39-win32.whl", hash = "sha256:d248cd4a92065a4d4543b8331660121b31c4148dd00a691bfb7a5cdc7483cfa4"}, + {file = "coverage-7.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:7ed681b0f8e8bcbbffa58ba26fcf5dbc8f79e7997595bf071ed5430d8c08d6f3"}, + {file = "coverage-7.1.0-pp37.pp38.pp39-none-any.whl", hash = "sha256:755e89e32376c850f826c425ece2c35a4fc266c081490eb0a841e7c1cb0d3bda"}, + {file = "coverage-7.1.0.tar.gz", hash = "sha256:10188fe543560ec4874f974b5305cd1a8bdcfa885ee00ea3a03733464c4ca265"}, ] [package.dependencies] @@ -288,6 +397,18 @@ files = [ {file = "docutils-0.17.1.tar.gz", hash = "sha256:686577d2e4c32380bb50cbb22f575ed742d58168cee37e99117a854bcd88f125"}, ] +[[package]] +name = "docutils" +version = "0.18.1" +description = "Docutils -- Python Documentation Utilities" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "docutils-0.18.1-py2.py3-none-any.whl", hash = "sha256:23010f129180089fbcd3bc08cfefccb3b890b0050e1ca00c867036e9d161b98c"}, + {file = "docutils-0.18.1.tar.gz", hash = "sha256:679987caf361a7539d76e584cbeddc311e3aee937877c87346f31debc63e9d06"}, +] + [[package]] name = "exceptiongroup" version = "1.1.0" @@ -355,14 +476,14 @@ flake8 = ">=5.0.0" [[package]] name = "flake8-bugbear" -version = "22.12.6" +version = "23.2.13" description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "flake8-bugbear-22.12.6.tar.gz", hash = "sha256:4cdb2c06e229971104443ae293e75e64c6107798229202fbe4f4091427a30ac0"}, - {file = "flake8_bugbear-22.12.6-py3-none-any.whl", hash = "sha256:b69a510634f8a9c298dfda2b18a8036455e6b19ecac4fe582e4d7a0abfa50a30"}, + {file = "flake8-bugbear-23.2.13.tar.gz", hash = "sha256:39259814a83f33c8409417ee12dd4050c9c0bb4c8707c12fc18ae62b2f3ddee1"}, + {file = "flake8_bugbear-23.2.13-py3-none-any.whl", hash = "sha256:f136bd0ca2684f101168bba2310dec541e11aa6b252260c17dcf58d18069a740"}, ] [package.dependencies] @@ -370,7 +491,7 @@ attrs = ">=19.2.0" flake8 = ">=3.0.0" [package.extras] -dev = ["coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit", "tox"] +dev = ["coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit", "pytest", "tox"] [[package]] name = "gitdb" @@ -482,19 +603,37 @@ files = [ [[package]] name = "isort" -version = "5.11.4" +version = "5.11.5" description = "A Python utility / library to sort Python imports." category = "dev" optional = false python-versions = ">=3.7.0" files = [ - {file = "isort-5.11.4-py3-none-any.whl", hash = "sha256:c033fd0edb91000a7f09527fe5c75321878f98322a77ddcc81adbd83724afb7b"}, - {file = "isort-5.11.4.tar.gz", hash = "sha256:6db30c5ded9815d813932c04c2f85a360bcdd35fed496f4d8f35495ef0a261b6"}, + {file = "isort-5.11.5-py3-none-any.whl", hash = "sha256:ba1d72fb2595a01c7895a5128f9585a5cc4b6d395f1c8d514989b9a7eb2a8746"}, + {file = "isort-5.11.5.tar.gz", hash = "sha256:6be1f76a507cb2ecf16c7cf14a37e41609ca082330be4e3436a18ef74add55db"}, ] [package.extras] colors = ["colorama (>=0.4.3,<0.5.0)"] -pipfile-deprecated-finder = ["pipreqs", "requirementslib"] +pipfile-deprecated-finder = ["pip-shims (>=0.5.2)", "pipreqs", "requirementslib"] +plugins = ["setuptools"] +requirements-deprecated-finder = ["pip-api", "pipreqs"] + +[[package]] +name = "isort" +version = "5.12.0" +description = "A Python utility / library to sort Python imports." +category = "dev" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "isort-5.12.0-py3-none-any.whl", hash = "sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6"}, + {file = "isort-5.12.0.tar.gz", hash = "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504"}, +] + +[package.extras] +colors = ["colorama (>=0.4.3)"] +pipfile-deprecated-finder = ["pip-shims (>=0.5.2)", "pipreqs", "requirementslib"] plugins = ["setuptools"] requirements-deprecated-finder = ["pip-api", "pipreqs"] @@ -518,52 +657,62 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "markupsafe" -version = "2.1.1" +version = "2.1.2" description = "Safely add untrusted strings to HTML/XML markup." category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-win32.whl", hash = "sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-win32.whl", hash = "sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-win32.whl", hash = "sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-win32.whl", hash = "sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247"}, - {file = "MarkupSafe-2.1.1.tar.gz", hash = "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-win32.whl", hash = "sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-win32.whl", hash = "sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a6e40afa7f45939ca356f348c8e23048e02cb109ced1eb8420961b2f40fb373a"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf877ab4ed6e302ec1d04952ca358b381a882fbd9d1b07cccbfd61783561f98a"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63ba06c9941e46fa389d389644e2d8225e0e3e5ebcc4ff1ea8506dce646f8c8a"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f1cd098434e83e656abf198f103a8207a8187c0fc110306691a2e94a78d0abb2"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:55f44b440d491028addb3b88f72207d71eeebfb7b5dbf0643f7c023ae1fba619"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a6f2fcca746e8d5910e18782f976489939d54a91f9411c32051b4aab2bd7c513"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0b462104ba25f1ac006fdab8b6a01ebbfbce9ed37fd37fd4acd70c67c973e460"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-win32.whl", hash = "sha256:7668b52e102d0ed87cb082380a7e2e1e78737ddecdde129acadb0eccc5423859"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6d6607f98fcf17e534162f0709aaad3ab7a96032723d8ac8750ffe17ae5a0666"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a806db027852538d2ad7555b203300173dd1b77ba116de92da9afbc3a3be3eed"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a4abaec6ca3ad8660690236d11bfe28dfd707778e2442b45addd2f086d6ef094"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f03a532d7dee1bed20bc4884194a16160a2de9ffc6354b3878ec9682bb623c54"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cf06cdc1dda95223e9d2d3c58d3b178aa5dacb35ee7e3bbac10e4e1faacb419"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22731d79ed2eb25059ae3df1dfc9cb1546691cc41f4e3130fe6bfbc3ecbbecfa"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f8ffb705ffcf5ddd0e80b65ddf7bed7ee4f5a441ea7d3419e861a12eaf41af58"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8db032bf0ce9022a8e41a22598eefc802314e81b879ae093f36ce9ddf39ab1ba"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2298c859cfc5463f1b64bd55cb3e602528db6fa0f3cfd568d3605c50678f8f03"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-win32.whl", hash = "sha256:50c42830a633fa0cf9e7d27664637532791bfc31c731a87b202d2d8ac40c3ea2"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:bb06feb762bade6bf3c8b844462274db0c76acc95c52abe8dbed28ae3d44a147"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-win32.whl", hash = "sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed"}, + {file = "MarkupSafe-2.1.2.tar.gz", hash = "sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d"}, ] [[package]] @@ -580,42 +729,38 @@ files = [ [[package]] name = "mypy" -version = "0.991" +version = "1.0.0" description = "Optional static typing for Python" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-0.991-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7d17e0a9707d0772f4a7b878f04b4fd11f6f5bcb9b3813975a9b13c9332153ab"}, - {file = "mypy-0.991-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0714258640194d75677e86c786e80ccf294972cc76885d3ebbb560f11db0003d"}, - {file = "mypy-0.991-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0c8f3be99e8a8bd403caa8c03be619544bc2c77a7093685dcf308c6b109426c6"}, - {file = "mypy-0.991-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc9ec663ed6c8f15f4ae9d3c04c989b744436c16d26580eaa760ae9dd5d662eb"}, - {file = "mypy-0.991-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4307270436fd7694b41f913eb09210faff27ea4979ecbcd849e57d2da2f65305"}, - {file = "mypy-0.991-cp310-cp310-win_amd64.whl", hash = "sha256:901c2c269c616e6cb0998b33d4adbb4a6af0ac4ce5cd078afd7bc95830e62c1c"}, - {file = "mypy-0.991-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d13674f3fb73805ba0c45eb6c0c3053d218aa1f7abead6e446d474529aafc372"}, - {file = "mypy-0.991-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1c8cd4fb70e8584ca1ed5805cbc7c017a3d1a29fb450621089ffed3e99d1857f"}, - {file = "mypy-0.991-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:209ee89fbb0deed518605edddd234af80506aec932ad28d73c08f1400ef80a33"}, - {file = "mypy-0.991-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37bd02ebf9d10e05b00d71302d2c2e6ca333e6c2a8584a98c00e038db8121f05"}, - {file = "mypy-0.991-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:26efb2fcc6b67e4d5a55561f39176821d2adf88f2745ddc72751b7890f3194ad"}, - {file = "mypy-0.991-cp311-cp311-win_amd64.whl", hash = "sha256:3a700330b567114b673cf8ee7388e949f843b356a73b5ab22dd7cff4742a5297"}, - {file = "mypy-0.991-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:1f7d1a520373e2272b10796c3ff721ea1a0712288cafaa95931e66aa15798813"}, - {file = "mypy-0.991-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:641411733b127c3e0dab94c45af15fea99e4468f99ac88b39efb1ad677da5711"}, - {file = "mypy-0.991-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3d80e36b7d7a9259b740be6d8d906221789b0d836201af4234093cae89ced0cd"}, - {file = "mypy-0.991-cp37-cp37m-win_amd64.whl", hash = "sha256:e62ebaad93be3ad1a828a11e90f0e76f15449371ffeecca4a0a0b9adc99abcef"}, - {file = "mypy-0.991-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b86ce2c1866a748c0f6faca5232059f881cda6dda2a893b9a8373353cfe3715a"}, - {file = "mypy-0.991-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ac6e503823143464538efda0e8e356d871557ef60ccd38f8824a4257acc18d93"}, - {file = "mypy-0.991-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0cca5adf694af539aeaa6ac633a7afe9bbd760df9d31be55ab780b77ab5ae8bf"}, - {file = "mypy-0.991-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a12c56bf73cdab116df96e4ff39610b92a348cc99a1307e1da3c3768bbb5b135"}, - {file = "mypy-0.991-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:652b651d42f155033a1967739788c436491b577b6a44e4c39fb340d0ee7f0d70"}, - {file = "mypy-0.991-cp38-cp38-win_amd64.whl", hash = "sha256:4175593dc25d9da12f7de8de873a33f9b2b8bdb4e827a7cae952e5b1a342e243"}, - {file = "mypy-0.991-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:98e781cd35c0acf33eb0295e8b9c55cdbef64fcb35f6d3aa2186f289bed6e80d"}, - {file = "mypy-0.991-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6d7464bac72a85cb3491c7e92b5b62f3dcccb8af26826257760a552a5e244aa5"}, - {file = "mypy-0.991-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c9166b3f81a10cdf9b49f2d594b21b31adadb3d5e9db9b834866c3258b695be3"}, - {file = "mypy-0.991-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8472f736a5bfb159a5e36740847808f6f5b659960115ff29c7cecec1741c648"}, - {file = "mypy-0.991-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e80e758243b97b618cdf22004beb09e8a2de1af481382e4d84bc52152d1c476"}, - {file = "mypy-0.991-cp39-cp39-win_amd64.whl", hash = "sha256:74e259b5c19f70d35fcc1ad3d56499065c601dfe94ff67ae48b85596b9ec1461"}, - {file = "mypy-0.991-py3-none-any.whl", hash = "sha256:de32edc9b0a7e67c2775e574cb061a537660e51210fbf6006b0b36ea695ae9bb"}, - {file = "mypy-0.991.tar.gz", hash = "sha256:3c0165ba8f354a6d9881809ef29f1a9318a236a6d81c690094c5df32107bde06"}, + {file = "mypy-1.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0626db16705ab9f7fa6c249c017c887baf20738ce7f9129da162bb3075fc1af"}, + {file = "mypy-1.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1ace23f6bb4aec4604b86c4843276e8fa548d667dbbd0cb83a3ae14b18b2db6c"}, + {file = "mypy-1.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87edfaf344c9401942883fad030909116aa77b0fa7e6e8e1c5407e14549afe9a"}, + {file = "mypy-1.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0ab090d9240d6b4e99e1fa998c2d0aa5b29fc0fb06bd30e7ad6183c95fa07593"}, + {file = "mypy-1.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:7cc2c01dfc5a3cbddfa6c13f530ef3b95292f926329929001d45e124342cd6b7"}, + {file = "mypy-1.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:14d776869a3e6c89c17eb943100f7868f677703c8a4e00b3803918f86aafbc52"}, + {file = "mypy-1.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bb2782a036d9eb6b5a6efcdda0986774bf798beef86a62da86cb73e2a10b423d"}, + {file = "mypy-1.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5cfca124f0ac6707747544c127880893ad72a656e136adc935c8600740b21ff5"}, + {file = "mypy-1.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8845125d0b7c57838a10fd8925b0f5f709d0e08568ce587cc862aacce453e3dd"}, + {file = "mypy-1.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:01b1b9e1ed40544ef486fa8ac022232ccc57109f379611633ede8e71630d07d2"}, + {file = "mypy-1.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c7cf862aef988b5fbaa17764ad1d21b4831436701c7d2b653156a9497d92c83c"}, + {file = "mypy-1.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5cd187d92b6939617f1168a4fe68f68add749902c010e66fe574c165c742ed88"}, + {file = "mypy-1.0.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4e5175026618c178dfba6188228b845b64131034ab3ba52acaffa8f6c361f805"}, + {file = "mypy-1.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:2f6ac8c87e046dc18c7d1d7f6653a66787a4555085b056fe2d599f1f1a2a2d21"}, + {file = "mypy-1.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7306edca1c6f1b5fa0bc9aa645e6ac8393014fa82d0fa180d0ebc990ebe15964"}, + {file = "mypy-1.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3cfad08f16a9c6611e6143485a93de0e1e13f48cfb90bcad7d5fde1c0cec3d36"}, + {file = "mypy-1.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67cced7f15654710386e5c10b96608f1ee3d5c94ca1da5a2aad5889793a824c1"}, + {file = "mypy-1.0.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a86b794e8a56ada65c573183756eac8ac5b8d3d59daf9d5ebd72ecdbb7867a43"}, + {file = "mypy-1.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:50979d5efff8d4135d9db293c6cb2c42260e70fb010cbc697b1311a4d7a39ddb"}, + {file = "mypy-1.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3ae4c7a99e5153496243146a3baf33b9beff714464ca386b5f62daad601d87af"}, + {file = "mypy-1.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5e398652d005a198a7f3c132426b33c6b85d98aa7dc852137a2a3be8890c4072"}, + {file = "mypy-1.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be78077064d016bc1b639c2cbcc5be945b47b4261a4f4b7d8923f6c69c5c9457"}, + {file = "mypy-1.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92024447a339400ea00ac228369cd242e988dd775640755fa4ac0c126e49bb74"}, + {file = "mypy-1.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:fe523fcbd52c05040c7bee370d66fee8373c5972171e4fbc323153433198592d"}, + {file = "mypy-1.0.0-py3-none-any.whl", hash = "sha256:2efa963bdddb27cb4a0d42545cd137a8d2b883bd181bbc4525b568ef6eca258f"}, + {file = "mypy-1.0.0.tar.gz", hash = "sha256:f34495079c8d9da05b183f9f7daec2878280c2ad7cc81da686ef0b484cea2ecf"}, ] [package.dependencies] @@ -632,70 +777,70 @@ reports = ["lxml"] [[package]] name = "mypy-extensions" -version = "0.4.3" -description = "Experimental type system extensions for programs checked with the mypy typechecker." +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.5" files = [ - {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, - {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] [[package]] name = "packaging" -version = "22.0" +version = "23.0" description = "Core utilities for Python packages" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-22.0-py3-none-any.whl", hash = "sha256:957e2148ba0e1a3b282772e791ef1d8083648bc131c8ab0c1feba110ce1146c3"}, - {file = "packaging-22.0.tar.gz", hash = "sha256:2198ec20bd4c017b8f9717e00f0c8714076fc2fd93816750ab48e2c41de2cfd3"}, + {file = "packaging-23.0-py3-none-any.whl", hash = "sha256:714ac14496c3e68c99c29b00845f7a2b85f3bb6f1078fd9f72fd20f0570002b2"}, + {file = "packaging-23.0.tar.gz", hash = "sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97"}, ] [[package]] name = "pathspec" -version = "0.10.3" +version = "0.11.0" description = "Utility library for gitignore style pattern matching of file paths." category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "pathspec-0.10.3-py3-none-any.whl", hash = "sha256:3c95343af8b756205e2aba76e843ba9520a24dd84f68c22b9f93251507509dd6"}, - {file = "pathspec-0.10.3.tar.gz", hash = "sha256:56200de4077d9d0791465aa9095a01d421861e405b5096955051deefd697d6f6"}, + {file = "pathspec-0.11.0-py3-none-any.whl", hash = "sha256:3a66eb970cbac598f9e5ccb5b2cf58930cd8e3ed86d393d541eaf2d8b1705229"}, + {file = "pathspec-0.11.0.tar.gz", hash = "sha256:64d338d4e0914e91c1792321e6907b5a593f1ab1851de7fc269557a21b30ebbc"}, ] [[package]] name = "pbr" -version = "5.11.0" +version = "5.11.1" description = "Python Build Reasonableness" category = "dev" optional = false python-versions = ">=2.6" files = [ - {file = "pbr-5.11.0-py2.py3-none-any.whl", hash = "sha256:db2317ff07c84c4c63648c9064a79fe9d9f5c7ce85a9099d4b6258b3db83225a"}, - {file = "pbr-5.11.0.tar.gz", hash = "sha256:b97bc6695b2aff02144133c2e7399d5885223d42b7912ffaec2ca3898e673bfe"}, + {file = "pbr-5.11.1-py2.py3-none-any.whl", hash = "sha256:567f09558bae2b3ab53cb3c1e2e33e726ff3338e7bae3db5dc954b3a44eef12b"}, + {file = "pbr-5.11.1.tar.gz", hash = "sha256:aefc51675b0b533d56bb5fd1c8c6c0522fe31896679882e1c4c63d5e4a0fccb3"}, ] [[package]] name = "platformdirs" -version = "2.6.2" +version = "3.0.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "platformdirs-2.6.2-py3-none-any.whl", hash = "sha256:83c8f6d04389165de7c9b6f0c682439697887bca0aa2f1c87ef1826be3584490"}, - {file = "platformdirs-2.6.2.tar.gz", hash = "sha256:e1fea1fe471b9ff8332e229df3cb7de4f53eeea4998d3b6bfff542115e998bd2"}, + {file = "platformdirs-3.0.0-py3-none-any.whl", hash = "sha256:b1d5eb14f221506f50d6604a561f4c5786d9e80355219694a1b244bcd96f4567"}, + {file = "platformdirs-3.0.0.tar.gz", hash = "sha256:8a1228abb1ef82d788f74139988b137e78692984ec7b08eaa6c65f1723af28f9"}, ] [package.dependencies] typing-extensions = {version = ">=4.4", markers = "python_version < \"3.8\""} [package.extras] -docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] +docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest (>=7.2.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] [[package]] name = "pluggy" @@ -781,14 +926,14 @@ plugins = ["importlib-metadata"] [[package]] name = "pyproject-api" -version = "1.4.0" +version = "1.5.0" description = "API to interact with the python pyproject.toml based projects" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "pyproject_api-1.4.0-py3-none-any.whl", hash = "sha256:c34226297781efdd1ba4dfb74ce21076d9a8360e2125ea31803c1a02c76b2460"}, - {file = "pyproject_api-1.4.0.tar.gz", hash = "sha256:ac85c1f82e0291dbae5a7739dbb9a990e11ee4034c9b5599ea714f07a24ecd71"}, + {file = "pyproject_api-1.5.0-py3-none-any.whl", hash = "sha256:4c111277dfb96bcd562c6245428f27250b794bfe3e210b8714c4f893952f2c17"}, + {file = "pyproject_api-1.5.0.tar.gz", hash = "sha256:0962df21f3e633b8ddb9567c011e6c1b3dcdfc31b7860c0ede7e24c5a1200fbe"}, ] [package.dependencies] @@ -801,14 +946,14 @@ testing = ["covdefaults (>=2.2.2)", "importlib-metadata (>=5.1)", "pytest (>=7.2 [[package]] name = "pytest" -version = "7.2.0" +version = "7.2.1" description = "pytest: simple powerful testing with Python" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-7.2.0-py3-none-any.whl", hash = "sha256:892f933d339f068883b6fd5a459f03d85bfcb355e4981e146d2c7616c21fef71"}, - {file = "pytest-7.2.0.tar.gz", hash = "sha256:c4014eb40e10f11f355ad4e3c2fb2c6c6d1919c73f3b5a433de4708202cade59"}, + {file = "pytest-7.2.1-py3-none-any.whl", hash = "sha256:c7c6ca206e93355074ae32f7403e8ea12163b1163c976fee7d4d84027c162be5"}, + {file = "pytest-7.2.1.tar.gz", hash = "sha256:d45e0952f3727241918b8fd0f376f5ff6b301cc0777c6f9a556935c92d8a7d42"}, ] [package.dependencies] @@ -916,14 +1061,14 @@ pytest = ">=5.0.0" [[package]] name = "pytz" -version = "2022.7" +version = "2022.7.1" description = "World timezone definitions, modern and historical" category = "dev" optional = false python-versions = "*" files = [ - {file = "pytz-2022.7-py2.py3-none-any.whl", hash = "sha256:93007def75ae22f7cd991c84e02d434876818661f8df9ad5df9e950ff4e52cfd"}, - {file = "pytz-2022.7.tar.gz", hash = "sha256:7ccfae7b4b2c067464a6733c6261673fdb8fd1be905460396b97a073e9fa683a"}, + {file = "pytz-2022.7.1-py2.py3-none-any.whl", hash = "sha256:78f4f37d8198e0627c5f1143240bb0206b8691d8d7ac6d78fee88b78733f8c4a"}, + {file = "pytz-2022.7.1.tar.gz", hash = "sha256:01a0681c4b9684a28304615eba55d1ab31ae00bf68ec157ec3708a8182dbbcd0"}, ] [[package]] @@ -978,19 +1123,19 @@ files = [ [[package]] name = "requests" -version = "2.28.1" +version = "2.28.2" description = "Python HTTP for Humans." category = "dev" optional = false python-versions = ">=3.7, <4" files = [ - {file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"}, - {file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"}, + {file = "requests-2.28.2-py3-none-any.whl", hash = "sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa"}, + {file = "requests-2.28.2.tar.gz", hash = "sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf"}, ] [package.dependencies] certifi = ">=2017.4.17" -charset-normalizer = ">=2,<3" +charset-normalizer = ">=2,<4" idna = ">=2.5,<4" urllib3 = ">=1.21.1,<1.27" @@ -1000,18 +1145,18 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "setuptools" -version = "65.6.3" +version = "67.3.2" description = "Easily download, build, install, upgrade, and uninstall Python packages" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "setuptools-65.6.3-py3-none-any.whl", hash = "sha256:57f6f22bde4e042978bcd50176fdb381d7c21a9efa4041202288d3737a0c6a54"}, - {file = "setuptools-65.6.3.tar.gz", hash = "sha256:a7620757bf984b58deaf32fc8a4577a9bbc0850cf92c20e1ce41c38c19e5fb75"}, + {file = "setuptools-67.3.2-py3-none-any.whl", hash = "sha256:bb6d8e508de562768f2027902929f8523932fcd1fb784e6d573d2cafac995a48"}, + {file = "setuptools-67.3.2.tar.gz", hash = "sha256:95f00380ef2ffa41d9bba85d95b27689d923c93dfbafed4aecd7cf988a25e012"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] @@ -1089,27 +1234,27 @@ test = ["cython", "html5lib", "pytest", "pytest-cov", "typed-ast"] [[package]] name = "sphinx" -version = "5.3.0" +version = "6.1.3" description = "Python documentation generator" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "Sphinx-5.3.0.tar.gz", hash = "sha256:51026de0a9ff9fc13c05d74913ad66047e104f56a129ff73e174eb5c3ee794b5"}, - {file = "sphinx-5.3.0-py3-none-any.whl", hash = "sha256:060ca5c9f7ba57a08a1219e547b269fadf125ae25b06b9fa7f66768efb652d6d"}, + {file = "Sphinx-6.1.3.tar.gz", hash = "sha256:0dac3b698538ffef41716cf97ba26c1c7788dba73ce6f150c1ff5b4720786dd2"}, + {file = "sphinx-6.1.3-py3-none-any.whl", hash = "sha256:807d1cb3d6be87eb78a381c3e70ebd8d346b9a25f3753e9947e866b2786865fc"}, ] [package.dependencies] alabaster = ">=0.7,<0.8" babel = ">=2.9" colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} -docutils = ">=0.14,<0.20" +docutils = ">=0.18,<0.20" imagesize = ">=1.3" importlib-metadata = {version = ">=4.8", markers = "python_version < \"3.10\""} Jinja2 = ">=3.0" packaging = ">=21.0" -Pygments = ">=2.12" -requests = ">=2.5.0" +Pygments = ">=2.13" +requests = ">=2.25.0" snowballstemmer = ">=2.0" sphinxcontrib-applehelp = "*" sphinxcontrib-devhelp = "*" @@ -1120,24 +1265,25 @@ sphinxcontrib-serializinghtml = ">=1.1.5" [package.extras] docs = ["sphinxcontrib-websupport"] -lint = ["docutils-stubs", "flake8 (>=3.5.0)", "flake8-bugbear", "flake8-comprehensions", "flake8-simplify", "isort", "mypy (>=0.981)", "sphinx-lint", "types-requests", "types-typed-ast"] -test = ["cython", "html5lib", "pytest (>=4.6)", "typed_ast"] +lint = ["docutils-stubs", "flake8 (>=3.5.0)", "flake8-simplify", "isort", "mypy (>=0.990)", "ruff", "sphinx-lint", "types-requests"] +test = ["cython", "html5lib", "pytest (>=4.6)"] [[package]] name = "sphinx-rtd-theme" -version = "1.1.1" +version = "1.2.0" description = "Read the Docs theme for Sphinx" category = "dev" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ - {file = "sphinx_rtd_theme-1.1.1-py2.py3-none-any.whl", hash = "sha256:31faa07d3e97c8955637fc3f1423a5ab2c44b74b8cc558a51498c202ce5cbda7"}, - {file = "sphinx_rtd_theme-1.1.1.tar.gz", hash = "sha256:6146c845f1e1947b3c3dd4432c28998a1693ccc742b4f9ad7c63129f0757c103"}, + {file = "sphinx_rtd_theme-1.2.0-py2.py3-none-any.whl", hash = "sha256:f823f7e71890abe0ac6aaa6013361ea2696fc8d3e1fa798f463e82bdb77eeff2"}, + {file = "sphinx_rtd_theme-1.2.0.tar.gz", hash = "sha256:a0d8bd1a2ed52e0b338cbe19c4b2eef3c5e7a048769753dac6a9f059c7b641b8"}, ] [package.dependencies] -docutils = "<0.18" -sphinx = ">=1.6,<6" +docutils = "<0.19" +sphinx = ">=1.6,<7" +sphinxcontrib-jquery = {version = ">=2.0.0,<3.0.0 || >3.0.0", markers = "python_version > \"3\""} [package.extras] dev = ["bump2version", "sphinxcontrib-httpdomain", "transifex-client", "wheel"] @@ -1160,14 +1306,14 @@ test = ["pytest"] [[package]] name = "sphinxcontrib-applehelp" -version = "1.0.3" +version = "1.0.4" description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "sphinxcontrib.applehelp-1.0.3-py3-none-any.whl", hash = "sha256:ba0f2a22e6eeada8da6428d0d520215ee8864253f32facf958cca81e426f661d"}, - {file = "sphinxcontrib.applehelp-1.0.3.tar.gz", hash = "sha256:83749f09f6ac843b8cb685277dbc818a8bf2d76cc19602699094fe9a74db529e"}, + {file = "sphinxcontrib-applehelp-1.0.4.tar.gz", hash = "sha256:828f867945bbe39817c210a1abfd1bc4895c8b73fcaade56d45357a348a07d7e"}, + {file = "sphinxcontrib_applehelp-1.0.4-py3-none-any.whl", hash = "sha256:29d341f67fb0f6f586b23ad80e072c8e6ad0b48417db2bde114a4c9746feb228"}, ] [package.extras] @@ -1206,6 +1352,37 @@ files = [ lint = ["docutils-stubs", "flake8", "mypy"] test = ["html5lib", "pytest"] +[[package]] +name = "sphinxcontrib-htmlhelp" +version = "2.0.1" +description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "sphinxcontrib-htmlhelp-2.0.1.tar.gz", hash = "sha256:0cbdd302815330058422b98a113195c9249825d681e18f11e8b1f78a2f11efff"}, + {file = "sphinxcontrib_htmlhelp-2.0.1-py3-none-any.whl", hash = "sha256:c38cb46dccf316c79de6e5515e1770414b797162b23cd3d06e67020e1d2a6903"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["html5lib", "pytest"] + +[[package]] +name = "sphinxcontrib-jquery" +version = "2.0.0" +description = "Extension to include jQuery on newer Sphinx releases" +category = "dev" +optional = false +python-versions = ">=2.7" +files = [ + {file = "sphinxcontrib-jquery-2.0.0.tar.gz", hash = "sha256:8fb65f6dba84bf7bcd1aea1f02ab3955ac34611d838bcc95d4983b805b234daa"}, + {file = "sphinxcontrib_jquery-2.0.0-py3-none-any.whl", hash = "sha256:ed47fa425c338ffebe3c37e1cdb56e30eb806116b85f01055b158c7057fdb995"}, +] + +[package.dependencies] +setuptools = "*" + [[package]] name = "sphinxcontrib-jsmath" version = "1.0.1" @@ -1310,33 +1487,33 @@ testing = ["flaky (>=3.4.0)", "freezegun (>=0.3.11)", "pathlib2 (>=2.3.3)", "psu [[package]] name = "tox" -version = "4.2.6" +version = "4.4.5" description = "tox is a generic virtualenv management and test command line tool" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "tox-4.2.6-py3-none-any.whl", hash = "sha256:fb79b3e4b788491949576a9c80c2d56419eac994567c3591e24bb2788b5901d0"}, - {file = "tox-4.2.6.tar.gz", hash = "sha256:ecf224a4f3a318adcdd71aa8fe15ffd31f14afd6a9845a43ffd63950a7325538"}, + {file = "tox-4.4.5-py3-none-any.whl", hash = "sha256:1081864f1a1393ffa11ebe9beaa280349020579310d217a594a4e7b6124c5425"}, + {file = "tox-4.4.5.tar.gz", hash = "sha256:f9bc83c5da8666baa2a4d4e884bbbda124fe646e4b1c0e412949cecc2b6e8f90"}, ] [package.dependencies] -cachetools = ">=5.2" +cachetools = ">=5.3" chardet = ">=5.1" colorama = ">=0.4.6" filelock = ">=3.9" -importlib-metadata = {version = ">=5.2", markers = "python_version < \"3.8\""} -packaging = ">=22" +importlib-metadata = {version = ">=6", markers = "python_version < \"3.8\""} +packaging = ">=23" platformdirs = ">=2.6.2" pluggy = ">=1" -pyproject-api = ">=1.2.1" +pyproject-api = ">=1.5" tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} typing-extensions = {version = ">=4.4", markers = "python_version < \"3.8\""} virtualenv = ">=20.17.1" [package.extras] -docs = ["furo (>=2022.12.7)", "sphinx (>=6)", "sphinx-argparse-cli (>=1.10)", "sphinx-autodoc-typehints (>=1.19.5)", "sphinx-copybutton (>=0.5.1)", "sphinx-inline-tabs (>=2022.1.2b11)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=22.12)"] -testing = ["build[virtualenv] (>=0.9)", "covdefaults (>=2.2.2)", "devpi-process (>=0.3)", "diff-cover (>=7.3)", "distlib (>=0.3.6)", "flaky (>=3.7)", "hatch-vcs (>=0.3)", "hatchling (>=1.12)", "psutil (>=5.9.4)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)", "pytest-xdist (>=3.1)", "re-assert (>=1.1)", "time-machine (>=2.8.2)"] +docs = ["furo (>=2022.12.7)", "sphinx (>=6.1.3)", "sphinx-argparse-cli (>=1.11)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)", "sphinx-copybutton (>=0.5.1)", "sphinx-inline-tabs (>=2022.1.2b11)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=22.12)"] +testing = ["build[virtualenv] (>=0.10)", "covdefaults (>=2.2.2)", "devpi-process (>=0.3)", "diff-cover (>=7.4)", "distlib (>=0.3.6)", "flaky (>=3.7)", "hatch-vcs (>=0.3)", "hatchling (>=1.12.2)", "psutil (>=5.9.4)", "pytest (>=7.2.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)", "pytest-xdist (>=3.1)", "re-assert (>=1.1)", "time-machine (>=2.9)", "wheel (>=0.38.4)"] [[package]] name = "typed-ast" @@ -1374,26 +1551,26 @@ files = [ [[package]] name = "typing-extensions" -version = "4.4.0" +version = "4.5.0" description = "Backported and Experimental Type Hints for Python 3.7+" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "typing_extensions-4.4.0-py3-none-any.whl", hash = "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"}, - {file = "typing_extensions-4.4.0.tar.gz", hash = "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa"}, + {file = "typing_extensions-4.5.0-py3-none-any.whl", hash = "sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4"}, + {file = "typing_extensions-4.5.0.tar.gz", hash = "sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb"}, ] [[package]] name = "urllib3" -version = "1.26.13" +version = "1.26.14" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ - {file = "urllib3-1.26.13-py2.py3-none-any.whl", hash = "sha256:47cc05d99aaa09c9e72ed5809b60e7ba354e64b59c9c173ac3018642d8bb41fc"}, - {file = "urllib3-1.26.13.tar.gz", hash = "sha256:c083dd0dce68dbfbe1129d5271cb90f9447dea7d52097c6e0126120c521ddea8"}, + {file = "urllib3-1.26.14-py2.py3-none-any.whl", hash = "sha256:75edcdc2f7d85b137124a6c3c9fc3933cdeaa12ecb9a6a959f22797a0feca7e1"}, + {file = "urllib3-1.26.14.tar.gz", hash = "sha256:076907bf8fd355cde77728471316625a4d2f7e713c125f51953bb5b3eecf4f72"}, ] [package.extras] @@ -1403,65 +1580,66 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "virtualenv" -version = "20.16.2" +version = "20.4.7" description = "Virtual Python Environment builder" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" files = [ - {file = "virtualenv-20.16.2-py2.py3-none-any.whl", hash = "sha256:635b272a8e2f77cb051946f46c60a54ace3cb5e25568228bd6b57fc70eca9ff3"}, - {file = "virtualenv-20.16.2.tar.gz", hash = "sha256:0ef5be6d07181946891f5abc8047fda8bc2f0b4b9bf222c64e6e8963baee76db"}, + {file = "virtualenv-20.4.7-py2.py3-none-any.whl", hash = "sha256:2b0126166ea7c9c3661f5b8e06773d28f83322de7a3ff7d06f0aed18c9de6a76"}, + {file = "virtualenv-20.4.7.tar.gz", hash = "sha256:14fdf849f80dbb29a4eb6caa9875d476ee2a5cf76a5f5415fa2f1606010ab467"}, ] [package.dependencies] +appdirs = ">=1.4.3,<2" distlib = ">=0.3.1,<1" -filelock = ">=3.2,<4" +filelock = ">=3.0.0,<4" importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} -platformdirs = ">=2,<3" +six = ">=1.9.0,<2" [package.extras] -docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=21.3)"] -testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "packaging (>=20.0)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)"] +docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=19.9.0rc1)"] +testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "packaging (>=20.0)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)", "xonsh (>=0.9.16)"] [[package]] name = "virtualenv" -version = "20.17.1" +version = "20.19.0" description = "Virtual Python Environment builder" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "virtualenv-20.17.1-py3-none-any.whl", hash = "sha256:ce3b1684d6e1a20a3e5ed36795a97dfc6af29bc3970ca8dab93e11ac6094b3c4"}, - {file = "virtualenv-20.17.1.tar.gz", hash = "sha256:f8b927684efc6f1cc206c9db297a570ab9ad0e51c16fa9e45487d36d1905c058"}, + {file = "virtualenv-20.19.0-py3-none-any.whl", hash = "sha256:54eb59e7352b573aa04d53f80fc9736ed0ad5143af445a1e539aada6eb947dd1"}, + {file = "virtualenv-20.19.0.tar.gz", hash = "sha256:37a640ba82ed40b226599c522d411e4be5edb339a0c0de030c0dc7b646d61590"}, ] [package.dependencies] distlib = ">=0.3.6,<1" filelock = ">=3.4.1,<4" importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.8\""} -platformdirs = ">=2.4,<3" +platformdirs = ">=2.4,<4" [package.extras] -docs = ["proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-argparse (>=0.3.2)", "sphinx-rtd-theme (>=1)", "towncrier (>=22.8)"] -testing = ["coverage (>=6.2)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=21.3)", "pytest (>=7.0.1)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.6.1)", "pytest-randomly (>=3.10.3)", "pytest-timeout (>=2.1)"] +docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=22.12)"] +test = ["covdefaults (>=2.2.2)", "coverage (>=7.1)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23)", "pytest (>=7.2.1)", "pytest-env (>=0.8.1)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.10)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)"] [[package]] name = "zipp" -version = "3.11.0" +version = "3.13.0" description = "Backport of pathlib-compatible object wrapper for zip files" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "zipp-3.11.0-py3-none-any.whl", hash = "sha256:83a28fcb75844b5c0cdaf5aa4003c2d728c77e05f5aeabe8e95e56727005fbaa"}, - {file = "zipp-3.11.0.tar.gz", hash = "sha256:a7a22e05929290a67401440b39690ae6563279bced5f314609d9d03798f56766"}, + {file = "zipp-3.13.0-py3-none-any.whl", hash = "sha256:e8b2a36ea17df80ffe9e2c4fda3f693c3dad6df1697d3cd3af232db680950b0b"}, + {file = "zipp-3.13.0.tar.gz", hash = "sha256:23f70e964bc11a34cef175bc90ba2914e1e4545ea1e3e2f67c079671883f9cb6"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] [metadata] lock-version = "2.0" python-versions = "^3.7" -content-hash = "3130d4d2f2d8d6d28c20322a41ae9d267d8f1a930a8b069ed8f3717431253e5b" +content-hash = "b39f7d75a3cf605006ca19870366ad01d1f4df94d559075fc36ff21e619dffcc" diff --git a/pyproject.toml b/pyproject.toml index a14ef58a..0c629003 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -42,7 +42,7 @@ Changelog = "https://github.com/graphql-python/graphql-core/releases" [tool.poetry.dependencies] python = "^3.7" typing-extensions = [ - { version = "^4.4", python = "<3.10" } + { version = "^4.5", python = "<3.10" } ] [tool.poetry.group.test] @@ -61,15 +61,18 @@ tox = ">=3.0" optional = true [tool.poetry.group.lint.dependencies] -black = "22.12.0" +black = "23.1.0" flake8 = [ { version = ">=5,<7", python = ">=3.8" }, { version = ">=5,<6", python = "<3.8" } ] flake8-bandit = "^4.1" -flake8-bugbear = "22.12.6" -isort = "^5.11" -mypy = "0.991" +flake8-bugbear = "23.2.13" +isort = [ + { version = "^5.12", python = ">=3.8" }, + { version = "^5.11", python = "<3.8" } +] +mypy = "1.0.0" bump2version = ">=1.0,<2" [tool.poetry.group.doc] diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index 61a63c7d..ef1edb34 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -1418,7 +1418,6 @@ def execute_subscription( result = resolve_fn(context.root_value, info, **args) if context.is_awaitable(result): - # noinspection PyShadowingNames async def await_result() -> AsyncIterable[Any]: try: diff --git a/src/graphql/language/ast.py b/src/graphql/language/ast.py index cfbfe62a..099edc01 100644 --- a/src/graphql/language/ast.py +++ b/src/graphql/language/ast.py @@ -240,7 +240,6 @@ def __hash__(self) -> int: class OperationType(Enum): - QUERY = "query" MUTATION = "mutation" SUBSCRIPTION = "subscription" @@ -479,7 +478,6 @@ class ArgumentNode(Node): class ConstArgumentNode(ArgumentNode): - value: ConstValueNode @@ -561,7 +559,6 @@ class ListValueNode(ValueNode): class ConstListValueNode(ListValueNode): - values: Tuple[ConstValueNode, ...] @@ -572,7 +569,6 @@ class ObjectValueNode(ValueNode): class ConstObjectValueNode(ObjectValueNode): - fields: Tuple[ConstObjectFieldNode, ...] @@ -584,7 +580,6 @@ class ObjectFieldNode(Node): class ConstObjectFieldNode(ObjectFieldNode): - value: ConstValueNode @@ -611,7 +606,6 @@ class DirectiveNode(Node): class ConstDirectiveNode(DirectiveNode): - arguments: Tuple[ConstArgumentNode, ...] diff --git a/src/graphql/language/visitor.py b/src/graphql/language/visitor.py index 6ad71da5..406684f0 100644 --- a/src/graphql/language/visitor.py +++ b/src/graphql/language/visitor.py @@ -364,7 +364,6 @@ def leave(node: Node, *args: Any) -> Optional[VisitorAction]: return None else: - enter = leave = None enter_leave = EnterLeaveVisitor(enter, leave) diff --git a/src/graphql/type/definition.py b/src/graphql/type/definition.py index 327ac7f3..8d1d55cd 100644 --- a/src/graphql/type/definition.py +++ b/src/graphql/type/definition.py @@ -652,7 +652,6 @@ def is_required_argument(arg: GraphQLArgument) -> bool: class GraphQLObjectTypeKwargs(GraphQLNamedTypeKwargs, total=False): - fields: GraphQLFieldMap interfaces: Tuple[GraphQLInterfaceType, ...] is_type_of: Optional[GraphQLIsTypeOfFn] @@ -1128,7 +1127,6 @@ class GraphQLEnumValueKwargs(TypedDict, total=False): class GraphQLEnumValue: - value: Any description: Optional[str] deprecation_reason: Optional[str] diff --git a/src/graphql/type/schema.py b/src/graphql/type/schema.py index 31cfabbf..386f10a5 100644 --- a/src/graphql/type/schema.py +++ b/src/graphql/type/schema.py @@ -47,7 +47,6 @@ class InterfaceImplementations(NamedTuple): - objects: List[GraphQLObjectType] interfaces: List[GraphQLInterfaceType] diff --git a/src/graphql/type/validate.py b/src/graphql/type/validate.py index 1f6603d8..3fdb2f09 100644 --- a/src/graphql/type/validate.py +++ b/src/graphql/type/validate.py @@ -57,7 +57,6 @@ def validate_schema(schema: GraphQLSchema) -> List[GraphQLError]: # noinspection PyProtectedMember errors = schema._validation_errors if errors is None: - # Validate the schema, producing a list of errors. context = SchemaValidationContext(schema) context.validate_root_types() @@ -196,7 +195,6 @@ def validate_name(self, node: Any, name: Optional[str] = None) -> None: def validate_types(self) -> None: validate_input_object_circular_refs = InputObjectCircularRefsValidator(self) for type_ in self.schema.type_map.values(): - # Ensure all provided types are in fact GraphQL type. if not is_named_type(type_): self.report_error( @@ -247,7 +245,6 @@ def validate_fields( ) for field_name, field in fields.items(): - # Ensure they are named correctly. self.validate_name(field, field_name) @@ -463,7 +460,6 @@ def validate_input_fields(self, input_obj: GraphQLInputObjectType) -> None: # Ensure the arguments are valid for field_name, field in fields.items(): - # Ensure they are named correctly. self.validate_name(field, field_name) diff --git a/src/graphql/utilities/find_breaking_changes.py b/src/graphql/utilities/find_breaking_changes.py index 5dce9959..c205d78a 100644 --- a/src/graphql/utilities/find_breaking_changes.py +++ b/src/graphql/utilities/find_breaking_changes.py @@ -140,7 +140,7 @@ def find_directive_changes( ) ) - for (old_directive, new_directive) in directives_diff.persisted: + for old_directive, new_directive in directives_diff.persisted: args_diff = dict_diff(old_directive.args, new_directive.args) for arg_name, new_arg in args_diff.added.items(): @@ -506,7 +506,6 @@ def is_change_safe_for_input_object_field_or_field_arg( old_type: GraphQLType, new_type: GraphQLType ) -> bool: if is_list_type(old_type): - return is_list_type( # if they're both lists, make sure underlying types are compatible new_type diff --git a/src/graphql/validation/rules/provided_required_arguments.py b/src/graphql/validation/rules/provided_required_arguments.py index 2b822e17..08ae7a5c 100644 --- a/src/graphql/validation/rules/provided_required_arguments.py +++ b/src/graphql/validation/rules/provided_required_arguments.py @@ -60,7 +60,6 @@ def leave_directive(self, directive_node: DirectiveNode, *_args: Any) -> None: directive_name = directive_node.name.value required_args = self.required_args_map.get(directive_name) if required_args: - arg_nodes = directive_node.arguments or () arg_node_set = {arg.name.value for arg in arg_nodes} for arg_name in required_args: diff --git a/tests/execution/test_abstract.py b/tests/execution/test_abstract.py index 0ea10d47..51e3a55b 100644 --- a/tests/execution/test_abstract.py +++ b/tests/execution/test_abstract.py @@ -81,13 +81,11 @@ async def type_error(*_args): class Dog(NamedTuple): - name: str woofs: bool class Cat(NamedTuple): - name: str meows: bool @@ -412,7 +410,6 @@ async def resolve_type_can_throw(sync): ) def describe_using_typename_on_source_object(): - expected = ( { "pets": [ @@ -483,7 +480,6 @@ class RootValueWithInheritance: assert False, f"Unknown access variant: {access}" # pragma: no cover def describe_union_type(): - schema = build_schema( """ type Query { diff --git a/tests/execution/test_execution_result.py b/tests/execution/test_execution_result.py index 25247b88..c6b13d40 100644 --- a/tests/execution/test_execution_result.py +++ b/tests/execution/test_execution_result.py @@ -5,7 +5,6 @@ def describe_execution_result(): - data = {"foo": "Some data"} error = GraphQLError("Some error") errors = [error] diff --git a/tests/execution/test_middleware.py b/tests/execution/test_middleware.py index 86436c27..0d19e08e 100644 --- a/tests/execution/test_middleware.py +++ b/tests/execution/test_middleware.py @@ -141,7 +141,6 @@ def second(self, _info): ) class ReverseMiddleware: - # noinspection PyMethodMayBeStatic def resolve(self, next_, *args, **kwargs): return next_(*args, **kwargs)[::-1] @@ -185,7 +184,6 @@ def reverse_middleware(next_, *args, **kwargs): return next_(*args, **kwargs)[::-1] class CaptitalizeMiddleware: - # noinspection PyMethodMayBeStatic def resolve(self, next_, *args, **kwargs): return next_(*args, **kwargs).capitalize() @@ -219,7 +217,6 @@ async def reverse_middleware(next_, *args, **kwargs): return (await next_(*args, **kwargs))[::-1] class CaptitalizeMiddleware: - # noinspection PyMethodMayBeStatic async def resolve(self, next_, *args, **kwargs): return (await next_(*args, **kwargs)).capitalize() diff --git a/tests/execution/test_mutations.py b/tests/execution/test_mutations.py index 819eddd4..3aa8d1c2 100644 --- a/tests/execution/test_mutations.py +++ b/tests/execution/test_mutations.py @@ -16,7 +16,6 @@ # noinspection PyPep8Naming class NumberHolder: - theNumber: int def __init__(self, originalNumber: int): @@ -25,7 +24,6 @@ def __init__(self, originalNumber: int): # noinspection PyPep8Naming class Root: - numberHolder: NumberHolder def __init__(self, originalNumber: int): diff --git a/tests/execution/test_nonnull.py b/tests/execution/test_nonnull.py index 6d0f2993..fe3dacee 100644 --- a/tests/execution/test_nonnull.py +++ b/tests/execution/test_nonnull.py @@ -147,7 +147,6 @@ async def throws(): ) def describe_nulls_a_returned_object_that_contains_a_non_null_field(): - query = """ { syncNest { @@ -512,7 +511,6 @@ async def throws(): ) def describe_handles_non_null_argument(): - # noinspection PyPep8Naming schema_with_non_null_arg = GraphQLSchema( GraphQLObjectType( diff --git a/tests/execution/test_schema.py b/tests/execution/test_schema.py index 9f5918f0..69e4f973 100644 --- a/tests/execution/test_schema.py +++ b/tests/execution/test_schema.py @@ -19,7 +19,6 @@ def describe_execute_handles_execution_with_a_complex_schema(): def executes_using_a_schema(): class Article: - # noinspection PyShadowingBuiltins def __init__(self, id: int): self.id = id diff --git a/tests/execution/test_subscribe.py b/tests/execution/test_subscribe.py index e2ed520f..e6bb726e 100644 --- a/tests/execution/test_subscribe.py +++ b/tests/execution/test_subscribe.py @@ -881,7 +881,7 @@ def resolve_message(message, _info): subscription = subscribe(schema, document) assert isinstance(subscription, MapAsyncIterator) - assert await (anext(subscription)) == ({"newMessage": "Hello"}, None) + assert await anext(subscription) == ({"newMessage": "Hello"}, None) with raises(RuntimeError) as exc_info: await anext(subscription) diff --git a/tests/execution/test_union_interface.py b/tests/execution/test_union_interface.py index 280199e4..4a3099a9 100644 --- a/tests/execution/test_union_interface.py +++ b/tests/execution/test_union_interface.py @@ -17,7 +17,6 @@ class Dog: - name: str barks: bool mother: Optional[Dog] @@ -33,7 +32,6 @@ def __init__(self, name: str, barks: bool): class Cat: - name: str meows: bool mother: Optional[Cat] @@ -49,7 +47,6 @@ def __init__(self, name: str, meows: bool): class Person: - name: str pets: Optional[List[Union[Dog, Cat]]] friends: Optional[List[Union[Dog, Cat, Person]]] diff --git a/tests/type/test_predicate.py b/tests/type/test_predicate.py index 64cbf2ab..be3b6e82 100644 --- a/tests/type/test_predicate.py +++ b/tests/type/test_predicate.py @@ -543,7 +543,6 @@ def returns_false_for_custom_directive(): def describe_schema_predicates(): - schema = GraphQLSchema() def describe_is_schema_and_assert_schema(): diff --git a/tests/utilities/test_introspection_from_schema.py b/tests/utilities/test_introspection_from_schema.py index 878ac0fb..353dc2ea 100644 --- a/tests/utilities/test_introspection_from_schema.py +++ b/tests/utilities/test_introspection_from_schema.py @@ -22,7 +22,6 @@ def introspection_to_sdl(introspection: IntrospectionQuery) -> str: def describe_introspection_from_schema(): - schema = GraphQLSchema( GraphQLObjectType( "Simple", diff --git a/tests/validation/test_overlapping_fields_can_be_merged.py b/tests/validation/test_overlapping_fields_can_be_merged.py index 6a190485..d2528355 100644 --- a/tests/validation/test_overlapping_fields_can_be_merged.py +++ b/tests/validation/test_overlapping_fields_can_be_merged.py @@ -577,7 +577,6 @@ def ignores_unknown_fragments(): ) def describe_return_types_must_be_unambiguous(): - schema = build_schema( """ interface SomeBox { diff --git a/tox.ini b/tox.ini index 35ee29d7..c0708030 100644 --- a/tox.ini +++ b/tox.ini @@ -15,7 +15,7 @@ python = [testenv:black] basepython = python3.10 -deps = black==22.10.0 +deps = black==23.1.0 commands = black src tests -t py310 --check @@ -24,20 +24,20 @@ basepython = python3.10 deps = flake8>=6,<7 flake8-bandit>=4.1,<5 - flake8-bugbear==22.12.6 + flake8-bugbear==23.2.13 commands = flake8 src tests [testenv:isort] basepython = python3.10 -deps = isort>=5.11,<6 +deps = isort>=5.12,<6 commands = isort src tests --check-only [testenv:mypy] basepython = python3.10 deps = - mypy==0.991 + mypy==1.0.0 pytest>=7.2,<8 commands = mypy src tests @@ -58,7 +58,7 @@ deps = pytest-cov>=4,<5 pytest-describe>=2,<3 pytest-timeout>=2.1,<3 - py37,py38,py39,pypy39: typing-extensions>=4.4,<5 + py37,py38,py39,pypy39: typing-extensions>=4.5,<5 commands = # to also run the time-consuming tests: tox -e py310 -- --run-slow # to run the benchmarks: tox -e py310 -- -k benchmarks --benchmark-enable From b690e4c08bea833d24890e4b3a1af85d567a35bf Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Wed, 15 Feb 2023 23:05:13 +0100 Subject: [PATCH 072/230] Check coverage by default only with latest Python --- .github/workflows/lint.yml | 4 ++-- .github/workflows/publish.yml | 4 ++-- tests/language/test_block_string_fuzz.py | 4 ++-- tests/utilities/test_build_ast_schema.py | 8 ++++---- tests/utilities/test_introspection_from_schema.py | 8 ++++---- .../utilities/test_strip_ignored_characters_fuzz.py | 4 ++-- tests/utils/__init__.py | 8 +------- tox.ini | 13 +++++++------ 8 files changed, 24 insertions(+), 29 deletions(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 106d22bd..12790703 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -9,10 +9,10 @@ jobs: steps: - uses: actions/checkout@v3 - - name: Set up Python 3.10 + - name: Set up Python 3.11 uses: actions/setup-python@v4 with: - python-version: '3.10' + python-version: '3.11' - name: Install dependencies run: | diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index f1f39421..871ad03c 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -12,10 +12,10 @@ jobs: steps: - uses: actions/checkout@v3 - - name: Set up Python 3.10 + - name: Set up Python 3.11 uses: actions/setup-python@v4 with: - python-version: '3.10' + python-version: '3.11' - name: Build wheel and source tarball run: | diff --git a/tests/language/test_block_string_fuzz.py b/tests/language/test_block_string_fuzz.py index e3a38f38..647db3c1 100644 --- a/tests/language/test_block_string_fuzz.py +++ b/tests/language/test_block_string_fuzz.py @@ -6,7 +6,7 @@ print_block_string, ) -from ..utils import dedent, gen_fuzz_strings, timeout_factor +from ..utils import dedent, gen_fuzz_strings def lex_value(s: str) -> str: @@ -42,7 +42,7 @@ def assert_non_printable_block_string(test_value: str) -> None: def describe_print_block_string(): @mark.slow - @mark.timeout(80 * timeout_factor) + @mark.timeout(80) def correctly_print_random_strings(): # Testing with length >7 is taking exponentially more time. However, it is # highly recommended testing with increased limit if you make any change. diff --git a/tests/utilities/test_build_ast_schema.py b/tests/utilities/test_build_ast_schema.py index c58cc837..0e55c168 100644 --- a/tests/utilities/test_build_ast_schema.py +++ b/tests/utilities/test_build_ast_schema.py @@ -39,7 +39,7 @@ from ..fixtures import big_schema_sdl # noqa: F401 from ..star_wars_schema import star_wars_schema -from ..utils import dedent, timeout_factor +from ..utils import dedent try: @@ -1228,7 +1228,7 @@ def can_deep_copy_pickled_schema(): @mark.slow def describe_deepcopy_and_pickle_big(): # pragma: no cover - @mark.timeout(20 * timeout_factor) + @mark.timeout(20) def can_deep_copy_big_schema(big_schema_sdl): # noqa: F811 # use our printing conventions big_schema_sdl = cycle_sdl(big_schema_sdl) @@ -1240,7 +1240,7 @@ def can_deep_copy_big_schema(big_schema_sdl): # noqa: F811 # check that printing the copied schema gives the same SDL assert print_schema(copied) == big_schema_sdl - @mark.timeout(60 * timeout_factor) + @mark.timeout(60) def can_pickle_and_unpickle_big_schema(big_schema_sdl): # noqa: F811 # use our printing conventions big_schema_sdl = cycle_sdl(big_schema_sdl) @@ -1272,7 +1272,7 @@ def can_pickle_and_unpickle_big_schema(big_schema_sdl): # noqa: F811 finally: sys.setrecursionlimit(limit) - @mark.timeout(60 * timeout_factor) + @mark.timeout(60) def can_deep_copy_pickled_big_schema(big_schema_sdl): # noqa: F811 # use our printing conventions big_schema_sdl = cycle_sdl(big_schema_sdl) diff --git a/tests/utilities/test_introspection_from_schema.py b/tests/utilities/test_introspection_from_schema.py index 353dc2ea..9112bdf7 100644 --- a/tests/utilities/test_introspection_from_schema.py +++ b/tests/utilities/test_introspection_from_schema.py @@ -14,7 +14,7 @@ ) from ..fixtures import big_schema_introspection_result, big_schema_sdl # noqa: F401 -from ..utils import dedent, timeout_factor +from ..utils import dedent def introspection_to_sdl(introspection: IntrospectionQuery) -> str: @@ -108,7 +108,7 @@ def can_deep_copy_pickled_schema(): @mark.slow def describe_deepcopy_and_pickle_big(): # pragma: no cover - @mark.timeout(20 * timeout_factor) + @mark.timeout(20) def can_deep_copy_big_schema(big_schema_sdl): # noqa: F811 # introspect the original big schema big_schema = build_schema(big_schema_sdl) @@ -119,7 +119,7 @@ def can_deep_copy_big_schema(big_schema_sdl): # noqa: F811 # check that introspecting the copied schema gives the same result assert introspection_from_schema(copied) == expected_introspection - @mark.timeout(60 * timeout_factor) + @mark.timeout(60) def can_pickle_and_unpickle_big_schema(big_schema_sdl): # noqa: F811 # introspect the original big schema big_schema = build_schema(big_schema_sdl) @@ -153,7 +153,7 @@ def can_pickle_and_unpickle_big_schema(big_schema_sdl): # noqa: F811 finally: sys.setrecursionlimit(limit) - @mark.timeout(60 * timeout_factor) + @mark.timeout(60) def can_deep_copy_pickled_big_schema(big_schema_sdl): # noqa: F811 # introspect the original big schema big_schema = build_schema(big_schema_sdl) diff --git a/tests/utilities/test_strip_ignored_characters_fuzz.py b/tests/utilities/test_strip_ignored_characters_fuzz.py index 5b038ca2..977f62ea 100644 --- a/tests/utilities/test_strip_ignored_characters_fuzz.py +++ b/tests/utilities/test_strip_ignored_characters_fuzz.py @@ -7,7 +7,7 @@ from graphql.language import Lexer, Source, TokenKind from graphql.utilities import strip_ignored_characters -from ..utils import dedent, gen_fuzz_strings, timeout_factor +from ..utils import dedent, gen_fuzz_strings ignored_tokens = [ @@ -228,7 +228,7 @@ def does_not_strip_random_ignored_tokens_embedded_in_the_block_string(): ).to_stay_the_same() @mark.slow - @mark.timeout(80 * timeout_factor) + @mark.timeout(80) def strips_ignored_characters_inside_random_block_strings(): # Testing with length >7 is taking exponentially more time. However it is # highly recommended to test with increased limit if you make any change. diff --git a/tests/utils/__init__.py b/tests/utils/__init__.py index 7657950a..d6392286 100644 --- a/tests/utils/__init__.py +++ b/tests/utils/__init__.py @@ -1,13 +1,7 @@ """Test utilities""" -from platform import python_implementation - from .dedent import dedent from .gen_fuzz_strings import gen_fuzz_strings -# some tests can take much longer on PyPy -timeout_factor = 4 if python_implementation() == "PyPy" else 1 - - -__all__ = ["dedent", "gen_fuzz_strings", "timeout_factor"] +__all__ = ["dedent", "gen_fuzz_strings"] diff --git a/tox.ini b/tox.ini index c0708030..b3b16643 100644 --- a/tox.ini +++ b/tox.ini @@ -14,13 +14,13 @@ python = pypy3.9: pypy39 [testenv:black] -basepython = python3.10 +basepython = python3.11 deps = black==23.1.0 commands = black src tests -t py310 --check [testenv:flake8] -basepython = python3.10 +basepython = python3.11 deps = flake8>=6,<7 flake8-bandit>=4.1,<5 @@ -29,13 +29,13 @@ commands = flake8 src tests [testenv:isort] -basepython = python3.10 +basepython = python3.11 deps = isort>=5.12,<6 commands = isort src tests --check-only [testenv:mypy] -basepython = python3.10 +basepython = python3.11 deps = mypy==1.0.0 pytest>=7.2,<8 @@ -43,7 +43,7 @@ commands = mypy src tests [testenv:docs] -basepython = python3.10 +basepython = python3.11 deps = sphinx>=5.3,<6 sphinx_rtd_theme>=1.1,<2 @@ -62,4 +62,5 @@ deps = commands = # to also run the time-consuming tests: tox -e py310 -- --run-slow # to run the benchmarks: tox -e py310 -- -k benchmarks --benchmark-enable - pytest tests {posargs: --cov-report=term-missing --cov=graphql --cov=tests --cov-fail-under=100} + py37,py38.py39,py310,pypy39: pytest tests {posargs} + py311: pytest tests {posargs: --cov-report=term-missing --cov=graphql --cov=tests --cov-fail-under=100} From 1cd1b78d404b753528630932a694a99f1a93ddb8 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Wed, 15 Feb 2023 23:19:31 +0100 Subject: [PATCH 073/230] Some stubs do not yet exist for Python 3.11 --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index b3b16643..8e870e6c 100644 --- a/tox.ini +++ b/tox.ini @@ -43,7 +43,7 @@ commands = mypy src tests [testenv:docs] -basepython = python3.11 +basepython = python3.10 deps = sphinx>=5.3,<6 sphinx_rtd_theme>=1.1,<2 From 7fd3ce43e85ad0236c7b5f59fe2d534f3787cd95 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 25 Feb 2023 15:10:49 +0100 Subject: [PATCH 074/230] Update dependencies --- poetry.lock | 188 ++++++++++++++++++++++++------------------------- pyproject.toml | 2 +- tox.ini | 2 +- 3 files changed, 96 insertions(+), 96 deletions(-) diff --git a/poetry.lock b/poetry.lock index 821f108b..50ad8e8a 100644 --- a/poetry.lock +++ b/poetry.lock @@ -308,63 +308,63 @@ files = [ [[package]] name = "coverage" -version = "7.1.0" +version = "7.2.0" description = "Code coverage measurement for Python" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "coverage-7.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3b946bbcd5a8231383450b195cfb58cb01cbe7f8949f5758566b881df4b33baf"}, - {file = "coverage-7.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ec8e767f13be637d056f7e07e61d089e555f719b387a7070154ad80a0ff31801"}, - {file = "coverage-7.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4a5a5879a939cb84959d86869132b00176197ca561c664fc21478c1eee60d75"}, - {file = "coverage-7.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b643cb30821e7570c0aaf54feaf0bfb630b79059f85741843e9dc23f33aaca2c"}, - {file = "coverage-7.1.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32df215215f3af2c1617a55dbdfb403b772d463d54d219985ac7cd3bf124cada"}, - {file = "coverage-7.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:33d1ae9d4079e05ac4cc1ef9e20c648f5afabf1a92adfaf2ccf509c50b85717f"}, - {file = "coverage-7.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:29571503c37f2ef2138a306d23e7270687c0efb9cab4bd8038d609b5c2393a3a"}, - {file = "coverage-7.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:63ffd21aa133ff48c4dff7adcc46b7ec8b565491bfc371212122dd999812ea1c"}, - {file = "coverage-7.1.0-cp310-cp310-win32.whl", hash = "sha256:4b14d5e09c656de5038a3f9bfe5228f53439282abcab87317c9f7f1acb280352"}, - {file = "coverage-7.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:8361be1c2c073919500b6601220a6f2f98ea0b6d2fec5014c1d9cfa23dd07038"}, - {file = "coverage-7.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:da9b41d4539eefd408c46725fb76ecba3a50a3367cafb7dea5f250d0653c1040"}, - {file = "coverage-7.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c5b15ed7644ae4bee0ecf74fee95808dcc34ba6ace87e8dfbf5cb0dc20eab45a"}, - {file = "coverage-7.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d12d076582507ea460ea2a89a8c85cb558f83406c8a41dd641d7be9a32e1274f"}, - {file = "coverage-7.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2617759031dae1bf183c16cef8fcfb3de7617f394c813fa5e8e46e9b82d4222"}, - {file = "coverage-7.1.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4e4881fa9e9667afcc742f0c244d9364d197490fbc91d12ac3b5de0bf2df146"}, - {file = "coverage-7.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9d58885215094ab4a86a6aef044e42994a2bd76a446dc59b352622655ba6621b"}, - {file = "coverage-7.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ffeeb38ee4a80a30a6877c5c4c359e5498eec095878f1581453202bfacc8fbc2"}, - {file = "coverage-7.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3baf5f126f30781b5e93dbefcc8271cb2491647f8283f20ac54d12161dff080e"}, - {file = "coverage-7.1.0-cp311-cp311-win32.whl", hash = "sha256:ded59300d6330be27bc6cf0b74b89ada58069ced87c48eaf9344e5e84b0072f7"}, - {file = "coverage-7.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:6a43c7823cd7427b4ed763aa7fb63901ca8288591323b58c9cd6ec31ad910f3c"}, - {file = "coverage-7.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7a726d742816cb3a8973c8c9a97539c734b3a309345236cd533c4883dda05b8d"}, - {file = "coverage-7.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc7c85a150501286f8b56bd8ed3aa4093f4b88fb68c0843d21ff9656f0009d6a"}, - {file = "coverage-7.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5b4198d85a3755d27e64c52f8c95d6333119e49fd001ae5798dac872c95e0f8"}, - {file = "coverage-7.1.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ddb726cb861c3117a553f940372a495fe1078249ff5f8a5478c0576c7be12050"}, - {file = "coverage-7.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:51b236e764840a6df0661b67e50697aaa0e7d4124ca95e5058fa3d7cbc240b7c"}, - {file = "coverage-7.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:7ee5c9bb51695f80878faaa5598040dd6c9e172ddcf490382e8aedb8ec3fec8d"}, - {file = "coverage-7.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c31b75ae466c053a98bf26843563b3b3517b8f37da4d47b1c582fdc703112bc3"}, - {file = "coverage-7.1.0-cp37-cp37m-win32.whl", hash = "sha256:3b155caf3760408d1cb903b21e6a97ad4e2bdad43cbc265e3ce0afb8e0057e73"}, - {file = "coverage-7.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:2a60d6513781e87047c3e630b33b4d1e89f39836dac6e069ffee28c4786715f5"}, - {file = "coverage-7.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f2cba5c6db29ce991029b5e4ac51eb36774458f0a3b8d3137241b32d1bb91f06"}, - {file = "coverage-7.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:beeb129cacea34490ffd4d6153af70509aa3cda20fdda2ea1a2be870dfec8d52"}, - {file = "coverage-7.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0c45948f613d5d18c9ec5eaa203ce06a653334cf1bd47c783a12d0dd4fd9c851"}, - {file = "coverage-7.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef382417db92ba23dfb5864a3fc9be27ea4894e86620d342a116b243ade5d35d"}, - {file = "coverage-7.1.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c7c0d0827e853315c9bbd43c1162c006dd808dbbe297db7ae66cd17b07830f0"}, - {file = "coverage-7.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e5cdbb5cafcedea04924568d990e20ce7f1945a1dd54b560f879ee2d57226912"}, - {file = "coverage-7.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:9817733f0d3ea91bea80de0f79ef971ae94f81ca52f9b66500c6a2fea8e4b4f8"}, - {file = "coverage-7.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:218fe982371ac7387304153ecd51205f14e9d731b34fb0568181abaf7b443ba0"}, - {file = "coverage-7.1.0-cp38-cp38-win32.whl", hash = "sha256:04481245ef966fbd24ae9b9e537ce899ae584d521dfbe78f89cad003c38ca2ab"}, - {file = "coverage-7.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:8ae125d1134bf236acba8b83e74c603d1b30e207266121e76484562bc816344c"}, - {file = "coverage-7.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2bf1d5f2084c3932b56b962a683074a3692bce7cabd3aa023c987a2a8e7612f6"}, - {file = "coverage-7.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:98b85dd86514d889a2e3dd22ab3c18c9d0019e696478391d86708b805f4ea0fa"}, - {file = "coverage-7.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38da2db80cc505a611938d8624801158e409928b136c8916cd2e203970dde4dc"}, - {file = "coverage-7.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3164d31078fa9efe406e198aecd2a02d32a62fecbdef74f76dad6a46c7e48311"}, - {file = "coverage-7.1.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db61a79c07331e88b9a9974815c075fbd812bc9dbc4dc44b366b5368a2936063"}, - {file = "coverage-7.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9ccb092c9ede70b2517a57382a601619d20981f56f440eae7e4d7eaafd1d1d09"}, - {file = "coverage-7.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:33ff26d0f6cc3ca8de13d14fde1ff8efe1456b53e3f0273e63cc8b3c84a063d8"}, - {file = "coverage-7.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d47dd659a4ee952e90dc56c97d78132573dc5c7b09d61b416a9deef4ebe01a0c"}, - {file = "coverage-7.1.0-cp39-cp39-win32.whl", hash = "sha256:d248cd4a92065a4d4543b8331660121b31c4148dd00a691bfb7a5cdc7483cfa4"}, - {file = "coverage-7.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:7ed681b0f8e8bcbbffa58ba26fcf5dbc8f79e7997595bf071ed5430d8c08d6f3"}, - {file = "coverage-7.1.0-pp37.pp38.pp39-none-any.whl", hash = "sha256:755e89e32376c850f826c425ece2c35a4fc266c081490eb0a841e7c1cb0d3bda"}, - {file = "coverage-7.1.0.tar.gz", hash = "sha256:10188fe543560ec4874f974b5305cd1a8bdcfa885ee00ea3a03733464c4ca265"}, + {file = "coverage-7.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90e7a4cbbb7b1916937d380beb1315b12957b8e895d7d9fb032e2038ac367525"}, + {file = "coverage-7.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:34d7211be69b215ad92298a962b2cd5a4ef4b17c7871d85e15d3d1b6dc8d8c96"}, + {file = "coverage-7.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:971b49dbf713044c3e5f6451b39f65615d4d1c1d9a19948fa0f41b0245a98765"}, + {file = "coverage-7.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f0557289260125a6c453ad5673ba79e5b6841d9a20c9e101f758bfbedf928a77"}, + {file = "coverage-7.2.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:049806ae2df69468c130f04f0fab4212c46b34ba5590296281423bb1ae379df2"}, + {file = "coverage-7.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:875b03d92ac939fbfa8ae74a35b2c468fc4f070f613d5b1692f9980099a3a210"}, + {file = "coverage-7.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c160e34e388277f10c50dc2c7b5e78abe6d07357d9fe7fcb2f3c156713fd647e"}, + {file = "coverage-7.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:32e6a730fd18b2556716039ab93278ccebbefa1af81e6aa0c8dba888cf659e6e"}, + {file = "coverage-7.2.0-cp310-cp310-win32.whl", hash = "sha256:f3ff4205aff999164834792a3949f82435bc7c7655c849226d5836c3242d7451"}, + {file = "coverage-7.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:93db11da6e728587e943dff8ae1b739002311f035831b6ecdb15e308224a4247"}, + {file = "coverage-7.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cd38140b56538855d3d5722c6d1b752b35237e7ea3f360047ce57f3fade82d98"}, + {file = "coverage-7.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9dbb21561b0e04acabe62d2c274f02df0d715e8769485353ddf3cf84727e31ce"}, + {file = "coverage-7.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:171dd3aa71a49274a7e4fc26f5bc167bfae5a4421a668bc074e21a0522a0af4b"}, + {file = "coverage-7.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4655ecd813f4ba44857af3e9cffd133ab409774e9d2a7d8fdaf4fdfd2941b789"}, + {file = "coverage-7.2.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1856a8c4aa77eb7ca0d42c996d0ca395ecafae658c1432b9da4528c429f2575c"}, + {file = "coverage-7.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd67df6b48db18c10790635060858e2ea4109601e84a1e9bfdd92e898dc7dc79"}, + {file = "coverage-7.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:2d7daf3da9c7e0ed742b3e6b4de6cc464552e787b8a6449d16517b31bbdaddf5"}, + {file = "coverage-7.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:bf9e02bc3dee792b9d145af30db8686f328e781bd212fdef499db5e9e4dd8377"}, + {file = "coverage-7.2.0-cp311-cp311-win32.whl", hash = "sha256:3713a8ec18781fda408f0e853bf8c85963e2d3327c99a82a22e5c91baffcb934"}, + {file = "coverage-7.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:88ae5929f0ef668b582fd7cad09b5e7277f50f912183cf969b36e82a1c26e49a"}, + {file = "coverage-7.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5e29a64e9586194ea271048bc80c83cdd4587830110d1e07b109e6ff435e5dbc"}, + {file = "coverage-7.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d5302eb84c61e758c9d68b8a2f93a398b272073a046d07da83d77b0edc8d76b"}, + {file = "coverage-7.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2c9fffbc39dc4a6277e1525cab06c161d11ee3995bbc97543dc74fcec33e045b"}, + {file = "coverage-7.2.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6ceeab5fca62bca072eba6865a12d881f281c74231d2990f8a398226e1a5d96"}, + {file = "coverage-7.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:28563a35ef4a82b5bc5160a01853ce62b9fceee00760e583ffc8acf9e3413753"}, + {file = "coverage-7.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bfa065307667f1c6e1f4c3e13f415b0925e34e56441f5fda2c84110a4a1d8bda"}, + {file = "coverage-7.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:7f992b32286c86c38f07a8b5c3fc88384199e82434040a729ec06b067ee0d52c"}, + {file = "coverage-7.2.0-cp37-cp37m-win32.whl", hash = "sha256:2c15bd09fd5009f3a79c8b3682b52973df29761030b692043f9834fc780947c4"}, + {file = "coverage-7.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:f332d61fbff353e2ef0f3130a166f499c3fad3a196e7f7ae72076d41a6bfb259"}, + {file = "coverage-7.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:577a8bc40c01ad88bb9ab1b3a1814f2f860ff5c5099827da2a3cafc5522dadea"}, + {file = "coverage-7.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9240a0335365c29c968131bdf624bb25a8a653a9c0d8c5dbfcabf80b59c1973c"}, + {file = "coverage-7.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:358d3bce1468f298b19a3e35183bdb13c06cdda029643537a0cc37e55e74e8f1"}, + {file = "coverage-7.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:932048364ff9c39030c6ba360c31bf4500036d4e15c02a2afc5a76e7623140d4"}, + {file = "coverage-7.2.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7efa21611ffc91156e6f053997285c6fe88cfef3fb7533692d0692d2cb30c846"}, + {file = "coverage-7.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:465ea431c3b78a87e32d7d9ea6d081a1003c43a442982375cf2c247a19971961"}, + {file = "coverage-7.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0f03c229f1453b936916f68a47b3dfb5e84e7ad48e160488168a5e35115320c8"}, + {file = "coverage-7.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:40785553d68c61e61100262b73f665024fd2bb3c6f0f8e2cd5b13e10e4df027b"}, + {file = "coverage-7.2.0-cp38-cp38-win32.whl", hash = "sha256:b09dd7bef59448c66e6b490cc3f3c25c14bc85d4e3c193b81a6204be8dd355de"}, + {file = "coverage-7.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:dc4f9a89c82faf6254d646180b2e3aa4daf5ff75bdb2c296b9f6a6cf547e26a7"}, + {file = "coverage-7.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c243b25051440386179591a8d5a5caff4484f92c980fb6e061b9559da7cc3f64"}, + {file = "coverage-7.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4b8fd32f85b256fc096deeb4872aeb8137474da0c0351236f93cbedc359353d6"}, + {file = "coverage-7.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7f2a7df523791e6a63b40360afa6792a11869651307031160dc10802df9a252"}, + {file = "coverage-7.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da32526326e8da0effb452dc32a21ffad282c485a85a02aeff2393156f69c1c3"}, + {file = "coverage-7.2.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c1153a6156715db9d6ae8283480ae67fb67452aa693a56d7dae9ffe8f7a80da"}, + {file = "coverage-7.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:74cd60fa00f46f28bd40048d6ca26bd58e9bee61d2b0eb4ec18cea13493c003f"}, + {file = "coverage-7.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:59a427f8a005aa7254074719441acb25ac2c2f60c1f1026d43f846d4254c1c2f"}, + {file = "coverage-7.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c3c4beddee01c8125a75cde3b71be273995e2e9ec08fbc260dd206b46bb99969"}, + {file = "coverage-7.2.0-cp39-cp39-win32.whl", hash = "sha256:08e3dd256b8d3e07bb230896c8c96ec6c5dffbe5a133ba21f8be82b275b900e8"}, + {file = "coverage-7.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:ad12c74c6ce53a027f5a5ecbac9be20758a41c85425c1bbab7078441794b04ee"}, + {file = "coverage-7.2.0-pp37.pp38.pp39-none-any.whl", hash = "sha256:ffa637a2d5883298449a5434b699b22ef98dd8e2ef8a1d9e60fa9cfe79813411"}, + {file = "coverage-7.2.0.tar.gz", hash = "sha256:9cc9c41aa5af16d845b53287051340c363dd03b7ef408e45eec3af52be77810d"}, ] [package.dependencies] @@ -510,14 +510,14 @@ smmap = ">=3.0.1,<6" [[package]] name = "gitpython" -version = "3.1.30" -description = "GitPython is a python library used to interact with Git repositories" +version = "3.1.31" +description = "GitPython is a Python library used to interact with Git repositories" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "GitPython-3.1.30-py3-none-any.whl", hash = "sha256:cd455b0000615c60e286208ba540271af9fe531fa6a87cc590a7298785ab2882"}, - {file = "GitPython-3.1.30.tar.gz", hash = "sha256:769c2d83e13f5d938b7688479da374c4e3d49f71549aaf462b646db9602ea6f8"}, + {file = "GitPython-3.1.31-py3-none-any.whl", hash = "sha256:f04893614f6aa713a60cbbe1e6a97403ef633103cdd0ef5eb6efe0deb98dbe8d"}, + {file = "GitPython-3.1.31.tar.gz", hash = "sha256:8ce3bcf69adfdf7c7d503e78fd3b1c492af782d58893b650adb2ac8912ddd573"}, ] [package.dependencies] @@ -729,38 +729,38 @@ files = [ [[package]] name = "mypy" -version = "1.0.0" +version = "1.0.1" description = "Optional static typing for Python" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-1.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0626db16705ab9f7fa6c249c017c887baf20738ce7f9129da162bb3075fc1af"}, - {file = "mypy-1.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1ace23f6bb4aec4604b86c4843276e8fa548d667dbbd0cb83a3ae14b18b2db6c"}, - {file = "mypy-1.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87edfaf344c9401942883fad030909116aa77b0fa7e6e8e1c5407e14549afe9a"}, - {file = "mypy-1.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0ab090d9240d6b4e99e1fa998c2d0aa5b29fc0fb06bd30e7ad6183c95fa07593"}, - {file = "mypy-1.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:7cc2c01dfc5a3cbddfa6c13f530ef3b95292f926329929001d45e124342cd6b7"}, - {file = "mypy-1.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:14d776869a3e6c89c17eb943100f7868f677703c8a4e00b3803918f86aafbc52"}, - {file = "mypy-1.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bb2782a036d9eb6b5a6efcdda0986774bf798beef86a62da86cb73e2a10b423d"}, - {file = "mypy-1.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5cfca124f0ac6707747544c127880893ad72a656e136adc935c8600740b21ff5"}, - {file = "mypy-1.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8845125d0b7c57838a10fd8925b0f5f709d0e08568ce587cc862aacce453e3dd"}, - {file = "mypy-1.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:01b1b9e1ed40544ef486fa8ac022232ccc57109f379611633ede8e71630d07d2"}, - {file = "mypy-1.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c7cf862aef988b5fbaa17764ad1d21b4831436701c7d2b653156a9497d92c83c"}, - {file = "mypy-1.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5cd187d92b6939617f1168a4fe68f68add749902c010e66fe574c165c742ed88"}, - {file = "mypy-1.0.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4e5175026618c178dfba6188228b845b64131034ab3ba52acaffa8f6c361f805"}, - {file = "mypy-1.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:2f6ac8c87e046dc18c7d1d7f6653a66787a4555085b056fe2d599f1f1a2a2d21"}, - {file = "mypy-1.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7306edca1c6f1b5fa0bc9aa645e6ac8393014fa82d0fa180d0ebc990ebe15964"}, - {file = "mypy-1.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3cfad08f16a9c6611e6143485a93de0e1e13f48cfb90bcad7d5fde1c0cec3d36"}, - {file = "mypy-1.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67cced7f15654710386e5c10b96608f1ee3d5c94ca1da5a2aad5889793a824c1"}, - {file = "mypy-1.0.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a86b794e8a56ada65c573183756eac8ac5b8d3d59daf9d5ebd72ecdbb7867a43"}, - {file = "mypy-1.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:50979d5efff8d4135d9db293c6cb2c42260e70fb010cbc697b1311a4d7a39ddb"}, - {file = "mypy-1.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3ae4c7a99e5153496243146a3baf33b9beff714464ca386b5f62daad601d87af"}, - {file = "mypy-1.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5e398652d005a198a7f3c132426b33c6b85d98aa7dc852137a2a3be8890c4072"}, - {file = "mypy-1.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be78077064d016bc1b639c2cbcc5be945b47b4261a4f4b7d8923f6c69c5c9457"}, - {file = "mypy-1.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92024447a339400ea00ac228369cd242e988dd775640755fa4ac0c126e49bb74"}, - {file = "mypy-1.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:fe523fcbd52c05040c7bee370d66fee8373c5972171e4fbc323153433198592d"}, - {file = "mypy-1.0.0-py3-none-any.whl", hash = "sha256:2efa963bdddb27cb4a0d42545cd137a8d2b883bd181bbc4525b568ef6eca258f"}, - {file = "mypy-1.0.0.tar.gz", hash = "sha256:f34495079c8d9da05b183f9f7daec2878280c2ad7cc81da686ef0b484cea2ecf"}, + {file = "mypy-1.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:71a808334d3f41ef011faa5a5cd8153606df5fc0b56de5b2e89566c8093a0c9a"}, + {file = "mypy-1.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:920169f0184215eef19294fa86ea49ffd4635dedfdea2b57e45cb4ee85d5ccaf"}, + {file = "mypy-1.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27a0f74a298769d9fdc8498fcb4f2beb86f0564bcdb1a37b58cbbe78e55cf8c0"}, + {file = "mypy-1.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:65b122a993d9c81ea0bfde7689b3365318a88bde952e4dfa1b3a8b4ac05d168b"}, + {file = "mypy-1.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:5deb252fd42a77add936b463033a59b8e48eb2eaec2976d76b6878d031933fe4"}, + {file = "mypy-1.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2013226d17f20468f34feddd6aae4635a55f79626549099354ce641bc7d40262"}, + {file = "mypy-1.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:48525aec92b47baed9b3380371ab8ab6e63a5aab317347dfe9e55e02aaad22e8"}, + {file = "mypy-1.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c96b8a0c019fe29040d520d9257d8c8f122a7343a8307bf8d6d4a43f5c5bfcc8"}, + {file = "mypy-1.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:448de661536d270ce04f2d7dddaa49b2fdba6e3bd8a83212164d4174ff43aa65"}, + {file = "mypy-1.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:d42a98e76070a365a1d1c220fcac8aa4ada12ae0db679cb4d910fabefc88b994"}, + {file = "mypy-1.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e64f48c6176e243ad015e995de05af7f22bbe370dbb5b32bd6988438ec873919"}, + {file = "mypy-1.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fdd63e4f50e3538617887e9aee91855368d9fc1dea30da743837b0df7373bc4"}, + {file = "mypy-1.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:dbeb24514c4acbc78d205f85dd0e800f34062efcc1f4a4857c57e4b4b8712bff"}, + {file = "mypy-1.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a2948c40a7dd46c1c33765718936669dc1f628f134013b02ff5ac6c7ef6942bf"}, + {file = "mypy-1.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5bc8d6bd3b274dd3846597855d96d38d947aedba18776aa998a8d46fabdaed76"}, + {file = "mypy-1.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:17455cda53eeee0a4adb6371a21dd3dbf465897de82843751cf822605d152c8c"}, + {file = "mypy-1.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e831662208055b006eef68392a768ff83596035ffd6d846786578ba1714ba8f6"}, + {file = "mypy-1.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e60d0b09f62ae97a94605c3f73fd952395286cf3e3b9e7b97f60b01ddfbbda88"}, + {file = "mypy-1.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:0af4f0e20706aadf4e6f8f8dc5ab739089146b83fd53cb4a7e0e850ef3de0bb6"}, + {file = "mypy-1.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:24189f23dc66f83b839bd1cce2dfc356020dfc9a8bae03978477b15be61b062e"}, + {file = "mypy-1.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:93a85495fb13dc484251b4c1fd7a5ac370cd0d812bbfc3b39c1bafefe95275d5"}, + {file = "mypy-1.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f546ac34093c6ce33f6278f7c88f0f147a4849386d3bf3ae193702f4fe31407"}, + {file = "mypy-1.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c6c2ccb7af7154673c591189c3687b013122c5a891bb5651eca3db8e6c6c55bd"}, + {file = "mypy-1.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:15b5a824b58c7c822c51bc66308e759243c32631896743f030daf449fe3677f3"}, + {file = "mypy-1.0.1-py3-none-any.whl", hash = "sha256:eda5c8b9949ed411ff752b9a01adda31afe7eae1e53e946dbdf9db23865e66c4"}, + {file = "mypy-1.0.1.tar.gz", hash = "sha256:28cea5a6392bb43d266782983b5a4216c25544cd7d80be681a155ddcdafd152d"}, ] [package.dependencies] @@ -1145,14 +1145,14 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "setuptools" -version = "67.3.2" +version = "67.4.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "setuptools-67.3.2-py3-none-any.whl", hash = "sha256:bb6d8e508de562768f2027902929f8523932fcd1fb784e6d573d2cafac995a48"}, - {file = "setuptools-67.3.2.tar.gz", hash = "sha256:95f00380ef2ffa41d9bba85d95b27689d923c93dfbafed4aecd7cf988a25e012"}, + {file = "setuptools-67.4.0-py3-none-any.whl", hash = "sha256:f106dee1b506dee5102cc3f3e9e68137bbad6d47b616be7991714b0c62204251"}, + {file = "setuptools-67.4.0.tar.gz", hash = "sha256:e5fd0a713141a4a105412233c63dc4e17ba0090c8e8334594ac790ec97792330"}, ] [package.extras] @@ -1487,14 +1487,14 @@ testing = ["flaky (>=3.4.0)", "freezegun (>=0.3.11)", "pathlib2 (>=2.3.3)", "psu [[package]] name = "tox" -version = "4.4.5" +version = "4.4.6" description = "tox is a generic virtualenv management and test command line tool" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "tox-4.4.5-py3-none-any.whl", hash = "sha256:1081864f1a1393ffa11ebe9beaa280349020579310d217a594a4e7b6124c5425"}, - {file = "tox-4.4.5.tar.gz", hash = "sha256:f9bc83c5da8666baa2a4d4e884bbbda124fe646e4b1c0e412949cecc2b6e8f90"}, + {file = "tox-4.4.6-py3-none-any.whl", hash = "sha256:e3d4a65852f029e5ba441a01824d2d839d30bb8fb071635ef9cb53952698e6bf"}, + {file = "tox-4.4.6.tar.gz", hash = "sha256:9786671d23b673ace7499c602c5746e2a225d1ecd9d9f624d0461303f40bd93b"}, ] [package.dependencies] @@ -1625,21 +1625,21 @@ test = ["covdefaults (>=2.2.2)", "coverage (>=7.1)", "coverage-enable-subprocess [[package]] name = "zipp" -version = "3.13.0" +version = "3.15.0" description = "Backport of pathlib-compatible object wrapper for zip files" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "zipp-3.13.0-py3-none-any.whl", hash = "sha256:e8b2a36ea17df80ffe9e2c4fda3f693c3dad6df1697d3cd3af232db680950b0b"}, - {file = "zipp-3.13.0.tar.gz", hash = "sha256:23f70e964bc11a34cef175bc90ba2914e1e4545ea1e3e2f67c079671883f9cb6"}, + {file = "zipp-3.15.0-py3-none-any.whl", hash = "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556"}, + {file = "zipp-3.15.0.tar.gz", hash = "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] +testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] [metadata] lock-version = "2.0" python-versions = "^3.7" -content-hash = "b39f7d75a3cf605006ca19870366ad01d1f4df94d559075fc36ff21e619dffcc" +content-hash = "1a44241e119441fdd0154b8e399ac397aeef931d213bb87940efed83b21bd54b" diff --git a/pyproject.toml b/pyproject.toml index 0c629003..af09de60 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -72,7 +72,7 @@ isort = [ { version = "^5.12", python = ">=3.8" }, { version = "^5.11", python = "<3.8" } ] -mypy = "1.0.0" +mypy = "1.0.1" bump2version = ">=1.0,<2" [tool.poetry.group.doc] diff --git a/tox.ini b/tox.ini index 8e870e6c..327e7112 100644 --- a/tox.ini +++ b/tox.ini @@ -37,7 +37,7 @@ commands = [testenv:mypy] basepython = python3.11 deps = - mypy==1.0.0 + mypy==1.0.1 pytest>=7.2,<8 commands = mypy src tests From 1062cbb6c8b96cfb6b9569fb74c26d861bdeddd4 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 25 Feb 2023 18:27:43 +0100 Subject: [PATCH 075/230] Optimize parallel execution when it's only one task See discussion in issue #190. Also, run coverage in Python 3.10, since running in Python 3.11 reports false negatives in test_lists (probably bug in coverage). --- src/graphql/__init__.py | 2 +- src/graphql/execution/execute.py | 40 ++++++++++++++++++++------------ tests/execution/test_lists.py | 15 ++++++++++++ tests/execution/test_parallel.py | 40 ++++++++++++++++++++++++++++++++ tox.ini | 4 ++-- 5 files changed, 83 insertions(+), 18 deletions(-) diff --git a/src/graphql/__init__.py b/src/graphql/__init__.py index 0327396a..4f858a68 100644 --- a/src/graphql/__init__.py +++ b/src/graphql/__init__.py @@ -313,7 +313,7 @@ # Validate GraphQL schema. validate_schema, assert_valid_schema, - # Uphold the spec rules about naming + # Uphold the spec rules about naming assert_name, assert_enum_value_name, # Types diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index ef1edb34..1a836874 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -441,7 +441,7 @@ def execute_fields( if is_awaitable(result): append_awaitable(response_name) - # If there are no coroutines, we can just return the object + # If there are no coroutines, we can just return the object. if not awaitable_fields: return results @@ -450,12 +450,17 @@ def execute_fields( # will yield this same map, but with any coroutines awaited in parallel and # replaced with the values they yielded. async def get_results() -> Dict[str, Any]: - results.update( - zip( - awaitable_fields, - await gather(*(results[field] for field in awaitable_fields)), + if len(awaitable_fields) == 1: + # If there is only one field, avoid the overhead of parallelization. + field = awaitable_fields[0] + results[field] = await results[field] + else: + results.update( + zip( + awaitable_fields, + await gather(*(results[field] for field in awaitable_fields)), + ) ) - ) return results return get_results() @@ -758,13 +763,18 @@ async def await_completed(item: Any, item_path: Path) -> Any: # noinspection PyShadowingNames async def get_completed_results() -> List[Any]: - for index, result in zip( - awaitable_indices, - await gather( - *(completed_results[index] for index in awaitable_indices) - ), - ): - completed_results[index] = result + if len(awaitable_indices) == 1: + # If there is only one index, avoid the overhead of parallelization. + index = awaitable_indices[0] + completed_results[index] = await completed_results[index] + else: + for index, result in zip( + awaitable_indices, + await gather( + *(completed_results[index] for index in awaitable_indices) + ), + ): + completed_results[index] = result return completed_results return get_completed_results() @@ -907,7 +917,7 @@ def complete_object_value( # If there is an `is_type_of()` predicate function, call it with the current # result. If `is_type_of()` returns False, then raise an error rather than - # continuing execution. + # continuing execution. if return_type.is_type_of: is_type_of = return_type.is_type_of(result, info) @@ -943,7 +953,7 @@ def collect_subfields( # We cannot use the field_nodes themselves as key for the cache, since they # are not hashable as a list. We also do not want to use the field_nodes # themselves (converted to a tuple) as keys, since hashing them is slow. - # Therefore we use the ids of the field_nodes as keys. Note that we do not + # Therefore, we use the ids of the field_nodes as keys. Note that we do not # use the id of the list, since we want to hit the cache for all lists of # the same nodes, not only for the same list of nodes. Also, the list id may # even be reused, in which case we would get wrong results from the cache. diff --git a/tests/execution/test_lists.py b/tests/execution/test_lists.py index 98d57c12..84617850 100644 --- a/tests/execution/test_lists.py +++ b/tests/execution/test_lists.py @@ -25,6 +25,21 @@ def _complete(list_field): Data(list_field), ) + def accepts_a_list_as_a_list_value(): + result = _complete([]) + assert result == ({"listField": []}, None) + list_field = ["just an apple"] + result = _complete(list_field) + assert result == ({"listField": list_field}, None) + list_field = ["apple", "banana", "coconut"] + result = _complete(list_field) + assert result == ({"listField": list_field}, None) + + def accepts_a_tuple_as_a_list_value(): + list_field = ("apple", "banana", "coconut") + result = _complete(list_field) + assert result == ({"listField": list(list_field)}, None) + def accepts_a_set_as_a_list_value(): # Note that sets are not ordered in Python. list_field = {"apple", "banana", "coconut"} diff --git a/tests/execution/test_parallel.py b/tests/execution/test_parallel.py index e5841d56..9d28c3c3 100644 --- a/tests/execution/test_parallel.py +++ b/tests/execution/test_parallel.py @@ -32,6 +32,27 @@ async def wait(self) -> bool: def describe_parallel_execution(): + @mark.asyncio + async def resolve_single_field(): + # make sure that the special case of resolving a single field works + async def resolve(*_args): + return True + + schema = GraphQLSchema( + GraphQLObjectType( + "Query", + { + "foo": GraphQLField(GraphQLBoolean, resolve=resolve), + }, + ) + ) + + awaitable_result = execute(schema, parse("{foo}")) + assert isinstance(awaitable_result, Awaitable) + result = await awaitable_result + + assert result == ({"foo": True}, None) + @mark.asyncio async def resolve_fields_in_parallel(): barrier = Barrier(2) @@ -58,6 +79,25 @@ async def resolve(*_args): assert result == ({"foo": True, "bar": True}, None) + @mark.asyncio + async def resolve_single_element_list(): + # make sure that the special case of resolving a single element list works + async def resolve(*_args): + return [True] + + schema = GraphQLSchema( + GraphQLObjectType( + "Query", + {"foo": GraphQLField(GraphQLList(GraphQLBoolean), resolve=resolve)}, + ) + ) + + awaitable_result = execute(schema, parse("{foo}")) + assert isinstance(awaitable_result, Awaitable) + result = await awaitable_result + + assert result == ({"foo": [True]}, None) + @mark.asyncio async def resolve_list_in_parallel(): barrier = Barrier(2) diff --git a/tox.ini b/tox.ini index 327e7112..6b0aa608 100644 --- a/tox.ini +++ b/tox.ini @@ -62,5 +62,5 @@ deps = commands = # to also run the time-consuming tests: tox -e py310 -- --run-slow # to run the benchmarks: tox -e py310 -- -k benchmarks --benchmark-enable - py37,py38.py39,py310,pypy39: pytest tests {posargs} - py311: pytest tests {posargs: --cov-report=term-missing --cov=graphql --cov=tests --cov-fail-under=100} + py37,py38.py39,py311,pypy39: pytest tests {posargs} + py310: pytest tests {posargs: --cov-report=term-missing --cov=graphql --cov=tests --cov-fail-under=100} From ffab53621e89989781fab9a1e19d2e8490a87521 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 25 Feb 2023 19:10:29 +0100 Subject: [PATCH 076/230] Remove assert_valid_execution_arguments function Replicates graphql/graphql-js@cfbc023296a1a596429a6312abede040c9353644 --- src/graphql/execution/execute.py | 23 ++--------------------- 1 file changed, 2 insertions(+), 21 deletions(-) diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index 1a836874..2d2bc0a9 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -65,7 +65,6 @@ __all__ = [ - "assert_valid_execution_arguments", "create_source_event_stream", "default_field_resolver", "default_type_resolver", @@ -250,8 +249,8 @@ def build( For internal use only. """ - # If arguments are missing or incorrect, throw an error. - assert_valid_execution_arguments(schema) + # If the schema used for execution is invalid, raise an error. + assert_valid_schema(schema) operation: Optional[OperationDefinitionNode] = None fragments: Dict[str, FragmentDefinitionNode] = {} @@ -1112,20 +1111,6 @@ def execute_sync( return cast(ExecutionResult, result) -def assert_valid_execution_arguments( - schema: GraphQLSchema, -) -> None: - """Check that the arguments are acceptable. - - Essential assertions before executing to provide developer feedback for improper use - of the GraphQL library. - - For internal use only. - """ - # If the schema used for execution is invalid, throw an error. - assert_valid_schema(schema) - - def invalid_return_type_error( return_type: GraphQLObjectType, result: Any, field_nodes: List[FieldNode] ) -> GraphQLError: @@ -1340,10 +1325,6 @@ def create_source_event_stream( separating these two steps. For more on this, see the "Supporting Subscriptions at Scale" information in the GraphQL spec. """ - # If arguments are missing or incorrectly typed, this is an internal developer - # mistake which should throw an early error. - assert_valid_execution_arguments(schema) - if not execution_context_class: execution_context_class = ExecutionContext From dabf869325b809cd21fac8f2925cc81829e2c7fd Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 25 Feb 2023 19:24:18 +0100 Subject: [PATCH 077/230] refactor: execute_operation can take only one parameter Replicates graphql/graphql-js@bde22d242843f41c23dff55216e2aabd2a2aad6e --- src/graphql/execution/execute.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index 2d2bc0a9..03ccfe52 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -331,13 +331,12 @@ def build_response( ) return ExecutionResult(data, errors) - def execute_operation( - self, operation: OperationDefinitionNode - ) -> AwaitableOrValue[Any]: + def execute_operation(self) -> AwaitableOrValue[Any]: """Execute an operation. Implements the "Executing operations" section of the spec. """ + operation = self.operation root_type = self.schema.get_root_type(operation.operation) if root_type is None: raise GraphQLError( @@ -1035,8 +1034,7 @@ def execute( errors = exe_context.errors build_response = exe_context.build_response try: - operation = exe_context.operation - result = exe_context.execute_operation(operation) + result = exe_context.execute_operation() if exe_context.is_awaitable(result): # noinspection PyShadowingNames From 51b93b84a64cc9023e6ae4c6019b517d40306bdd Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 25 Feb 2023 19:35:36 +0100 Subject: [PATCH 078/230] Minor simplification Roughly replicates graphql/graphql-js@467be3e3aefc00e0df6ab0255ef442d989a3e0e0 --- src/graphql/execution/execute.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index 03ccfe52..d924a202 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -336,8 +336,9 @@ def execute_operation(self) -> AwaitableOrValue[Any]: Implements the "Executing operations" section of the spec. """ + schema = self.schema operation = self.operation - root_type = self.schema.get_root_type(operation.operation) + root_type = schema.get_root_type(operation.operation) if root_type is None: raise GraphQLError( "Schema is not configured to execute" @@ -346,7 +347,7 @@ def execute_operation(self) -> AwaitableOrValue[Any]: ) root_fields = collect_fields( - self.schema, + schema, self.fragments, self.variable_values, root_type, From cefbdf1c1aa1a13b2c33904e70b79ac78df5eaf5 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 25 Feb 2023 20:48:56 +0100 Subject: [PATCH 079/230] refactor: subscribe: introduce build_per_event_execution_context Replicates graphql/graphql-js@c1fe9519e49843cecf2292b32dfacc401477845b --- src/graphql/execution/execute.py | 81 +++++++++++++++++++++++--------- 1 file changed, 60 insertions(+), 21 deletions(-) diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index d924a202..0035f15c 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -331,6 +331,23 @@ def build_response( ) return ExecutionResult(data, errors) + def build_per_event_execution_context(self, payload: Any) -> ExecutionContext: + """Create a copy of the execution context for usage with subscribe events.""" + return self.__class__( + self.schema, + self.fragments, + payload, + self.context_value, + self.operation, + self.variable_values, + self.field_resolver, + self.type_resolver, + self.subscribe_field_resolver, + [], + self.middleware_manager, + self.is_awaitable, + ) + def execute_operation(self) -> AwaitableOrValue[Any]: """Execute an operation. @@ -1003,7 +1020,7 @@ def execute( # If a valid execution context cannot be created due to incorrect arguments, # a "Response" with only errors is returned. - exe_context = execution_context_class.build( + context = execution_context_class.build( schema, document, root_value, @@ -1018,9 +1035,14 @@ def execute( ) # Return early errors if execution context failed. - if isinstance(exe_context, list): - return ExecutionResult(data=None, errors=exe_context) + if isinstance(context, list): + return ExecutionResult(data=None, errors=context) + + return execute_impl(context) + +def execute_impl(context: ExecutionContext) -> AwaitableOrValue[ExecutionResult]: + """Execute GraphQL operation (internal implementation).""" # Return a possible coroutine object that will eventually yield the data described # by the "Response" section of the GraphQL specification. # @@ -1032,12 +1054,12 @@ def execute( # Errors from sub-fields of a NonNull type may propagate to the top level, # at which point we still log the error and null the parent field, which # in this case is the entire response. - errors = exe_context.errors - build_response = exe_context.build_response + errors = context.errors + build_response = context.build_response try: - result = exe_context.execute_operation() + result = context.execute_operation() - if exe_context.is_awaitable(result): + if context.is_awaitable(result): # noinspection PyShadowingNames async def await_result() -> Any: try: @@ -1215,6 +1237,7 @@ def subscribe( variable_values: Optional[Dict[str, Any]] = None, operation_name: Optional[str] = None, field_resolver: Optional[GraphQLFieldResolver] = None, + type_resolver: Optional[GraphQLTypeResolver] = None, subscribe_field_resolver: Optional[GraphQLFieldResolver] = None, execution_context_class: Optional[Type[ExecutionContext]] = None, ) -> AwaitableOrValue[Union[AsyncIterator[ExecutionResult], ExecutionResult]]: @@ -1237,17 +1260,31 @@ def subscribe( If the operation succeeded, the coroutine will yield an AsyncIterator, which yields a stream of ExecutionResults representing the response stream. """ - result_or_stream = create_source_event_stream( + if execution_context_class is None: + execution_context_class = ExecutionContext + + # If a valid context cannot be created due to incorrect arguments, + # a "Response" with only errors is returned. + context = execution_context_class.build( schema, document, root_value, context_value, variable_values, operation_name, + field_resolver, + type_resolver, subscribe_field_resolver, - execution_context_class, ) + # Return early errors if execution context failed. + if isinstance(context, list): + return ExecutionResult(data=None, errors=context) + + result_or_stream = create_source_event_stream_impl(context) + + build_context = context.build_per_event_execution_context + async def map_source_to_response(payload: Any) -> ExecutionResult: """Map source to response. @@ -1258,19 +1295,10 @@ async def map_source_to_response(payload: Any) -> ExecutionResult: "ExecuteSubscriptionEvent" algorithm, as it is nearly identical to the "ExecuteQuery" algorithm, for which :func:`~graphql.execute` is also used. """ - result = execute( - schema, - document, - payload, - context_value, - variable_values, - operation_name, - field_resolver, - execution_context_class=execution_context_class, - ) + result = execute_impl(build_context(payload)) return await result if isawaitable(result) else result - if (execution_context_class or ExecutionContext).is_awaitable(result_or_stream): + if execution_context_class.is_awaitable(result_or_stream): awaitable_result_or_stream = cast(Awaitable, result_or_stream) # noinspection PyShadowingNames @@ -1298,6 +1326,8 @@ def create_source_event_stream( context_value: Any = None, variable_values: Optional[Dict[str, Any]] = None, operation_name: Optional[str] = None, + field_resolver: Optional[GraphQLFieldResolver] = None, + type_resolver: Optional[GraphQLTypeResolver] = None, subscribe_field_resolver: Optional[GraphQLFieldResolver] = None, execution_context_class: Optional[Type[ExecutionContext]] = None, ) -> AwaitableOrValue[Union[AsyncIterable[Any], ExecutionResult]]: @@ -1336,13 +1366,22 @@ def create_source_event_stream( context_value, variable_values, operation_name, - subscribe_field_resolver=subscribe_field_resolver, + field_resolver, + type_resolver, + subscribe_field_resolver, ) # Return early errors if execution context failed. if isinstance(context, list): return ExecutionResult(data=None, errors=context) + return create_source_event_stream_impl(context) + + +def create_source_event_stream_impl( + context: ExecutionContext, +) -> AwaitableOrValue[Union[AsyncIterable[Any], ExecutionResult]]: + """Create source event stream (internal implementation).""" try: event_stream = execute_subscription(context) except GraphQLError as error: From 072c59e89052a6b28e0c3c8b2710afc885fce2d0 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 25 Feb 2023 21:21:09 +0100 Subject: [PATCH 080/230] Fix coverage --- src/graphql/execution/execute.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index 0035f15c..32ad78d0 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -1354,12 +1354,9 @@ def create_source_event_stream( separating these two steps. For more on this, see the "Supporting Subscriptions at Scale" information in the GraphQL spec. """ - if not execution_context_class: - execution_context_class = ExecutionContext - # If a valid context cannot be created due to incorrect arguments, # a "Response" with only errors is returned. - context = execution_context_class.build( + context = (execution_context_class or ExecutionContext).build( schema, document, root_value, From dd083669de028686a80b617078fe8727652587ba Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 25 Feb 2023 21:26:27 +0100 Subject: [PATCH 081/230] Fix GitHub action for testing --- .github/workflows/test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 8686eb1d..90691aa1 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -8,7 +8,7 @@ jobs: strategy: matrix: - python: ['3.7', '3.8', '3.9', '3.10', '3.11', 'pypy3.9'] + python-version: ['3.7', '3.8', '3.9', '3.10', '3.11', 'pypy3.9'] steps: - uses: actions/checkout@v3 From 87551f57adb3bb6c0c047d7ce3761a14515f3040 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Thu, 16 Mar 2023 21:54:19 +0100 Subject: [PATCH 082/230] Support returning async iterables from resolver functions Replicates graphql/graphql-js@59c87c39c277d6337981f63302bee37dfc3dcebc --- src/graphql/execution/execute.py | 91 ++++++++-- tests/benchmarks/test_async_iterable.py | 34 ++++ tests/execution/test_lists.py | 230 +++++++++++++++++++----- 3 files changed, 299 insertions(+), 56 deletions(-) create mode 100644 tests/benchmarks/test_async_iterable.py diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index 32ad78d0..2bac13a9 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -64,6 +64,15 @@ from .values import get_argument_values, get_variable_values +try: # pragma: no cover + anext +except NameError: # pragma: no cover (Python < 3.10) + # noinspection PyShadowingBuiltins + async def anext(iterator: AsyncIterator) -> Any: + """Return the next item from an async iterator.""" + return await iterator.__anext__() + + __all__ = [ "create_source_event_stream", "default_field_resolver", @@ -684,6 +693,67 @@ def complete_value( f" '{inspect(return_type)}'." ) + async def complete_async_iterator_value( + self, + item_type: GraphQLOutputType, + field_nodes: List[FieldNode], + info: GraphQLResolveInfo, + path: Path, + iterator: AsyncIterator[Any], + ) -> List[Any]: + """Complete an async iterator. + + Complete a async iterator value by completing the result and calling + recursively until all the results are completed. + """ + is_awaitable = self.is_awaitable + awaitable_indices: List[int] = [] + append_awaitable = awaitable_indices.append + completed_results: List[Any] = [] + append_result = completed_results.append + index = 0 + while True: + field_path = path.add_key(index, None) + try: + try: + value = await anext(iterator) + except StopAsyncIteration: + break + try: + completed_item = self.complete_value( + item_type, field_nodes, info, field_path, value + ) + if is_awaitable(completed_item): + append_awaitable(index) + append_result(completed_item) + except Exception as raw_error: + append_result(None) + error = located_error(raw_error, field_nodes, field_path.as_list()) + self.handle_field_error(error, item_type) + except Exception as raw_error: + append_result(None) + error = located_error(raw_error, field_nodes, field_path.as_list()) + self.handle_field_error(error, item_type) + break + index += 1 + + if not awaitable_indices: + return completed_results + + if len(awaitable_indices) == 1: + # If there is only one index, avoid the overhead of parallelization. + index = awaitable_indices[0] + completed_results[index] = await completed_results[index] + else: + for index, result in zip( + awaitable_indices, + await gather( + *(completed_results[index] for index in awaitable_indices) + ), + ): + completed_results[index] = result + return completed_results + def complete_list_value( self, return_type: GraphQLList[GraphQLOutputType], @@ -696,20 +766,16 @@ def complete_list_value( Complete a list value by completing each item in the list with the inner type. """ - if not is_iterable(result): - # experimental: allow async iterables - if isinstance(result, AsyncIterable): - # noinspection PyShadowingNames - async def async_iterable_to_list( - async_result: AsyncIterable[Any], - ) -> Any: - sync_result = [item async for item in async_result] - return self.complete_list_value( - return_type, field_nodes, info, path, sync_result - ) + item_type = return_type.of_type - return async_iterable_to_list(result) + if isinstance(result, AsyncIterable): + iterator = result.__aiter__() + return self.complete_async_iterator_value( + item_type, field_nodes, info, path, iterator + ) + + if not is_iterable(result): raise GraphQLError( "Expected Iterable, but did not find one for field" f" '{info.parent_type.name}.{info.field_name}'." @@ -718,7 +784,6 @@ async def async_iterable_to_list( # This is specified as a simple map, however we're optimizing the path where # the list contains no coroutine objects by avoiding creating another coroutine # object. - item_type = return_type.of_type is_awaitable = self.is_awaitable awaitable_indices: List[int] = [] append_awaitable = awaitable_indices.append diff --git a/tests/benchmarks/test_async_iterable.py b/tests/benchmarks/test_async_iterable.py new file mode 100644 index 00000000..47b00446 --- /dev/null +++ b/tests/benchmarks/test_async_iterable.py @@ -0,0 +1,34 @@ +import asyncio +from inspect import isawaitable + +from graphql import ExecutionResult, build_schema, execute, parse + + +schema = build_schema("type Query { listField: [String] }") +document = parse("{ listField }") + + +class Data: + # noinspection PyPep8Naming + @staticmethod + async def listField(info_): + for index in range(1000): + yield index + + +async def execute_async() -> ExecutionResult: + result = execute(schema, document, Data()) + assert isawaitable(result) + return await result + + +def test_execute_async_iterable_list_field(benchmark): + # Note: we are creating the async loop outside of the benchmark code so that + # the setup is not included in the benchmark timings + loop = asyncio.events.new_event_loop() + asyncio.events.set_event_loop(loop) + result = benchmark(lambda: loop.run_until_complete(execute_async())) + asyncio.events.set_event_loop(None) + loop.close() + assert not result.errors + assert result.data == {"listField": [str(index) for index in range(1000)]} diff --git a/tests/execution/test_lists.py b/tests/execution/test_lists.py index 84617850..2eed7595 100644 --- a/tests/execution/test_lists.py +++ b/tests/execution/test_lists.py @@ -1,10 +1,19 @@ -from typing import Any +from typing import Any, AsyncGenerator from pytest import mark from graphql.execution import ExecutionResult, execute, execute_sync from graphql.language import parse from graphql.pyutils import is_awaitable +from graphql.type import ( + GraphQLField, + GraphQLFieldResolver, + GraphQLList, + GraphQLObjectType, + GraphQLResolveInfo, + GraphQLSchema, + GraphQLString, +) from graphql.utilities import build_schema @@ -115,6 +124,183 @@ def does_not_accept_iterable_string_literal_as_a_list_value(): ) +def describe_execute_accepts_async_iterables_as_list_value(): + async def _complete(list_field, as_: str = "[String]"): + result = execute( + build_schema(f"type Query {{ listField: {as_} }}"), + parse("{ listField }"), + Data(list_field), + ) + assert is_awaitable(result) + return await result + + class _IndexData: + def __init__(self, index: int): + self.index = index + + async def _complete_object_lists( + resolve: GraphQLFieldResolver, count=3 + ) -> ExecutionResult: + async def _list_field( + obj_: Any, info_: GraphQLResolveInfo + ) -> AsyncGenerator[_IndexData, None]: + for index in range(count): + yield _IndexData(index) + + schema = GraphQLSchema( + GraphQLObjectType( + "Query", + { + "listField": GraphQLField( + GraphQLList( + GraphQLObjectType( + "ObjectWrapper", + {"index": GraphQLField(GraphQLString, resolve=resolve)}, + ) + ), + resolve=_list_field, + ) + }, + ) + ) + result = execute(schema, document=parse("{ listField { index } }")) + assert is_awaitable(result) + return await result + + @mark.asyncio + async def accepts_an_async_generator_as_a_list_value(): + async def list_field(): + yield "two" + yield 4 + yield False + + assert await _complete(list_field()) == ( + {"listField": ["two", "4", "false"]}, + None, + ) + + @mark.asyncio + async def accepts_a_custom_async_iterable_as_a_list_value(): + class ListField: + def __aiter__(self): + self.last = "hello" + return self + + async def __anext__(self): + last = self.last + if last == "stop": + raise StopAsyncIteration + self.last = "world" if last == "hello" else "stop" + return last + + assert await _complete(ListField()) == ( + {"listField": ["hello", "world"]}, + None, + ) + + @mark.asyncio + async def handles_an_async_generator_that_throws(): + async def list_field(): + yield "two" + yield 4 + raise RuntimeError("bad") + + assert await _complete(list_field()) == ( + {"listField": ["two", "4", None]}, + [{"message": "bad", "locations": [(1, 3)], "path": ["listField", 2]}], + ) + + @mark.asyncio + async def handles_an_async_generator_where_intermediate_value_triggers_an_error(): + async def list_field(): + yield "two" + yield {} + yield 4 + + assert await _complete(list_field()) == ( + {"listField": ["two", None, "4"]}, + [ + { + "message": "String cannot represent value: {}", + "locations": [(1, 3)], + "path": ["listField", 1], + } + ], + ) + + @mark.asyncio + async def handles_errors_from_complete_value_in_async_iterables(): + async def list_field(): + yield "two" + yield {} + + assert await _complete(list_field()) == ( + {"listField": ["two", None]}, + [ + { + "message": "String cannot represent value: {}", + "locations": [(1, 3)], + "path": ["listField", 1], + } + ], + ) + + @mark.asyncio + async def handles_async_functions_from_complete_value_in_async_iterables(): + async def resolve(data: _IndexData, info_: GraphQLResolveInfo) -> int: + return data.index + + assert await _complete_object_lists(resolve) == ( + {"listField": [{"index": "0"}, {"index": "1"}, {"index": "2"}]}, + None, + ) + + @mark.asyncio + async def handles_single_async_functions_from_complete_value_in_async_iterables(): + async def resolve(data: _IndexData, info_: GraphQLResolveInfo) -> int: + return data.index + + assert await _complete_object_lists(resolve, 1) == ( + {"listField": [{"index": "0"}]}, + None, + ) + + @mark.asyncio + async def handles_async_errors_from_complete_value_in_async_iterables(): + async def resolve(data: _IndexData, info_: GraphQLResolveInfo) -> int: + index = data.index + if index == 2: + raise RuntimeError("bad") + return index + + assert await _complete_object_lists(resolve) == ( + {"listField": [{"index": "0"}, {"index": "1"}, {"index": None}]}, + [ + { + "message": "bad", + "locations": [(1, 15)], + "path": ["listField", 2, "index"], + } + ], + ) + + @mark.asyncio + async def handles_nulls_yielded_by_async_generator(): + async def list_field(): + yield 1 + yield None + yield 2 + + data = {"listField": [1, None, 2]} + message = "Cannot return null for non-nullable field Query.listField." + errors = [{"message": message, "locations": [(1, 3)], "path": ["listField", 1]}] + + assert await _complete(list_field(), "[Int]") == (data, None) + assert await _complete(list_field(), "[Int]!") == (data, None) + assert await _complete(list_field(), "[Int!]") == ({"listField": None}, errors) + assert await _complete(list_field(), "[Int!]!") == (None, errors) + + def describe_execute_handles_list_nullability(): async def _complete(list_field: Any, as_type: str) -> ExecutionResult: schema = build_schema(f"type Query {{ listField: {as_type} }}") @@ -229,45 +415,3 @@ async def results_in_errors(): None, errors, ) - - -def describe_experimental_execute_accepts_async_iterables_as_list_value(): - async def _complete(list_field): - result = execute( - build_schema("type Query { listField: [String] }"), - parse("{ listField }"), - Data(list_field), - ) - assert is_awaitable(result) - return await result - - @mark.asyncio - async def accepts_an_async_generator_as_a_list_value(): - async def list_field(): - yield "one" - yield 2 - yield True - - assert await _complete(list_field()) == ( - {"listField": ["one", "2", "true"]}, - None, - ) - - @mark.asyncio - async def accepts_a_custom_async_iterable_as_a_list_value(): - class ListField: - def __aiter__(self): - self.last = "hello" - return self - - async def __anext__(self): - last = self.last - if last == "stop": - raise StopAsyncIteration - self.last = "world" if last == "hello" else "stop" - return last - - assert await _complete(ListField()) == ( - {"listField": ["hello", "world"]}, - None, - ) From f045a3296f9074a0455fc001db8436ba1c6b4411 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 18 Mar 2023 15:46:06 +0100 Subject: [PATCH 083/230] [RFC] Client Controlled Nullability experiment implementation w/o execution Replicates graphql/graphql-js@699ec58547c34bfeef866a2a4458615d39b16964 --- docs/modules/language.rst | 4 + src/graphql/__init__.py | 10 + src/graphql/language/__init__.py | 10 + src/graphql/language/ast.py | 42 ++- src/graphql/language/lexer.py | 2 + src/graphql/language/parser.py | 63 ++++ src/graphql/language/predicates.py | 7 + src/graphql/language/printer.py | 28 +- src/graphql/language/token_kind.py | 1 + tests/benchmarks/test_parser.py | 6 +- tests/fixtures/kitchen_sink.graphql | 15 + tests/language/test_lexer.py | 2 + tests/language/test_parser.py | 243 +++++++++++++++- tests/language/test_predicates.py | 9 + tests/language/test_printer.py | 23 +- tests/language/test_visitor.py | 268 +++++++++++++++++- tests/utilities/test_ast_to_dict.py | 9 + .../test_strip_ignored_characters.py | 12 +- 18 files changed, 732 insertions(+), 22 deletions(-) diff --git a/docs/modules/language.rst b/docs/modules/language.rst index b447787e..be45fd26 100644 --- a/docs/modules/language.rst +++ b/docs/modules/language.rst @@ -31,6 +31,7 @@ Each kind of AST node has its own class: .. autoclass:: EnumTypeExtensionNode .. autoclass:: EnumValueDefinitionNode .. autoclass:: EnumValueNode +.. autoclass:: ErrorBoundaryNode .. autoclass:: ExecutableDefinitionNode .. autoclass:: FieldDefinitionNode .. autoclass:: FieldNode @@ -44,11 +45,14 @@ Each kind of AST node has its own class: .. autoclass:: IntValueNode .. autoclass:: InterfaceTypeDefinitionNode .. autoclass:: InterfaceTypeExtensionNode +.. autoclass:: ListNullabilityOperatorNode .. autoclass:: ListTypeNode .. autoclass:: ListValueNode .. autoclass:: NameNode .. autoclass:: NamedTypeNode +.. autoclass:: NonNullAssertionNode .. autoclass:: NonNullTypeNode +.. autoclass:: NullabilityAssertionNode .. autoclass:: NullValueNode .. autoclass:: ObjectFieldNode .. autoclass:: ObjectTypeDefinitionNode diff --git a/src/graphql/__init__.py b/src/graphql/__init__.py index 4f858a68..30c8b42e 100644 --- a/src/graphql/__init__.py +++ b/src/graphql/__init__.py @@ -83,6 +83,7 @@ # Predicates is_definition_node, is_executable_definition_node, + is_nullability_assertion_node, is_selection_node, is_value_node, is_const_value_node, @@ -110,6 +111,10 @@ SelectionNode, FieldNode, ArgumentNode, + NullabilityAssertionNode, + NonNullAssertionNode, + ErrorBoundaryNode, + ListNullabilityOperatorNode, ConstArgumentNode, FragmentSpreadNode, InlineFragmentNode, @@ -606,6 +611,7 @@ "DirectiveLocation", "is_definition_node", "is_executable_definition_node", + "is_nullability_assertion_node", "is_selection_node", "is_value_node", "is_const_value_node", @@ -630,6 +636,10 @@ "SelectionNode", "FieldNode", "ArgumentNode", + "NullabilityAssertionNode", + "NonNullAssertionNode", + "ErrorBoundaryNode", + "ListNullabilityOperatorNode", "ConstArgumentNode", "FragmentSpreadNode", "InlineFragmentNode", diff --git a/src/graphql/language/__init__.py b/src/graphql/language/__init__.py index 7d3120f5..2f105a98 100644 --- a/src/graphql/language/__init__.py +++ b/src/graphql/language/__init__.py @@ -46,6 +46,10 @@ SelectionSetNode, SelectionNode, FieldNode, + NullabilityAssertionNode, + NonNullAssertionNode, + ErrorBoundaryNode, + ListNullabilityOperatorNode, ArgumentNode, ConstArgumentNode, FragmentSpreadNode, @@ -98,6 +102,7 @@ from .predicates import ( is_definition_node, is_executable_definition_node, + is_nullability_assertion_node, is_selection_node, is_value_node, is_const_value_node, @@ -147,6 +152,10 @@ "SelectionSetNode", "SelectionNode", "FieldNode", + "NullabilityAssertionNode", + "NonNullAssertionNode", + "ErrorBoundaryNode", + "ListNullabilityOperatorNode", "ArgumentNode", "ConstArgumentNode", "FragmentSpreadNode", @@ -197,6 +206,7 @@ "InputObjectTypeExtensionNode", "is_definition_node", "is_executable_definition_node", + "is_nullability_assertion_node", "is_selection_node", "is_value_node", "is_const_value_node", diff --git a/src/graphql/language/ast.py b/src/graphql/language/ast.py index 099edc01..e57adb82 100644 --- a/src/graphql/language/ast.py +++ b/src/graphql/language/ast.py @@ -28,6 +28,10 @@ "SelectionSetNode", "SelectionNode", "FieldNode", + "NullabilityAssertionNode", + "NonNullAssertionNode", + "ErrorBoundaryNode", + "ListNullabilityOperatorNode", "ArgumentNode", "ConstArgumentNode", "FragmentSpreadNode", @@ -258,8 +262,22 @@ class OperationType(Enum): "variable_definition": ("variable", "type", "default_value", "directives"), "variable": ("name",), "selection_set": ("selections",), - "field": ("alias", "name", "arguments", "directives", "selection_set"), + "field": ( + "alias", + "name", + "arguments", + "directives", + "selection_set", + # note: Client controlled Nullability is experimental and may be changed + # or removed in the future. + "nullability_assertion", + ), "argument": ("name", "value"), + # note: Client controlled Nullability is experimental and may be changed + # or removed in the future. + "list_nullability_operator": ("nullability_assertion",), + "non_null_assertion": ("nullability_assertion",), + "error_boundary": ("nullability_assertion",), "fragment_spread": ("name", "directives"), "inline_fragment": ("type_condition", "directives", "selection_set"), "fragment_definition": ( @@ -462,14 +480,34 @@ class SelectionNode(Node): class FieldNode(SelectionNode): - __slots__ = "alias", "name", "arguments", "selection_set" + __slots__ = "alias", "name", "arguments", "nullability_assertion", "selection_set" alias: Optional[NameNode] name: NameNode arguments: Tuple[ArgumentNode, ...] + # Note: Client Controlled Nullability is experimental + # and may be changed or removed in the future. + nullability_assertion: NullabilityAssertionNode selection_set: Optional[SelectionSetNode] +class NullabilityAssertionNode(Node): + __slots__ = ("nullability_assertion",) + nullability_assertion: Optional["NullabilityAssertionNode"] + + +class ListNullabilityOperatorNode(NullabilityAssertionNode): + pass + + +class NonNullAssertionNode(NullabilityAssertionNode): + nullability_assertion: ListNullabilityOperatorNode + + +class ErrorBoundaryNode(NullabilityAssertionNode): + nullability_assertion: ListNullabilityOperatorNode + + class ArgumentNode(Node): __slots__ = "name", "value" diff --git a/src/graphql/language/lexer.py b/src/graphql/language/lexer.py index 3e842fc6..5e2914f7 100644 --- a/src/graphql/language/lexer.py +++ b/src/graphql/language/lexer.py @@ -458,6 +458,7 @@ def read_name(self, start: int) -> Token: _punctuator_token_kinds = frozenset( [ TokenKind.BANG, + TokenKind.QUESTION_MARK, TokenKind.DOLLAR, TokenKind.AMP, TokenKind.PAREN_L, @@ -485,6 +486,7 @@ def is_punctuator_token_kind(kind: TokenKind) -> bool: _KIND_FOR_PUNCT = { "!": TokenKind.BANG, + "?": TokenKind.QUESTION_MARK, "$": TokenKind.DOLLAR, "&": TokenKind.AMP, "(": TokenKind.PAREN_L, diff --git a/src/graphql/language/parser.py b/src/graphql/language/parser.py index d0b009ed..d4846cc7 100644 --- a/src/graphql/language/parser.py +++ b/src/graphql/language/parser.py @@ -16,6 +16,7 @@ EnumTypeExtensionNode, EnumValueDefinitionNode, EnumValueNode, + ErrorBoundaryNode, FieldDefinitionNode, FieldNode, FloatValueNode, @@ -28,12 +29,15 @@ InterfaceTypeDefinitionNode, InterfaceTypeExtensionNode, IntValueNode, + ListNullabilityOperatorNode, ListTypeNode, ListValueNode, Location, NamedTypeNode, NameNode, + NonNullAssertionNode, NonNullTypeNode, + NullabilityAssertionNode, NullValueNode, ObjectFieldNode, ObjectTypeDefinitionNode, @@ -81,6 +85,7 @@ def parse( source: SourceType, no_location: bool = False, allow_legacy_fragment_variables: bool = False, + experimental_client_controlled_nullability: bool = False, ) -> DocumentNode: """Given a GraphQL source, parse it into a Document. @@ -103,11 +108,31 @@ def parse( fragment A($var: Boolean = false) on T { ... } + + EXPERIMENTAL: + + If enabled, the parser will understand and parse Client Controlled Nullability + Designators contained in Fields. They'll be represented in the + :attr:`~graphql.language.FieldNode.nullability_assertion` field + of the :class:`~graphql.language.FieldNode`. + + The syntax looks like the following:: + + { + nullableField! + nonNullableField? + nonNullableSelectionSet? { + childField! + } + } + + Note: this feature is experimental and may change or be removed in the future. """ parser = Parser( source, no_location=no_location, allow_legacy_fragment_variables=allow_legacy_fragment_variables, + experimental_client_controlled_nullability=experimental_client_controlled_nullability, # noqa ) return parser.parse_document() @@ -200,12 +225,14 @@ class Parser: _lexer: Lexer _no_location: bool _allow_legacy_fragment_variables: bool + _experimental_client_controlled_nullability: bool def __init__( self, source: SourceType, no_location: bool = False, allow_legacy_fragment_variables: bool = False, + experimental_client_controlled_nullability: bool = False, ): if not is_source(source): source = Source(cast(str, source)) @@ -213,6 +240,9 @@ def __init__( self._lexer = Lexer(source) self._no_location = no_location self._allow_legacy_fragment_variables = allow_legacy_fragment_variables + self._experimental_client_controlled_nullability = ( + experimental_client_controlled_nullability + ) def parse_name(self) -> NameNode: """Convert a name lex token into a name parse node.""" @@ -376,6 +406,9 @@ def parse_field(self) -> FieldNode: alias=alias, name=name, arguments=self.parse_arguments(False), + # Experimental support for Client Controlled Nullability changes + # the grammar of Field: + nullability_assertion=self.parse_nullability_assertion(), directives=self.parse_directives(False), selection_set=self.parse_selection_set() if self.peek(TokenKind.BRACE_L) @@ -383,6 +416,36 @@ def parse_field(self) -> FieldNode: loc=self.loc(start), ) + def parse_nullability_assertion(self) -> Optional[NullabilityAssertionNode]: + """NullabilityAssertion (grammar not yet finalized) + + # Note: Client Controlled Nullability is experimental and may be changed or + # removed in the future. + """ + if not self._experimental_client_controlled_nullability: + return None + + start = self._lexer.token + nullability_assertion: Optional[NullabilityAssertionNode] = None + + if self.expect_optional_token(TokenKind.BRACKET_L): + inner_modifier = self.parse_nullability_assertion() + self.expect_token(TokenKind.BRACKET_R) + nullability_assertion = ListNullabilityOperatorNode( + nullability_assertion=inner_modifier, loc=self.loc(start) + ) + + if self.expect_optional_token(TokenKind.BANG): + nullability_assertion = NonNullAssertionNode( + nullability_assertion=nullability_assertion, loc=self.loc(start) + ) + elif self.expect_optional_token(TokenKind.QUESTION_MARK): + nullability_assertion = ErrorBoundaryNode( + nullability_assertion=nullability_assertion, loc=self.loc(start) + ) + + return nullability_assertion + def parse_arguments(self, is_const: bool) -> List[ArgumentNode]: """Arguments[Const]: (Argument[?Const]+)""" item = self.parse_const_argument if is_const else self.parse_argument diff --git a/src/graphql/language/predicates.py b/src/graphql/language/predicates.py index be365003..3b132bb2 100644 --- a/src/graphql/language/predicates.py +++ b/src/graphql/language/predicates.py @@ -5,6 +5,7 @@ ExecutableDefinitionNode, ListValueNode, Node, + NullabilityAssertionNode, ObjectValueNode, SchemaExtensionNode, SelectionNode, @@ -26,6 +27,7 @@ __all__ = [ "is_definition_node", "is_executable_definition_node", + "is_nullability_assertion_node", "is_selection_node", "is_value_node", "is_const_value_node", @@ -52,6 +54,11 @@ def is_selection_node(node: Node) -> TypeGuard[SelectionNode]: return isinstance(node, SelectionNode) +def is_nullability_assertion_node(node: Node) -> TypeGuard[NullabilityAssertionNode]: + """Check whether the given node represents a nullability assertion node.""" + return isinstance(node, NullabilityAssertionNode) + + def is_value_node(node: Node) -> TypeGuard[ValueNode]: """Check whether the given node represents a value.""" return isinstance(node, ValueNode) diff --git a/src/graphql/language/printer.py b/src/graphql/language/printer.py index 72cb4c4e..56971058 100644 --- a/src/graphql/language/printer.py +++ b/src/graphql/language/printer.py @@ -34,6 +34,7 @@ class PrintedNode: interfaces: Strings locations: Strings name: str + nullability_assertion: str operation: OperationType operation_types: Strings repeatable: bool @@ -100,18 +101,41 @@ def leave_selection_set(node: PrintedNode, *_args: Any) -> str: @staticmethod def leave_field(node: PrintedNode, *_args: Any) -> str: - prefix = wrap("", node.alias, ": ") + node.name + prefix = join((wrap("", node.alias, ": "), node.name)) args_line = prefix + wrap("(", join(node.arguments, ", "), ")") if len(args_line) > MAX_LINE_LENGTH: args_line = prefix + wrap("(\n", indent(join(node.arguments, "\n")), "\n)") - return join((args_line, join(node.directives, " "), node.selection_set), " ") + return join( + ( + args_line, + # Note: Client Controlled Nullability is experimental and may be + # changed or removed in the future. + node.nullability_assertion, + wrap(" ", join(node.directives, " ")), + wrap(" ", node.selection_set), + ), + ) @staticmethod def leave_argument(node: PrintedNode, *_args: Any) -> str: return f"{node.name}: {node.value}" + # Nullability Modifiers + + @staticmethod + def leave_list_nullability_operator(node: PrintedNode, *_args: Any) -> str: + return join(("[", node.nullability_assertion, "]")) + + @staticmethod + def leave_non_null_assertion(node: PrintedNode, *_args: Any) -> str: + return join((node.nullability_assertion, "!")) + + @staticmethod + def leave_error_boundary(node: PrintedNode, *_args: Any) -> str: + return join((node.nullability_assertion, "?")) + # Fragments @staticmethod diff --git a/src/graphql/language/token_kind.py b/src/graphql/language/token_kind.py index 543ac22f..7f5a2607 100644 --- a/src/graphql/language/token_kind.py +++ b/src/graphql/language/token_kind.py @@ -10,6 +10,7 @@ class TokenKind(Enum): SOF = "" EOF = "" BANG = "!" + QUESTION_MARK = "?" DOLLAR = "$" AMP = "&" PAREN_L = "(" diff --git a/tests/benchmarks/test_parser.py b/tests/benchmarks/test_parser.py index 8a99a760..7db8ef2f 100644 --- a/tests/benchmarks/test_parser.py +++ b/tests/benchmarks/test_parser.py @@ -4,5 +4,9 @@ def test_parse_kitchen_sink(benchmark, kitchen_sink_query): # noqa: F811 - query = benchmark(lambda: parse(kitchen_sink_query)) + query = benchmark( + lambda: parse( + kitchen_sink_query, experimental_client_controlled_nullability=True + ) + ) assert isinstance(query, DocumentNode) diff --git a/tests/fixtures/kitchen_sink.graphql b/tests/fixtures/kitchen_sink.graphql index a2d9f671..66a71fb3 100644 --- a/tests/fixtures/kitchen_sink.graphql +++ b/tests/fixtures/kitchen_sink.graphql @@ -9,6 +9,21 @@ query queryName($foo: ComplexType, $site: Site = MOBILE) @onQuery { ...frag @onFragmentSpread } } + + field3! + field4? + requiredField5: field5! + requiredSelectionSet(first: 10)! @directive { + field + } + + unsetListItemsRequiredList: listField[]! + requiredListItemsUnsetList: listField[!] + requiredListItemsRequiredList: listField[!]! + unsetListItemsOptionalList: listField[]? + optionalListItemsUnsetList: listField[?] + optionalListItemsOptionalList: listField[?]? + multidimensionalList: listField[[[!]!]!]! } ... @skip(unless: $foo) { id diff --git a/tests/language/test_lexer.py b/tests/language/test_lexer.py index 2be6af4e..83e47953 100644 --- a/tests/language/test_lexer.py +++ b/tests/language/test_lexer.py @@ -519,6 +519,7 @@ def lex_does_not_allow_name_start_after_a_number(): # noinspection PyArgumentEqualDefault def lexes_punctuation(): assert lex_one("!") == Token(TokenKind.BANG, 0, 1, 1, 1, None) + assert lex_one("?") == Token(TokenKind.QUESTION_MARK, 0, 1, 1, 1, None) assert lex_one("$") == Token(TokenKind.DOLLAR, 0, 1, 1, 1, None) assert lex_one("(") == Token(TokenKind.PAREN_L, 0, 1, 1, 1, None) assert lex_one(")") == Token(TokenKind.PAREN_R, 0, 1, 1, 1, None) @@ -622,6 +623,7 @@ def _is_punctuator_token(text: str) -> bool: def returns_true_for_punctuator_tokens(): assert _is_punctuator_token("!") is True + assert _is_punctuator_token("?") is True assert _is_punctuator_token("$") is True assert _is_punctuator_token("&") is True assert _is_punctuator_token("(") is True diff --git a/tests/language/test_parser.py b/tests/language/test_parser.py index cc807bb4..7ac79af6 100644 --- a/tests/language/test_parser.py +++ b/tests/language/test_parser.py @@ -7,13 +7,17 @@ ArgumentNode, DefinitionNode, DocumentNode, + ErrorBoundaryNode, FieldNode, IntValueNode, + ListNullabilityOperatorNode, ListTypeNode, ListValueNode, NamedTypeNode, NameNode, + NonNullAssertionNode, NonNullTypeNode, + NullabilityAssertionNode, NullValueNode, ObjectFieldNode, ObjectValueNode, @@ -46,6 +50,10 @@ Location: TypeAlias = Optional[Tuple[int, int]] +def parse_ccn(source: str) -> DocumentNode: + return parse(source, experimental_client_controlled_nullability=True) + + def assert_syntax_error(text: str, message: str, location: Location) -> None: with raises(GraphQLSyntaxError) as exc_info: parse(text) @@ -55,6 +63,15 @@ def assert_syntax_error(text: str, message: str, location: Location) -> None: assert error.locations == [location] +def assert_syntax_error_ccn(text: str, message: str, location: Location) -> None: + with raises(GraphQLSyntaxError) as exc_info: + parse_ccn(text) + error = exc_info.value + assert error.message == f"Syntax Error: {message}" + assert error.description == message + assert error.locations == [location] + + def describe_parser(): def parse_provides_useful_errors(): with raises(GraphQLSyntaxError) as exc_info: @@ -160,7 +177,7 @@ def parses_multi_byte_characters(): # noinspection PyShadowingNames def parses_kitchen_sink(kitchen_sink_query): # noqa: F811 - parse(kitchen_sink_query) + parse_ccn(kitchen_sink_query) def allows_non_keywords_anywhere_a_name_is_allowed(): non_keywords = ( @@ -223,6 +240,214 @@ def parses_named_subscription_operations(): """ ) + def parses_required_field(): + doc = parse_ccn("{ requiredField! }") + assert isinstance(doc, DocumentNode) + definitions = doc.definitions + assert isinstance(definitions, tuple) + assert len(definitions) == 1 + definition = cast(OperationDefinitionNode, definitions[0]) + selection_set: Optional[SelectionSetNode] = definition.selection_set + assert isinstance(selection_set, SelectionSetNode) + selections = selection_set.selections + assert isinstance(selections, tuple) + assert len(selections) == 1 + field = selections[0] + assert isinstance(field, FieldNode) + nullability_assertion = field.nullability_assertion + assert isinstance(nullability_assertion, NonNullAssertionNode) + assert nullability_assertion.loc == (15, 16) + assert nullability_assertion.nullability_assertion is None + + def parses_optional_field(): + parse_ccn("{ optionalField? }") + + def does_not_parse_field_with_multiple_designators(): + assert_syntax_error_ccn( + "{ optionalField?! }", "Expected Name, found '!'.", (1, 17) + ) + assert_syntax_error_ccn( + "{ optionalField!? }", "Expected Name, found '?'.", (1, 17) + ) + + def parses_required_with_alias(): + parse_ccn("{ requiredField: field! }") + + def parses_optional_with_alias(): + parse_ccn("{ requiredField: field? }") + + def does_not_parse_aliased_field_with_bang_on_left_of_colon(): + assert_syntax_error_ccn( + "{ requiredField!: field }", "Expected Name, found ':'.", (1, 17) + ) + + def does_not_parse_aliased_field_with_question_mark_on_left_of_colon(): + assert_syntax_error_ccn( + "{ requiredField?: field }", "Expected Name, found ':'.", (1, 17) + ) + + def does_not_parse_aliased_field_with_bang_on_left_and_right_of_colon(): + assert_syntax_error_ccn( + "{ requiredField!: field! }", "Expected Name, found ':'.", (1, 17) + ) + + def does_not_parse_aliased_field_with_question_mark_on_left_and_right_of_colon(): + assert_syntax_error_ccn( + "{ requiredField?: field? }", "Expected Name, found ':'.", (1, 17) + ) + + def does_not_parse_designator_on_query(): + assert_syntax_error_ccn("query? { field }", "Expected '{', found '?'.", (1, 6)) + + def parses_required_within_fragment(): + parse_ccn("fragment MyFragment on Query { field! }") + + def parses_optional_within_fragment(): + parse_ccn("fragment MyFragment on Query { field? }") + + def parses_field_with_required_list_elements(): + doc = parse_ccn("{ field[!] }") + assert isinstance(doc, DocumentNode) + definitions = doc.definitions + assert isinstance(definitions, tuple) + assert len(definitions) == 1 + definition = cast(OperationDefinitionNode, definitions[0]) + selection_set: Optional[SelectionSetNode] = definition.selection_set + assert isinstance(selection_set, SelectionSetNode) + selections = selection_set.selections + assert isinstance(selections, tuple) + assert len(selections) == 1 + field = selections[0] + assert isinstance(field, FieldNode) + nullability_assertion: Optional[ + NullabilityAssertionNode + ] = field.nullability_assertion + assert isinstance(nullability_assertion, ListNullabilityOperatorNode) + assert nullability_assertion.loc == (7, 10) + nullability_assertion = nullability_assertion.nullability_assertion + assert isinstance(nullability_assertion, NonNullAssertionNode) + assert nullability_assertion.loc == (8, 9) + assert nullability_assertion.nullability_assertion is None + + def parses_field_with_optional_list_elements(): + doc = parse_ccn("{ field[?] }") + assert isinstance(doc, DocumentNode) + definitions = doc.definitions + assert isinstance(definitions, tuple) + assert len(definitions) == 1 + definition = cast(OperationDefinitionNode, definitions[0]) + selection_set: Optional[SelectionSetNode] = definition.selection_set + assert isinstance(selection_set, SelectionSetNode) + selections = selection_set.selections + assert isinstance(selections, tuple) + assert len(selections) == 1 + field = selections[0] + assert isinstance(field, FieldNode) + nullability_assertion: Optional[ + NullabilityAssertionNode + ] = field.nullability_assertion + assert isinstance(nullability_assertion, ListNullabilityOperatorNode) + assert nullability_assertion.loc == (7, 10) + nullability_assertion = nullability_assertion.nullability_assertion + assert isinstance(nullability_assertion, ErrorBoundaryNode) + assert nullability_assertion.loc == (8, 9) + assert nullability_assertion.nullability_assertion is None + + def parses_field_with_required_list(): + doc = parse_ccn("{ field[]! }") + assert isinstance(doc, DocumentNode) + definitions = doc.definitions + assert isinstance(definitions, tuple) + assert len(definitions) == 1 + definition = cast(OperationDefinitionNode, definitions[0]) + selection_set: Optional[SelectionSetNode] = definition.selection_set + assert isinstance(selection_set, SelectionSetNode) + selections = selection_set.selections + assert isinstance(selections, tuple) + assert len(selections) == 1 + field = selections[0] + assert isinstance(field, FieldNode) + nullability_assertion: Optional[ + NullabilityAssertionNode + ] = field.nullability_assertion + assert isinstance(nullability_assertion, NonNullAssertionNode) + assert nullability_assertion.loc == (7, 10) + nullability_assertion = nullability_assertion.nullability_assertion + assert isinstance(nullability_assertion, ListNullabilityOperatorNode) + assert nullability_assertion.loc == (7, 9) + assert nullability_assertion.nullability_assertion is None + + def parses_field_with_optional_list(): + doc = parse_ccn("{ field[]? }") + assert isinstance(doc, DocumentNode) + definitions = doc.definitions + assert isinstance(definitions, tuple) + assert len(definitions) == 1 + definition = cast(OperationDefinitionNode, definitions[0]) + selection_set: Optional[SelectionSetNode] = definition.selection_set + assert isinstance(selection_set, SelectionSetNode) + selections = selection_set.selections + assert isinstance(selections, tuple) + assert len(selections) == 1 + field = selections[0] + assert isinstance(field, FieldNode) + nullability_assertion: Optional[ + NullabilityAssertionNode + ] = field.nullability_assertion + assert isinstance(nullability_assertion, ErrorBoundaryNode) + assert nullability_assertion.loc == (7, 10) + nullability_assertion = nullability_assertion.nullability_assertion + assert isinstance(nullability_assertion, ListNullabilityOperatorNode) + assert nullability_assertion.loc == (7, 9) + assert nullability_assertion.nullability_assertion is None + + def parses_field_with_mixed_list_elements(): + doc = parse_ccn("{ field[[[?]!]]! }") + assert isinstance(doc, DocumentNode) + definitions = doc.definitions + assert isinstance(definitions, tuple) + assert len(definitions) == 1 + definition = cast(OperationDefinitionNode, definitions[0]) + selection_set: Optional[SelectionSetNode] = definition.selection_set + assert isinstance(selection_set, SelectionSetNode) + selections = selection_set.selections + assert isinstance(selections, tuple) + assert len(selections) == 1 + field = selections[0] + assert isinstance(field, FieldNode) + nullability_assertion: Optional[ + NullabilityAssertionNode + ] = field.nullability_assertion + assert isinstance(nullability_assertion, NonNullAssertionNode) + assert nullability_assertion.loc == (7, 16) + nullability_assertion = nullability_assertion.nullability_assertion + assert isinstance(nullability_assertion, ListNullabilityOperatorNode) + assert nullability_assertion.loc == (7, 15) + nullability_assertion = nullability_assertion.nullability_assertion + assert isinstance(nullability_assertion, ListNullabilityOperatorNode) + assert nullability_assertion.loc == (8, 14) + nullability_assertion = nullability_assertion.nullability_assertion + assert isinstance(nullability_assertion, NonNullAssertionNode) + assert nullability_assertion.loc == (9, 13) + nullability_assertion = nullability_assertion.nullability_assertion + assert isinstance(nullability_assertion, ListNullabilityOperatorNode) + assert nullability_assertion.loc == (9, 12) + nullability_assertion = nullability_assertion.nullability_assertion + assert isinstance(nullability_assertion, ErrorBoundaryNode) + assert nullability_assertion.loc == (10, 11) + assert nullability_assertion.nullability_assertion is None + + def does_not_parse_field_with_unbalanced_brackets(): + assert_syntax_error_ccn("{ field[[] }", "Expected ']', found '}'.", (1, 12)) + assert_syntax_error_ccn("{ field[]] }", "Expected Name, found ']'.", (1, 10)) + assert_syntax_error_ccn("{ field] }", "Expected Name, found ']'.", (1, 8)) + assert_syntax_error_ccn("{ field[ }", "Expected ']', found '}'.", (1, 10)) + + def does_not_parse_field_with_assorted_invalid_nullability_designators(): + assert_syntax_error_ccn("{ field[][] }", "Expected Name, found '['.", (1, 10)) + assert_syntax_error_ccn("{ field[!!] }", "Expected ']', found '!'.", (1, 10)) + assert_syntax_error_ccn("{ field[]?! }", "Expected Name, found '!'.", (1, 11)) + def creates_ast(): doc = parse( dedent( @@ -277,6 +502,7 @@ def creates_ast(): assert value.loc == (13, 14) assert value.value == "4" assert argument.loc == (9, 14) + assert field.nullability_assertion is None assert field.directives == () selection_set = field.selection_set assert isinstance(selection_set, SelectionSetNode) @@ -292,17 +518,7 @@ def creates_ast(): assert name.loc == (22, 24) assert name.value == "id" assert field.arguments == () - assert field.directives == () - assert field.selection_set is None - field = selections[0] - assert isinstance(field, FieldNode) - assert field.loc == (22, 24) - assert field.alias is None - name = field.name - assert isinstance(name, NameNode) - assert name.loc == (22, 24) - assert name.value == "id" - assert field.arguments == () + assert field.nullability_assertion is None assert field.directives == () assert field.selection_set is None field = selections[1] @@ -314,6 +530,7 @@ def creates_ast(): assert name.loc == (30, 34) assert name.value == "name" assert field.arguments == () + assert field.nullability_assertion is None assert field.directives == () assert field.selection_set is None @@ -356,6 +573,7 @@ def creates_ast_from_nameless_query_without_variables(): assert name.loc == (10, 14) assert name.value == "node" assert field.arguments == () + assert field.nullability_assertion is None assert field.directives == () selection_set = field.selection_set assert isinstance(selection_set, SelectionSetNode) @@ -372,6 +590,7 @@ def creates_ast_from_nameless_query_without_variables(): assert name.loc == (21, 23) assert name.value == "id" assert field.arguments == () + assert field.nullability_assertion is None assert field.directives == () assert field.selection_set is None diff --git a/tests/language/test_predicates.py b/tests/language/test_predicates.py index 419dd35a..498829f9 100644 --- a/tests/language/test_predicates.py +++ b/tests/language/test_predicates.py @@ -7,6 +7,7 @@ is_const_value_node, is_definition_node, is_executable_definition_node, + is_nullability_assertion_node, is_selection_node, is_type_definition_node, is_type_extension_node, @@ -78,6 +79,14 @@ def check_selection_node(): "selection", ] + def check_nullability_assertion_node(): + assert filter_nodes(is_nullability_assertion_node) == [ + "error_boundary", + "list_nullability_operator", + "non_null_assertion", + "nullability_assertion", + ] + def check_value_node(): assert filter_nodes(is_value_node) == [ "boolean_value", diff --git a/tests/language/test_printer.py b/tests/language/test_printer.py index 5628b7d2..3f91cc4a 100644 --- a/tests/language/test_printer.py +++ b/tests/language/test_printer.py @@ -130,11 +130,17 @@ def legacy_correctly_prints_fragment_defined_variables(): assert print_ast(fragment_with_variable) == dedent(source) def prints_kitchen_sink_without_altering_ast(kitchen_sink_query): # noqa: F811 - ast = parse(kitchen_sink_query, no_location=True) + ast = parse( + kitchen_sink_query, + no_location=True, + experimental_client_controlled_nullability=True, + ) ast_before_print_call = deepcopy(ast) printed = print_ast(ast) - printed_ast = parse(printed, no_location=True) + printed_ast = parse( + printed, no_location=True, experimental_client_controlled_nullability=True + ) assert printed_ast == ast assert deepcopy(ast) == ast_before_print_call @@ -151,6 +157,19 @@ def prints_kitchen_sink_without_altering_ast(kitchen_sink_query): # noqa: F811 ...frag @onFragmentSpread } } + field3! + field4? + requiredField5: field5! + requiredSelectionSet(first: 10)! @directive { + field + } + unsetListItemsRequiredList: listField[]! + requiredListItemsUnsetList: listField[!] + requiredListItemsRequiredList: listField[!]! + unsetListItemsOptionalList: listField[]? + optionalListItemsUnsetList: listField[?] + optionalListItemsOptionalList: listField[?]? + multidimensionalList: listField[[[!]!]!]! } ... @skip(unless: $foo) { id diff --git a/tests/language/test_visitor.py b/tests/language/test_visitor.py index a0acd236..b44736fd 100644 --- a/tests/language/test_visitor.py +++ b/tests/language/test_visitor.py @@ -732,7 +732,7 @@ def leave(*args): # noinspection PyShadowingNames def visits_kitchen_sink(kitchen_sink_query): # noqa: F811 - ast = parse(kitchen_sink_query) + ast = parse(kitchen_sink_query, experimental_client_controlled_nullability=True) visited: List = [] record = visited.append arg_stack: List = [] @@ -878,6 +878,272 @@ def leave(*args): ["leave", "field", 1, None], ["leave", "selection_set", "selection_set", "field"], ["leave", "field", 0, None], + ["enter", "field", 1, None], + ["enter", "name", "name", "field"], + ["leave", "name", "name", "field"], + ["enter", "non_null_assertion", "nullability_assertion", "field"], + ["leave", "non_null_assertion", "nullability_assertion", "field"], + ["leave", "field", 1, None], + ["enter", "field", 2, None], + ["enter", "name", "name", "field"], + ["leave", "name", "name", "field"], + ["enter", "error_boundary", "nullability_assertion", "field"], + ["leave", "error_boundary", "nullability_assertion", "field"], + ["leave", "field", 2, None], + ["enter", "field", 3, None], + ["enter", "name", "alias", "field"], + ["leave", "name", "alias", "field"], + ["enter", "name", "name", "field"], + ["leave", "name", "name", "field"], + ["enter", "non_null_assertion", "nullability_assertion", "field"], + ["leave", "non_null_assertion", "nullability_assertion", "field"], + ["leave", "field", 3, None], + ["enter", "field", 4, None], + ["enter", "name", "name", "field"], + ["leave", "name", "name", "field"], + ["enter", "argument", 0, None], + ["enter", "name", "name", "argument"], + ["leave", "name", "name", "argument"], + ["enter", "int_value", "value", "argument"], + ["leave", "int_value", "value", "argument"], + ["leave", "argument", 0, None], + ["enter", "directive", 0, None], + ["enter", "name", "name", "directive"], + ["leave", "name", "name", "directive"], + ["leave", "directive", 0, None], + ["enter", "selection_set", "selection_set", "field"], + ["enter", "field", 0, None], + ["enter", "name", "name", "field"], + ["leave", "name", "name", "field"], + ["leave", "field", 0, None], + ["leave", "selection_set", "selection_set", "field"], + ["enter", "non_null_assertion", "nullability_assertion", "field"], + ["leave", "non_null_assertion", "nullability_assertion", "field"], + ["leave", "field", 4, None], + ["enter", "field", 5, None], + ["enter", "name", "alias", "field"], + ["leave", "name", "alias", "field"], + ["enter", "name", "name", "field"], + ["leave", "name", "name", "field"], + ["enter", "non_null_assertion", "nullability_assertion", "field"], + [ + "enter", + "list_nullability_operator", + "nullability_assertion", + "non_null_assertion", + ], + [ + "leave", + "list_nullability_operator", + "nullability_assertion", + "non_null_assertion", + ], + ["leave", "non_null_assertion", "nullability_assertion", "field"], + ["leave", "field", 5, None], + ["enter", "field", 6, None], + ["enter", "name", "alias", "field"], + ["leave", "name", "alias", "field"], + ["enter", "name", "name", "field"], + ["leave", "name", "name", "field"], + ["enter", "list_nullability_operator", "nullability_assertion", "field"], + [ + "enter", + "non_null_assertion", + "nullability_assertion", + "list_nullability_operator", + ], + [ + "leave", + "non_null_assertion", + "nullability_assertion", + "list_nullability_operator", + ], + ["leave", "list_nullability_operator", "nullability_assertion", "field"], + ["leave", "field", 6, None], + ["enter", "field", 7, None], + ["enter", "name", "alias", "field"], + ["leave", "name", "alias", "field"], + ["enter", "name", "name", "field"], + ["leave", "name", "name", "field"], + ["enter", "non_null_assertion", "nullability_assertion", "field"], + [ + "enter", + "list_nullability_operator", + "nullability_assertion", + "non_null_assertion", + ], + [ + "enter", + "non_null_assertion", + "nullability_assertion", + "list_nullability_operator", + ], + [ + "leave", + "non_null_assertion", + "nullability_assertion", + "list_nullability_operator", + ], + [ + "leave", + "list_nullability_operator", + "nullability_assertion", + "non_null_assertion", + ], + ["leave", "non_null_assertion", "nullability_assertion", "field"], + ["leave", "field", 7, None], + ["enter", "field", 8, None], + ["enter", "name", "alias", "field"], + ["leave", "name", "alias", "field"], + ["enter", "name", "name", "field"], + ["leave", "name", "name", "field"], + ["enter", "error_boundary", "nullability_assertion", "field"], + [ + "enter", + "list_nullability_operator", + "nullability_assertion", + "error_boundary", + ], + [ + "leave", + "list_nullability_operator", + "nullability_assertion", + "error_boundary", + ], + ["leave", "error_boundary", "nullability_assertion", "field"], + ["leave", "field", 8, None], + ["enter", "field", 9, None], + ["enter", "name", "alias", "field"], + ["leave", "name", "alias", "field"], + ["enter", "name", "name", "field"], + ["leave", "name", "name", "field"], + ["enter", "list_nullability_operator", "nullability_assertion", "field"], + [ + "enter", + "error_boundary", + "nullability_assertion", + "list_nullability_operator", + ], + [ + "leave", + "error_boundary", + "nullability_assertion", + "list_nullability_operator", + ], + ["leave", "list_nullability_operator", "nullability_assertion", "field"], + ["leave", "field", 9, None], + ["enter", "field", 10, None], + ["enter", "name", "alias", "field"], + ["leave", "name", "alias", "field"], + ["enter", "name", "name", "field"], + ["leave", "name", "name", "field"], + ["enter", "error_boundary", "nullability_assertion", "field"], + [ + "enter", + "list_nullability_operator", + "nullability_assertion", + "error_boundary", + ], + [ + "enter", + "error_boundary", + "nullability_assertion", + "list_nullability_operator", + ], + [ + "leave", + "error_boundary", + "nullability_assertion", + "list_nullability_operator", + ], + [ + "leave", + "list_nullability_operator", + "nullability_assertion", + "error_boundary", + ], + ["leave", "error_boundary", "nullability_assertion", "field"], + ["leave", "field", 10, None], + ["enter", "field", 11, None], + ["enter", "name", "alias", "field"], + ["leave", "name", "alias", "field"], + ["enter", "name", "name", "field"], + ["leave", "name", "name", "field"], + ["enter", "non_null_assertion", "nullability_assertion", "field"], + [ + "enter", + "list_nullability_operator", + "nullability_assertion", + "non_null_assertion", + ], + [ + "enter", + "non_null_assertion", + "nullability_assertion", + "list_nullability_operator", + ], + [ + "enter", + "list_nullability_operator", + "nullability_assertion", + "non_null_assertion", + ], + [ + "enter", + "non_null_assertion", + "nullability_assertion", + "list_nullability_operator", + ], + [ + "enter", + "list_nullability_operator", + "nullability_assertion", + "non_null_assertion", + ], + [ + "enter", + "non_null_assertion", + "nullability_assertion", + "list_nullability_operator", + ], + [ + "leave", + "non_null_assertion", + "nullability_assertion", + "list_nullability_operator", + ], + [ + "leave", + "list_nullability_operator", + "nullability_assertion", + "non_null_assertion", + ], + [ + "leave", + "non_null_assertion", + "nullability_assertion", + "list_nullability_operator", + ], + [ + "leave", + "list_nullability_operator", + "nullability_assertion", + "non_null_assertion", + ], + [ + "leave", + "non_null_assertion", + "nullability_assertion", + "list_nullability_operator", + ], + [ + "leave", + "list_nullability_operator", + "nullability_assertion", + "non_null_assertion", + ], + ["leave", "non_null_assertion", "nullability_assertion", "field"], + ["leave", "field", 11, None], ["leave", "selection_set", "selection_set", "inline_fragment"], ["leave", "inline_fragment", 1, None], ["enter", "inline_fragment", 2, None], diff --git a/tests/utilities/test_ast_to_dict.py b/tests/utilities/test_ast_to_dict.py index a755e8aa..3f7c2ca9 100644 --- a/tests/utilities/test_ast_to_dict.py +++ b/tests/utilities/test_ast_to_dict.py @@ -45,6 +45,7 @@ def converts_recursive_ast_to_recursive_dict(): "alias": None, "arguments": [], "directives": None, + "nullability_assertion": None, "selection_set": res, } ], @@ -283,6 +284,7 @@ def converts_simple_query_to_dict(): "directives": [], "kind": "field", "name": {"kind": "name", "value": "hero"}, + "nullability_assertion": None, "selection_set": { "kind": "selection_set", "selections": [ @@ -292,6 +294,7 @@ def converts_simple_query_to_dict(): "directives": [], "kind": "field", "name": {"kind": "name", "value": "name"}, + "nullability_assertion": None, "selection_set": None, }, { @@ -309,6 +312,7 @@ def converts_simple_query_to_dict(): "kind": "name", "value": "primaryFunction", }, + "nullability_assertion": None, "selection_set": None, } ], @@ -336,6 +340,7 @@ def converts_simple_query_to_dict(): "kind": "name", "value": "height", }, + "nullability_assertion": None, "selection_set": None, } ], @@ -441,6 +446,7 @@ def converts_simple_query_to_dict_with_locations(): "loc": {"end": 69, "start": 65}, "value": "hero", }, + "nullability_assertion": None, "selection_set": { "kind": "selection_set", "loc": {"end": 279, "start": 84}, @@ -456,6 +462,7 @@ def converts_simple_query_to_dict_with_locations(): "loc": {"end": 106, "start": 102}, "value": "name", }, + "nullability_assertion": None, "selection_set": None, }, { @@ -483,6 +490,7 @@ def converts_simple_query_to_dict_with_locations(): }, "value": "primaryFunction", }, + "nullability_assertion": None, "selection_set": None, } ], @@ -522,6 +530,7 @@ def converts_simple_query_to_dict_with_locations(): }, "value": "height", }, + "nullability_assertion": None, "selection_set": None, } ], diff --git a/tests/utilities/test_strip_ignored_characters.py b/tests/utilities/test_strip_ignored_characters.py index 2e026af8..58643603 100644 --- a/tests/utilities/test_strip_ignored_characters.py +++ b/tests/utilities/test_strip_ignored_characters.py @@ -213,8 +213,16 @@ def strips_kitchen_sink_query_but_maintains_the_exact_same_ast( stripped_query = strip_ignored_characters(kitchen_sink_query) assert strip_ignored_characters(stripped_query) == stripped_query - query_ast = parse(kitchen_sink_query, no_location=True) - stripped_ast = parse(stripped_query, no_location=True) + query_ast = parse( + kitchen_sink_query, + no_location=True, + experimental_client_controlled_nullability=True, + ) + stripped_ast = parse( + stripped_query, + no_location=True, + experimental_client_controlled_nullability=True, + ) assert stripped_ast == query_ast # noinspection PyShadowingNames From c77d514b327604f805a4c7eb633536edb5bd0092 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rafa=C5=82=20Pito=C5=84?= Date: Sun, 19 Mar 2023 19:49:17 +0100 Subject: [PATCH 084/230] Make 'GraphQLInputFieldOutType' importable (#195) --- src/graphql/__init__.py | 2 ++ src/graphql/type/__init__.py | 2 ++ src/graphql/type/definition.py | 1 + 3 files changed, 5 insertions(+) diff --git a/src/graphql/__init__.py b/src/graphql/__init__.py index 30c8b42e..9bdf7f3b 100644 --- a/src/graphql/__init__.py +++ b/src/graphql/__init__.py @@ -347,6 +347,7 @@ GraphQLFieldResolver, GraphQLInputField, GraphQLInputFieldMap, + GraphQLInputFieldOutType, GraphQLScalarSerializer, GraphQLScalarValueParser, GraphQLScalarLiteralParser, @@ -567,6 +568,7 @@ "GraphQLFieldResolver", "GraphQLInputField", "GraphQLInputFieldMap", + "GraphQLInputFieldOutType", "GraphQLScalarSerializer", "GraphQLScalarValueParser", "GraphQLScalarLiteralParser", diff --git a/src/graphql/type/__init__.py b/src/graphql/type/__init__.py index 569e4f52..f6af8b7e 100644 --- a/src/graphql/type/__init__.py +++ b/src/graphql/type/__init__.py @@ -98,6 +98,7 @@ GraphQLFieldMap, GraphQLInputField, GraphQLInputFieldMap, + GraphQLInputFieldOutType, GraphQLScalarSerializer, GraphQLScalarValueParser, GraphQLScalarLiteralParser, @@ -252,6 +253,7 @@ "GraphQLFieldMap", "GraphQLInputField", "GraphQLInputFieldMap", + "GraphQLInputFieldOutType", "GraphQLScalarSerializer", "GraphQLScalarValueParser", "GraphQLScalarLiteralParser", diff --git a/src/graphql/type/definition.py b/src/graphql/type/definition.py index 8d1d55cd..e1a0a770 100644 --- a/src/graphql/type/definition.py +++ b/src/graphql/type/definition.py @@ -129,6 +129,7 @@ "GraphQLInputField", "GraphQLInputFieldKwargs", "GraphQLInputFieldMap", + "GraphQLInputFieldOutType", "GraphQLInputObjectType", "GraphQLInputObjectTypeKwargs", "GraphQLInputType", From 948d594949d93522029809f0cae6e25b3ddf79cf Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 8 Apr 2023 14:05:40 +0200 Subject: [PATCH 085/230] Update dependencies --- poetry.lock | 578 ++++++++++++++++++++++++++----------------------- pyproject.toml | 6 +- tox.ini | 6 +- 3 files changed, 318 insertions(+), 272 deletions(-) diff --git a/poetry.lock b/poetry.lock index 50ad8e8a..b0aaad0b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -45,75 +45,76 @@ tests-no-zope = ["cloudpickle", "cloudpickle", "hypothesis", "hypothesis", "mypy [[package]] name = "babel" -version = "2.11.0" +version = "2.12.1" description = "Internationalization utilities" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "Babel-2.11.0-py3-none-any.whl", hash = "sha256:1ad3eca1c885218f6dce2ab67291178944f810a10a9b5f3cb8382a5a232b64fe"}, - {file = "Babel-2.11.0.tar.gz", hash = "sha256:5ef4b3226b0180dedded4229651c8b0e1a3a6a2837d45a073272f313e4cf97f6"}, + {file = "Babel-2.12.1-py3-none-any.whl", hash = "sha256:b4246fb7677d3b98f501a39d43396d3cafdc8eadb045f4a31be01863f655c610"}, + {file = "Babel-2.12.1.tar.gz", hash = "sha256:cc2d99999cd01d44420ae725a21c9e3711b3aadc7976d6147f622d8581963455"}, ] [package.dependencies] -pytz = ">=2015.7" +pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} [[package]] name = "bandit" -version = "1.7.4" +version = "1.7.5" description = "Security oriented static analyser for python code." category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "bandit-1.7.4-py3-none-any.whl", hash = "sha256:412d3f259dab4077d0e7f0c11f50f650cc7d10db905d98f6520a95a18049658a"}, - {file = "bandit-1.7.4.tar.gz", hash = "sha256:2d63a8c573417bae338962d4b9b06fbc6080f74ecd955a092849e1e65c717bd2"}, + {file = "bandit-1.7.5-py3-none-any.whl", hash = "sha256:75665181dc1e0096369112541a056c59d1c5f66f9bb74a8d686c3c362b83f549"}, + {file = "bandit-1.7.5.tar.gz", hash = "sha256:bdfc739baa03b880c2d15d0431b31c658ffc348e907fe197e54e0389dd59e11e"}, ] [package.dependencies] colorama = {version = ">=0.3.9", markers = "platform_system == \"Windows\""} GitPython = ">=1.0.1" PyYAML = ">=5.3.1" +rich = "*" stevedore = ">=1.20.0" [package.extras] -test = ["beautifulsoup4 (>=4.8.0)", "coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "pylint (==1.9.4)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)", "toml"] -toml = ["toml"] +test = ["beautifulsoup4 (>=4.8.0)", "coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "pylint (==1.9.4)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)", "tomli (>=1.1.0)"] +toml = ["tomli (>=1.1.0)"] yaml = ["PyYAML"] [[package]] name = "black" -version = "23.1.0" +version = "23.3.0" description = "The uncompromising code formatter." category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "black-23.1.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:b6a92a41ee34b883b359998f0c8e6eb8e99803aa8bf3123bf2b2e6fec505a221"}, - {file = "black-23.1.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:57c18c5165c1dbe291d5306e53fb3988122890e57bd9b3dcb75f967f13411a26"}, - {file = "black-23.1.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:9880d7d419bb7e709b37e28deb5e68a49227713b623c72b2b931028ea65f619b"}, - {file = "black-23.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e6663f91b6feca5d06f2ccd49a10f254f9298cc1f7f49c46e498a0771b507104"}, - {file = "black-23.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:9afd3f493666a0cd8f8df9a0200c6359ac53940cbde049dcb1a7eb6ee2dd7074"}, - {file = "black-23.1.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:bfffba28dc52a58f04492181392ee380e95262af14ee01d4bc7bb1b1c6ca8d27"}, - {file = "black-23.1.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:c1c476bc7b7d021321e7d93dc2cbd78ce103b84d5a4cf97ed535fbc0d6660648"}, - {file = "black-23.1.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:382998821f58e5c8238d3166c492139573325287820963d2f7de4d518bd76958"}, - {file = "black-23.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bf649fda611c8550ca9d7592b69f0637218c2369b7744694c5e4902873b2f3a"}, - {file = "black-23.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:121ca7f10b4a01fd99951234abdbd97728e1240be89fde18480ffac16503d481"}, - {file = "black-23.1.0-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:a8471939da5e824b891b25751955be52ee7f8a30a916d570a5ba8e0f2eb2ecad"}, - {file = "black-23.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8178318cb74f98bc571eef19068f6ab5613b3e59d4f47771582f04e175570ed8"}, - {file = "black-23.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:a436e7881d33acaf2536c46a454bb964a50eff59b21b51c6ccf5a40601fbef24"}, - {file = "black-23.1.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:a59db0a2094d2259c554676403fa2fac3473ccf1354c1c63eccf7ae65aac8ab6"}, - {file = "black-23.1.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:0052dba51dec07ed029ed61b18183942043e00008ec65d5028814afaab9a22fd"}, - {file = "black-23.1.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:49f7b39e30f326a34b5c9a4213213a6b221d7ae9d58ec70df1c4a307cf2a1580"}, - {file = "black-23.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:162e37d49e93bd6eb6f1afc3e17a3d23a823042530c37c3c42eeeaf026f38468"}, - {file = "black-23.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b70eb40a78dfac24842458476135f9b99ab952dd3f2dab738c1881a9b38b753"}, - {file = "black-23.1.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:a29650759a6a0944e7cca036674655c2f0f63806ddecc45ed40b7b8aa314b651"}, - {file = "black-23.1.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:bb460c8561c8c1bec7824ecbc3ce085eb50005883a6203dcfb0122e95797ee06"}, - {file = "black-23.1.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:c91dfc2c2a4e50df0026f88d2215e166616e0c80e86004d0003ece0488db2739"}, - {file = "black-23.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a951cc83ab535d248c89f300eccbd625e80ab880fbcfb5ac8afb5f01a258ac9"}, - {file = "black-23.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:0680d4380db3719ebcfb2613f34e86c8e6d15ffeabcf8ec59355c5e7b85bb555"}, - {file = "black-23.1.0-py3-none-any.whl", hash = "sha256:7a0f701d314cfa0896b9001df70a530eb2472babb76086344e688829efd97d32"}, - {file = "black-23.1.0.tar.gz", hash = "sha256:b0bd97bea8903f5a2ba7219257a44e3f1f9d00073d6cc1add68f0beec69692ac"}, + {file = "black-23.3.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:0945e13506be58bf7db93ee5853243eb368ace1c08a24c65ce108986eac65915"}, + {file = "black-23.3.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:67de8d0c209eb5b330cce2469503de11bca4085880d62f1628bd9972cc3366b9"}, + {file = "black-23.3.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:7c3eb7cea23904399866c55826b31c1f55bbcd3890ce22ff70466b907b6775c2"}, + {file = "black-23.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32daa9783106c28815d05b724238e30718f34155653d4d6e125dc7daec8e260c"}, + {file = "black-23.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:35d1381d7a22cc5b2be2f72c7dfdae4072a3336060635718cc7e1ede24221d6c"}, + {file = "black-23.3.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:a8a968125d0a6a404842fa1bf0b349a568634f856aa08ffaff40ae0dfa52e7c6"}, + {file = "black-23.3.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:c7ab5790333c448903c4b721b59c0d80b11fe5e9803d8703e84dcb8da56fec1b"}, + {file = "black-23.3.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:a6f6886c9869d4daae2d1715ce34a19bbc4b95006d20ed785ca00fa03cba312d"}, + {file = "black-23.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f3c333ea1dd6771b2d3777482429864f8e258899f6ff05826c3a4fcc5ce3f70"}, + {file = "black-23.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:11c410f71b876f961d1de77b9699ad19f939094c3a677323f43d7a29855fe326"}, + {file = "black-23.3.0-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:1d06691f1eb8de91cd1b322f21e3bfc9efe0c7ca1f0e1eb1db44ea367dff656b"}, + {file = "black-23.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50cb33cac881766a5cd9913e10ff75b1e8eb71babf4c7104f2e9c52da1fb7de2"}, + {file = "black-23.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:e114420bf26b90d4b9daa597351337762b63039752bdf72bf361364c1aa05925"}, + {file = "black-23.3.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:48f9d345675bb7fbc3dd85821b12487e1b9a75242028adad0333ce36ed2a6d27"}, + {file = "black-23.3.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:714290490c18fb0126baa0fca0a54ee795f7502b44177e1ce7624ba1c00f2331"}, + {file = "black-23.3.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:064101748afa12ad2291c2b91c960be28b817c0c7eaa35bec09cc63aa56493c5"}, + {file = "black-23.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:562bd3a70495facf56814293149e51aa1be9931567474993c7942ff7d3533961"}, + {file = "black-23.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:e198cf27888ad6f4ff331ca1c48ffc038848ea9f031a3b40ba36aced7e22f2c8"}, + {file = "black-23.3.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:3238f2aacf827d18d26db07524e44741233ae09a584273aa059066d644ca7b30"}, + {file = "black-23.3.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:f0bd2f4a58d6666500542b26354978218a9babcdc972722f4bf90779524515f3"}, + {file = "black-23.3.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:92c543f6854c28a3c7f39f4d9b7694f9a6eb9d3c5e2ece488c327b6e7ea9b266"}, + {file = "black-23.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a150542a204124ed00683f0db1f5cf1c2aaaa9cc3495b7a3b5976fb136090ab"}, + {file = "black-23.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:6b39abdfb402002b8a7d030ccc85cf5afff64ee90fa4c5aebc531e3ad0175ddb"}, + {file = "black-23.3.0-py3-none-any.whl", hash = "sha256:ec751418022185b0c1bb7d7736e6933d40bbb14c14a0abcf9123d1b159f98dd4"}, + {file = "black-23.3.0.tar.gz", hash = "sha256:1c7b8d606e728a41ea1ccbd7264677e494e87cf630e399262ced92d4a8dac940"}, ] [package.dependencies] @@ -182,100 +183,87 @@ files = [ [[package]] name = "charset-normalizer" -version = "3.0.1" +version = "3.1.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.7.0" files = [ - {file = "charset-normalizer-3.0.1.tar.gz", hash = "sha256:ebea339af930f8ca5d7a699b921106c6e29c617fe9606fa7baa043c1cdae326f"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:88600c72ef7587fe1708fd242b385b6ed4b8904976d5da0893e31df8b3480cb6"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c75ffc45f25324e68ab238cb4b5c0a38cd1c3d7f1fb1f72b5541de469e2247db"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:db72b07027db150f468fbada4d85b3b2729a3db39178abf5c543b784c1254539"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62595ab75873d50d57323a91dd03e6966eb79c41fa834b7a1661ed043b2d404d"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ff6f3db31555657f3163b15a6b7c6938d08df7adbfc9dd13d9d19edad678f1e8"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:772b87914ff1152b92a197ef4ea40efe27a378606c39446ded52c8f80f79702e"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70990b9c51340e4044cfc394a81f614f3f90d41397104d226f21e66de668730d"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:292d5e8ba896bbfd6334b096e34bffb56161c81408d6d036a7dfa6929cff8783"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2edb64ee7bf1ed524a1da60cdcd2e1f6e2b4f66ef7c077680739f1641f62f555"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:31a9ddf4718d10ae04d9b18801bd776693487cbb57d74cc3458a7673f6f34639"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:44ba614de5361b3e5278e1241fda3dc1838deed864b50a10d7ce92983797fa76"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:12db3b2c533c23ab812c2b25934f60383361f8a376ae272665f8e48b88e8e1c6"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c512accbd6ff0270939b9ac214b84fb5ada5f0409c44298361b2f5e13f9aed9e"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-win32.whl", hash = "sha256:502218f52498a36d6bf5ea77081844017bf7982cdbe521ad85e64cabee1b608b"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:601f36512f9e28f029d9481bdaf8e89e5148ac5d89cffd3b05cd533eeb423b59"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0298eafff88c99982a4cf66ba2efa1128e4ddaca0b05eec4c456bbc7db691d8d"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a8d0fc946c784ff7f7c3742310cc8a57c5c6dc31631269876a88b809dbeff3d3"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:87701167f2a5c930b403e9756fab1d31d4d4da52856143b609e30a1ce7160f3c"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14e76c0f23218b8f46c4d87018ca2e441535aed3632ca134b10239dfb6dadd6b"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0c0a590235ccd933d9892c627dec5bc7511ce6ad6c1011fdf5b11363022746c1"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8c7fe7afa480e3e82eed58e0ca89f751cd14d767638e2550c77a92a9e749c317"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79909e27e8e4fcc9db4addea88aa63f6423ebb171db091fb4373e3312cb6d603"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ac7b6a045b814cf0c47f3623d21ebd88b3e8cf216a14790b455ea7ff0135d18"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:72966d1b297c741541ca8cf1223ff262a6febe52481af742036a0b296e35fa5a"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:f9d0c5c045a3ca9bedfc35dca8526798eb91a07aa7a2c0fee134c6c6f321cbd7"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:5995f0164fa7df59db4746112fec3f49c461dd6b31b841873443bdb077c13cfc"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4a8fcf28c05c1f6d7e177a9a46a1c52798bfe2ad80681d275b10dcf317deaf0b"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:761e8904c07ad053d285670f36dd94e1b6ab7f16ce62b9805c475b7aa1cffde6"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-win32.whl", hash = "sha256:71140351489970dfe5e60fc621ada3e0f41104a5eddaca47a7acb3c1b851d6d3"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:9ab77acb98eba3fd2a85cd160851816bfce6871d944d885febf012713f06659c"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:84c3990934bae40ea69a82034912ffe5a62c60bbf6ec5bc9691419641d7d5c9a"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74292fc76c905c0ef095fe11e188a32ebd03bc38f3f3e9bcb85e4e6db177b7ea"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c95a03c79bbe30eec3ec2b7f076074f4281526724c8685a42872974ef4d36b72"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4c39b0e3eac288fedc2b43055cfc2ca7a60362d0e5e87a637beac5d801ef478"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df2c707231459e8a4028eabcd3cfc827befd635b3ef72eada84ab13b52e1574d"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93ad6d87ac18e2a90b0fe89df7c65263b9a99a0eb98f0a3d2e079f12a0735837"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:59e5686dd847347e55dffcc191a96622f016bc0ad89105e24c14e0d6305acbc6"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:cd6056167405314a4dc3c173943f11249fa0f1b204f8b51ed4bde1a9cd1834dc"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:083c8d17153ecb403e5e1eb76a7ef4babfc2c48d58899c98fcaa04833e7a2f9a"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:f5057856d21e7586765171eac8b9fc3f7d44ef39425f85dbcccb13b3ebea806c"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:7eb33a30d75562222b64f569c642ff3dc6689e09adda43a082208397f016c39a"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-win32.whl", hash = "sha256:95dea361dd73757c6f1c0a1480ac499952c16ac83f7f5f4f84f0658a01b8ef41"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:eaa379fcd227ca235d04152ca6704c7cb55564116f8bc52545ff357628e10602"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3e45867f1f2ab0711d60c6c71746ac53537f1684baa699f4f668d4c6f6ce8e14"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cadaeaba78750d58d3cc6ac4d1fd867da6fc73c88156b7a3212a3cd4819d679d"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:911d8a40b2bef5b8bbae2e36a0b103f142ac53557ab421dc16ac4aafee6f53dc"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:503e65837c71b875ecdd733877d852adbc465bd82c768a067badd953bf1bc5a3"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a60332922359f920193b1d4826953c507a877b523b2395ad7bc716ddd386d866"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:16a8663d6e281208d78806dbe14ee9903715361cf81f6d4309944e4d1e59ac5b"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a16418ecf1329f71df119e8a65f3aa68004a3f9383821edcb20f0702934d8087"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9d9153257a3f70d5f69edf2325357251ed20f772b12e593f3b3377b5f78e7ef8"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:02a51034802cbf38db3f89c66fb5d2ec57e6fe7ef2f4a44d070a593c3688667b"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:2e396d70bc4ef5325b72b593a72c8979999aa52fb8bcf03f701c1b03e1166918"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:11b53acf2411c3b09e6af37e4b9005cba376c872503c8f28218c7243582df45d"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-win32.whl", hash = "sha256:0bf2dae5291758b6f84cf923bfaa285632816007db0330002fa1de38bfcb7154"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:2c03cc56021a4bd59be889c2b9257dae13bf55041a3372d3295416f86b295fb5"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:024e606be3ed92216e2b6952ed859d86b4cfa52cd5bc5f050e7dc28f9b43ec42"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4b0d02d7102dd0f997580b51edc4cebcf2ab6397a7edf89f1c73b586c614272c"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:358a7c4cb8ba9b46c453b1dd8d9e431452d5249072e4f56cfda3149f6ab1405e"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81d6741ab457d14fdedc215516665050f3822d3e56508921cc7239f8c8e66a58"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8b8af03d2e37866d023ad0ddea594edefc31e827fee64f8de5611a1dbc373174"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9cf4e8ad252f7c38dd1f676b46514f92dc0ebeb0db5552f5f403509705e24753"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e696f0dd336161fca9adbb846875d40752e6eba585843c768935ba5c9960722b"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c22d3fe05ce11d3671297dc8973267daa0f938b93ec716e12e0f6dee81591dc1"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:109487860ef6a328f3eec66f2bf78b0b72400280d8f8ea05f69c51644ba6521a"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:37f8febc8ec50c14f3ec9637505f28e58d4f66752207ea177c1d67df25da5aed"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:f97e83fa6c25693c7a35de154681fcc257c1c41b38beb0304b9c4d2d9e164479"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a152f5f33d64a6be73f1d30c9cc82dfc73cec6477ec268e7c6e4c7d23c2d2291"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:39049da0ffb96c8cbb65cbf5c5f3ca3168990adf3551bd1dee10c48fce8ae820"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-win32.whl", hash = "sha256:4457ea6774b5611f4bed5eaa5df55f70abde42364d498c5134b7ef4c6958e20e"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:e62164b50f84e20601c1ff8eb55620d2ad25fb81b59e3cd776a1902527a788af"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8eade758719add78ec36dc13201483f8e9b5d940329285edcd5f70c0a9edbd7f"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8499ca8f4502af841f68135133d8258f7b32a53a1d594aa98cc52013fff55678"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3fc1c4a2ffd64890aebdb3f97e1278b0cc72579a08ca4de8cd2c04799a3a22be"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00d3ffdaafe92a5dc603cb9bd5111aaa36dfa187c8285c543be562e61b755f6b"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c2ac1b08635a8cd4e0cbeaf6f5e922085908d48eb05d44c5ae9eabab148512ca"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f6f45710b4459401609ebebdbcfb34515da4fc2aa886f95107f556ac69a9147e"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ae1de54a77dc0d6d5fcf623290af4266412a7c4be0b1ff7444394f03f5c54e3"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b590df687e3c5ee0deef9fc8c547d81986d9a1b56073d82de008744452d6541"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab5de034a886f616a5668aa5d098af2b5385ed70142090e2a31bcbd0af0fdb3d"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9cb3032517f1627cc012dbc80a8ec976ae76d93ea2b5feaa9d2a5b8882597579"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:608862a7bf6957f2333fc54ab4399e405baad0163dc9f8d99cb236816db169d4"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0f438ae3532723fb6ead77e7c604be7c8374094ef4ee2c5e03a3a17f1fca256c"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:356541bf4381fa35856dafa6a965916e54bed415ad8a24ee6de6e37deccf2786"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-win32.whl", hash = "sha256:39cf9ed17fe3b1bc81f33c9ceb6ce67683ee7526e65fde1447c772afc54a1bb8"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:0a11e971ed097d24c534c037d298ad32c6ce81a45736d31e0ff0ad37ab437d59"}, - {file = "charset_normalizer-3.0.1-py3-none-any.whl", hash = "sha256:7e189e2e1d3ed2f4aebabd2d5b0f931e883676e51c7624826e0a4e5fe8a0bf24"}, + {file = "charset-normalizer-3.1.0.tar.gz", hash = "sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-win32.whl", hash = "sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-win32.whl", hash = "sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-win32.whl", hash = "sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-win32.whl", hash = "sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b"}, + {file = "charset_normalizer-3.1.0-py3-none-any.whl", hash = "sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d"}, ] [[package]] @@ -308,63 +296,63 @@ files = [ [[package]] name = "coverage" -version = "7.2.0" +version = "7.2.3" description = "Code coverage measurement for Python" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "coverage-7.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90e7a4cbbb7b1916937d380beb1315b12957b8e895d7d9fb032e2038ac367525"}, - {file = "coverage-7.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:34d7211be69b215ad92298a962b2cd5a4ef4b17c7871d85e15d3d1b6dc8d8c96"}, - {file = "coverage-7.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:971b49dbf713044c3e5f6451b39f65615d4d1c1d9a19948fa0f41b0245a98765"}, - {file = "coverage-7.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f0557289260125a6c453ad5673ba79e5b6841d9a20c9e101f758bfbedf928a77"}, - {file = "coverage-7.2.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:049806ae2df69468c130f04f0fab4212c46b34ba5590296281423bb1ae379df2"}, - {file = "coverage-7.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:875b03d92ac939fbfa8ae74a35b2c468fc4f070f613d5b1692f9980099a3a210"}, - {file = "coverage-7.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c160e34e388277f10c50dc2c7b5e78abe6d07357d9fe7fcb2f3c156713fd647e"}, - {file = "coverage-7.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:32e6a730fd18b2556716039ab93278ccebbefa1af81e6aa0c8dba888cf659e6e"}, - {file = "coverage-7.2.0-cp310-cp310-win32.whl", hash = "sha256:f3ff4205aff999164834792a3949f82435bc7c7655c849226d5836c3242d7451"}, - {file = "coverage-7.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:93db11da6e728587e943dff8ae1b739002311f035831b6ecdb15e308224a4247"}, - {file = "coverage-7.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cd38140b56538855d3d5722c6d1b752b35237e7ea3f360047ce57f3fade82d98"}, - {file = "coverage-7.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9dbb21561b0e04acabe62d2c274f02df0d715e8769485353ddf3cf84727e31ce"}, - {file = "coverage-7.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:171dd3aa71a49274a7e4fc26f5bc167bfae5a4421a668bc074e21a0522a0af4b"}, - {file = "coverage-7.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4655ecd813f4ba44857af3e9cffd133ab409774e9d2a7d8fdaf4fdfd2941b789"}, - {file = "coverage-7.2.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1856a8c4aa77eb7ca0d42c996d0ca395ecafae658c1432b9da4528c429f2575c"}, - {file = "coverage-7.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd67df6b48db18c10790635060858e2ea4109601e84a1e9bfdd92e898dc7dc79"}, - {file = "coverage-7.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:2d7daf3da9c7e0ed742b3e6b4de6cc464552e787b8a6449d16517b31bbdaddf5"}, - {file = "coverage-7.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:bf9e02bc3dee792b9d145af30db8686f328e781bd212fdef499db5e9e4dd8377"}, - {file = "coverage-7.2.0-cp311-cp311-win32.whl", hash = "sha256:3713a8ec18781fda408f0e853bf8c85963e2d3327c99a82a22e5c91baffcb934"}, - {file = "coverage-7.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:88ae5929f0ef668b582fd7cad09b5e7277f50f912183cf969b36e82a1c26e49a"}, - {file = "coverage-7.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5e29a64e9586194ea271048bc80c83cdd4587830110d1e07b109e6ff435e5dbc"}, - {file = "coverage-7.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d5302eb84c61e758c9d68b8a2f93a398b272073a046d07da83d77b0edc8d76b"}, - {file = "coverage-7.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2c9fffbc39dc4a6277e1525cab06c161d11ee3995bbc97543dc74fcec33e045b"}, - {file = "coverage-7.2.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6ceeab5fca62bca072eba6865a12d881f281c74231d2990f8a398226e1a5d96"}, - {file = "coverage-7.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:28563a35ef4a82b5bc5160a01853ce62b9fceee00760e583ffc8acf9e3413753"}, - {file = "coverage-7.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bfa065307667f1c6e1f4c3e13f415b0925e34e56441f5fda2c84110a4a1d8bda"}, - {file = "coverage-7.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:7f992b32286c86c38f07a8b5c3fc88384199e82434040a729ec06b067ee0d52c"}, - {file = "coverage-7.2.0-cp37-cp37m-win32.whl", hash = "sha256:2c15bd09fd5009f3a79c8b3682b52973df29761030b692043f9834fc780947c4"}, - {file = "coverage-7.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:f332d61fbff353e2ef0f3130a166f499c3fad3a196e7f7ae72076d41a6bfb259"}, - {file = "coverage-7.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:577a8bc40c01ad88bb9ab1b3a1814f2f860ff5c5099827da2a3cafc5522dadea"}, - {file = "coverage-7.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9240a0335365c29c968131bdf624bb25a8a653a9c0d8c5dbfcabf80b59c1973c"}, - {file = "coverage-7.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:358d3bce1468f298b19a3e35183bdb13c06cdda029643537a0cc37e55e74e8f1"}, - {file = "coverage-7.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:932048364ff9c39030c6ba360c31bf4500036d4e15c02a2afc5a76e7623140d4"}, - {file = "coverage-7.2.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7efa21611ffc91156e6f053997285c6fe88cfef3fb7533692d0692d2cb30c846"}, - {file = "coverage-7.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:465ea431c3b78a87e32d7d9ea6d081a1003c43a442982375cf2c247a19971961"}, - {file = "coverage-7.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0f03c229f1453b936916f68a47b3dfb5e84e7ad48e160488168a5e35115320c8"}, - {file = "coverage-7.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:40785553d68c61e61100262b73f665024fd2bb3c6f0f8e2cd5b13e10e4df027b"}, - {file = "coverage-7.2.0-cp38-cp38-win32.whl", hash = "sha256:b09dd7bef59448c66e6b490cc3f3c25c14bc85d4e3c193b81a6204be8dd355de"}, - {file = "coverage-7.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:dc4f9a89c82faf6254d646180b2e3aa4daf5ff75bdb2c296b9f6a6cf547e26a7"}, - {file = "coverage-7.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c243b25051440386179591a8d5a5caff4484f92c980fb6e061b9559da7cc3f64"}, - {file = "coverage-7.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4b8fd32f85b256fc096deeb4872aeb8137474da0c0351236f93cbedc359353d6"}, - {file = "coverage-7.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7f2a7df523791e6a63b40360afa6792a11869651307031160dc10802df9a252"}, - {file = "coverage-7.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da32526326e8da0effb452dc32a21ffad282c485a85a02aeff2393156f69c1c3"}, - {file = "coverage-7.2.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c1153a6156715db9d6ae8283480ae67fb67452aa693a56d7dae9ffe8f7a80da"}, - {file = "coverage-7.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:74cd60fa00f46f28bd40048d6ca26bd58e9bee61d2b0eb4ec18cea13493c003f"}, - {file = "coverage-7.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:59a427f8a005aa7254074719441acb25ac2c2f60c1f1026d43f846d4254c1c2f"}, - {file = "coverage-7.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c3c4beddee01c8125a75cde3b71be273995e2e9ec08fbc260dd206b46bb99969"}, - {file = "coverage-7.2.0-cp39-cp39-win32.whl", hash = "sha256:08e3dd256b8d3e07bb230896c8c96ec6c5dffbe5a133ba21f8be82b275b900e8"}, - {file = "coverage-7.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:ad12c74c6ce53a027f5a5ecbac9be20758a41c85425c1bbab7078441794b04ee"}, - {file = "coverage-7.2.0-pp37.pp38.pp39-none-any.whl", hash = "sha256:ffa637a2d5883298449a5434b699b22ef98dd8e2ef8a1d9e60fa9cfe79813411"}, - {file = "coverage-7.2.0.tar.gz", hash = "sha256:9cc9c41aa5af16d845b53287051340c363dd03b7ef408e45eec3af52be77810d"}, + {file = "coverage-7.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e58c0d41d336569d63d1b113bd573db8363bc4146f39444125b7f8060e4e04f5"}, + {file = "coverage-7.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:344e714bd0fe921fc72d97404ebbdbf9127bac0ca1ff66d7b79efc143cf7c0c4"}, + {file = "coverage-7.2.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:974bc90d6f6c1e59ceb1516ab00cf1cdfbb2e555795d49fa9571d611f449bcb2"}, + {file = "coverage-7.2.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0743b0035d4b0e32bc1df5de70fba3059662ace5b9a2a86a9f894cfe66569013"}, + {file = "coverage-7.2.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d0391fb4cfc171ce40437f67eb050a340fdbd0f9f49d6353a387f1b7f9dd4fa"}, + {file = "coverage-7.2.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4a42e1eff0ca9a7cb7dc9ecda41dfc7cbc17cb1d02117214be0561bd1134772b"}, + {file = "coverage-7.2.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:be19931a8dcbe6ab464f3339966856996b12a00f9fe53f346ab3be872d03e257"}, + {file = "coverage-7.2.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:72fcae5bcac3333a4cf3b8f34eec99cea1187acd55af723bcbd559adfdcb5535"}, + {file = "coverage-7.2.3-cp310-cp310-win32.whl", hash = "sha256:aeae2aa38395b18106e552833f2a50c27ea0000122bde421c31d11ed7e6f9c91"}, + {file = "coverage-7.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:83957d349838a636e768251c7e9979e899a569794b44c3728eaebd11d848e58e"}, + {file = "coverage-7.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:dfd393094cd82ceb9b40df4c77976015a314b267d498268a076e940fe7be6b79"}, + {file = "coverage-7.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:182eb9ac3f2b4874a1f41b78b87db20b66da6b9cdc32737fbbf4fea0c35b23fc"}, + {file = "coverage-7.2.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bb1e77a9a311346294621be905ea8a2c30d3ad371fc15bb72e98bfcfae532df"}, + {file = "coverage-7.2.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca0f34363e2634deffd390a0fef1aa99168ae9ed2af01af4a1f5865e362f8623"}, + {file = "coverage-7.2.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:55416d7385774285b6e2a5feca0af9652f7f444a4fa3d29d8ab052fafef9d00d"}, + {file = "coverage-7.2.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:06ddd9c0249a0546997fdda5a30fbcb40f23926df0a874a60a8a185bc3a87d93"}, + {file = "coverage-7.2.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:fff5aaa6becf2c6a1699ae6a39e2e6fb0672c2d42eca8eb0cafa91cf2e9bd312"}, + {file = "coverage-7.2.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ea53151d87c52e98133eb8ac78f1206498c015849662ca8dc246255265d9c3c4"}, + {file = "coverage-7.2.3-cp311-cp311-win32.whl", hash = "sha256:8f6c930fd70d91ddee53194e93029e3ef2aabe26725aa3c2753df057e296b925"}, + {file = "coverage-7.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:fa546d66639d69aa967bf08156eb8c9d0cd6f6de84be9e8c9819f52ad499c910"}, + {file = "coverage-7.2.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b2317d5ed777bf5a033e83d4f1389fd4ef045763141d8f10eb09a7035cee774c"}, + {file = "coverage-7.2.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be9824c1c874b73b96288c6d3de793bf7f3a597770205068c6163ea1f326e8b9"}, + {file = "coverage-7.2.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2c3b2803e730dc2797a017335827e9da6da0e84c745ce0f552e66400abdfb9a1"}, + {file = "coverage-7.2.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f69770f5ca1994cb32c38965e95f57504d3aea96b6c024624fdd5bb1aa494a1"}, + {file = "coverage-7.2.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1127b16220f7bfb3f1049ed4a62d26d81970a723544e8252db0efde853268e21"}, + {file = "coverage-7.2.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:aa784405f0c640940595fa0f14064d8e84aff0b0f762fa18393e2760a2cf5841"}, + {file = "coverage-7.2.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3146b8e16fa60427e03884301bf8209221f5761ac754ee6b267642a2fd354c48"}, + {file = "coverage-7.2.3-cp37-cp37m-win32.whl", hash = "sha256:1fd78b911aea9cec3b7e1e2622c8018d51c0d2bbcf8faaf53c2497eb114911c1"}, + {file = "coverage-7.2.3-cp37-cp37m-win_amd64.whl", hash = "sha256:0f3736a5d34e091b0a611964c6262fd68ca4363df56185902528f0b75dbb9c1f"}, + {file = "coverage-7.2.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:981b4df72c93e3bc04478153df516d385317628bd9c10be699c93c26ddcca8ab"}, + {file = "coverage-7.2.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c0045f8f23a5fb30b2eb3b8a83664d8dc4fb58faddf8155d7109166adb9f2040"}, + {file = "coverage-7.2.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f760073fcf8f3d6933178d67754f4f2d4e924e321f4bb0dcef0424ca0215eba1"}, + {file = "coverage-7.2.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c86bd45d1659b1ae3d0ba1909326b03598affbc9ed71520e0ff8c31a993ad911"}, + {file = "coverage-7.2.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:172db976ae6327ed4728e2507daf8a4de73c7cc89796483e0a9198fd2e47b462"}, + {file = "coverage-7.2.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d2a3a6146fe9319926e1d477842ca2a63fe99af5ae690b1f5c11e6af074a6b5c"}, + {file = "coverage-7.2.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:f649dd53833b495c3ebd04d6eec58479454a1784987af8afb77540d6c1767abd"}, + {file = "coverage-7.2.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7c4ed4e9f3b123aa403ab424430b426a1992e6f4c8fd3cb56ea520446e04d152"}, + {file = "coverage-7.2.3-cp38-cp38-win32.whl", hash = "sha256:eb0edc3ce9760d2f21637766c3aa04822030e7451981ce569a1b3456b7053f22"}, + {file = "coverage-7.2.3-cp38-cp38-win_amd64.whl", hash = "sha256:63cdeaac4ae85a179a8d6bc09b77b564c096250d759eed343a89d91bce8b6367"}, + {file = "coverage-7.2.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:20d1a2a76bb4eb00e4d36b9699f9b7aba93271c9c29220ad4c6a9581a0320235"}, + {file = "coverage-7.2.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ea748802cc0de4de92ef8244dd84ffd793bd2e7be784cd8394d557a3c751e21"}, + {file = "coverage-7.2.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21b154aba06df42e4b96fc915512ab39595105f6c483991287021ed95776d934"}, + {file = "coverage-7.2.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd214917cabdd6f673a29d708574e9fbdb892cb77eb426d0eae3490d95ca7859"}, + {file = "coverage-7.2.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c2e58e45fe53fab81f85474e5d4d226eeab0f27b45aa062856c89389da2f0d9"}, + {file = "coverage-7.2.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:87ecc7c9a1a9f912e306997ffee020297ccb5ea388421fe62a2a02747e4d5539"}, + {file = "coverage-7.2.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:387065e420aed3c71b61af7e82c7b6bc1c592f7e3c7a66e9f78dd178699da4fe"}, + {file = "coverage-7.2.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ea3f5bc91d7d457da7d48c7a732beaf79d0c8131df3ab278e6bba6297e23c6c4"}, + {file = "coverage-7.2.3-cp39-cp39-win32.whl", hash = "sha256:ae7863a1d8db6a014b6f2ff9c1582ab1aad55a6d25bac19710a8df68921b6e30"}, + {file = "coverage-7.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:3f04becd4fcda03c0160d0da9c8f0c246bc78f2f7af0feea1ec0930e7c93fa4a"}, + {file = "coverage-7.2.3-pp37.pp38.pp39-none-any.whl", hash = "sha256:965ee3e782c7892befc25575fa171b521d33798132692df428a09efacaffe8d0"}, + {file = "coverage-7.2.3.tar.gz", hash = "sha256:d298c2815fa4891edd9abe5ad6e6cb4207104c7dd9fd13aea3fdebf6f9b91259"}, ] [package.dependencies] @@ -411,14 +399,14 @@ files = [ [[package]] name = "exceptiongroup" -version = "1.1.0" +version = "1.1.1" description = "Backport of PEP 654 (exception groups)" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.1.0-py3-none-any.whl", hash = "sha256:327cbda3da756e2de031a3107b81ab7b3770a602c4d16ca618298c526f4bec1e"}, - {file = "exceptiongroup-1.1.0.tar.gz", hash = "sha256:bcb67d800a4497e1b404c2dd44fca47d3b7a5e5433dbab67f96c1a685cdfdf23"}, + {file = "exceptiongroup-1.1.1-py3-none-any.whl", hash = "sha256:232c37c63e4f682982c8b6459f33a8981039e5fb8756b2074364e5055c498c9e"}, + {file = "exceptiongroup-1.1.1.tar.gz", hash = "sha256:d484c3090ba2889ae2928419117447a14daf3c1231d5e30d0aae34f354f01785"}, ] [package.extras] @@ -426,19 +414,19 @@ test = ["pytest (>=6)"] [[package]] name = "filelock" -version = "3.9.0" +version = "3.11.0" description = "A platform independent file lock." category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "filelock-3.9.0-py3-none-any.whl", hash = "sha256:f58d535af89bb9ad5cd4df046f741f8553a418c01a7856bf0d173bbc9f6bd16d"}, - {file = "filelock-3.9.0.tar.gz", hash = "sha256:7b319f24340b51f55a2bf7a12ac0755a9b03e718311dac567a0f4f7fabd2f5de"}, + {file = "filelock-3.11.0-py3-none-any.whl", hash = "sha256:f08a52314748335c6460fc8fe40cd5638b85001225db78c2aa01c8c0db83b318"}, + {file = "filelock-3.11.0.tar.gz", hash = "sha256:3618c0da67adcc0506b015fd11ef7faf1b493f0b40d87728e19986b536890c37"}, ] [package.extras] -docs = ["furo (>=2022.12.7)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"] -testing = ["covdefaults (>=2.2.2)", "coverage (>=7.0.1)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-timeout (>=2.1)"] +docs = ["furo (>=2023.3.27)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.2.2)", "diff-cover (>=7.5)", "pytest (>=7.2.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)", "pytest-timeout (>=2.1)"] [[package]] name = "flake8" @@ -570,14 +558,14 @@ testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pep517", [[package]] name = "importlib-metadata" -version = "6.0.0" +version = "6.2.0" description = "Read metadata from Python packages" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "importlib_metadata-6.0.0-py3-none-any.whl", hash = "sha256:7efb448ec9a5e313a57655d35aa54cd3e01b7e1fbcf72dce1bf06119420f5bad"}, - {file = "importlib_metadata-6.0.0.tar.gz", hash = "sha256:e354bedeb60efa6affdcc8ae121b73544a7aa74156d047311948f6d711cd378d"}, + {file = "importlib_metadata-6.2.0-py3-none-any.whl", hash = "sha256:8388b74023a138c605fddd0d47cb81dd706232569f56c9aca7d9c7fdb54caeba"}, + {file = "importlib_metadata-6.2.0.tar.gz", hash = "sha256:9127aad2f49d7203e7112098c12b92e4fd1061ccd18548cdfdc49171a8c073cc"}, ] [package.dependencies] @@ -655,6 +643,32 @@ MarkupSafe = ">=2.0" [package.extras] i18n = ["Babel (>=2.7)"] +[[package]] +name = "markdown-it-py" +version = "2.2.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "markdown-it-py-2.2.0.tar.gz", hash = "sha256:7c9a5e412688bc771c67432cbfebcdd686c93ce6484913dccf06cb5a0bea35a1"}, + {file = "markdown_it_py-2.2.0-py3-none-any.whl", hash = "sha256:5a35f8d1870171d9acc47b99612dc146129b631baf04970128b568f190d0cc30"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" +typing_extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""} + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +code-style = ["pre-commit (>=3.0,<4.0)"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins"] +profiling = ["gprof2dot"] +rtd = ["attrs", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + [[package]] name = "markupsafe" version = "2.1.2" @@ -727,44 +741,56 @@ files = [ {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, ] +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + [[package]] name = "mypy" -version = "1.0.1" +version = "1.2.0" description = "Optional static typing for Python" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-1.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:71a808334d3f41ef011faa5a5cd8153606df5fc0b56de5b2e89566c8093a0c9a"}, - {file = "mypy-1.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:920169f0184215eef19294fa86ea49ffd4635dedfdea2b57e45cb4ee85d5ccaf"}, - {file = "mypy-1.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27a0f74a298769d9fdc8498fcb4f2beb86f0564bcdb1a37b58cbbe78e55cf8c0"}, - {file = "mypy-1.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:65b122a993d9c81ea0bfde7689b3365318a88bde952e4dfa1b3a8b4ac05d168b"}, - {file = "mypy-1.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:5deb252fd42a77add936b463033a59b8e48eb2eaec2976d76b6878d031933fe4"}, - {file = "mypy-1.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2013226d17f20468f34feddd6aae4635a55f79626549099354ce641bc7d40262"}, - {file = "mypy-1.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:48525aec92b47baed9b3380371ab8ab6e63a5aab317347dfe9e55e02aaad22e8"}, - {file = "mypy-1.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c96b8a0c019fe29040d520d9257d8c8f122a7343a8307bf8d6d4a43f5c5bfcc8"}, - {file = "mypy-1.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:448de661536d270ce04f2d7dddaa49b2fdba6e3bd8a83212164d4174ff43aa65"}, - {file = "mypy-1.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:d42a98e76070a365a1d1c220fcac8aa4ada12ae0db679cb4d910fabefc88b994"}, - {file = "mypy-1.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e64f48c6176e243ad015e995de05af7f22bbe370dbb5b32bd6988438ec873919"}, - {file = "mypy-1.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fdd63e4f50e3538617887e9aee91855368d9fc1dea30da743837b0df7373bc4"}, - {file = "mypy-1.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:dbeb24514c4acbc78d205f85dd0e800f34062efcc1f4a4857c57e4b4b8712bff"}, - {file = "mypy-1.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a2948c40a7dd46c1c33765718936669dc1f628f134013b02ff5ac6c7ef6942bf"}, - {file = "mypy-1.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5bc8d6bd3b274dd3846597855d96d38d947aedba18776aa998a8d46fabdaed76"}, - {file = "mypy-1.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:17455cda53eeee0a4adb6371a21dd3dbf465897de82843751cf822605d152c8c"}, - {file = "mypy-1.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e831662208055b006eef68392a768ff83596035ffd6d846786578ba1714ba8f6"}, - {file = "mypy-1.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e60d0b09f62ae97a94605c3f73fd952395286cf3e3b9e7b97f60b01ddfbbda88"}, - {file = "mypy-1.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:0af4f0e20706aadf4e6f8f8dc5ab739089146b83fd53cb4a7e0e850ef3de0bb6"}, - {file = "mypy-1.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:24189f23dc66f83b839bd1cce2dfc356020dfc9a8bae03978477b15be61b062e"}, - {file = "mypy-1.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:93a85495fb13dc484251b4c1fd7a5ac370cd0d812bbfc3b39c1bafefe95275d5"}, - {file = "mypy-1.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f546ac34093c6ce33f6278f7c88f0f147a4849386d3bf3ae193702f4fe31407"}, - {file = "mypy-1.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c6c2ccb7af7154673c591189c3687b013122c5a891bb5651eca3db8e6c6c55bd"}, - {file = "mypy-1.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:15b5a824b58c7c822c51bc66308e759243c32631896743f030daf449fe3677f3"}, - {file = "mypy-1.0.1-py3-none-any.whl", hash = "sha256:eda5c8b9949ed411ff752b9a01adda31afe7eae1e53e946dbdf9db23865e66c4"}, - {file = "mypy-1.0.1.tar.gz", hash = "sha256:28cea5a6392bb43d266782983b5a4216c25544cd7d80be681a155ddcdafd152d"}, + {file = "mypy-1.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:701189408b460a2ff42b984e6bd45c3f41f0ac9f5f58b8873bbedc511900086d"}, + {file = "mypy-1.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fe91be1c51c90e2afe6827601ca14353bbf3953f343c2129fa1e247d55fd95ba"}, + {file = "mypy-1.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d26b513225ffd3eacece727f4387bdce6469192ef029ca9dd469940158bc89e"}, + {file = "mypy-1.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3a2d219775a120581a0ae8ca392b31f238d452729adbcb6892fa89688cb8306a"}, + {file = "mypy-1.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:2e93a8a553e0394b26c4ca683923b85a69f7ccdc0139e6acd1354cc884fe0128"}, + {file = "mypy-1.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3efde4af6f2d3ccf58ae825495dbb8d74abd6d176ee686ce2ab19bd025273f41"}, + {file = "mypy-1.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:695c45cea7e8abb6f088a34a6034b1d273122e5530aeebb9c09626cea6dca4cb"}, + {file = "mypy-1.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0e9464a0af6715852267bf29c9553e4555b61f5904a4fc538547a4d67617937"}, + {file = "mypy-1.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8293a216e902ac12779eb7a08f2bc39ec6c878d7c6025aa59464e0c4c16f7eb9"}, + {file = "mypy-1.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:f46af8d162f3d470d8ffc997aaf7a269996d205f9d746124a179d3abe05ac602"}, + {file = "mypy-1.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:031fc69c9a7e12bcc5660b74122ed84b3f1c505e762cc4296884096c6d8ee140"}, + {file = "mypy-1.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:390bc685ec209ada4e9d35068ac6988c60160b2b703072d2850457b62499e336"}, + {file = "mypy-1.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4b41412df69ec06ab141808d12e0bf2823717b1c363bd77b4c0820feaa37249e"}, + {file = "mypy-1.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4e4a682b3f2489d218751981639cffc4e281d548f9d517addfd5a2917ac78119"}, + {file = "mypy-1.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a197ad3a774f8e74f21e428f0de7f60ad26a8d23437b69638aac2764d1e06a6a"}, + {file = "mypy-1.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c9a084bce1061e55cdc0493a2ad890375af359c766b8ac311ac8120d3a472950"}, + {file = "mypy-1.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaeaa0888b7f3ccb7bcd40b50497ca30923dba14f385bde4af78fac713d6d6f6"}, + {file = "mypy-1.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bea55fc25b96c53affab852ad94bf111a3083bc1d8b0c76a61dd101d8a388cf5"}, + {file = "mypy-1.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:4c8d8c6b80aa4a1689f2a179d31d86ae1367ea4a12855cc13aa3ba24bb36b2d8"}, + {file = "mypy-1.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:70894c5345bea98321a2fe84df35f43ee7bb0feec117a71420c60459fc3e1eed"}, + {file = "mypy-1.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4a99fe1768925e4a139aace8f3fb66db3576ee1c30b9c0f70f744ead7e329c9f"}, + {file = "mypy-1.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:023fe9e618182ca6317ae89833ba422c411469156b690fde6a315ad10695a521"}, + {file = "mypy-1.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4d19f1a239d59f10fdc31263d48b7937c585810288376671eaf75380b074f238"}, + {file = "mypy-1.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:2de7babe398cb7a85ac7f1fd5c42f396c215ab3eff731b4d761d68d0f6a80f48"}, + {file = "mypy-1.2.0-py3-none-any.whl", hash = "sha256:d8e9187bfcd5ffedbe87403195e1fc340189a68463903c39e2b63307c9fa0394"}, + {file = "mypy-1.2.0.tar.gz", hash = "sha256:f70a40410d774ae23fcb4afbbeca652905a04de7948eaf0b1789c8d1426b72d1"}, ] [package.dependencies] -mypy-extensions = ">=0.4.3" +mypy-extensions = ">=1.0.0" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} typed-ast = {version = ">=1.4.0,<2", markers = "python_version < \"3.8\""} typing-extensions = ">=3.10" @@ -801,14 +827,14 @@ files = [ [[package]] name = "pathspec" -version = "0.11.0" +version = "0.11.1" description = "Utility library for gitignore style pattern matching of file paths." category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "pathspec-0.11.0-py3-none-any.whl", hash = "sha256:3a66eb970cbac598f9e5ccb5b2cf58930cd8e3ed86d393d541eaf2d8b1705229"}, - {file = "pathspec-0.11.0.tar.gz", hash = "sha256:64d338d4e0914e91c1792321e6907b5a593f1ab1851de7fc269557a21b30ebbc"}, + {file = "pathspec-0.11.1-py3-none-any.whl", hash = "sha256:d8af70af76652554bd134c22b3e8a1cc46ed7d91edcdd721ef1a0c51a84a5293"}, + {file = "pathspec-0.11.1.tar.gz", hash = "sha256:2798de800fa92780e33acca925945e9a19a133b715067cf165b8866c15a31687"}, ] [[package]] @@ -825,22 +851,22 @@ files = [ [[package]] name = "platformdirs" -version = "3.0.0" +version = "3.2.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "platformdirs-3.0.0-py3-none-any.whl", hash = "sha256:b1d5eb14f221506f50d6604a561f4c5786d9e80355219694a1b244bcd96f4567"}, - {file = "platformdirs-3.0.0.tar.gz", hash = "sha256:8a1228abb1ef82d788f74139988b137e78692984ec7b08eaa6c65f1723af28f9"}, + {file = "platformdirs-3.2.0-py3-none-any.whl", hash = "sha256:ebe11c0d7a805086e99506aa331612429a72ca7cd52a1f0d277dc4adc20cb10e"}, + {file = "platformdirs-3.2.0.tar.gz", hash = "sha256:d5b638ca397f25f979350ff789db335903d7ea010ab28903f57b27e1b16c2b08"}, ] [package.dependencies] -typing-extensions = {version = ">=4.4", markers = "python_version < \"3.8\""} +typing-extensions = {version = ">=4.5", markers = "python_version < \"3.8\""} [package.extras] docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest (>=7.2.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.2.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] [[package]] name = "pluggy" @@ -926,34 +952,34 @@ plugins = ["importlib-metadata"] [[package]] name = "pyproject-api" -version = "1.5.0" +version = "1.5.1" description = "API to interact with the python pyproject.toml based projects" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "pyproject_api-1.5.0-py3-none-any.whl", hash = "sha256:4c111277dfb96bcd562c6245428f27250b794bfe3e210b8714c4f893952f2c17"}, - {file = "pyproject_api-1.5.0.tar.gz", hash = "sha256:0962df21f3e633b8ddb9567c011e6c1b3dcdfc31b7860c0ede7e24c5a1200fbe"}, + {file = "pyproject_api-1.5.1-py3-none-any.whl", hash = "sha256:4698a3777c2e0f6b624f8a4599131e2a25376d90fe8d146d7ac74c67c6f97c43"}, + {file = "pyproject_api-1.5.1.tar.gz", hash = "sha256:435f46547a9ff22cf4208ee274fca3e2869aeb062a4834adfc99a4dd64af3cf9"}, ] [package.dependencies] -packaging = ">=21.3" +packaging = ">=23" tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} [package.extras] -docs = ["furo (>=2022.9.29)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"] -testing = ["covdefaults (>=2.2.2)", "importlib-metadata (>=5.1)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)", "virtualenv (>=20.17)", "wheel (>=0.38.4)"] +docs = ["furo (>=2022.12.7)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)"] +testing = ["covdefaults (>=2.2.2)", "importlib-metadata (>=6)", "pytest (>=7.2.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)", "virtualenv (>=20.17.1)", "wheel (>=0.38.4)"] [[package]] name = "pytest" -version = "7.2.1" +version = "7.2.2" description = "pytest: simple powerful testing with Python" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-7.2.1-py3-none-any.whl", hash = "sha256:c7c6ca206e93355074ae32f7403e8ea12163b1163c976fee7d4d84027c162be5"}, - {file = "pytest-7.2.1.tar.gz", hash = "sha256:d45e0952f3727241918b8fd0f376f5ff6b301cc0777c6f9a556935c92d8a7d42"}, + {file = "pytest-7.2.2-py3-none-any.whl", hash = "sha256:130328f552dcfac0b1cec75c12e3f005619dc5f874f0a06e8ff7263f0ee6225e"}, + {file = "pytest-7.2.2.tar.gz", hash = "sha256:c99ab0c73aceb050f68929bc93af19ab6db0558791c6a0715723abe9d0ade9d4"}, ] [package.dependencies] @@ -971,18 +997,18 @@ testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2. [[package]] name = "pytest-asyncio" -version = "0.20.3" +version = "0.21.0" description = "Pytest support for asyncio" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-asyncio-0.20.3.tar.gz", hash = "sha256:83cbf01169ce3e8eb71c6c278ccb0574d1a7a3bb8eaaf5e50e0ad342afb33b36"}, - {file = "pytest_asyncio-0.20.3-py3-none-any.whl", hash = "sha256:f129998b209d04fcc65c96fc85c11e5316738358909a8399e93be553d7656442"}, + {file = "pytest-asyncio-0.21.0.tar.gz", hash = "sha256:2b38a496aef56f56b0e87557ec313e11e1ab9276fc3863f6a7be0f1d0e415e1b"}, + {file = "pytest_asyncio-0.21.0-py3-none-any.whl", hash = "sha256:f2b3366b7cd501a4056858bd39349d5af19742aed2d81660b7998b6341c7eb9c"}, ] [package.dependencies] -pytest = ">=6.1.0" +pytest = ">=7.0.0" typing-extensions = {version = ">=3.7.2", markers = "python_version < \"3.8\""} [package.extras] @@ -1061,14 +1087,14 @@ pytest = ">=5.0.0" [[package]] name = "pytz" -version = "2022.7.1" +version = "2023.3" description = "World timezone definitions, modern and historical" category = "dev" optional = false python-versions = "*" files = [ - {file = "pytz-2022.7.1-py2.py3-none-any.whl", hash = "sha256:78f4f37d8198e0627c5f1143240bb0206b8691d8d7ac6d78fee88b78733f8c4a"}, - {file = "pytz-2022.7.1.tar.gz", hash = "sha256:01a0681c4b9684a28304615eba55d1ab31ae00bf68ec157ec3708a8182dbbcd0"}, + {file = "pytz-2023.3-py2.py3-none-any.whl", hash = "sha256:a151b3abb88eda1d4e34a9814df37de2a80e301e68ba0fd856fb9b46bfbbbffb"}, + {file = "pytz-2023.3.tar.gz", hash = "sha256:1d8ce29db189191fb55338ee6d0387d82ab59f3d00eac103412d64e0ebd0c588"}, ] [[package]] @@ -1143,16 +1169,36 @@ urllib3 = ">=1.21.1,<1.27" socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] +[[package]] +name = "rich" +version = "13.3.3" +description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +category = "dev" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "rich-13.3.3-py3-none-any.whl", hash = "sha256:540c7d6d26a1178e8e8b37e9ba44573a3cd1464ff6348b99ee7061b95d1c6333"}, + {file = "rich-13.3.3.tar.gz", hash = "sha256:dc84400a9d842b3a9c5ff74addd8eb798d155f36c1c91303888e0a66850d2a15"}, +] + +[package.dependencies] +markdown-it-py = ">=2.2.0,<3.0.0" +pygments = ">=2.13.0,<3.0.0" +typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.9\""} + +[package.extras] +jupyter = ["ipywidgets (>=7.5.1,<9)"] + [[package]] name = "setuptools" -version = "67.4.0" +version = "67.6.1" description = "Easily download, build, install, upgrade, and uninstall Python packages" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "setuptools-67.4.0-py3-none-any.whl", hash = "sha256:f106dee1b506dee5102cc3f3e9e68137bbad6d47b616be7991714b0c62204251"}, - {file = "setuptools-67.4.0.tar.gz", hash = "sha256:e5fd0a713141a4a105412233c63dc4e17ba0090c8e8334594ac790ec97792330"}, + {file = "setuptools-67.6.1-py3-none-any.whl", hash = "sha256:e728ca814a823bf7bf60162daf9db95b93d532948c4c0bea762ce62f60189078"}, + {file = "setuptools-67.6.1.tar.gz", hash = "sha256:257de92a9d50a60b8e22abfcbb771571fde0dbf3ec234463212027a4eeecbe9a"}, ] [package.extras] @@ -1370,18 +1416,18 @@ test = ["html5lib", "pytest"] [[package]] name = "sphinxcontrib-jquery" -version = "2.0.0" +version = "4.1" description = "Extension to include jQuery on newer Sphinx releases" category = "dev" optional = false python-versions = ">=2.7" files = [ - {file = "sphinxcontrib-jquery-2.0.0.tar.gz", hash = "sha256:8fb65f6dba84bf7bcd1aea1f02ab3955ac34611d838bcc95d4983b805b234daa"}, - {file = "sphinxcontrib_jquery-2.0.0-py3-none-any.whl", hash = "sha256:ed47fa425c338ffebe3c37e1cdb56e30eb806116b85f01055b158c7057fdb995"}, + {file = "sphinxcontrib-jquery-4.1.tar.gz", hash = "sha256:1620739f04e36a2c779f1a131a2dfd49b2fd07351bf1968ced074365933abc7a"}, + {file = "sphinxcontrib_jquery-4.1-py2.py3-none-any.whl", hash = "sha256:f936030d7d0147dd026a4f2b5a57343d233f1fc7b363f68b3d4f1cb0993878ae"}, ] [package.dependencies] -setuptools = "*" +Sphinx = ">=1.8" [[package]] name = "sphinxcontrib-jsmath" @@ -1487,33 +1533,33 @@ testing = ["flaky (>=3.4.0)", "freezegun (>=0.3.11)", "pathlib2 (>=2.3.3)", "psu [[package]] name = "tox" -version = "4.4.6" +version = "4.4.11" description = "tox is a generic virtualenv management and test command line tool" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "tox-4.4.6-py3-none-any.whl", hash = "sha256:e3d4a65852f029e5ba441a01824d2d839d30bb8fb071635ef9cb53952698e6bf"}, - {file = "tox-4.4.6.tar.gz", hash = "sha256:9786671d23b673ace7499c602c5746e2a225d1ecd9d9f624d0461303f40bd93b"}, + {file = "tox-4.4.11-py3-none-any.whl", hash = "sha256:6fa4dbd933d0e335b5392c81e9cd467630119b3669705dbad47814a93b6c9586"}, + {file = "tox-4.4.11.tar.gz", hash = "sha256:cd88e41aef9c71f0ba02b6d7939f102760b192b63458fbe04dbbaed82f7bf5f5"}, ] [package.dependencies] cachetools = ">=5.3" chardet = ">=5.1" colorama = ">=0.4.6" -filelock = ">=3.9" -importlib-metadata = {version = ">=6", markers = "python_version < \"3.8\""} +filelock = ">=3.10.7" +importlib-metadata = {version = ">=6.1", markers = "python_version < \"3.8\""} packaging = ">=23" -platformdirs = ">=2.6.2" +platformdirs = ">=3.2" pluggy = ">=1" -pyproject-api = ">=1.5" +pyproject-api = ">=1.5.1" tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} -typing-extensions = {version = ">=4.4", markers = "python_version < \"3.8\""} -virtualenv = ">=20.17.1" +typing-extensions = {version = ">=4.5", markers = "python_version < \"3.8\""} +virtualenv = ">=20.21" [package.extras] -docs = ["furo (>=2022.12.7)", "sphinx (>=6.1.3)", "sphinx-argparse-cli (>=1.11)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)", "sphinx-copybutton (>=0.5.1)", "sphinx-inline-tabs (>=2022.1.2b11)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=22.12)"] -testing = ["build[virtualenv] (>=0.10)", "covdefaults (>=2.2.2)", "devpi-process (>=0.3)", "diff-cover (>=7.4)", "distlib (>=0.3.6)", "flaky (>=3.7)", "hatch-vcs (>=0.3)", "hatchling (>=1.12.2)", "psutil (>=5.9.4)", "pytest (>=7.2.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)", "pytest-xdist (>=3.1)", "re-assert (>=1.1)", "time-machine (>=2.9)", "wheel (>=0.38.4)"] +docs = ["furo (>=2023.3.27)", "sphinx (>=6.1.3)", "sphinx-argparse-cli (>=1.11)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)", "sphinx-copybutton (>=0.5.1)", "sphinx-inline-tabs (>=2022.1.2b11)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=22.12)"] +testing = ["build[virtualenv] (>=0.10)", "covdefaults (>=2.3)", "devpi-process (>=0.3)", "diff-cover (>=7.5)", "distlib (>=0.3.6)", "flaky (>=3.7)", "hatch-vcs (>=0.3)", "hatchling (>=1.13)", "psutil (>=5.9.4)", "pytest (>=7.2.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)", "pytest-xdist (>=3.2.1)", "re-assert (>=1.1)", "time-machine (>=2.9)", "wheel (>=0.40)"] [[package]] name = "typed-ast" @@ -1563,14 +1609,14 @@ files = [ [[package]] name = "urllib3" -version = "1.26.14" +version = "1.26.15" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ - {file = "urllib3-1.26.14-py2.py3-none-any.whl", hash = "sha256:75edcdc2f7d85b137124a6c3c9fc3933cdeaa12ecb9a6a959f22797a0feca7e1"}, - {file = "urllib3-1.26.14.tar.gz", hash = "sha256:076907bf8fd355cde77728471316625a4d2f7e713c125f51953bb5b3eecf4f72"}, + {file = "urllib3-1.26.15-py2.py3-none-any.whl", hash = "sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42"}, + {file = "urllib3-1.26.15.tar.gz", hash = "sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305"}, ] [package.extras] @@ -1603,14 +1649,14 @@ testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", [[package]] name = "virtualenv" -version = "20.19.0" +version = "20.21.0" description = "Virtual Python Environment builder" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.19.0-py3-none-any.whl", hash = "sha256:54eb59e7352b573aa04d53f80fc9736ed0ad5143af445a1e539aada6eb947dd1"}, - {file = "virtualenv-20.19.0.tar.gz", hash = "sha256:37a640ba82ed40b226599c522d411e4be5edb339a0c0de030c0dc7b646d61590"}, + {file = "virtualenv-20.21.0-py3-none-any.whl", hash = "sha256:31712f8f2a17bd06234fa97fdf19609e789dd4e3e4bf108c3da71d710651adbc"}, + {file = "virtualenv-20.21.0.tar.gz", hash = "sha256:f50e3e60f990a0757c9b68333c9fdaa72d7188caa417f96af9e52407831a3b68"}, ] [package.dependencies] @@ -1642,4 +1688,4 @@ testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more [metadata] lock-version = "2.0" python-versions = "^3.7" -content-hash = "1a44241e119441fdd0154b8e399ac397aeef931d213bb87940efed83b21bd54b" +content-hash = "dde11b1b0c0576196aa4313eb677c0006d301b1c6cf50e59ab458387f3b92240" diff --git a/pyproject.toml b/pyproject.toml index af09de60..71bfa735 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -50,7 +50,7 @@ optional = true [tool.poetry.group.test.dependencies] pytest = "^7.2" -pytest-asyncio = ">=0.20,<1" +pytest-asyncio = ">=0.21,<1" pytest-benchmark = "^4.0" pytest-cov = "^4.0" pytest-describe = "^2.0" @@ -61,7 +61,7 @@ tox = ">=3.0" optional = true [tool.poetry.group.lint.dependencies] -black = "23.1.0" +black = "23.3.0" flake8 = [ { version = ">=5,<7", python = ">=3.8" }, { version = ">=5,<6", python = "<3.8" } @@ -72,7 +72,7 @@ isort = [ { version = "^5.12", python = ">=3.8" }, { version = "^5.11", python = "<3.8" } ] -mypy = "1.0.1" +mypy = "1.2.0" bump2version = ">=1.0,<2" [tool.poetry.group.doc] diff --git a/tox.ini b/tox.ini index 6b0aa608..a6b2cf5a 100644 --- a/tox.ini +++ b/tox.ini @@ -15,7 +15,7 @@ python = [testenv:black] basepython = python3.11 -deps = black==23.1.0 +deps = black==23.3.0 commands = black src tests -t py310 --check @@ -37,7 +37,7 @@ commands = [testenv:mypy] basepython = python3.11 deps = - mypy==1.0.1 + mypy==1.2.0 pytest>=7.2,<8 commands = mypy src tests @@ -53,7 +53,7 @@ commands = [testenv] deps = pytest>=7.2,<8 - pytest-asyncio>=0.20,<1 + pytest-asyncio>=0.21,<1 pytest-benchmark>=4,<5 pytest-cov>=4,<5 pytest-describe>=2,<3 From 70df39d51bcff89e93fb94057ff45a3c54153d5b Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 8 Apr 2023 14:21:55 +0200 Subject: [PATCH 086/230] Improve comment in coerce_input_value Replicates graphql/graphql-js@ac90b52f6df53c211d3311d6d91bfc98b319aca3 --- src/graphql/utilities/coerce_input_value.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/graphql/utilities/coerce_input_value.py b/src/graphql/utilities/coerce_input_value.py index 31237d1e..ac2d87ff 100644 --- a/src/graphql/utilities/coerce_input_value.py +++ b/src/graphql/utilities/coerce_input_value.py @@ -130,8 +130,9 @@ def coerce_input_value( return type_.out_type(coerced_dict) if is_leaf_type(type_): - # Scalars determine if a value is valid via `parse_value()`, which can throw to - # indicate failure. If it throws, maintain a reference to the original error. + # Scalars and Enums determine if an input value is valid via `parse_value()`, + # which can throw to indicate failure. If it throws, maintain a reference + # to the original error. type_ = cast(GraphQLScalarType, type_) try: parse_result = type_.parse_value(input_value) From aab6d5020c1ed741fa41c2db3e7f78b8654035ce Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 8 Apr 2023 14:37:31 +0200 Subject: [PATCH 087/230] Improve handling of ValidationAbortedError Replicates graphql/graphql-js@5aadd613b3129f6d25a8d7a612ded7e78e110470 --- src/graphql/validation/validate.py | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/src/graphql/validation/validate.py b/src/graphql/validation/validate.py index c58d9e7a..b410ebbb 100644 --- a/src/graphql/validation/validate.py +++ b/src/graphql/validation/validate.py @@ -12,10 +12,15 @@ __all__ = ["assert_valid_sdl", "assert_valid_sdl_extension", "validate", "validate_sdl"] -class ValidationAbortedError(RuntimeError): +class ValidationAbortedError(GraphQLError): """Error when a validation has been aborted (error limit reached).""" +validation_aborted_error = ValidationAbortedError( + "Too many validation errors, error limit reached. Validation aborted." +) + + def validate( schema: GraphQLSchema, document_ast: DocumentNode, @@ -54,13 +59,7 @@ def validate( def on_error(error: GraphQLError) -> None: if len(errors) >= max_errors: # type: ignore - errors.append( - GraphQLError( - "Too many validation errors, error limit reached." - " Validation aborted." - ) - ) - raise ValidationAbortedError + raise validation_aborted_error errors.append(error) context = ValidationContext(schema, document_ast, type_info, on_error) @@ -73,7 +72,7 @@ def on_error(error: GraphQLError) -> None: try: visit(document_ast, TypeInfoVisitor(type_info, ParallelVisitor(visitors))) except ValidationAbortedError: - pass + errors.append(validation_aborted_error) return errors From 0da3225f308767859abb528321703a7c9023f50d Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 8 Apr 2023 15:05:25 +0200 Subject: [PATCH 088/230] parser: limit maximum number of tokens Replicates graphql/graphql-js@9df90799299f40b596938aba15e88c4cd4f2d376 --- src/graphql/language/parser.py | 53 +++++++++++++++++++++++++++------- tests/language/test_parser.py | 16 ++++++++++ 2 files changed, 59 insertions(+), 10 deletions(-) diff --git a/src/graphql/language/parser.py b/src/graphql/language/parser.py index d4846cc7..e380152a 100644 --- a/src/graphql/language/parser.py +++ b/src/graphql/language/parser.py @@ -84,6 +84,7 @@ def parse( source: SourceType, no_location: bool = False, + max_tokens: Optional[int] = None, allow_legacy_fragment_variables: bool = False, experimental_client_controlled_nullability: bool = False, ) -> DocumentNode: @@ -95,6 +96,12 @@ def parse( they correspond to. The ``no_location`` option disables that behavior for performance or testing. + Parser CPU and memory usage is linear to the number of tokens in a document, + however in extreme cases it becomes quadratic due to memory exhaustion. + Parsing happens before validation so even invalid queries can burn lots of + CPU time and memory. + To prevent this you can set a maximum number of tokens allowed within a document. + Legacy feature (will be removed in v3.3): If ``allow_legacy_fragment_variables`` is set to ``True``, the parser will @@ -131,6 +138,7 @@ def parse( parser = Parser( source, no_location=no_location, + max_tokens=max_tokens, allow_legacy_fragment_variables=allow_legacy_fragment_variables, experimental_client_controlled_nullability=experimental_client_controlled_nullability, # noqa ) @@ -140,6 +148,7 @@ def parse( def parse_value( source: SourceType, no_location: bool = False, + max_tokens: Optional[int] = None, allow_legacy_fragment_variables: bool = False, ) -> ValueNode: """Parse the AST for a given string containing a GraphQL value. @@ -155,6 +164,7 @@ def parse_value( parser = Parser( source, no_location=no_location, + max_tokens=max_tokens, allow_legacy_fragment_variables=allow_legacy_fragment_variables, ) parser.expect_token(TokenKind.SOF) @@ -166,6 +176,7 @@ def parse_value( def parse_const_value( source: SourceType, no_location: bool = False, + max_tokens: Optional[int] = None, allow_legacy_fragment_variables: bool = False, ) -> ConstValueNode: """Parse the AST for a given string containing a GraphQL constant value. @@ -176,6 +187,7 @@ def parse_const_value( parser = Parser( source, no_location=no_location, + max_tokens=max_tokens, allow_legacy_fragment_variables=allow_legacy_fragment_variables, ) parser.expect_token(TokenKind.SOF) @@ -187,6 +199,7 @@ def parse_const_value( def parse_type( source: SourceType, no_location: bool = False, + max_tokens: Optional[int] = None, allow_legacy_fragment_variables: bool = False, ) -> TypeNode: """Parse the AST for a given string containing a GraphQL Type. @@ -202,6 +215,7 @@ def parse_type( parser = Parser( source, no_location=no_location, + max_tokens=max_tokens, allow_legacy_fragment_variables=allow_legacy_fragment_variables, ) parser.expect_token(TokenKind.SOF) @@ -222,27 +236,32 @@ class Parser: library, please use the `__version_info__` variable for version detection. """ - _lexer: Lexer _no_location: bool + _max_tokens: Optional[int] _allow_legacy_fragment_variables: bool _experimental_client_controlled_nullability: bool + _lexer: Lexer + _token_counter: int def __init__( self, source: SourceType, no_location: bool = False, + max_tokens: Optional[int] = None, allow_legacy_fragment_variables: bool = False, experimental_client_controlled_nullability: bool = False, ): if not is_source(source): source = Source(cast(str, source)) - self._lexer = Lexer(source) self._no_location = no_location + self._max_tokens = max_tokens self._allow_legacy_fragment_variables = allow_legacy_fragment_variables self._experimental_client_controlled_nullability = ( experimental_client_controlled_nullability ) + self._lexer = Lexer(source) + self._token_counter = 0 def parse_name(self) -> NameNode: """Convert a name lex token into a name parse node.""" @@ -546,7 +565,7 @@ def parse_value_literal(self, is_const: bool) -> ValueNode: def parse_string_literal(self, _is_const: bool = False) -> StringValueNode: token = self._lexer.token - self._lexer.advance() + self.advance_lexer() return StringValueNode( value=token.value, block=token.kind == TokenKind.BLOCK_STRING, @@ -583,18 +602,18 @@ def parse_object(self, is_const: bool) -> ObjectValueNode: def parse_int(self, _is_const: bool = False) -> IntValueNode: token = self._lexer.token - self._lexer.advance() + self.advance_lexer() return IntValueNode(value=token.value, loc=self.loc(token)) def parse_float(self, _is_const: bool = False) -> FloatValueNode: token = self._lexer.token - self._lexer.advance() + self.advance_lexer() return FloatValueNode(value=token.value, loc=self.loc(token)) def parse_named_values(self, _is_const: bool = False) -> ValueNode: token = self._lexer.token value = token.value - self._lexer.advance() + self.advance_lexer() if value == "true": return BooleanValueNode(value=True, loc=self.loc(token)) if value == "false": @@ -1089,7 +1108,7 @@ def expect_token(self, kind: TokenKind) -> Token: """ token = self._lexer.token if token.kind == kind: - self._lexer.advance() + self.advance_lexer() return token raise GraphQLSyntaxError( @@ -1106,7 +1125,7 @@ def expect_optional_token(self, kind: TokenKind) -> bool: """ token = self._lexer.token if token.kind == kind: - self._lexer.advance() + self.advance_lexer() return True return False @@ -1119,7 +1138,7 @@ def expect_keyword(self, value: str) -> None: """ token = self._lexer.token if token.kind == TokenKind.NAME and token.value == value: - self._lexer.advance() + self.advance_lexer() else: raise GraphQLSyntaxError( self._lexer.source, @@ -1135,7 +1154,7 @@ def expect_optional_keyword(self, value: str) -> bool: """ token = self._lexer.token if token.kind == TokenKind.NAME and token.value == value: - self._lexer.advance() + self.advance_lexer() return True return False @@ -1223,6 +1242,20 @@ def delimited_many( break return nodes + def advance_lexer(self) -> None: + """Advance the lexer.""" + token = self._lexer.advance() + max_tokens = self._max_tokens + if max_tokens is not None and token.kind is not TokenKind.EOF: + self._token_counter += 1 + if self._token_counter > max_tokens: + raise GraphQLSyntaxError( + self._lexer.source, + token.start, + f"Document contains more that {max_tokens} tokens." + " Parsing aborted.", + ) + def get_token_desc(token: Token) -> str: """Describe a token as a string for debugging.""" diff --git a/tests/language/test_parser.py b/tests/language/test_parser.py index 7ac79af6..82d787f3 100644 --- a/tests/language/test_parser.py +++ b/tests/language/test_parser.py @@ -115,6 +115,22 @@ def parse_provides_useful_error_when_using_source(): """ ) + def limits_maximum_number_of_tokens(): + parse("{ foo }", max_tokens=3) + with raises( + GraphQLSyntaxError, + match="Syntax Error:" + r" Document contains more that 2 tokens\. Parsing aborted\.", + ): + parse("{ foo }", max_tokens=2) + parse('{ foo(bar: "baz") }', max_tokens=8) + with raises( + GraphQLSyntaxError, + match="Syntax Error:" + r" Document contains more that 7 tokens\. Parsing aborted\.", + ): + parse('{ foo(bar: "baz") }', max_tokens=7) + def parses_variable_inline_values(): parse("{ field(complex: { a: { b: [ $var ] } }) }") From b4cf3c34977f4b7822e9ab1d9ce7a2b9ffb24051 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 8 Apr 2023 17:00:13 +0200 Subject: [PATCH 089/230] Minor simplification and clarification Replicates graphql/graphql-js@7f5fe4da1a2f213db0a253e2a9b5398071931950 --- src/graphql/execution/execute.py | 3 ++- src/graphql/validation/rules/single_field_subscriptions.py | 3 +-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index 2bac13a9..f6eb845e 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -1483,7 +1483,8 @@ def execute_subscription( root_type, context.operation.selection_set, ) - response_name, field_nodes = next(iter(root_fields.items())) + first_root_field = next(iter(root_fields.items())) + response_name, field_nodes = first_root_field field_name = field_nodes[0].name.value field_def = schema.get_field(root_type, field_name) diff --git a/src/graphql/validation/rules/single_field_subscriptions.py b/src/graphql/validation/rules/single_field_subscriptions.py index 9b96813b..bf9541c3 100644 --- a/src/graphql/validation/rules/single_field_subscriptions.py +++ b/src/graphql/validation/rules/single_field_subscriptions.py @@ -70,8 +70,7 @@ def enter_operation_definition( ) ) for field_nodes in fields.values(): - field = field_nodes[0] - field_name = field.name.value + field_name = field_nodes[0].name.value if field_name.startswith("__"): self.report_error( GraphQLError( From b47c92256d7439d3577d10dabb3448117212f342 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 8 Apr 2023 19:00:16 +0200 Subject: [PATCH 090/230] Stricter typing in extend_schema Replicates graphql/graphql-js@c9f968b9afb73c7bdaacc836531a2eaf1be150d3 --- src/graphql/utilities/extend_schema.py | 87 +++++++++++++++++--------- 1 file changed, 59 insertions(+), 28 deletions(-) diff --git a/src/graphql/utilities/extend_schema.py b/src/graphql/utilities/extend_schema.py index 18a261fc..b2db4ffb 100644 --- a/src/graphql/utilities/extend_schema.py +++ b/src/graphql/utilities/extend_schema.py @@ -9,6 +9,7 @@ Mapping, Optional, Tuple, + TypeVar, Union, cast, ) @@ -133,6 +134,38 @@ def extend_schema( ) +TEN = TypeVar("TEN", bound=TypeExtensionNode) + + +class TypeExtensionsMap: + """Mappings from types to their extensions.""" + + scalar: DefaultDict[str, List[ScalarTypeExtensionNode]] + object: DefaultDict[str, List[ObjectTypeExtensionNode]] + interface: DefaultDict[str, List[InterfaceTypeExtensionNode]] + union: DefaultDict[str, List[UnionTypeExtensionNode]] + enum: DefaultDict[str, List[EnumTypeExtensionNode]] + input_object: DefaultDict[str, List[InputObjectTypeExtensionNode]] + + def __init__(self) -> None: + self.scalar = defaultdict(list) + self.object = defaultdict(list) + self.interface = defaultdict(list) + self.union = defaultdict(list) + self.enum = defaultdict(list) + self.input_object = defaultdict(list) + + def for_node(self, node: TEN) -> DefaultDict[str, List[TEN]]: + """Get type extensions map for the given node kind.""" + kind = node.kind + try: + kind = kind.removesuffix("_type_extension") + except AttributeError: # pragma: no cover (Python < 3.9) + if kind.endswith("_type_extension"): + kind = kind[:-15] + return getattr(self, kind) + + class ExtendSchemaImpl: """Helper class implementing the methods to extend a schema. @@ -143,11 +176,11 @@ class ExtendSchemaImpl: """ type_map: Dict[str, GraphQLNamedType] - type_extensions_map: Dict[str, Any] + type_extensions: TypeExtensionsMap - def __init__(self, type_extensions_map: Dict[str, Any]): + def __init__(self, type_extensions: TypeExtensionsMap): self.type_map = {} - self.type_extensions_map = type_extensions_map + self.type_extensions = type_extensions @classmethod def extend_schema_args( @@ -164,7 +197,8 @@ def extend_schema_args( # Collect the type definitions and extensions found in the document. type_defs: List[TypeDefinitionNode] = [] - type_extensions_map: DefaultDict[str, Any] = defaultdict(list) + + type_extensions = TypeExtensionsMap() # New directives and types are separate because a directives and types can have # the same name. For example, a type named "skip". @@ -174,31 +208,28 @@ def extend_schema_args( # Schema extensions are collected which may add additional operation types. schema_extensions: List[SchemaExtensionNode] = [] + is_schema_changed = False for def_ in document_ast.definitions: if isinstance(def_, SchemaDefinitionNode): schema_def = def_ elif isinstance(def_, SchemaExtensionNode): schema_extensions.append(def_) + elif isinstance(def_, DirectiveDefinitionNode): + directive_defs.append(def_) elif isinstance(def_, TypeDefinitionNode): type_defs.append(def_) elif isinstance(def_, TypeExtensionNode): - extended_type_name = def_.name.value - type_extensions_map[extended_type_name].append(def_) - elif isinstance(def_, DirectiveDefinitionNode): - directive_defs.append(def_) + type_extensions.for_node(def_)[def_.name.value].append(def_) + else: + continue + is_schema_changed = True # If this document contains no new types, extensions, or directives then return # the same unmodified GraphQLSchema instance. - if ( - not type_extensions_map - and not type_defs - and not directive_defs - and not schema_extensions - and not schema_def - ): + if not is_schema_changed: return schema_kwargs - self = cls(type_extensions_map) + self = cls(type_extensions) for existing_type in schema_kwargs["types"] or (): self.type_map[existing_type.name] = self.extend_named_type(existing_type) for type_node in type_defs: @@ -311,7 +342,7 @@ def extend_input_object_type( type_: GraphQLInputObjectType, ) -> GraphQLInputObjectType: kwargs = type_.to_kwargs() - extensions = tuple(self.type_extensions_map[kwargs["name"]]) + extensions = tuple(self.type_extensions.input_object[kwargs["name"]]) return GraphQLInputObjectType( **merge_kwargs( @@ -325,7 +356,7 @@ def extend_input_object_type( def extend_enum_type(self, type_: GraphQLEnumType) -> GraphQLEnumType: kwargs = type_.to_kwargs() - extensions = tuple(self.type_extensions_map[kwargs["name"]]) + extensions = tuple(self.type_extensions.enum[kwargs["name"]]) return GraphQLEnumType( **merge_kwargs( @@ -337,7 +368,7 @@ def extend_enum_type(self, type_: GraphQLEnumType) -> GraphQLEnumType: def extend_scalar_type(self, type_: GraphQLScalarType) -> GraphQLScalarType: kwargs = type_.to_kwargs() - extensions = tuple(self.type_extensions_map[kwargs["name"]]) + extensions = tuple(self.type_extensions.scalar[kwargs["name"]]) specified_by_url = kwargs["specified_by_url"] for extension_node in extensions: @@ -373,7 +404,7 @@ def extend_object_type_fields( # noinspection PyShadowingNames def extend_object_type(self, type_: GraphQLObjectType) -> GraphQLObjectType: kwargs = type_.to_kwargs() - extensions = tuple(self.type_extensions_map[kwargs["name"]]) + extensions = tuple(self.type_extensions.object[kwargs["name"]]) return GraphQLObjectType( **merge_kwargs( @@ -410,7 +441,7 @@ def extend_interface_type( self, type_: GraphQLInterfaceType ) -> GraphQLInterfaceType: kwargs = type_.to_kwargs() - extensions = tuple(self.type_extensions_map[kwargs["name"]]) + extensions = tuple(self.type_extensions.interface[kwargs["name"]]) return GraphQLInterfaceType( **merge_kwargs( @@ -433,7 +464,7 @@ def extend_union_type_types( def extend_union_type(self, type_: GraphQLUnionType) -> GraphQLUnionType: kwargs = type_.to_kwargs() - extensions = tuple(self.type_extensions_map[kwargs["name"]]) + extensions = tuple(self.type_extensions.union[kwargs["name"]]) return GraphQLUnionType( **merge_kwargs( @@ -626,7 +657,7 @@ def build_union_types( def build_object_type( self, ast_node: ObjectTypeDefinitionNode ) -> GraphQLObjectType: - extension_nodes = self.type_extensions_map[ast_node.name.value] + extension_nodes = self.type_extensions.object[ast_node.name.value] all_nodes: List[Union[ObjectTypeDefinitionNode, ObjectTypeExtensionNode]] = [ ast_node, *extension_nodes, @@ -644,7 +675,7 @@ def build_interface_type( self, ast_node: InterfaceTypeDefinitionNode, ) -> GraphQLInterfaceType: - extension_nodes = self.type_extensions_map[ast_node.name.value] + extension_nodes = self.type_extensions.interface[ast_node.name.value] all_nodes: List[ Union[InterfaceTypeDefinitionNode, InterfaceTypeExtensionNode] ] = [ast_node, *extension_nodes] @@ -658,7 +689,7 @@ def build_interface_type( ) def build_enum_type(self, ast_node: EnumTypeDefinitionNode) -> GraphQLEnumType: - extension_nodes = self.type_extensions_map[ast_node.name.value] + extension_nodes = self.type_extensions.enum[ast_node.name.value] all_nodes: List[Union[EnumTypeDefinitionNode, EnumTypeExtensionNode]] = [ ast_node, *extension_nodes, @@ -672,7 +703,7 @@ def build_enum_type(self, ast_node: EnumTypeDefinitionNode) -> GraphQLEnumType: ) def build_union_type(self, ast_node: UnionTypeDefinitionNode) -> GraphQLUnionType: - extension_nodes = self.type_extensions_map[ast_node.name.value] + extension_nodes = self.type_extensions.union[ast_node.name.value] all_nodes: List[Union[UnionTypeDefinitionNode, UnionTypeExtensionNode]] = [ ast_node, *extension_nodes, @@ -688,7 +719,7 @@ def build_union_type(self, ast_node: UnionTypeDefinitionNode) -> GraphQLUnionTyp def build_scalar_type( self, ast_node: ScalarTypeDefinitionNode ) -> GraphQLScalarType: - extension_nodes = self.type_extensions_map[ast_node.name.value] + extension_nodes = self.type_extensions.scalar[ast_node.name.value] return GraphQLScalarType( name=ast_node.name.value, description=ast_node.description.value if ast_node.description else None, @@ -701,7 +732,7 @@ def build_input_object_type( self, ast_node: InputObjectTypeDefinitionNode, ) -> GraphQLInputObjectType: - extension_nodes = self.type_extensions_map[ast_node.name.value] + extension_nodes = self.type_extensions.input_object[ast_node.name.value] all_nodes: List[ Union[InputObjectTypeDefinitionNode, InputObjectTypeExtensionNode] ] = [ast_node, *extension_nodes] From ab33d4cbebbe97e3b88e32d8fc5ae22ad1560158 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 8 Apr 2023 20:39:38 +0200 Subject: [PATCH 091/230] polish: add tests for assert_equal_awaitables_or_values Replicates graphql/graphql-js@a8426782d9cb8b2d657f0f265d19e18fce2be841 --- tests/execution/test_subscribe.py | 36 ++------------ tests/utils/__init__.py | 9 +++- .../assert_equal_awaitables_or_values.py | 27 ++++++++++ tests/utils/assert_matching_values.py | 14 ++++++ .../test_assert_equal_awaitables_or_values.py | 49 +++++++++++++++++++ tests/utils/test_assert_matching_values.py | 13 +++++ 6 files changed, 114 insertions(+), 34 deletions(-) create mode 100644 tests/utils/assert_equal_awaitables_or_values.py create mode 100644 tests/utils/assert_matching_values.py create mode 100644 tests/utils/test_assert_equal_awaitables_or_values.py create mode 100644 tests/utils/test_assert_matching_values.py diff --git a/tests/execution/test_subscribe.py b/tests/execution/test_subscribe.py index e6bb726e..7ed931e6 100644 --- a/tests/execution/test_subscribe.py +++ b/tests/execution/test_subscribe.py @@ -1,16 +1,5 @@ import asyncio -from typing import ( - Any, - AsyncIterable, - Awaitable, - Callable, - Dict, - List, - Optional, - TypeVar, - Union, - cast, -) +from typing import Any, AsyncIterable, Callable, Dict, List, Optional, TypeVar, Union from pytest import mark, raises @@ -33,6 +22,8 @@ GraphQLString, ) +from ..utils.assert_equal_awaitables_or_values import assert_equal_awaitables_or_values + try: from typing import TypedDict @@ -150,27 +141,6 @@ def transform(new_email): DummyQueryType = GraphQLObjectType("Query", {"dummy": GraphQLField(GraphQLString)}) -def assert_equal_awaitables_or_values( - value1: AwaitableOrValue[T], value2: AwaitableOrValue[T] -) -> AwaitableOrValue[T]: - if is_awaitable(value1): - awaitable1 = cast(Awaitable[T], value1) - assert is_awaitable(value2) - awaitable2 = cast(Awaitable[T], value2) - - # noinspection PyShadowingNames - async def awaited_equal_value(): - value1 = await awaitable1 - value2 = await awaitable2 - assert value1 == value2 - return value1 - - return awaited_equal_value() - assert not is_awaitable(value2) - assert value1 == value2 - return value1 - - def subscribe_with_bad_fn( subscribe_fn: Callable, ) -> AwaitableOrValue[Union[ExecutionResult, AsyncIterable[Any]]]: diff --git a/tests/utils/__init__.py b/tests/utils/__init__.py index d6392286..a40b86e2 100644 --- a/tests/utils/__init__.py +++ b/tests/utils/__init__.py @@ -1,7 +1,14 @@ """Test utilities""" +from .assert_equal_awaitables_or_values import assert_equal_awaitables_or_values +from .assert_matching_values import assert_matching_values from .dedent import dedent from .gen_fuzz_strings import gen_fuzz_strings -__all__ = ["dedent", "gen_fuzz_strings"] +__all__ = [ + "assert_matching_values", + "assert_equal_awaitables_or_values", + "dedent", + "gen_fuzz_strings", +] diff --git a/tests/utils/assert_equal_awaitables_or_values.py b/tests/utils/assert_equal_awaitables_or_values.py new file mode 100644 index 00000000..71805cee --- /dev/null +++ b/tests/utils/assert_equal_awaitables_or_values.py @@ -0,0 +1,27 @@ +import asyncio +from typing import Awaitable, Tuple, TypeVar, cast + +from graphql.pyutils import is_awaitable + +from .assert_matching_values import assert_matching_values + + +__all__ = ["assert_equal_awaitables_or_values"] + +T = TypeVar("T") + + +def assert_equal_awaitables_or_values(*items: T) -> T: + """Check whether the items are the same and either all awaitables or all values.""" + if all(is_awaitable(item) for item in items): + awaitable_items = cast(Tuple[Awaitable], items) + + async def assert_matching_awaitables(): + return assert_matching_values(*(await asyncio.gather(*awaitable_items))) + + return assert_matching_awaitables() + + if all(not is_awaitable(item) for item in items): + return assert_matching_values(*items) + + assert False, "Received an invalid mixture of promises and values." diff --git a/tests/utils/assert_matching_values.py b/tests/utils/assert_matching_values.py new file mode 100644 index 00000000..b7f0fdc7 --- /dev/null +++ b/tests/utils/assert_matching_values.py @@ -0,0 +1,14 @@ +from typing import TypeVar + + +__all__ = ["assert_matching_values"] + +T = TypeVar("T") + + +def assert_matching_values(*values: T) -> T: + """Test that all values in the sequence are equal.""" + first_value, *remaining_values = values + for value in remaining_values: + assert value == first_value + return first_value diff --git a/tests/utils/test_assert_equal_awaitables_or_values.py b/tests/utils/test_assert_equal_awaitables_or_values.py new file mode 100644 index 00000000..63c389b7 --- /dev/null +++ b/tests/utils/test_assert_equal_awaitables_or_values.py @@ -0,0 +1,49 @@ +from pytest import mark, raises + +from . import assert_equal_awaitables_or_values + + +def describe_assert_equal_awaitables_or_values(): + def throws_when_given_unequal_values(): + with raises(AssertionError): + assert_equal_awaitables_or_values({}, {}, {"test": "test"}) + + def does_not_throw_when_given_equal_values(): + test_value = {"test": "test"} + assert ( + assert_equal_awaitables_or_values(test_value, test_value, test_value) + == test_value + ) + + @mark.asyncio + async def does_not_throw_when_given_equal_awaitables(): + async def test_value(): + return {"test": "test"} + + assert ( + await assert_equal_awaitables_or_values( + test_value(), test_value(), test_value() + ) + == await test_value() + ) + + @mark.asyncio + async def throws_when_given_unequal_awaitables(): + async def test_value(value): + return value + + with raises(AssertionError): + await assert_equal_awaitables_or_values( + test_value({}), test_value({}), test_value({"test": "test"}) + ) + + @mark.asyncio + async def throws_when_given_mixture_of_equal_values_and_awaitables(): + async def test_value(): + return {"test": "test"} + + with raises( + AssertionError, + match=r"Received an invalid mixture of promises and values\.", + ): + await assert_equal_awaitables_or_values(await test_value(), test_value()) diff --git a/tests/utils/test_assert_matching_values.py b/tests/utils/test_assert_matching_values.py new file mode 100644 index 00000000..7569b2c5 --- /dev/null +++ b/tests/utils/test_assert_matching_values.py @@ -0,0 +1,13 @@ +from pytest import raises + +from . import assert_matching_values + + +def describe_assert_matching_values(): + def throws_when_given_unequal_values(): + with raises(AssertionError): + assert_matching_values({}, {}, {"test": "test"}) + + def does_not_throw_when_given_equal_values(): + test_value = {"test": "test"} + assert assert_matching_values(test_value, test_value, test_value) == test_value From b499445d62b551dccfa59fce64dd5bf3e27a7ea7 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 8 Apr 2023 21:33:17 +0200 Subject: [PATCH 092/230] Remove outdated comment --- src/graphql/utilities/extend_schema.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/graphql/utilities/extend_schema.py b/src/graphql/utilities/extend_schema.py index b2db4ffb..17858d5f 100644 --- a/src/graphql/utilities/extend_schema.py +++ b/src/graphql/utilities/extend_schema.py @@ -193,8 +193,6 @@ def extend_schema_args( For internal use only. """ - # Note: schema_kwargs should become a TypedDict once we require Python 3.8 - # Collect the type definitions and extensions found in the document. type_defs: List[TypeDefinitionNode] = [] From 1e2589ea0a7fc305ffc0cf8b38a5a8842415ab12 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 8 Apr 2023 21:50:49 +0200 Subject: [PATCH 093/230] polish: rename MapAsyncIterator to MapAsyncIterable Replicates graphql/graphql-js@4822cb4bedc7bc45d6c4be136e9180fb0312a9bb --- docs/conf.py | 2 +- docs/modules/execution.rst | 2 +- src/graphql/__init__.py | 4 +- src/graphql/execution/__init__.py | 4 +- src/graphql/execution/execute.py | 6 +- ...sync_iterator.py => map_async_iterable.py} | 6 +- tests/execution/test_customize.py | 6 +- ...iterator.py => test_map_async_iterable.py} | 90 +++++++++---------- tests/execution/test_subscribe.py | 36 ++++---- tests/test_user_registry.py | 6 +- 10 files changed, 81 insertions(+), 81 deletions(-) rename src/graphql/execution/{map_async_iterator.py => map_async_iterable.py} (97%) rename tests/execution/{test_map_async_iterator.py => test_map_async_iterable.py} (85%) diff --git a/docs/conf.py b/docs/conf.py index 2763fd54..3f937283 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -148,7 +148,7 @@ GraphQLOutputType Middleware asyncio.events.AbstractEventLoop -graphql.execution.map_async_iterator.MapAsyncIterator +graphql.execution.map_async_iterable.MapAsyncIterable graphql.execution.Middleware graphql.language.lexer.EscapeSequence graphql.language.visitor.EnterLeaveVisitor diff --git a/docs/modules/execution.rst b/docs/modules/execution.rst index 958cfbe1..874126a6 100644 --- a/docs/modules/execution.rst +++ b/docs/modules/execution.rst @@ -26,7 +26,7 @@ Execution .. autofunction:: create_source_event_stream -.. autoclass:: MapAsyncIterator +.. autoclass:: MapAsyncIterable .. autoclass:: Middleware diff --git a/src/graphql/__init__.py b/src/graphql/__init__.py index 9bdf7f3b..4880dfab 100644 --- a/src/graphql/__init__.py +++ b/src/graphql/__init__.py @@ -437,7 +437,7 @@ # Subscription subscribe, create_source_event_stream, - MapAsyncIterator, + MapAsyncIterable, # Middleware Middleware, MiddlewareManager, @@ -703,7 +703,7 @@ "MiddlewareManager", "subscribe", "create_source_event_stream", - "MapAsyncIterator", + "MapAsyncIterable", "validate", "ValidationContext", "ValidationRule", diff --git a/src/graphql/execution/__init__.py b/src/graphql/execution/__init__.py index a9cd45b6..951ec8f1 100644 --- a/src/graphql/execution/__init__.py +++ b/src/graphql/execution/__init__.py @@ -16,7 +16,7 @@ FormattedExecutionResult, Middleware, ) -from .map_async_iterator import MapAsyncIterator +from .map_async_iterable import MapAsyncIterable from .middleware import MiddlewareManager from .values import get_argument_values, get_directive_values, get_variable_values @@ -30,7 +30,7 @@ "ExecutionContext", "ExecutionResult", "FormattedExecutionResult", - "MapAsyncIterator", + "MapAsyncIterable", "Middleware", "MiddlewareManager", "get_argument_values", diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index f6eb845e..dd06b239 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -59,7 +59,7 @@ is_object_type, ) from .collect_fields import collect_fields, collect_subfields -from .map_async_iterator import MapAsyncIterator +from .map_async_iterable import MapAsyncIterable from .middleware import MiddlewareManager from .values import get_argument_values, get_variable_values @@ -1371,7 +1371,7 @@ async def await_result() -> Any: result_or_stream = await awaitable_result_or_stream if isinstance(result_or_stream, ExecutionResult): return result_or_stream - return MapAsyncIterator(result_or_stream, map_source_to_response) + return MapAsyncIterable(result_or_stream, map_source_to_response) return await_result() @@ -1379,7 +1379,7 @@ async def await_result() -> Any: return result_or_stream # Map every source value to a ExecutionResult value as described above. - return MapAsyncIterator( + return MapAsyncIterable( cast(AsyncIterable[Any], result_or_stream), map_source_to_response ) diff --git a/src/graphql/execution/map_async_iterator.py b/src/graphql/execution/map_async_iterable.py similarity index 97% rename from src/graphql/execution/map_async_iterator.py rename to src/graphql/execution/map_async_iterable.py index 76ff7bc5..84bd3f4a 100644 --- a/src/graphql/execution/map_async_iterator.py +++ b/src/graphql/execution/map_async_iterable.py @@ -7,11 +7,11 @@ from typing import Any, AsyncIterable, Callable, Optional, Set, Type, Union, cast -__all__ = ["MapAsyncIterator"] +__all__ = ["MapAsyncIterable"] # noinspection PyAttributeOutsideInit -class MapAsyncIterator: +class MapAsyncIterable: """Map an AsyncIterable over a callback function. Given an AsyncIterable and a callback function, return an AsyncIterator which @@ -26,7 +26,7 @@ def __init__(self, iterable: AsyncIterable, callback: Callable) -> None: self.callback = callback self._close_event = Event() - def __aiter__(self) -> MapAsyncIterator: + def __aiter__(self) -> MapAsyncIterable: """Get the iterator object.""" return self diff --git a/tests/execution/test_customize.py b/tests/execution/test_customize.py index a82d7e0d..63f7e2ec 100644 --- a/tests/execution/test_customize.py +++ b/tests/execution/test_customize.py @@ -1,6 +1,6 @@ from pytest import mark -from graphql.execution import ExecutionContext, MapAsyncIterator, execute, subscribe +from graphql.execution import ExecutionContext, MapAsyncIterable, execute, subscribe from graphql.language import parse from graphql.type import GraphQLField, GraphQLObjectType, GraphQLSchema, GraphQLString @@ -73,7 +73,7 @@ async def custom_foo(): root_value=Root(), subscribe_field_resolver=lambda root, _info: root.custom_foo(), ) - assert isinstance(subscription, MapAsyncIterator) + assert isinstance(subscription, MapAsyncIterable) assert await anext(subscription) == ( {"foo": "FooValue"}, @@ -117,6 +117,6 @@ def resolve_foo(message, _info): context_value={}, execution_context_class=TestExecutionContext, ) - assert isinstance(subscription, MapAsyncIterator) + assert isinstance(subscription, MapAsyncIterable) assert await anext(subscription) == ({"foo": "bar"}, None) diff --git a/tests/execution/test_map_async_iterator.py b/tests/execution/test_map_async_iterable.py similarity index 85% rename from tests/execution/test_map_async_iterator.py rename to tests/execution/test_map_async_iterable.py index 0845b9bc..77c2ecb4 100644 --- a/tests/execution/test_map_async_iterator.py +++ b/tests/execution/test_map_async_iterable.py @@ -4,7 +4,7 @@ from pytest import mark, raises -from graphql.execution import MapAsyncIterator +from graphql.execution import MapAsyncIterable is_pypy = platform.python_implementation() == "PyPy" @@ -18,7 +18,7 @@ async def anext(iterator): return await iterator.__anext__() -def describe_map_async_iterator(): +def describe_map_async_iterable(): @mark.asyncio async def maps_over_async_generator(): async def source(): @@ -26,7 +26,7 @@ async def source(): yield 2 yield 3 - doubles = MapAsyncIterator(source(), lambda x: x + x) + doubles = MapAsyncIterable(source(), lambda x: x + x) assert await anext(doubles) == 2 assert await anext(doubles) == 4 @@ -48,7 +48,7 @@ async def __anext__(self): except IndexError: raise StopAsyncIteration - doubles = MapAsyncIterator(Iterable(), lambda x: x + x) + doubles = MapAsyncIterable(Iterable(), lambda x: x + x) values = [value async for value in doubles] @@ -62,7 +62,7 @@ async def source(): yield 2 yield 3 - doubles = MapAsyncIterator(source(), lambda x: x + x) + doubles = MapAsyncIterable(source(), lambda x: x + x) values = [value async for value in doubles] @@ -78,7 +78,7 @@ async def source(): async def double(x): return x + x - doubles = MapAsyncIterator(source(), double) + doubles = MapAsyncIterable(source(), double) values = [value async for value in doubles] @@ -91,7 +91,7 @@ async def source(): yield 2 yield 3 # pragma: no cover - doubles = MapAsyncIterator(source(), lambda x: x + x) + doubles = MapAsyncIterable(source(), lambda x: x + x) assert await anext(doubles) == 2 assert await anext(doubles) == 4 @@ -119,7 +119,7 @@ async def __anext__(self): except IndexError: # pragma: no cover raise StopAsyncIteration - doubles = MapAsyncIterator(Iterable(), lambda x: x + x) + doubles = MapAsyncIterable(Iterable(), lambda x: x + x) assert await anext(doubles) == 2 assert await anext(doubles) == 4 @@ -144,7 +144,7 @@ async def source(): yield "Done" yield "Last" - doubles = MapAsyncIterator(source(), lambda x: x + x) + doubles = MapAsyncIterable(source(), lambda x: x + x) assert await anext(doubles) == 2 assert await anext(doubles) == 4 @@ -171,7 +171,7 @@ async def __anext__(self): except IndexError: # pragma: no cover raise StopAsyncIteration - doubles = MapAsyncIterator(Iterable(), lambda x: x + x) + doubles = MapAsyncIterable(Iterable(), lambda x: x + x) assert await anext(doubles) == 2 assert await anext(doubles) == 4 @@ -188,15 +188,15 @@ async def __anext__(self): await anext(doubles) @mark.asyncio - async def allows_throwing_errors_with_values_through_async_iterators(): - class Iterator: + async def allows_throwing_errors_with_values_through_async_iterables(): + class Iterable: def __aiter__(self): return self async def __anext__(self): return 1 - one = MapAsyncIterator(Iterator(), lambda x: x) + one = MapAsyncIterable(Iterable(), lambda x: x) assert await anext(one) == 1 @@ -214,15 +214,15 @@ async def __anext__(self): await anext(one) @mark.asyncio - async def allows_throwing_errors_with_traceback_through_async_iterators(): - class Iterator: + async def allows_throwing_errors_with_traceback_through_async_iterables(): + class Iterable: def __aiter__(self): return self async def __anext__(self): return 1 - one = MapAsyncIterator(Iterator(), lambda x: x) + one = MapAsyncIterable(Iterable(), lambda x: x) assert await anext(one) == 1 @@ -249,7 +249,7 @@ async def source(): except Exception as e: yield e - doubles = MapAsyncIterator(source(), lambda x: x + x) + doubles = MapAsyncIterable(source(), lambda x: x + x) assert await anext(doubles) == 2 assert await anext(doubles) == 4 @@ -268,7 +268,7 @@ async def source(): yield "Hello" raise RuntimeError("Goodbye") - doubles = MapAsyncIterator(source(), lambda x: x + x) + doubles = MapAsyncIterable(source(), lambda x: x + x) assert await anext(doubles) == "HelloHello" @@ -282,7 +282,7 @@ async def does_not_normally_map_over_externally_thrown_errors(): async def source(): yield "Hello" - doubles = MapAsyncIterator(source(), lambda x: x + x) + doubles = MapAsyncIterable(source(), lambda x: x + x) assert await anext(doubles) == "HelloHello" @@ -292,7 +292,7 @@ async def source(): assert str(exc_info.value) == "Goodbye" @mark.asyncio - async def can_use_simple_iterator_instead_of_generator(): + async def can_use_simple_iterable_instead_of_generator(): async def source(): yield 1 yield 2 @@ -314,15 +314,15 @@ async def __anext__(self): def double(x): return x + x - for iterator in source, Source: - doubles = MapAsyncIterator(iterator(), double) + for iterable in source, Source: + doubles = MapAsyncIterable(iterable(), double) await doubles.aclose() with raises(StopAsyncIteration): await anext(doubles) - doubles = MapAsyncIterator(iterator(), double) + doubles = MapAsyncIterable(iterable(), double) assert await anext(doubles) == 2 assert await anext(doubles) == 4 @@ -331,7 +331,7 @@ def double(x): with raises(StopAsyncIteration): await anext(doubles) - doubles = MapAsyncIterator(iterator(), double) + doubles = MapAsyncIterable(iterable(), double) assert await anext(doubles) == 2 assert await anext(doubles) == 4 @@ -358,7 +358,7 @@ def double(x): await doubles.aclose() - doubles = MapAsyncIterator(iterator(), double) + doubles = MapAsyncIterable(iterable(), double) assert await anext(doubles) == 2 assert await anext(doubles) == 4 @@ -383,7 +383,7 @@ async def source(): yield 3 # pragma: no cover singles = source() - doubles = MapAsyncIterator(singles, lambda x: x * 2) + doubles = MapAsyncIterable(singles, lambda x: x * 2) result = await anext(doubles) assert result == 2 @@ -403,10 +403,10 @@ async def source(): await anext(singles) @mark.asyncio - async def can_unset_closed_state_of_async_iterator(): + async def can_unset_closed_state_of_async_iterable(): items = [1, 2, 3] - class Iterator: + class Iterable: def __init__(self): self.is_closed = False @@ -424,37 +424,37 @@ async def __anext__(self): async def aclose(self): self.is_closed = True - iterator = Iterator() - doubles = MapAsyncIterator(iterator, lambda x: x + x) + iterable = Iterable() + doubles = MapAsyncIterable(iterable, lambda x: x + x) assert await anext(doubles) == 2 assert await anext(doubles) == 4 - assert not iterator.is_closed + assert not iterable.is_closed await doubles.aclose() - assert iterator.is_closed + assert iterable.is_closed with raises(StopAsyncIteration): - await anext(iterator) + await anext(iterable) with raises(StopAsyncIteration): await anext(doubles) assert doubles.is_closed - iterator.is_closed = False + iterable.is_closed = False doubles.is_closed = False assert not doubles.is_closed assert await anext(doubles) == 6 assert not doubles.is_closed - assert not iterator.is_closed + assert not iterable.is_closed with raises(StopAsyncIteration): - await anext(iterator) + await anext(iterable) with raises(StopAsyncIteration): await anext(doubles) assert not doubles.is_closed - assert not iterator.is_closed + assert not iterable.is_closed @mark.asyncio - async def can_cancel_async_iterator_while_waiting(): - class Iterator: + async def can_cancel_async_iterable_while_waiting(): + class Iterable: def __init__(self): self.is_closed = False self.value = 1 @@ -473,8 +473,8 @@ async def __anext__(self): async def aclose(self): self.is_closed = True - iterator = Iterator() - doubles = MapAsyncIterator(iterator, lambda x: x + x) # pragma: no cover exit + iterable = Iterable() + doubles = MapAsyncIterable(iterable, lambda x: x + x) # pragma: no cover exit cancelled = False async def iterator_task(): @@ -489,11 +489,11 @@ async def iterator_task(): await sleep(0.05) assert not cancelled assert not doubles.is_closed - assert iterator.value == 1 - assert not iterator.is_closed + assert iterable.value == 1 + assert not iterable.is_closed task.cancel() await sleep(0.05) assert cancelled - assert iterator.value == -1 + assert iterable.value == -1 assert doubles.is_closed - assert iterator.is_closed + assert iterable.is_closed diff --git a/tests/execution/test_subscribe.py b/tests/execution/test_subscribe.py index 7ed931e6..73c489a5 100644 --- a/tests/execution/test_subscribe.py +++ b/tests/execution/test_subscribe.py @@ -5,7 +5,7 @@ from graphql.execution import ( ExecutionResult, - MapAsyncIterator, + MapAsyncIterable, create_source_event_stream, subscribe, ) @@ -178,11 +178,11 @@ async def accepts_positional_arguments(): """ ) - async def empty_async_iterator(_info): + async def empty_async_iterable(_info): for value in (): # type: ignore yield value # pragma: no cover - ai = subscribe(email_schema, document, {"importantEmail": empty_async_iterator}) + ai = subscribe(email_schema, document, {"importantEmail": empty_async_iterable}) with raises(StopAsyncIteration): await anext(ai) @@ -207,7 +207,7 @@ async def foo_generator(_info): subscription = subscribe( schema, parse("subscription { foo }"), {"foo": foo_generator} ) - assert isinstance(subscription, MapAsyncIterator) + assert isinstance(subscription, MapAsyncIterable) assert await anext(subscription) == ({"foo": "FooValue"}, None) @@ -227,7 +227,7 @@ async def foo_generator(_obj, _info): ) subscription = subscribe(schema, parse("subscription { foo }")) - assert isinstance(subscription, MapAsyncIterator) + assert isinstance(subscription, MapAsyncIterable) assert await anext(subscription) == ({"foo": "FooValue"}, None) @@ -255,7 +255,7 @@ async def subscribe_fn(obj, info): assert is_awaitable(awaitable) subscription = await awaitable - assert isinstance(subscription, MapAsyncIterator) + assert isinstance(subscription, MapAsyncIterable) assert await anext(subscription) == ({"foo": "FooValue"}, None) @@ -285,7 +285,7 @@ async def subscribe_bar(_obj, _info): # pragma: no cover ) subscription = subscribe(schema, parse("subscription { foo bar }")) - assert isinstance(subscription, MapAsyncIterator) + assert isinstance(subscription, MapAsyncIterable) assert await anext(subscription) == ( {"foo": "FooValue", "bar": None}, @@ -465,10 +465,10 @@ async def produces_a_payload_for_multiple_subscribe_in_same_subscription(): pubsub = SimplePubSub() subscription = create_subscription(pubsub) - assert isinstance(subscription, MapAsyncIterator) + assert isinstance(subscription, MapAsyncIterable) second_subscription = create_subscription(pubsub) - assert isinstance(subscription, MapAsyncIterator) + assert isinstance(subscription, MapAsyncIterable) payload1 = anext(subscription) payload2 = anext(second_subscription) @@ -499,7 +499,7 @@ async def produces_a_payload_for_multiple_subscribe_in_same_subscription(): async def produces_a_payload_per_subscription_event(): pubsub = SimplePubSub() subscription = create_subscription(pubsub) - assert isinstance(subscription, MapAsyncIterator) + assert isinstance(subscription, MapAsyncIterable) # Wait for the next subscription payload. payload = anext(subscription) @@ -577,7 +577,7 @@ async def produces_a_payload_per_subscription_event(): async def produces_a_payload_when_there_are_multiple_events(): pubsub = SimplePubSub() subscription = create_subscription(pubsub) - assert isinstance(subscription, MapAsyncIterator) + assert isinstance(subscription, MapAsyncIterable) payload = anext(subscription) @@ -633,7 +633,7 @@ async def produces_a_payload_when_there_are_multiple_events(): async def should_not_trigger_when_subscription_is_already_done(): pubsub = SimplePubSub() subscription = create_subscription(pubsub) - assert isinstance(subscription, MapAsyncIterator) + assert isinstance(subscription, MapAsyncIterable) payload = anext(subscription) @@ -683,7 +683,7 @@ async def should_not_trigger_when_subscription_is_already_done(): async def should_not_trigger_when_subscription_is_thrown(): pubsub = SimplePubSub() subscription = create_subscription(pubsub) - assert isinstance(subscription, MapAsyncIterator) + assert isinstance(subscription, MapAsyncIterable) payload = anext(subscription) @@ -724,7 +724,7 @@ async def should_not_trigger_when_subscription_is_thrown(): async def event_order_is_correct_for_multiple_publishes(): pubsub = SimplePubSub() subscription = create_subscription(pubsub) - assert isinstance(subscription, MapAsyncIterator) + assert isinstance(subscription, MapAsyncIterable) payload = anext(subscription) @@ -804,7 +804,7 @@ def resolve_message(message, _info): document = parse("subscription { newMessage }") subscription = subscribe(schema, document) - assert isinstance(subscription, MapAsyncIterator) + assert isinstance(subscription, MapAsyncIterable) assert await anext(subscription) == ({"newMessage": "Hello"}, None) @@ -849,7 +849,7 @@ def resolve_message(message, _info): document = parse("subscription { newMessage }") subscription = subscribe(schema, document) - assert isinstance(subscription, MapAsyncIterator) + assert isinstance(subscription, MapAsyncIterable) assert await anext(subscription) == ({"newMessage": "Hello"}, None) @@ -885,7 +885,7 @@ def resolve_message(message, _info): document = parse("subscription { newMessage }") subscription = subscribe(schema, document) - assert isinstance(subscription, MapAsyncIterator) + assert isinstance(subscription, MapAsyncIterable) assert await anext(subscription) == ({"newMessage": "Hello"}, None) @@ -915,6 +915,6 @@ async def resolve_message(message, _info): document = parse("subscription { newMessage }") subscription = subscribe(schema, document) - assert isinstance(subscription, MapAsyncIterator) + assert isinstance(subscription, MapAsyncIterable) assert await anext(subscription) == ({"newMessage": "Hello"}, None) diff --git a/tests/test_user_registry.py b/tests/test_user_registry.py index 74c0e56e..f361ac59 100644 --- a/tests/test_user_registry.py +++ b/tests/test_user_registry.py @@ -29,7 +29,7 @@ parse, subscribe, ) -from graphql.execution.map_async_iterator import MapAsyncIterator +from graphql.execution.map_async_iterable import MapAsyncIterable from graphql.pyutils import SimplePubSub, SimplePubSubIterator @@ -413,7 +413,7 @@ async def subscribe_to_user_mutations(context): subscription_one = subscribe( schema, parse(query), context_value=context, variable_values=variables ) - assert isinstance(subscription_one, MapAsyncIterator) + assert isinstance(subscription_one, MapAsyncIterable) query = """ subscription { @@ -425,7 +425,7 @@ async def subscribe_to_user_mutations(context): """ subscription_all = subscribe(schema, parse(query), context_value=context) - assert isinstance(subscription_all, MapAsyncIterator) + assert isinstance(subscription_all, MapAsyncIterable) received_one = [] received_all = [] From c33a49df224fdba7daaca32416094ae1291a8b45 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 8 Apr 2023 22:02:36 +0200 Subject: [PATCH 094/230] Minor change in test string But we do not use a Python counterpart to expectPromise. Replicates graphql/graphql-js@30d51c342c19f4e2269ab1e480c6caacadc61ec4 --- tests/execution/test_map_async_iterable.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/tests/execution/test_map_async_iterable.py b/tests/execution/test_map_async_iterable.py index 77c2ecb4..6406f7dd 100644 --- a/tests/execution/test_map_async_iterable.py +++ b/tests/execution/test_map_async_iterable.py @@ -177,10 +177,11 @@ async def __anext__(self): assert await anext(doubles) == 4 # Throw error - with raises(RuntimeError, match="Ouch") as exc_info: - await doubles.athrow(RuntimeError("Ouch")) + message = "allows throwing errors when mapping async iterable" + with raises(RuntimeError) as exc_info: + await doubles.athrow(RuntimeError(message)) - assert str(exc_info.value) == "Ouch" + assert str(exc_info.value) == message with raises(StopAsyncIteration): await anext(doubles) From a9b95682d3d2cdaac483aa97ad705bf51ea06d0c Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Mon, 10 Apr 2023 01:27:31 +0200 Subject: [PATCH 095/230] Update to latest version of pytest and fix tox.ini --- .github/workflows/test.yml | 2 +- poetry.lock | 32 ++++++++++++++------------------ pyproject.toml | 11 +++++++---- tox.ini | 10 +++++----- 4 files changed, 27 insertions(+), 28 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 90691aa1..ae9062bf 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -21,7 +21,7 @@ jobs: - name: Install dependencies run: | python -m pip install --upgrade pip - pip install tox tox-gh-actions + pip install "tox>=4.4,<5" "tox-gh-actions>=3.1,<4" - name: Run unit tests with tox run: tox diff --git a/poetry.lock b/poetry.lock index b0aaad0b..8809f11a 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry and should not be changed by hand. +# This file is automatically @generated by Poetry 1.4.2 and should not be changed by hand. [[package]] name = "alabaster" @@ -558,14 +558,14 @@ testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pep517", [[package]] name = "importlib-metadata" -version = "6.2.0" +version = "6.2.1" description = "Read metadata from Python packages" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "importlib_metadata-6.2.0-py3-none-any.whl", hash = "sha256:8388b74023a138c605fddd0d47cb81dd706232569f56c9aca7d9c7fdb54caeba"}, - {file = "importlib_metadata-6.2.0.tar.gz", hash = "sha256:9127aad2f49d7203e7112098c12b92e4fd1061ccd18548cdfdc49171a8c073cc"}, + {file = "importlib_metadata-6.2.1-py3-none-any.whl", hash = "sha256:f65e478a7c2177bd19517a3a15dac094d253446d8690c5f3e71e735a04312374"}, + {file = "importlib_metadata-6.2.1.tar.gz", hash = "sha256:5a66966b39ff1c14ef5b2d60c1d842b0141fefff0f4cc6365b4bc9446c652807"}, ] [package.dependencies] @@ -972,18 +972,17 @@ testing = ["covdefaults (>=2.2.2)", "importlib-metadata (>=6)", "pytest (>=7.2.1 [[package]] name = "pytest" -version = "7.2.2" +version = "7.3.0" description = "pytest: simple powerful testing with Python" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-7.2.2-py3-none-any.whl", hash = "sha256:130328f552dcfac0b1cec75c12e3f005619dc5f874f0a06e8ff7263f0ee6225e"}, - {file = "pytest-7.2.2.tar.gz", hash = "sha256:c99ab0c73aceb050f68929bc93af19ab6db0558791c6a0715723abe9d0ade9d4"}, + {file = "pytest-7.3.0-py3-none-any.whl", hash = "sha256:933051fa1bfbd38a21e73c3960cebdad4cf59483ddba7696c48509727e17f201"}, + {file = "pytest-7.3.0.tar.gz", hash = "sha256:58ecc27ebf0ea643ebfdf7fb1249335da761a00c9f955bcd922349bcb68ee57d"}, ] [package.dependencies] -attrs = ">=19.2.0" colorama = {version = "*", markers = "sys_platform == \"win32\""} exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} @@ -993,7 +992,7 @@ pluggy = ">=0.12,<2.0" tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} [package.extras] -testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] [[package]] name = "pytest-asyncio" @@ -1057,18 +1056,18 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtuale [[package]] name = "pytest-describe" -version = "2.0.1" +version = "2.1.0" description = "Describe-style plugin for pytest" category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.7" files = [ - {file = "pytest-describe-2.0.1.tar.gz", hash = "sha256:e5cbaa31169f0060348ad5ca0191027e5f1f41f3f27fdeef208365e09c55eb9a"}, - {file = "pytest_describe-2.0.1-py3-none-any.whl", hash = "sha256:ea347838bdf774b498ee7cb4a0b802a40be89e667a399fb63d860e3223bf4183"}, + {file = "pytest-describe-2.1.0.tar.gz", hash = "sha256:0630c95ac4942ab8dcd8e766236f86436b4984896db0c059fc234fef66fe9732"}, + {file = "pytest_describe-2.1.0-py3-none-any.whl", hash = "sha256:3ea587839363a91ea24e35e442dae46b56bd91d670e63b755e002b0adfc7a7b2"}, ] [package.dependencies] -pytest = ">=4.0.0" +pytest = ">=4.6,<8" [[package]] name = "pytest-timeout" @@ -1548,13 +1547,11 @@ cachetools = ">=5.3" chardet = ">=5.1" colorama = ">=0.4.6" filelock = ">=3.10.7" -importlib-metadata = {version = ">=6.1", markers = "python_version < \"3.8\""} packaging = ">=23" platformdirs = ">=3.2" pluggy = ">=1" pyproject-api = ">=1.5.1" tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} -typing-extensions = {version = ">=4.5", markers = "python_version < \"3.8\""} virtualenv = ">=20.21" [package.extras] @@ -1662,7 +1659,6 @@ files = [ [package.dependencies] distlib = ">=0.3.6,<1" filelock = ">=3.4.1,<4" -importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.8\""} platformdirs = ">=2.4,<4" [package.extras] @@ -1688,4 +1684,4 @@ testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more [metadata] lock-version = "2.0" python-versions = "^3.7" -content-hash = "dde11b1b0c0576196aa4313eb677c0006d301b1c6cf50e59ab458387f3b92240" +content-hash = "a5cc331810d7e3ec67b6e819430615c332861ed9b5663688780059d4651fec08" diff --git a/pyproject.toml b/pyproject.toml index 71bfa735..abbd7f33 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -49,13 +49,16 @@ typing-extensions = [ optional = true [tool.poetry.group.test.dependencies] -pytest = "^7.2" +pytest = "^7.3" pytest-asyncio = ">=0.21,<1" pytest-benchmark = "^4.0" pytest-cov = "^4.0" -pytest-describe = "^2.0" +pytest-describe = "^2.1" pytest-timeout = "^2.1" -tox = ">=3.0" +tox = [ + { version = ">=4.4,<5", python = ">=3.8" }, + { version = ">=3.28,<4", python = "<3.8" } +] [tool.poetry.group.lint] optional = true @@ -146,7 +149,7 @@ module = [ disallow_untyped_defs = false [tool.pytest.ini_options] -minversion = "7.2" +minversion = "7.3" # Only run benchmarks as tests. # To actually run the benchmarks, use --benchmark-enable on the command line. # To run the slow tests (fuzzing), add --run-slow on the command line. diff --git a/tox.ini b/tox.ini index a6b2cf5a..3e5d2e03 100644 --- a/tox.ini +++ b/tox.ini @@ -4,7 +4,7 @@ isolated_build = true [gh-actions] python = - 3: py39 + 3: py311 3.7: py37 3.8: py38 3.9: py39 @@ -38,7 +38,7 @@ commands = basepython = python3.11 deps = mypy==1.2.0 - pytest>=7.2,<8 + pytest>=7.3,<8 commands = mypy src tests @@ -52,15 +52,15 @@ commands = [testenv] deps = - pytest>=7.2,<8 + pytest>=7.3,<8 pytest-asyncio>=0.21,<1 pytest-benchmark>=4,<5 pytest-cov>=4,<5 - pytest-describe>=2,<3 + pytest-describe>=2.1,<3 pytest-timeout>=2.1,<3 py37,py38,py39,pypy39: typing-extensions>=4.5,<5 commands = # to also run the time-consuming tests: tox -e py310 -- --run-slow # to run the benchmarks: tox -e py310 -- -k benchmarks --benchmark-enable - py37,py38.py39,py311,pypy39: pytest tests {posargs} + py37,py38,py39,py311,pypy39: pytest tests {posargs} py310: pytest tests {posargs: --cov-report=term-missing --cov=graphql --cov=tests --cov-fail-under=100} From 62749e5a397946ef6e8186e759360aad3cb7715b Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 21 May 2023 19:51:16 +0200 Subject: [PATCH 096/230] Reference implementation of defer and stream spec Replicates graphql/graphql-js@1f2c8436a423e225d101a665c436c828c8b8f6dd --- docs/conf.py | 8 + docs/modules/execution.rst | 31 + docs/modules/type.rst | 3 + poetry.lock | 310 ++-- pyproject.toml | 2 +- src/graphql/__init__.py | 4 + src/graphql/execution/__init__.py | 32 + src/graphql/execution/collect_fields.py | 152 +- src/graphql/execution/execute.py | 1517 ++++++++++++++-- .../execution/flatten_async_iterable.py | 36 + src/graphql/graphql.py | 2 + src/graphql/language/visitor.py | 4 +- src/graphql/pyutils/__init__.py | 2 + src/graphql/pyutils/async_reduce.py | 40 + src/graphql/pyutils/simple_pub_sub.py | 4 +- src/graphql/type/__init__.py | 4 + src/graphql/type/directives.py | 41 +- src/graphql/validation/__init__.py | 12 + .../rules/defer_stream_directive_label.py | 62 + .../defer_stream_directive_on_root_field.py | 68 + .../rules/overlapping_fields_can_be_merged.py | 42 +- .../rules/single_field_subscriptions.py | 2 +- .../rules/stream_directive_on_list_field.py | 57 + src/graphql/validation/specified_rules.py | 14 +- tests/execution/test_customize.py | 8 +- tests/execution/test_defer.py | 926 ++++++++++ .../execution/test_flatten_async_iterable.py | 131 ++ tests/execution/test_mutations.py | 148 +- tests/execution/test_stream.py | 1573 +++++++++++++++++ tests/execution/test_subscribe.py | 329 +++- tests/execution/test_sync.py | 34 + tests/language/test_visitor.py | 6 +- tests/pyutils/test_async_reduce.py | 64 + tests/test_user_registry.py | 9 +- .../test_assert_equal_awaitables_or_values.py | 1 + tests/validation/harness.py | 2 +- .../test_defer_stream_directive_label.py | 189 ++ ...st_defer_stream_directive_on_root_field.py | 284 +++ .../test_overlapping_fields_can_be_merged.py | 110 ++ .../test_stream_directive_on_list_field.py | 83 + tox.ini | 2 +- 41 files changed, 5951 insertions(+), 397 deletions(-) create mode 100644 src/graphql/execution/flatten_async_iterable.py create mode 100644 src/graphql/pyutils/async_reduce.py create mode 100644 src/graphql/validation/rules/defer_stream_directive_label.py create mode 100644 src/graphql/validation/rules/defer_stream_directive_on_root_field.py create mode 100644 src/graphql/validation/rules/stream_directive_on_list_field.py create mode 100644 tests/execution/test_defer.py create mode 100644 tests/execution/test_flatten_async_iterable.py create mode 100644 tests/execution/test_stream.py create mode 100644 tests/pyutils/test_async_reduce.py create mode 100644 tests/validation/test_defer_stream_directive_label.py create mode 100644 tests/validation/test_defer_stream_directive_on_root_field.py create mode 100644 tests/validation/test_stream_directive_on_list_field.py diff --git a/docs/conf.py b/docs/conf.py index 3f937283..28ac1c71 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -138,8 +138,10 @@ traceback types.TracebackType TypeMap +AsyncPayloadRecord AwaitableOrValue EnterLeaveVisitor +ExperimentalExecuteIncrementallyResults FormattedSourceLocation GraphQLAbstractType GraphQLErrorExtensions @@ -148,8 +150,14 @@ GraphQLOutputType Middleware asyncio.events.AbstractEventLoop +graphql.execution.collect_fields.FieldsAndPatches graphql.execution.map_async_iterable.MapAsyncIterable graphql.execution.Middleware +graphql.execution.execute.DeferredFragmentRecord +graphql.execution.execute.ExperimentalExecuteMultipleResults +graphql.execution.execute.ExperimentalExecuteSingleResult +graphql.execution.execute.StreamArguments +graphql.execution.execute.StreamRecord graphql.language.lexer.EscapeSequence graphql.language.visitor.EnterLeaveVisitor graphql.type.schema.InterfaceImplementations diff --git a/docs/modules/execution.rst b/docs/modules/execution.rst index 874126a6..82147930 100644 --- a/docs/modules/execution.rst +++ b/docs/modules/execution.rst @@ -9,6 +9,8 @@ Execution .. autofunction:: execute +.. autofunction:: experimental_execute_incrementally + .. autofunction:: execute_sync .. autofunction:: default_field_resolver @@ -22,8 +24,37 @@ Execution .. autoclass:: FormattedExecutionResult :no-inherited-members: +.. autoclass:: ExperimentalExecuteIncrementallyResults + +.. autoclass:: InitialIncrementalExecutionResult + +.. autoclass:: FormattedInitialIncrementalExecutionResult + :no-inherited-members: + +.. autoclass:: SubsequentIncrementalExecutionResult + +.. autoclass:: FormattedSubsequentIncrementalExecutionResult + :no-inherited-members: + +.. autoclass:: IncrementalDeferResult + +.. autoclass:: FormattedIncrementalDeferResult + :no-inherited-members: + +.. autoclass:: IncrementalStreamResult + +.. autoclass:: FormattedIncrementalStreamResult + :no-inherited-members: + +.. autoclass:: IncrementalResult + +.. autoclass:: FormattedIncrementalResult + :no-inherited-members: + .. autofunction:: subscribe +.. autofunction:: experimental_subscribe_incrementally + .. autofunction:: create_source_event_stream .. autoclass:: MapAsyncIterable diff --git a/docs/modules/type.rst b/docs/modules/type.rst index 393cb362..d3c3b4b8 100644 --- a/docs/modules/type.rst +++ b/docs/modules/type.rst @@ -122,7 +122,10 @@ Definitions .. autoclass:: GraphQLDirective .. autoclass:: GraphQLIncludeDirective .. autoclass:: GraphQLSkipDirective +.. autoclass:: GraphQLDeferDirective +.. autoclass:: GraphQLStreamDirective .. autoclass:: GraphQLDeprecatedDirective +.. autoclass:: GraphQLSpecifiedByDirective .. data:: specified_directives diff --git a/poetry.lock b/poetry.lock index 8809f11a..c1fb6f40 100644 --- a/poetry.lock +++ b/poetry.lock @@ -26,22 +26,25 @@ files = [ [[package]] name = "attrs" -version = "22.2.0" +version = "23.1.0" description = "Classes Without Boilerplate" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "attrs-22.2.0-py3-none-any.whl", hash = "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836"}, - {file = "attrs-22.2.0.tar.gz", hash = "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99"}, + {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, + {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, ] +[package.dependencies] +importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} + [package.extras] -cov = ["attrs[tests]", "coverage-enable-subprocess", "coverage[toml] (>=5.3)"] -dev = ["attrs[docs,tests]"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope.interface"] -tests = ["attrs[tests-no-zope]", "zope.interface"] -tests-no-zope = ["cloudpickle", "cloudpickle", "hypothesis", "hypothesis", "mypy (>=0.971,<0.990)", "mypy (>=0.971,<0.990)", "pympler", "pympler", "pytest (>=4.3.0)", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-mypy-plugins", "pytest-xdist[psutil]", "pytest-xdist[psutil]"] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[docs,tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] [[package]] name = "babel" @@ -159,14 +162,14 @@ files = [ [[package]] name = "certifi" -version = "2022.12.7" +version = "2023.5.7" description = "Python package for providing Mozilla's CA Bundle." category = "dev" optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"}, - {file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"}, + {file = "certifi-2023.5.7-py3-none-any.whl", hash = "sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716"}, + {file = "certifi-2023.5.7.tar.gz", hash = "sha256:0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7"}, ] [[package]] @@ -296,63 +299,63 @@ files = [ [[package]] name = "coverage" -version = "7.2.3" +version = "7.2.5" description = "Code coverage measurement for Python" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "coverage-7.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e58c0d41d336569d63d1b113bd573db8363bc4146f39444125b7f8060e4e04f5"}, - {file = "coverage-7.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:344e714bd0fe921fc72d97404ebbdbf9127bac0ca1ff66d7b79efc143cf7c0c4"}, - {file = "coverage-7.2.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:974bc90d6f6c1e59ceb1516ab00cf1cdfbb2e555795d49fa9571d611f449bcb2"}, - {file = "coverage-7.2.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0743b0035d4b0e32bc1df5de70fba3059662ace5b9a2a86a9f894cfe66569013"}, - {file = "coverage-7.2.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d0391fb4cfc171ce40437f67eb050a340fdbd0f9f49d6353a387f1b7f9dd4fa"}, - {file = "coverage-7.2.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4a42e1eff0ca9a7cb7dc9ecda41dfc7cbc17cb1d02117214be0561bd1134772b"}, - {file = "coverage-7.2.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:be19931a8dcbe6ab464f3339966856996b12a00f9fe53f346ab3be872d03e257"}, - {file = "coverage-7.2.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:72fcae5bcac3333a4cf3b8f34eec99cea1187acd55af723bcbd559adfdcb5535"}, - {file = "coverage-7.2.3-cp310-cp310-win32.whl", hash = "sha256:aeae2aa38395b18106e552833f2a50c27ea0000122bde421c31d11ed7e6f9c91"}, - {file = "coverage-7.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:83957d349838a636e768251c7e9979e899a569794b44c3728eaebd11d848e58e"}, - {file = "coverage-7.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:dfd393094cd82ceb9b40df4c77976015a314b267d498268a076e940fe7be6b79"}, - {file = "coverage-7.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:182eb9ac3f2b4874a1f41b78b87db20b66da6b9cdc32737fbbf4fea0c35b23fc"}, - {file = "coverage-7.2.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bb1e77a9a311346294621be905ea8a2c30d3ad371fc15bb72e98bfcfae532df"}, - {file = "coverage-7.2.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca0f34363e2634deffd390a0fef1aa99168ae9ed2af01af4a1f5865e362f8623"}, - {file = "coverage-7.2.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:55416d7385774285b6e2a5feca0af9652f7f444a4fa3d29d8ab052fafef9d00d"}, - {file = "coverage-7.2.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:06ddd9c0249a0546997fdda5a30fbcb40f23926df0a874a60a8a185bc3a87d93"}, - {file = "coverage-7.2.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:fff5aaa6becf2c6a1699ae6a39e2e6fb0672c2d42eca8eb0cafa91cf2e9bd312"}, - {file = "coverage-7.2.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ea53151d87c52e98133eb8ac78f1206498c015849662ca8dc246255265d9c3c4"}, - {file = "coverage-7.2.3-cp311-cp311-win32.whl", hash = "sha256:8f6c930fd70d91ddee53194e93029e3ef2aabe26725aa3c2753df057e296b925"}, - {file = "coverage-7.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:fa546d66639d69aa967bf08156eb8c9d0cd6f6de84be9e8c9819f52ad499c910"}, - {file = "coverage-7.2.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b2317d5ed777bf5a033e83d4f1389fd4ef045763141d8f10eb09a7035cee774c"}, - {file = "coverage-7.2.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be9824c1c874b73b96288c6d3de793bf7f3a597770205068c6163ea1f326e8b9"}, - {file = "coverage-7.2.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2c3b2803e730dc2797a017335827e9da6da0e84c745ce0f552e66400abdfb9a1"}, - {file = "coverage-7.2.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f69770f5ca1994cb32c38965e95f57504d3aea96b6c024624fdd5bb1aa494a1"}, - {file = "coverage-7.2.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1127b16220f7bfb3f1049ed4a62d26d81970a723544e8252db0efde853268e21"}, - {file = "coverage-7.2.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:aa784405f0c640940595fa0f14064d8e84aff0b0f762fa18393e2760a2cf5841"}, - {file = "coverage-7.2.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3146b8e16fa60427e03884301bf8209221f5761ac754ee6b267642a2fd354c48"}, - {file = "coverage-7.2.3-cp37-cp37m-win32.whl", hash = "sha256:1fd78b911aea9cec3b7e1e2622c8018d51c0d2bbcf8faaf53c2497eb114911c1"}, - {file = "coverage-7.2.3-cp37-cp37m-win_amd64.whl", hash = "sha256:0f3736a5d34e091b0a611964c6262fd68ca4363df56185902528f0b75dbb9c1f"}, - {file = "coverage-7.2.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:981b4df72c93e3bc04478153df516d385317628bd9c10be699c93c26ddcca8ab"}, - {file = "coverage-7.2.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c0045f8f23a5fb30b2eb3b8a83664d8dc4fb58faddf8155d7109166adb9f2040"}, - {file = "coverage-7.2.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f760073fcf8f3d6933178d67754f4f2d4e924e321f4bb0dcef0424ca0215eba1"}, - {file = "coverage-7.2.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c86bd45d1659b1ae3d0ba1909326b03598affbc9ed71520e0ff8c31a993ad911"}, - {file = "coverage-7.2.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:172db976ae6327ed4728e2507daf8a4de73c7cc89796483e0a9198fd2e47b462"}, - {file = "coverage-7.2.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d2a3a6146fe9319926e1d477842ca2a63fe99af5ae690b1f5c11e6af074a6b5c"}, - {file = "coverage-7.2.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:f649dd53833b495c3ebd04d6eec58479454a1784987af8afb77540d6c1767abd"}, - {file = "coverage-7.2.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7c4ed4e9f3b123aa403ab424430b426a1992e6f4c8fd3cb56ea520446e04d152"}, - {file = "coverage-7.2.3-cp38-cp38-win32.whl", hash = "sha256:eb0edc3ce9760d2f21637766c3aa04822030e7451981ce569a1b3456b7053f22"}, - {file = "coverage-7.2.3-cp38-cp38-win_amd64.whl", hash = "sha256:63cdeaac4ae85a179a8d6bc09b77b564c096250d759eed343a89d91bce8b6367"}, - {file = "coverage-7.2.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:20d1a2a76bb4eb00e4d36b9699f9b7aba93271c9c29220ad4c6a9581a0320235"}, - {file = "coverage-7.2.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ea748802cc0de4de92ef8244dd84ffd793bd2e7be784cd8394d557a3c751e21"}, - {file = "coverage-7.2.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21b154aba06df42e4b96fc915512ab39595105f6c483991287021ed95776d934"}, - {file = "coverage-7.2.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd214917cabdd6f673a29d708574e9fbdb892cb77eb426d0eae3490d95ca7859"}, - {file = "coverage-7.2.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c2e58e45fe53fab81f85474e5d4d226eeab0f27b45aa062856c89389da2f0d9"}, - {file = "coverage-7.2.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:87ecc7c9a1a9f912e306997ffee020297ccb5ea388421fe62a2a02747e4d5539"}, - {file = "coverage-7.2.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:387065e420aed3c71b61af7e82c7b6bc1c592f7e3c7a66e9f78dd178699da4fe"}, - {file = "coverage-7.2.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ea3f5bc91d7d457da7d48c7a732beaf79d0c8131df3ab278e6bba6297e23c6c4"}, - {file = "coverage-7.2.3-cp39-cp39-win32.whl", hash = "sha256:ae7863a1d8db6a014b6f2ff9c1582ab1aad55a6d25bac19710a8df68921b6e30"}, - {file = "coverage-7.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:3f04becd4fcda03c0160d0da9c8f0c246bc78f2f7af0feea1ec0930e7c93fa4a"}, - {file = "coverage-7.2.3-pp37.pp38.pp39-none-any.whl", hash = "sha256:965ee3e782c7892befc25575fa171b521d33798132692df428a09efacaffe8d0"}, - {file = "coverage-7.2.3.tar.gz", hash = "sha256:d298c2815fa4891edd9abe5ad6e6cb4207104c7dd9fd13aea3fdebf6f9b91259"}, + {file = "coverage-7.2.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:883123d0bbe1c136f76b56276074b0c79b5817dd4238097ffa64ac67257f4b6c"}, + {file = "coverage-7.2.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d2fbc2a127e857d2f8898aaabcc34c37771bf78a4d5e17d3e1f5c30cd0cbc62a"}, + {file = "coverage-7.2.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f3671662dc4b422b15776cdca89c041a6349b4864a43aa2350b6b0b03bbcc7f"}, + {file = "coverage-7.2.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780551e47d62095e088f251f5db428473c26db7829884323e56d9c0c3118791a"}, + {file = "coverage-7.2.5-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:066b44897c493e0dcbc9e6a6d9f8bbb6607ef82367cf6810d387c09f0cd4fe9a"}, + {file = "coverage-7.2.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b9a4ee55174b04f6af539218f9f8083140f61a46eabcaa4234f3c2a452c4ed11"}, + {file = "coverage-7.2.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:706ec567267c96717ab9363904d846ec009a48d5f832140b6ad08aad3791b1f5"}, + {file = "coverage-7.2.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ae453f655640157d76209f42c62c64c4d4f2c7f97256d3567e3b439bd5c9b06c"}, + {file = "coverage-7.2.5-cp310-cp310-win32.whl", hash = "sha256:f81c9b4bd8aa747d417407a7f6f0b1469a43b36a85748145e144ac4e8d303cb5"}, + {file = "coverage-7.2.5-cp310-cp310-win_amd64.whl", hash = "sha256:dc945064a8783b86fcce9a0a705abd7db2117d95e340df8a4333f00be5efb64c"}, + {file = "coverage-7.2.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:40cc0f91c6cde033da493227797be2826cbf8f388eaa36a0271a97a332bfd7ce"}, + {file = "coverage-7.2.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a66e055254a26c82aead7ff420d9fa8dc2da10c82679ea850d8feebf11074d88"}, + {file = "coverage-7.2.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c10fbc8a64aa0f3ed136b0b086b6b577bc64d67d5581acd7cc129af52654384e"}, + {file = "coverage-7.2.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a22cbb5ede6fade0482111fa7f01115ff04039795d7092ed0db43522431b4f2"}, + {file = "coverage-7.2.5-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:292300f76440651529b8ceec283a9370532f4ecba9ad67d120617021bb5ef139"}, + {file = "coverage-7.2.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7ff8f3fb38233035028dbc93715551d81eadc110199e14bbbfa01c5c4a43f8d8"}, + {file = "coverage-7.2.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:a08c7401d0b24e8c2982f4e307124b671c6736d40d1c39e09d7a8687bddf83ed"}, + {file = "coverage-7.2.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ef9659d1cda9ce9ac9585c045aaa1e59223b143f2407db0eaee0b61a4f266fb6"}, + {file = "coverage-7.2.5-cp311-cp311-win32.whl", hash = "sha256:30dcaf05adfa69c2a7b9f7dfd9f60bc8e36b282d7ed25c308ef9e114de7fc23b"}, + {file = "coverage-7.2.5-cp311-cp311-win_amd64.whl", hash = "sha256:97072cc90f1009386c8a5b7de9d4fc1a9f91ba5ef2146c55c1f005e7b5c5e068"}, + {file = "coverage-7.2.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:bebea5f5ed41f618797ce3ffb4606c64a5de92e9c3f26d26c2e0aae292f015c1"}, + {file = "coverage-7.2.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:828189fcdda99aae0d6bf718ea766b2e715eabc1868670a0a07bf8404bf58c33"}, + {file = "coverage-7.2.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e8a95f243d01ba572341c52f89f3acb98a3b6d1d5d830efba86033dd3687ade"}, + {file = "coverage-7.2.5-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8834e5f17d89e05697c3c043d3e58a8b19682bf365048837383abfe39adaed5"}, + {file = "coverage-7.2.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d1f25ee9de21a39b3a8516f2c5feb8de248f17da7eead089c2e04aa097936b47"}, + {file = "coverage-7.2.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1637253b11a18f453e34013c665d8bf15904c9e3c44fbda34c643fbdc9d452cd"}, + {file = "coverage-7.2.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8e575a59315a91ccd00c7757127f6b2488c2f914096077c745c2f1ba5b8c0969"}, + {file = "coverage-7.2.5-cp37-cp37m-win32.whl", hash = "sha256:509ecd8334c380000d259dc66feb191dd0a93b21f2453faa75f7f9cdcefc0718"}, + {file = "coverage-7.2.5-cp37-cp37m-win_amd64.whl", hash = "sha256:12580845917b1e59f8a1c2ffa6af6d0908cb39220f3019e36c110c943dc875b0"}, + {file = "coverage-7.2.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b5016e331b75310610c2cf955d9f58a9749943ed5f7b8cfc0bb89c6134ab0a84"}, + {file = "coverage-7.2.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:373ea34dca98f2fdb3e5cb33d83b6d801007a8074f992b80311fc589d3e6b790"}, + {file = "coverage-7.2.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a063aad9f7b4c9f9da7b2550eae0a582ffc7623dca1c925e50c3fbde7a579771"}, + {file = "coverage-7.2.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38c0a497a000d50491055805313ed83ddba069353d102ece8aef5d11b5faf045"}, + {file = "coverage-7.2.5-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2b3b05e22a77bb0ae1a3125126a4e08535961c946b62f30985535ed40e26614"}, + {file = "coverage-7.2.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0342a28617e63ad15d96dca0f7ae9479a37b7d8a295f749c14f3436ea59fdcb3"}, + {file = "coverage-7.2.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:cf97ed82ca986e5c637ea286ba2793c85325b30f869bf64d3009ccc1a31ae3fd"}, + {file = "coverage-7.2.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c2c41c1b1866b670573657d584de413df701f482574bad7e28214a2362cb1fd1"}, + {file = "coverage-7.2.5-cp38-cp38-win32.whl", hash = "sha256:10b15394c13544fce02382360cab54e51a9e0fd1bd61ae9ce012c0d1e103c813"}, + {file = "coverage-7.2.5-cp38-cp38-win_amd64.whl", hash = "sha256:a0b273fe6dc655b110e8dc89b8ec7f1a778d78c9fd9b4bda7c384c8906072212"}, + {file = "coverage-7.2.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5c587f52c81211d4530fa6857884d37f514bcf9453bdeee0ff93eaaf906a5c1b"}, + {file = "coverage-7.2.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4436cc9ba5414c2c998eaedee5343f49c02ca93b21769c5fdfa4f9d799e84200"}, + {file = "coverage-7.2.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6599bf92f33ab041e36e06d25890afbdf12078aacfe1f1d08c713906e49a3fe5"}, + {file = "coverage-7.2.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:857abe2fa6a4973f8663e039ead8d22215d31db613ace76e4a98f52ec919068e"}, + {file = "coverage-7.2.5-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6f5cab2d7f0c12f8187a376cc6582c477d2df91d63f75341307fcdcb5d60303"}, + {file = "coverage-7.2.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:aa387bd7489f3e1787ff82068b295bcaafbf6f79c3dad3cbc82ef88ce3f48ad3"}, + {file = "coverage-7.2.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:156192e5fd3dbbcb11cd777cc469cf010a294f4c736a2b2c891c77618cb1379a"}, + {file = "coverage-7.2.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bd3b4b8175c1db502adf209d06136c000df4d245105c8839e9d0be71c94aefe1"}, + {file = "coverage-7.2.5-cp39-cp39-win32.whl", hash = "sha256:ddc5a54edb653e9e215f75de377354e2455376f416c4378e1d43b08ec50acc31"}, + {file = "coverage-7.2.5-cp39-cp39-win_amd64.whl", hash = "sha256:338aa9d9883aaaad53695cb14ccdeb36d4060485bb9388446330bef9c361c252"}, + {file = "coverage-7.2.5-pp37.pp38.pp39-none-any.whl", hash = "sha256:8877d9b437b35a85c18e3c6499b23674684bf690f5d96c1006a1ef61f9fdf0f3"}, + {file = "coverage-7.2.5.tar.gz", hash = "sha256:f99ef080288f09ffc687423b8d60978cf3a465d3f404a18d1a05474bd8575a47"}, ] [package.dependencies] @@ -414,19 +417,19 @@ test = ["pytest (>=6)"] [[package]] name = "filelock" -version = "3.11.0" +version = "3.12.0" description = "A platform independent file lock." category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "filelock-3.11.0-py3-none-any.whl", hash = "sha256:f08a52314748335c6460fc8fe40cd5638b85001225db78c2aa01c8c0db83b318"}, - {file = "filelock-3.11.0.tar.gz", hash = "sha256:3618c0da67adcc0506b015fd11ef7faf1b493f0b40d87728e19986b536890c37"}, + {file = "filelock-3.12.0-py3-none-any.whl", hash = "sha256:ad98852315c2ab702aeb628412cbf7e95b7ce8c3bf9565670b4eaecf1db370a9"}, + {file = "filelock-3.12.0.tar.gz", hash = "sha256:fc03ae43288c013d2ea83c8597001b1129db351aad9c57fe2409327916b8e718"}, ] [package.extras] -docs = ["furo (>=2023.3.27)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.2.2)", "diff-cover (>=7.5)", "pytest (>=7.2.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)", "pytest-timeout (>=2.1)"] +docs = ["furo (>=2023.3.27)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.2.3)", "diff-cover (>=7.5)", "pytest (>=7.3.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)", "pytest-timeout (>=2.1)"] [[package]] name = "flake8" @@ -558,14 +561,14 @@ testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pep517", [[package]] name = "importlib-metadata" -version = "6.2.1" +version = "6.6.0" description = "Read metadata from Python packages" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "importlib_metadata-6.2.1-py3-none-any.whl", hash = "sha256:f65e478a7c2177bd19517a3a15dac094d253446d8690c5f3e71e735a04312374"}, - {file = "importlib_metadata-6.2.1.tar.gz", hash = "sha256:5a66966b39ff1c14ef5b2d60c1d842b0141fefff0f4cc6365b4bc9446c652807"}, + {file = "importlib_metadata-6.6.0-py3-none-any.whl", hash = "sha256:43dd286a2cd8995d5eaef7fee2066340423b818ed3fd70adf0bad5f1fac53fed"}, + {file = "importlib_metadata-6.6.0.tar.gz", hash = "sha256:92501cdf9cc66ebd3e612f1b4f0c0765dfa42f0fa38ffb319b6bd84dd675d705"}, ] [package.dependencies] @@ -755,38 +758,38 @@ files = [ [[package]] name = "mypy" -version = "1.2.0" +version = "1.3.0" description = "Optional static typing for Python" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-1.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:701189408b460a2ff42b984e6bd45c3f41f0ac9f5f58b8873bbedc511900086d"}, - {file = "mypy-1.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fe91be1c51c90e2afe6827601ca14353bbf3953f343c2129fa1e247d55fd95ba"}, - {file = "mypy-1.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d26b513225ffd3eacece727f4387bdce6469192ef029ca9dd469940158bc89e"}, - {file = "mypy-1.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3a2d219775a120581a0ae8ca392b31f238d452729adbcb6892fa89688cb8306a"}, - {file = "mypy-1.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:2e93a8a553e0394b26c4ca683923b85a69f7ccdc0139e6acd1354cc884fe0128"}, - {file = "mypy-1.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3efde4af6f2d3ccf58ae825495dbb8d74abd6d176ee686ce2ab19bd025273f41"}, - {file = "mypy-1.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:695c45cea7e8abb6f088a34a6034b1d273122e5530aeebb9c09626cea6dca4cb"}, - {file = "mypy-1.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0e9464a0af6715852267bf29c9553e4555b61f5904a4fc538547a4d67617937"}, - {file = "mypy-1.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8293a216e902ac12779eb7a08f2bc39ec6c878d7c6025aa59464e0c4c16f7eb9"}, - {file = "mypy-1.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:f46af8d162f3d470d8ffc997aaf7a269996d205f9d746124a179d3abe05ac602"}, - {file = "mypy-1.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:031fc69c9a7e12bcc5660b74122ed84b3f1c505e762cc4296884096c6d8ee140"}, - {file = "mypy-1.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:390bc685ec209ada4e9d35068ac6988c60160b2b703072d2850457b62499e336"}, - {file = "mypy-1.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4b41412df69ec06ab141808d12e0bf2823717b1c363bd77b4c0820feaa37249e"}, - {file = "mypy-1.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4e4a682b3f2489d218751981639cffc4e281d548f9d517addfd5a2917ac78119"}, - {file = "mypy-1.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a197ad3a774f8e74f21e428f0de7f60ad26a8d23437b69638aac2764d1e06a6a"}, - {file = "mypy-1.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c9a084bce1061e55cdc0493a2ad890375af359c766b8ac311ac8120d3a472950"}, - {file = "mypy-1.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaeaa0888b7f3ccb7bcd40b50497ca30923dba14f385bde4af78fac713d6d6f6"}, - {file = "mypy-1.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bea55fc25b96c53affab852ad94bf111a3083bc1d8b0c76a61dd101d8a388cf5"}, - {file = "mypy-1.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:4c8d8c6b80aa4a1689f2a179d31d86ae1367ea4a12855cc13aa3ba24bb36b2d8"}, - {file = "mypy-1.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:70894c5345bea98321a2fe84df35f43ee7bb0feec117a71420c60459fc3e1eed"}, - {file = "mypy-1.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4a99fe1768925e4a139aace8f3fb66db3576ee1c30b9c0f70f744ead7e329c9f"}, - {file = "mypy-1.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:023fe9e618182ca6317ae89833ba422c411469156b690fde6a315ad10695a521"}, - {file = "mypy-1.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4d19f1a239d59f10fdc31263d48b7937c585810288376671eaf75380b074f238"}, - {file = "mypy-1.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:2de7babe398cb7a85ac7f1fd5c42f396c215ab3eff731b4d761d68d0f6a80f48"}, - {file = "mypy-1.2.0-py3-none-any.whl", hash = "sha256:d8e9187bfcd5ffedbe87403195e1fc340189a68463903c39e2b63307c9fa0394"}, - {file = "mypy-1.2.0.tar.gz", hash = "sha256:f70a40410d774ae23fcb4afbbeca652905a04de7948eaf0b1789c8d1426b72d1"}, + {file = "mypy-1.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c1eb485cea53f4f5284e5baf92902cd0088b24984f4209e25981cc359d64448d"}, + {file = "mypy-1.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4c99c3ecf223cf2952638da9cd82793d8f3c0c5fa8b6ae2b2d9ed1e1ff51ba85"}, + {file = "mypy-1.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:550a8b3a19bb6589679a7c3c31f64312e7ff482a816c96e0cecec9ad3a7564dd"}, + {file = "mypy-1.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cbc07246253b9e3d7d74c9ff948cd0fd7a71afcc2b77c7f0a59c26e9395cb152"}, + {file = "mypy-1.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:a22435632710a4fcf8acf86cbd0d69f68ac389a3892cb23fbad176d1cddaf228"}, + {file = "mypy-1.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6e33bb8b2613614a33dff70565f4c803f889ebd2f859466e42b46e1df76018dd"}, + {file = "mypy-1.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7d23370d2a6b7a71dc65d1266f9a34e4cde9e8e21511322415db4b26f46f6b8c"}, + {file = "mypy-1.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:658fe7b674769a0770d4b26cb4d6f005e88a442fe82446f020be8e5f5efb2fae"}, + {file = "mypy-1.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6e42d29e324cdda61daaec2336c42512e59c7c375340bd202efa1fe0f7b8f8ca"}, + {file = "mypy-1.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:d0b6c62206e04061e27009481cb0ec966f7d6172b5b936f3ead3d74f29fe3dcf"}, + {file = "mypy-1.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:76ec771e2342f1b558c36d49900dfe81d140361dd0d2df6cd71b3db1be155409"}, + {file = "mypy-1.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ebc95f8386314272bbc817026f8ce8f4f0d2ef7ae44f947c4664efac9adec929"}, + {file = "mypy-1.3.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:faff86aa10c1aa4a10e1a301de160f3d8fc8703b88c7e98de46b531ff1276a9a"}, + {file = "mypy-1.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:8c5979d0deb27e0f4479bee18ea0f83732a893e81b78e62e2dda3e7e518c92ee"}, + {file = "mypy-1.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c5d2cc54175bab47011b09688b418db71403aefad07cbcd62d44010543fc143f"}, + {file = "mypy-1.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:87df44954c31d86df96c8bd6e80dfcd773473e877ac6176a8e29898bfb3501cb"}, + {file = "mypy-1.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:473117e310febe632ddf10e745a355714e771ffe534f06db40702775056614c4"}, + {file = "mypy-1.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:74bc9b6e0e79808bf8678d7678b2ae3736ea72d56eede3820bd3849823e7f305"}, + {file = "mypy-1.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:44797d031a41516fcf5cbfa652265bb994e53e51994c1bd649ffcd0c3a7eccbf"}, + {file = "mypy-1.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ddae0f39ca146972ff6bb4399f3b2943884a774b8771ea0a8f50e971f5ea5ba8"}, + {file = "mypy-1.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1c4c42c60a8103ead4c1c060ac3cdd3ff01e18fddce6f1016e08939647a0e703"}, + {file = "mypy-1.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e86c2c6852f62f8f2b24cb7a613ebe8e0c7dc1402c61d36a609174f63e0ff017"}, + {file = "mypy-1.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f9dca1e257d4cc129517779226753dbefb4f2266c4eaad610fc15c6a7e14283e"}, + {file = "mypy-1.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:95d8d31a7713510685b05fbb18d6ac287a56c8f6554d88c19e73f724a445448a"}, + {file = "mypy-1.3.0-py3-none-any.whl", hash = "sha256:a8763e72d5d9574d45ce5881962bc8e9046bf7b375b0abf031f3e6811732a897"}, + {file = "mypy-1.3.0.tar.gz", hash = "sha256:e1f4d16e296f5135624b34e8fb741eb0eadedca90862405b1f1fde2040b9bd11"}, ] [package.dependencies] @@ -815,14 +818,14 @@ files = [ [[package]] name = "packaging" -version = "23.0" +version = "23.1" description = "Core utilities for Python packages" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.0-py3-none-any.whl", hash = "sha256:714ac14496c3e68c99c29b00845f7a2b85f3bb6f1078fd9f72fd20f0570002b2"}, - {file = "packaging-23.0.tar.gz", hash = "sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97"}, + {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"}, + {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, ] [[package]] @@ -851,22 +854,22 @@ files = [ [[package]] name = "platformdirs" -version = "3.2.0" +version = "3.5.1" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "platformdirs-3.2.0-py3-none-any.whl", hash = "sha256:ebe11c0d7a805086e99506aa331612429a72ca7cd52a1f0d277dc4adc20cb10e"}, - {file = "platformdirs-3.2.0.tar.gz", hash = "sha256:d5b638ca397f25f979350ff789db335903d7ea010ab28903f57b27e1b16c2b08"}, + {file = "platformdirs-3.5.1-py3-none-any.whl", hash = "sha256:e2378146f1964972c03c085bb5662ae80b2b8c06226c54b2ff4aa9483e8a13a5"}, + {file = "platformdirs-3.5.1.tar.gz", hash = "sha256:412dae91f52a6f84830f39a8078cecd0e866cb72294a5c66808e74d5e88d251f"}, ] [package.dependencies] typing-extensions = {version = ">=4.5", markers = "python_version < \"3.8\""} [package.extras] -docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.2.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] +docs = ["furo (>=2023.3.27)", "proselint (>=0.13)", "sphinx (>=6.2.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] [[package]] name = "pluggy" @@ -937,14 +940,14 @@ files = [ [[package]] name = "pygments" -version = "2.14.0" +version = "2.15.1" description = "Pygments is a syntax highlighting package written in Python." category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "Pygments-2.14.0-py3-none-any.whl", hash = "sha256:fa7bd7bd2771287c0de303af8bfdfc731f51bd2c6a47ab69d117138893b82717"}, - {file = "Pygments-2.14.0.tar.gz", hash = "sha256:b3ed06a9e8ac9a9aae5a6f5dbe78a8a58655d17b43b93c078f094ddc476ae297"}, + {file = "Pygments-2.15.1-py3-none-any.whl", hash = "sha256:db2db3deb4b4179f399a09054b023b6a586b76499d36965813c71aa8ed7b5fd1"}, + {file = "Pygments-2.15.1.tar.gz", hash = "sha256:8ace4d3c1dd481894b2005f560ead0f9f19ee64fe983366be1a21e171d12775c"}, ] [package.extras] @@ -972,14 +975,14 @@ testing = ["covdefaults (>=2.2.2)", "importlib-metadata (>=6)", "pytest (>=7.2.1 [[package]] name = "pytest" -version = "7.3.0" +version = "7.3.1" description = "pytest: simple powerful testing with Python" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-7.3.0-py3-none-any.whl", hash = "sha256:933051fa1bfbd38a21e73c3960cebdad4cf59483ddba7696c48509727e17f201"}, - {file = "pytest-7.3.0.tar.gz", hash = "sha256:58ecc27ebf0ea643ebfdf7fb1249335da761a00c9f955bcd922349bcb68ee57d"}, + {file = "pytest-7.3.1-py3-none-any.whl", hash = "sha256:3799fa815351fea3a5e96ac7e503a96fa51cc9942c3753cda7651b93c1cfa362"}, + {file = "pytest-7.3.1.tar.gz", hash = "sha256:434afafd78b1d78ed0addf160ad2b77a30d35d4bdf8af234fe621919d9ed15e3"}, ] [package.dependencies] @@ -1148,21 +1151,21 @@ files = [ [[package]] name = "requests" -version = "2.28.2" +version = "2.30.0" description = "Python HTTP for Humans." category = "dev" optional = false -python-versions = ">=3.7, <4" +python-versions = ">=3.7" files = [ - {file = "requests-2.28.2-py3-none-any.whl", hash = "sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa"}, - {file = "requests-2.28.2.tar.gz", hash = "sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf"}, + {file = "requests-2.30.0-py3-none-any.whl", hash = "sha256:10e94cc4f3121ee6da529d358cdaeaff2f1c409cd377dbc72b825852f2f7e294"}, + {file = "requests-2.30.0.tar.gz", hash = "sha256:239d7d4458afcb28a692cdd298d87542235f4ca8d36d03a15bfc128a6559a2f4"}, ] [package.dependencies] certifi = ">=2017.4.17" charset-normalizer = ">=2,<4" idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<1.27" +urllib3 = ">=1.21.1,<3" [package.extras] socks = ["PySocks (>=1.5.6,!=1.5.7)"] @@ -1170,14 +1173,14 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "rich" -version = "13.3.3" +version = "13.3.5" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" category = "dev" optional = false python-versions = ">=3.7.0" files = [ - {file = "rich-13.3.3-py3-none-any.whl", hash = "sha256:540c7d6d26a1178e8e8b37e9ba44573a3cd1464ff6348b99ee7061b95d1c6333"}, - {file = "rich-13.3.3.tar.gz", hash = "sha256:dc84400a9d842b3a9c5ff74addd8eb798d155f36c1c91303888e0a66850d2a15"}, + {file = "rich-13.3.5-py3-none-any.whl", hash = "sha256:69cdf53799e63f38b95b9bf9c875f8c90e78dd62b2f00c13a911c7a3b9fa4704"}, + {file = "rich-13.3.5.tar.gz", hash = "sha256:2d11b9b8dd03868f09b4fffadc84a6a8cda574e40dc90821bd845720ebb8e89c"}, ] [package.dependencies] @@ -1190,19 +1193,19 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "setuptools" -version = "67.6.1" +version = "67.8.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "setuptools-67.6.1-py3-none-any.whl", hash = "sha256:e728ca814a823bf7bf60162daf9db95b93d532948c4c0bea762ce62f60189078"}, - {file = "setuptools-67.6.1.tar.gz", hash = "sha256:257de92a9d50a60b8e22abfcbb771571fde0dbf3ec234463212027a4eeecbe9a"}, + {file = "setuptools-67.8.0-py3-none-any.whl", hash = "sha256:5df61bf30bb10c6f756eb19e7c9f3b473051f48db77fddbe06ff2ca307df9a6f"}, + {file = "setuptools-67.8.0.tar.gz", hash = "sha256:62642358adc77ffa87233bc4d2354c4b2682d214048f500964dbe760ccedf102"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] @@ -1279,21 +1282,21 @@ test = ["cython", "html5lib", "pytest", "pytest-cov", "typed-ast"] [[package]] name = "sphinx" -version = "6.1.3" +version = "6.2.1" description = "Python documentation generator" category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "Sphinx-6.1.3.tar.gz", hash = "sha256:0dac3b698538ffef41716cf97ba26c1c7788dba73ce6f150c1ff5b4720786dd2"}, - {file = "sphinx-6.1.3-py3-none-any.whl", hash = "sha256:807d1cb3d6be87eb78a381c3e70ebd8d346b9a25f3753e9947e866b2786865fc"}, + {file = "Sphinx-6.2.1.tar.gz", hash = "sha256:6d56a34697bb749ffa0152feafc4b19836c755d90a7c59b72bc7dfd371b9cc6b"}, + {file = "sphinx-6.2.1-py3-none-any.whl", hash = "sha256:97787ff1fa3256a3eef9eda523a63dbf299f7b47e053cfcf684a1c2a8380c912"}, ] [package.dependencies] alabaster = ">=0.7,<0.8" babel = ">=2.9" colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} -docutils = ">=0.18,<0.20" +docutils = ">=0.18.1,<0.20" imagesize = ">=1.3" importlib-metadata = {version = ">=4.8", markers = "python_version < \"3.10\""} Jinja2 = ">=3.0" @@ -1311,7 +1314,7 @@ sphinxcontrib-serializinghtml = ">=1.1.5" [package.extras] docs = ["sphinxcontrib-websupport"] lint = ["docutils-stubs", "flake8 (>=3.5.0)", "flake8-simplify", "isort", "mypy (>=0.990)", "ruff", "sphinx-lint", "types-requests"] -test = ["cython", "html5lib", "pytest (>=4.6)"] +test = ["cython", "filelock", "html5lib", "pytest (>=4.6)"] [[package]] name = "sphinx-rtd-theme" @@ -1532,22 +1535,22 @@ testing = ["flaky (>=3.4.0)", "freezegun (>=0.3.11)", "pathlib2 (>=2.3.3)", "psu [[package]] name = "tox" -version = "4.4.11" +version = "4.5.1" description = "tox is a generic virtualenv management and test command line tool" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "tox-4.4.11-py3-none-any.whl", hash = "sha256:6fa4dbd933d0e335b5392c81e9cd467630119b3669705dbad47814a93b6c9586"}, - {file = "tox-4.4.11.tar.gz", hash = "sha256:cd88e41aef9c71f0ba02b6d7939f102760b192b63458fbe04dbbaed82f7bf5f5"}, + {file = "tox-4.5.1-py3-none-any.whl", hash = "sha256:d25a2e6cb261adc489604fafd76cd689efeadfa79709965e965668d6d3f63046"}, + {file = "tox-4.5.1.tar.gz", hash = "sha256:5a2eac5fb816779dfdf5cb00fecbc27eb0524e4626626bb1de84747b24cacc56"}, ] [package.dependencies] cachetools = ">=5.3" chardet = ">=5.1" colorama = ">=0.4.6" -filelock = ">=3.10.7" -packaging = ">=23" +filelock = ">=3.11" +packaging = ">=23.1" platformdirs = ">=3.2" pluggy = ">=1" pyproject-api = ">=1.5.1" @@ -1555,8 +1558,8 @@ tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} virtualenv = ">=20.21" [package.extras] -docs = ["furo (>=2023.3.27)", "sphinx (>=6.1.3)", "sphinx-argparse-cli (>=1.11)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)", "sphinx-copybutton (>=0.5.1)", "sphinx-inline-tabs (>=2022.1.2b11)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=22.12)"] -testing = ["build[virtualenv] (>=0.10)", "covdefaults (>=2.3)", "devpi-process (>=0.3)", "diff-cover (>=7.5)", "distlib (>=0.3.6)", "flaky (>=3.7)", "hatch-vcs (>=0.3)", "hatchling (>=1.13)", "psutil (>=5.9.4)", "pytest (>=7.2.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)", "pytest-xdist (>=3.2.1)", "re-assert (>=1.1)", "time-machine (>=2.9)", "wheel (>=0.40)"] +docs = ["furo (>=2023.3.27)", "sphinx (>=6.1.3)", "sphinx-argparse-cli (>=1.11)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)", "sphinx-copybutton (>=0.5.2)", "sphinx-inline-tabs (>=2022.1.2b11)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=22.12)"] +testing = ["build[virtualenv] (>=0.10)", "covdefaults (>=2.3)", "devpi-process (>=0.3)", "diff-cover (>=7.5)", "distlib (>=0.3.6)", "flaky (>=3.7)", "hatch-vcs (>=0.3)", "hatchling (>=1.14)", "psutil (>=5.9.4)", "pytest (>=7.3.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)", "pytest-xdist (>=3.2.1)", "re-assert (>=1.1)", "time-machine (>=2.9)", "wheel (>=0.40)"] [[package]] name = "typed-ast" @@ -1606,20 +1609,21 @@ files = [ [[package]] name = "urllib3" -version = "1.26.15" +version = "2.0.2" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +python-versions = ">=3.7" files = [ - {file = "urllib3-1.26.15-py2.py3-none-any.whl", hash = "sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42"}, - {file = "urllib3-1.26.15.tar.gz", hash = "sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305"}, + {file = "urllib3-2.0.2-py3-none-any.whl", hash = "sha256:d055c2f9d38dc53c808f6fdc8eab7360b6fdbbde02340ed25cfbcd817c62469e"}, + {file = "urllib3-2.0.2.tar.gz", hash = "sha256:61717a1095d7e155cdb737ac7bb2f4324a858a1e2e6466f6d03ff630ca68d3cc"}, ] [package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] -socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] [[package]] name = "virtualenv" @@ -1646,24 +1650,24 @@ testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", [[package]] name = "virtualenv" -version = "20.21.0" +version = "20.23.0" description = "Virtual Python Environment builder" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.21.0-py3-none-any.whl", hash = "sha256:31712f8f2a17bd06234fa97fdf19609e789dd4e3e4bf108c3da71d710651adbc"}, - {file = "virtualenv-20.21.0.tar.gz", hash = "sha256:f50e3e60f990a0757c9b68333c9fdaa72d7188caa417f96af9e52407831a3b68"}, + {file = "virtualenv-20.23.0-py3-none-any.whl", hash = "sha256:6abec7670e5802a528357fdc75b26b9f57d5d92f29c5462ba0fbe45feacc685e"}, + {file = "virtualenv-20.23.0.tar.gz", hash = "sha256:a85caa554ced0c0afbd0d638e7e2d7b5f92d23478d05d17a76daeac8f279f924"}, ] [package.dependencies] distlib = ">=0.3.6,<1" -filelock = ">=3.4.1,<4" -platformdirs = ">=2.4,<4" +filelock = ">=3.11,<4" +platformdirs = ">=3.2,<4" [package.extras] -docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=22.12)"] -test = ["covdefaults (>=2.2.2)", "coverage (>=7.1)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23)", "pytest (>=7.2.1)", "pytest-env (>=0.8.1)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.10)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)"] +docs = ["furo (>=2023.3.27)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=22.12)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.3)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.3.1)", "pytest-env (>=0.8.1)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.10)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=67.7.1)", "time-machine (>=2.9)"] [[package]] name = "zipp" @@ -1684,4 +1688,4 @@ testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more [metadata] lock-version = "2.0" python-versions = "^3.7" -content-hash = "a5cc331810d7e3ec67b6e819430615c332861ed9b5663688780059d4651fec08" +content-hash = "bbf381c53af408205d5513ac3af5a6f6ca4b1f08ab4c7c6863d93262fcdab2a6" diff --git a/pyproject.toml b/pyproject.toml index abbd7f33..c8b6ba18 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -75,7 +75,7 @@ isort = [ { version = "^5.12", python = ">=3.8" }, { version = "^5.11", python = "<3.8" } ] -mypy = "1.2.0" +mypy = "1.3.0" bump2version = ">=1.0,<2" [tool.poetry.group.doc] diff --git a/src/graphql/__init__.py b/src/graphql/__init__.py index 4880dfab..cb946aba 100644 --- a/src/graphql/__init__.py +++ b/src/graphql/__init__.py @@ -253,6 +253,8 @@ specified_directives, GraphQLIncludeDirective, GraphQLSkipDirective, + GraphQLDeferDirective, + GraphQLStreamDirective, GraphQLDeprecatedDirective, GraphQLSpecifiedByDirective, # "Enum" of Type Kinds @@ -485,6 +487,8 @@ "specified_directives", "GraphQLIncludeDirective", "GraphQLSkipDirective", + "GraphQLDeferDirective", + "GraphQLStreamDirective", "GraphQLDeprecatedDirective", "GraphQLSpecifiedByDirective", "TypeKind", diff --git a/src/graphql/execution/__init__.py b/src/graphql/execution/__init__.py index 951ec8f1..6487c33d 100644 --- a/src/graphql/execution/__init__.py +++ b/src/graphql/execution/__init__.py @@ -5,15 +5,31 @@ """ from .execute import ( + ASYNC_DELAY, create_source_event_stream, execute, + experimental_execute_incrementally, execute_sync, default_field_resolver, default_type_resolver, subscribe, + experimental_subscribe_incrementally, ExecutionContext, ExecutionResult, + ExperimentalExecuteIncrementallyResults, + ExperimentalExecuteMultipleResults, + ExperimentalExecuteSingleResult, + InitialIncrementalExecutionResult, + SubsequentIncrementalExecutionResult, + IncrementalDeferResult, + IncrementalStreamResult, + IncrementalResult, FormattedExecutionResult, + FormattedInitialIncrementalExecutionResult, + FormattedSubsequentIncrementalExecutionResult, + FormattedIncrementalDeferResult, + FormattedIncrementalStreamResult, + FormattedIncrementalResult, Middleware, ) from .map_async_iterable import MapAsyncIterable @@ -21,15 +37,31 @@ from .values import get_argument_values, get_directive_values, get_variable_values __all__ = [ + "ASYNC_DELAY", "create_source_event_stream", "execute", + "experimental_execute_incrementally", "execute_sync", "default_field_resolver", "default_type_resolver", "subscribe", + "experimental_subscribe_incrementally", "ExecutionContext", "ExecutionResult", + "ExperimentalExecuteIncrementallyResults", + "ExperimentalExecuteMultipleResults", + "ExperimentalExecuteSingleResult", + "InitialIncrementalExecutionResult", + "SubsequentIncrementalExecutionResult", + "IncrementalDeferResult", + "IncrementalStreamResult", + "IncrementalResult", "FormattedExecutionResult", + "FormattedInitialIncrementalExecutionResult", + "FormattedSubsequentIncrementalExecutionResult", + "FormattedIncrementalDeferResult", + "FormattedIncrementalStreamResult", + "FormattedIncrementalResult", "MapAsyncIterable", "Middleware", "MiddlewareManager", diff --git a/src/graphql/execution/collect_fields.py b/src/graphql/execution/collect_fields.py index 04eefe21..8330b634 100644 --- a/src/graphql/execution/collect_fields.py +++ b/src/graphql/execution/collect_fields.py @@ -1,5 +1,5 @@ from collections import defaultdict -from typing import Any, Dict, List, Set, Union +from typing import Any, Dict, List, NamedTuple, Optional, Set, Union from ..language import ( FieldNode, @@ -9,6 +9,7 @@ SelectionSetNode, ) from ..type import ( + GraphQLDeferDirective, GraphQLIncludeDirective, GraphQLObjectType, GraphQLSchema, @@ -19,7 +20,21 @@ from .values import get_directive_values -__all__ = ["collect_fields", "collect_subfields"] +__all__ = ["collect_fields", "collect_subfields", "FieldsAndPatches"] + + +class PatchFields(NamedTuple): + """Optionally labelled set of fields to be used as a patch.""" + + label: Optional[str] + fields: Dict[str, List[FieldNode]] + + +class FieldsAndPatches(NamedTuple): + """Tuple of collected fields and patches to be applied.""" + + fields: Dict[str, List[FieldNode]] + patches: List[PatchFields] def collect_fields( @@ -28,7 +43,7 @@ def collect_fields( variable_values: Dict[str, Any], runtime_type: GraphQLObjectType, selection_set: SelectionSetNode, -) -> Dict[str, List[FieldNode]]: +) -> FieldsAndPatches: """Collect fields. Given a selection_set, collects all the fields and returns them. @@ -40,10 +55,18 @@ def collect_fields( For internal use only. """ fields: Dict[str, List[FieldNode]] = defaultdict(list) + patches: List[PatchFields] = [] collect_fields_impl( - schema, fragments, variable_values, runtime_type, selection_set, fields, set() + schema, + fragments, + variable_values, + runtime_type, + selection_set, + fields, + patches, + set(), ) - return fields + return FieldsAndPatches(fields, patches) def collect_subfields( @@ -52,7 +75,7 @@ def collect_subfields( variable_values: Dict[str, Any], return_type: GraphQLObjectType, field_nodes: List[FieldNode], -) -> Dict[str, List[FieldNode]]: +) -> FieldsAndPatches: """Collect subfields. Given a list of field nodes, collects all the subfields of the passed in fields, @@ -66,6 +89,10 @@ def collect_subfields( """ sub_field_nodes: Dict[str, List[FieldNode]] = defaultdict(list) visited_fragment_names: Set[str] = set() + + sub_patches: List[PatchFields] = [] + sub_fields_and_patches = FieldsAndPatches(sub_field_nodes, sub_patches) + for node in field_nodes: if node.selection_set: collect_fields_impl( @@ -75,9 +102,10 @@ def collect_subfields( return_type, node.selection_set, sub_field_nodes, + sub_patches, visited_fragment_names, ) - return sub_field_nodes + return sub_fields_and_patches def collect_fields_impl( @@ -87,9 +115,12 @@ def collect_fields_impl( runtime_type: GraphQLObjectType, selection_set: SelectionSetNode, fields: Dict[str, List[FieldNode]], + patches: List[PatchFields], visited_fragment_names: Set[str], ) -> None: """Collect fields (internal implementation).""" + patch_fields: Dict[str, List[FieldNode]] + for selection in selection_set.selections: if isinstance(selection, FieldNode): if not should_include_node(variable_values, selection): @@ -100,36 +131,98 @@ def collect_fields_impl( variable_values, selection ) or not does_fragment_condition_match(schema, selection, runtime_type): continue - collect_fields_impl( - schema, - fragments, - variable_values, - runtime_type, - selection.selection_set, - fields, - visited_fragment_names, - ) + + defer = get_defer_values(variable_values, selection) + if defer: + patch_fields = defaultdict(list) + collect_fields_impl( + schema, + fragments, + variable_values, + runtime_type, + selection.selection_set, + patch_fields, + patches, + visited_fragment_names, + ) + patches.append(PatchFields(defer.label, patch_fields)) + else: + collect_fields_impl( + schema, + fragments, + variable_values, + runtime_type, + selection.selection_set, + fields, + patches, + visited_fragment_names, + ) elif isinstance(selection, FragmentSpreadNode): # pragma: no cover else frag_name = selection.name.value - if frag_name in visited_fragment_names or not should_include_node( - variable_values, selection - ): + + if not should_include_node(variable_values, selection): continue - visited_fragment_names.add(frag_name) + + defer = get_defer_values(variable_values, selection) + if frag_name in visited_fragment_names and not defer: + continue + fragment = fragments.get(frag_name) if not fragment or not does_fragment_condition_match( schema, fragment, runtime_type ): continue - collect_fields_impl( - schema, - fragments, - variable_values, - runtime_type, - fragment.selection_set, - fields, - visited_fragment_names, - ) + + if not defer: + visited_fragment_names.add(frag_name) + + if defer: + patch_fields = defaultdict(list) + collect_fields_impl( + schema, + fragments, + variable_values, + runtime_type, + fragment.selection_set, + patch_fields, + patches, + visited_fragment_names, + ) + patches.append(PatchFields(defer.label, patch_fields)) + else: + collect_fields_impl( + schema, + fragments, + variable_values, + runtime_type, + fragment.selection_set, + fields, + patches, + visited_fragment_names, + ) + + +class DeferValues(NamedTuple): + """Values of an active defer directive.""" + + label: Optional[str] + + +def get_defer_values( + variable_values: Dict[str, Any], node: Union[FragmentSpreadNode, InlineFragmentNode] +) -> Optional[DeferValues]: + """Get values of defer directive if active. + + Returns an object containing the `@defer` arguments if a field should be + deferred based on the experimental flag, defer directive present and + not disabled by the "if" argument. + """ + defer = get_directive_values(GraphQLDeferDirective, node, variable_values) + + if not defer or defer.get("if") is False: + return None + + return DeferValues(defer.get("label")) def should_include_node( @@ -165,6 +258,7 @@ def does_fragment_condition_match( if conditional_type is type_: return True if is_abstract_type(conditional_type): + # noinspection PyTypeChecker return schema.is_sub_type(conditional_type, type_) return False diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index dd06b239..dd69658f 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -1,18 +1,22 @@ from __future__ import annotations # Python < 3.10 -from asyncio import ensure_future, gather +from asyncio import Event, as_completed, ensure_future, gather, shield, sleep, wait_for from collections.abc import Mapping from inspect import isawaitable from typing import ( Any, + AsyncGenerator, AsyncIterable, AsyncIterator, Awaitable, Callable, Dict, + Generator, Iterable, List, + NamedTuple, Optional, + Sequence, Tuple, Type, Union, @@ -28,6 +32,11 @@ from typing import TypeAlias, TypeGuard except ImportError: # Python < 3.10 from typing_extensions import TypeAlias, TypeGuard +try: # only needed for Python < 3.11 + # noinspection PyCompatibility + from asyncio.exceptions import TimeoutError +except ImportError: # Python < 3.7 + from concurrent.futures import TimeoutError # type: ignore from ..error import GraphQLError, GraphQLFormattedError, located_error from ..language import ( @@ -37,7 +46,7 @@ OperationDefinitionNode, OperationType, ) -from ..pyutils import AwaitableOrValue, Path, Undefined, inspect +from ..pyutils import AwaitableOrValue, Path, Undefined, async_reduce, inspect from ..pyutils import is_awaitable as default_is_awaitable from ..pyutils import is_iterable from ..type import ( @@ -50,6 +59,7 @@ GraphQLOutputType, GraphQLResolveInfo, GraphQLSchema, + GraphQLStreamDirective, GraphQLTypeResolver, assert_valid_schema, is_abstract_type, @@ -58,10 +68,14 @@ is_non_null_type, is_object_type, ) -from .collect_fields import collect_fields, collect_subfields +from .collect_fields import FieldsAndPatches, collect_fields, collect_subfields +from .flatten_async_iterable import flatten_async_iterable from .map_async_iterable import MapAsyncIterable from .middleware import MiddlewareManager -from .values import get_argument_values, get_variable_values +from .values import get_argument_values, get_directive_values, get_variable_values + + +ASYNC_DELAY = 1 / 512 # wait time in seconds for deferring execution try: # pragma: no cover @@ -74,16 +88,35 @@ async def anext(iterator: AsyncIterator) -> Any: __all__ = [ + "ASYNC_DELAY", "create_source_event_stream", "default_field_resolver", "default_type_resolver", "execute", "execute_sync", + "experimental_execute_incrementally", + "experimental_subscribe_incrementally", "subscribe", + "AsyncPayloadRecord", + "DeferredFragmentRecord", + "StreamRecord", "ExecutionResult", "ExecutionContext", + "ExperimentalExecuteIncrementallyResults", + "ExperimentalExecuteMultipleResults", + "ExperimentalExecuteSingleResult", "FormattedExecutionResult", + "FormattedIncrementalDeferResult", + "FormattedIncrementalResult", + "FormattedIncrementalStreamResult", + "FormattedInitialIncrementalExecutionResult", + "FormattedSubsequentIncrementalExecutionResult", + "IncrementalDeferResult", + "IncrementalResult", + "IncrementalStreamResult", + "InitialIncrementalExecutionResult", "Middleware", + "SubsequentIncrementalExecutionResult", ] @@ -109,8 +142,8 @@ async def anext(iterator: AsyncIterator) -> Any: class FormattedExecutionResult(TypedDict, total=False): """Formatted execution result""" - errors: List[GraphQLFormattedError] data: Optional[Dict[str, Any]] + errors: List[GraphQLFormattedError] extensions: Dict[str, Any] @@ -178,6 +211,411 @@ def __ne__(self, other: Any) -> bool: return not self == other +class FormattedIncrementalDeferResult(TypedDict, total=False): + """Formatted incremental deferred execution result""" + + data: Optional[Dict[str, Any]] + errors: List[GraphQLFormattedError] + path: List[Union[str, int]] + label: str + extensions: Dict[str, Any] + + +class IncrementalDeferResult: + """Incremental deferred execution result""" + + data: Optional[Dict[str, Any]] + errors: Optional[List[GraphQLError]] + path: Optional[List[Union[str, int]]] + label: Optional[str] + extensions: Optional[Dict[str, Any]] + + __slots__ = "data", "errors", "path", "label", "extensions" + + def __init__( + self, + data: Optional[Dict[str, Any]] = None, + errors: Optional[List[GraphQLError]] = None, + path: Optional[List[Union[str, int]]] = None, + label: Optional[str] = None, + extensions: Optional[Dict[str, Any]] = None, + ): + self.data = data + self.errors = errors + self.path = path + self.label = label + self.extensions = extensions + + def __repr__(self) -> str: + name = self.__class__.__name__ + args: List[str] = [f"data={self.data!r}, errors={self.errors!r}"] + if self.path: + args.append(f"path={self.path!r}") + if self.label: + args.append(f"label={self.label!r}") + if self.extensions: + args.append(f"extensions={self.extensions}") + return f"{name}({', '.join(args)})" + + @property + def formatted(self) -> FormattedIncrementalDeferResult: + """Get execution result formatted according to the specification.""" + formatted: FormattedIncrementalDeferResult = {"data": self.data} + if self.errors is not None: + formatted["errors"] = [error.formatted for error in self.errors] + if self.path is not None: + formatted["path"] = self.path + if self.label is not None: + formatted["label"] = self.label + if self.extensions is not None: + formatted["extensions"] = self.extensions + return formatted + + def __eq__(self, other: Any) -> bool: + if isinstance(other, dict): + return ( + other.get("data") == self.data + and other.get("errors") == self.errors + and ("path" not in other or other["path"] == self.path) + and ("label" not in other or other["label"] == self.label) + and ( + "extensions" not in other or other["extensions"] == self.extensions + ) + ) + if isinstance(other, tuple): + size = len(other) + return ( + 1 < size < 6 + and (self.data, self.errors, self.path, self.label, self.extensions)[ + :size + ] + == other + ) + return ( + isinstance(other, self.__class__) + and other.data == self.data + and other.errors == self.errors + and other.path == self.path + and other.label == self.label + and other.extensions == self.extensions + ) + + def __ne__(self, other: Any) -> bool: + return not self == other + + +class FormattedIncrementalStreamResult(TypedDict, total=False): + """Formatted incremental stream execution result""" + + items: Optional[List[Any]] + errors: List[GraphQLFormattedError] + path: List[Union[str, int]] + label: str + extensions: Dict[str, Any] + + +class IncrementalStreamResult: + """Incremental streamed execution result""" + + items: Optional[List[Any]] + errors: Optional[List[GraphQLError]] + path: Optional[List[Union[str, int]]] + label: Optional[str] + extensions: Optional[Dict[str, Any]] + + __slots__ = "items", "errors", "path", "label", "extensions" + + def __init__( + self, + items: Optional[List[Any]] = None, + errors: Optional[List[GraphQLError]] = None, + path: Optional[List[Union[str, int]]] = None, + label: Optional[str] = None, + extensions: Optional[Dict[str, Any]] = None, + ): + self.items = items + self.errors = errors + self.path = path + self.label = label + self.extensions = extensions + + def __repr__(self) -> str: + name = self.__class__.__name__ + args: List[str] = [f"items={self.items!r}, errors={self.errors!r}"] + if self.path: + args.append(f"path={self.path!r}") + if self.label: + args.append(f"label={self.label!r}") + if self.extensions: + args.append(f"extensions={self.extensions}") + return f"{name}({', '.join(args)})" + + @property + def formatted(self) -> FormattedIncrementalStreamResult: + """Get execution result formatted according to the specification.""" + formatted: FormattedIncrementalStreamResult = {"items": self.items} + if self.errors is not None: + formatted["errors"] = [error.formatted for error in self.errors] + if self.path is not None: + formatted["path"] = self.path + if self.label is not None: + formatted["label"] = self.label + if self.extensions is not None: + formatted["extensions"] = self.extensions + return formatted + + def __eq__(self, other: Any) -> bool: + if isinstance(other, dict): + return ( + other.get("items") == self.items + and other.get("errors") == self.errors + and ("path" not in other or other["path"] == self.path) + and ("label" not in other or other["label"] == self.label) + and ( + "extensions" not in other or other["extensions"] == self.extensions + ) + ) + if isinstance(other, tuple): + size = len(other) + return ( + 1 < size < 6 + and (self.items, self.errors, self.path, self.label, self.extensions)[ + :size + ] + == other + ) + return ( + isinstance(other, self.__class__) + and other.items == self.items + and other.errors == self.errors + and other.path == self.path + and other.label == self.label + and other.extensions == self.extensions + ) + + def __ne__(self, other: Any) -> bool: + return not self == other + + +FormattedIncrementalResult = Union[ + FormattedIncrementalDeferResult, FormattedIncrementalStreamResult +] + +IncrementalResult = Union[IncrementalDeferResult, IncrementalStreamResult] + + +class FormattedInitialIncrementalExecutionResult(TypedDict, total=False): + """Formatted initial incremental execution result""" + + data: Optional[Dict[str, Any]] + errors: List[GraphQLFormattedError] + hasNext: bool + incremental: List[FormattedIncrementalResult] + extensions: Dict[str, Any] + + +class InitialIncrementalExecutionResult: + """Initial incremental execution result. + + - ``has_next`` is True if a future payload is expected. + - ``incremental`` is a list of the results from defer/stream directives. + """ + + data: Optional[Dict[str, Any]] + errors: Optional[List[GraphQLError]] + incremental: Optional[Sequence[IncrementalResult]] + has_next: bool + extensions: Optional[Dict[str, Any]] + + __slots__ = "data", "errors", "has_next", "incremental", "extensions" + + def __init__( + self, + data: Optional[Dict[str, Any]] = None, + errors: Optional[List[GraphQLError]] = None, + incremental: Optional[Sequence[IncrementalResult]] = None, + has_next: bool = False, + extensions: Optional[Dict[str, Any]] = None, + ): + self.data = data + self.errors = errors + self.incremental = incremental + self.has_next = has_next + self.extensions = extensions + + def __repr__(self) -> str: + name = self.__class__.__name__ + args: List[str] = [f"data={self.data!r}, errors={self.errors!r}"] + if self.incremental: + args.append(f"incremental[{len(self.incremental)}]") + if self.has_next: + args.append("has_next") + if self.extensions: + args.append(f"extensions={self.extensions}") + return f"{name}({', '.join(args)})" + + @property + def formatted(self) -> FormattedInitialIncrementalExecutionResult: + """Get execution result formatted according to the specification.""" + formatted: FormattedInitialIncrementalExecutionResult = {"data": self.data} + if self.errors is not None: + formatted["errors"] = [error.formatted for error in self.errors] + if self.incremental: + formatted["incremental"] = [result.formatted for result in self.incremental] + formatted["hasNext"] = self.has_next + if self.extensions is not None: + formatted["extensions"] = self.extensions + return formatted + + def __eq__(self, other: Any) -> bool: + if isinstance(other, dict): + return ( + other.get("data") == self.data + and other.get("errors") == self.errors + and ( + "incremental" not in other + or other["incremental"] == self.incremental + ) + and ("hasNext" not in other or other["hasNext"] == self.has_next) + and ( + "extensions" not in other or other["extensions"] == self.extensions + ) + ) + if isinstance(other, tuple): + size = len(other) + return ( + 1 < size < 6 + and ( + self.data, + self.errors, + self.incremental, + self.has_next, + self.extensions, + )[:size] + == other + ) + return ( + isinstance(other, self.__class__) + and other.data == self.data + and other.errors == self.errors + and other.incremental == self.incremental + and other.has_next == self.has_next + and other.extensions == self.extensions + ) + + def __ne__(self, other: Any) -> bool: + return not self == other + + +class FormattedSubsequentIncrementalExecutionResult(TypedDict, total=False): + """Formatted subsequent incremental execution result""" + + incremental: List[FormattedIncrementalResult] + hasNext: bool + extensions: Dict[str, Any] + + +class SubsequentIncrementalExecutionResult: + """Subsequent incremental execution result. + + - ``has_next`` is True if a future payload is expected. + - ``incremental`` is a list of the results from defer/stream directives. + """ + + __slots__ = "has_next", "incremental", "extensions" + + incremental: Optional[Sequence[IncrementalResult]] + has_next: bool + extensions: Optional[Dict[str, Any]] + + def __init__( + self, + incremental: Optional[Sequence[IncrementalResult]] = None, + has_next: bool = False, + extensions: Optional[Dict[str, Any]] = None, + ) -> None: + self.incremental = incremental + self.has_next = has_next + self.extensions = extensions + + def __repr__(self) -> str: + name = self.__class__.__name__ + args: List[str] = [] + if self.incremental: + args.append(f"incremental[{len(self.incremental)}]") + if self.has_next: + args.append("has_next") + if self.extensions: + args.append(f"extensions={self.extensions}") + return f"{name}({', '.join(args)})" + + @property + def formatted(self) -> FormattedSubsequentIncrementalExecutionResult: + """Get execution result formatted according to the specification.""" + formatted: FormattedSubsequentIncrementalExecutionResult = {} + if self.incremental: + formatted["incremental"] = [result.formatted for result in self.incremental] + formatted["hasNext"] = self.has_next + if self.extensions is not None: + formatted["extensions"] = self.extensions + return formatted + + def __eq__(self, other: Any) -> bool: + if isinstance(other, dict): + return ( + ("incremental" not in other or other["incremental"] == self.incremental) + and ("hasNext" in other and other["hasNext"] == self.has_next) + and ( + "extensions" not in other or other["extensions"] == self.extensions + ) + ) + if isinstance(other, tuple): + size = len(other) + return ( + 1 < size < 4 + and ( + self.incremental, + self.has_next, + self.extensions, + )[:size] + == other + ) + return ( + isinstance(other, self.__class__) + and other.incremental == self.incremental + and other.has_next == self.has_next + and other.extensions == self.extensions + ) + + def __ne__(self, other: Any) -> bool: + return not self == other + + +class StreamArguments(NamedTuple): + """Arguments of the stream directive""" + + initial_count: int + label: Optional[str] + + +class ExperimentalExecuteSingleResult(NamedTuple): + """Execution result when retrieved at once.""" + + single_result: ExecutionResult + + +class ExperimentalExecuteMultipleResults(NamedTuple): + """Execution results when retrieved incrementally.""" + + initial_result: InitialIncrementalExecutionResult + subsequent_results: AsyncGenerator[SubsequentIncrementalExecutionResult, None] + + +ExperimentalExecuteIncrementallyResults = Union[ + ExperimentalExecuteSingleResult, ExperimentalExecuteMultipleResults +] + + Middleware: TypeAlias = Optional[Union[Tuple, List, MiddlewareManager]] @@ -198,6 +636,7 @@ class ExecutionContext: type_resolver: GraphQLTypeResolver subscribe_field_resolver: GraphQLFieldResolver errors: List[GraphQLError] + subsequent_payloads: Dict[AsyncPayloadRecord, None] # used as ordered set middleware_manager: Optional[MiddlewareManager] is_awaitable: Callable[[Any], TypeGuard[Awaitable]] = staticmethod( @@ -215,6 +654,7 @@ def __init__( field_resolver: GraphQLFieldResolver, type_resolver: GraphQLTypeResolver, subscribe_field_resolver: GraphQLFieldResolver, + subsequent_payloads: Dict[AsyncPayloadRecord, None], errors: List[GraphQLError], middleware_manager: Optional[MiddlewareManager], is_awaitable: Optional[Callable[[Any], bool]], @@ -228,11 +668,12 @@ def __init__( self.field_resolver = field_resolver self.type_resolver = type_resolver self.subscribe_field_resolver = subscribe_field_resolver + self.subsequent_payloads = subsequent_payloads self.errors = errors self.middleware_manager = middleware_manager if is_awaitable: self.is_awaitable = is_awaitable - self._subfields_cache: Dict[Tuple, Dict[str, List[FieldNode]]] = {} + self._subfields_cache: Dict[Tuple, FieldsAndPatches] = {} @classmethod def build( @@ -317,6 +758,7 @@ def build( field_resolver or default_field_resolver, type_resolver or default_type_resolver, subscribe_field_resolver or default_field_resolver, + {}, [], middleware_manager, is_awaitable, @@ -352,12 +794,13 @@ def build_per_event_execution_context(self, payload: Any) -> ExecutionContext: self.field_resolver, self.type_resolver, self.subscribe_field_resolver, + {}, [], self.middleware_manager, self.is_awaitable, ) - def execute_operation(self) -> AwaitableOrValue[Any]: + def execute_operation(self) -> AwaitableOrValue[Dict[str, Any]]: """Execute an operation. Implements the "Executing operations" section of the spec. @@ -372,7 +815,7 @@ def execute_operation(self) -> AwaitableOrValue[Any]: operation, ) - root_fields = collect_fields( + root_fields, patches = collect_fields( schema, self.fragments, self.variable_values, @@ -380,11 +823,23 @@ def execute_operation(self) -> AwaitableOrValue[Any]: operation.selection_set, ) - return ( + root_value = self.root_value + # noinspection PyTypeChecker + result = ( self.execute_fields_serially if operation.operation == OperationType.MUTATION else self.execute_fields - )(root_type, self.root_value, None, root_fields) + )( + root_type, root_value, None, root_fields + ) # type: ignore + + for patch in patches: + label, patch_fields = patch + self.execute_deferred_fragment( + root_type, root_value, patch_fields, label, None + ) + + return result def execute_fields_serially( self, @@ -398,47 +853,33 @@ def execute_fields_serially( Implements the "Executing selection sets" section of the spec for fields that must be executed serially. """ - results: AwaitableOrValue[Dict[str, Any]] = {} is_awaitable = self.is_awaitable - for response_name, field_nodes in fields.items(): + + def reducer( + results: Dict[str, Any], field_item: Tuple[str, List[FieldNode]] + ) -> AwaitableOrValue[Dict[str, Any]]: + response_name, field_nodes = field_item field_path = Path(path, response_name, parent_type.name) result = self.execute_field( parent_type, source_value, field_nodes, field_path ) if result is Undefined: - continue - if is_awaitable(results): - # noinspection PyShadowingNames - async def await_and_set_result( - results: Awaitable[Dict[str, Any]], - response_name: str, - result: AwaitableOrValue[Any], - ) -> Dict[str, Any]: - awaited_results = await results - awaited_results[response_name] = ( - await result if is_awaitable(result) else result - ) - return awaited_results - - results = await_and_set_result( - cast(Awaitable, results), response_name, result - ) - elif is_awaitable(result): + return results + if is_awaitable(result): # noinspection PyShadowingNames async def set_result( - results: Dict[str, Any], response_name: str, - result: Awaitable, + awaitable_result: Awaitable, ) -> Dict[str, Any]: - results[response_name] = await result + results[response_name] = await awaitable_result return results - results = set_result( - cast(Dict[str, Any], results), response_name, result - ) - else: - cast(Dict[str, Any], results)[response_name] = result - return results + return set_result(response_name, result) + results[response_name] = result + return results + + # noinspection PyTypeChecker + return async_reduce(reducer, fields.items(), {}) def execute_fields( self, @@ -446,6 +887,7 @@ def execute_fields( source_value: Any, path: Optional[Path], fields: Dict[str, List[FieldNode]], + async_payload_record: Optional[AsyncPayloadRecord] = None, ) -> AwaitableOrValue[Dict[str, Any]]: """Execute the given fields concurrently. @@ -459,7 +901,7 @@ def execute_fields( for response_name, field_nodes in fields.items(): field_path = Path(path, response_name, parent_type.name) result = self.execute_field( - parent_type, source_value, field_nodes, field_path + parent_type, source_value, field_nodes, field_path, async_payload_record ) if result is not Undefined: results[response_name] = result @@ -490,39 +932,13 @@ async def get_results() -> Dict[str, Any]: return get_results() - def build_resolve_info( - self, - field_def: GraphQLField, - field_nodes: List[FieldNode], - parent_type: GraphQLObjectType, - path: Path, - ) -> GraphQLResolveInfo: - """Build the GraphQLResolveInfo object. - - For internal use only.""" - # The resolve function's first argument is a collection of information about - # the current execution state. - return GraphQLResolveInfo( - field_nodes[0].name.value, - field_nodes, - field_def.type, - parent_type, - path, - self.schema, - self.fragments, - self.root_value, - self.operation, - self.variable_values, - self.context_value, - self.is_awaitable, - ) - def execute_field( self, parent_type: GraphQLObjectType, source: Any, field_nodes: List[FieldNode], path: Path, + async_payload_record: Optional[AsyncPayloadRecord] = None, ) -> AwaitableOrValue[Any]: """Resolve the field on the given source object. @@ -532,6 +948,7 @@ def execute_field( calling its resolve function, then calls complete_value to await coroutine objects, serialize scalars, or execute the sub-selection-set for objects. """ + errors = async_payload_record.errors if async_payload_record else self.errors field_name = field_nodes[0].name.value field_def = self.schema.get_field(parent_type, field_name) if not field_def: @@ -561,20 +978,25 @@ def execute_field( async def await_result() -> Any: try: completed = self.complete_value( - return_type, field_nodes, info, path, await result + return_type, + field_nodes, + info, + path, + await result, + async_payload_record, ) if self.is_awaitable(completed): return await completed return completed except Exception as raw_error: error = located_error(raw_error, field_nodes, path.as_list()) - self.handle_field_error(error, return_type) + handle_field_error(error, return_type, errors) return None return await_result() completed = self.complete_value( - return_type, field_nodes, info, path, result + return_type, field_nodes, info, path, result, async_payload_record ) if self.is_awaitable(completed): # noinspection PyShadowingNames @@ -583,7 +1005,7 @@ async def await_completed() -> Any: return await completed except Exception as raw_error: error = located_error(raw_error, field_nodes, path.as_list()) - self.handle_field_error(error, return_type) + handle_field_error(error, return_type, errors) return None return await_completed() @@ -591,22 +1013,35 @@ async def await_completed() -> Any: return completed except Exception as raw_error: error = located_error(raw_error, field_nodes, path.as_list()) - self.handle_field_error(error, return_type) + handle_field_error(error, return_type, errors) return None - def handle_field_error( + def build_resolve_info( self, - error: GraphQLError, - return_type: GraphQLOutputType, - ) -> None: - # If the field type is non-nullable, then it is resolved without any protection - # from errors, however it still properly locates the error. - if is_non_null_type(return_type): - raise error - # Otherwise, error protection is applied, logging the error and resolving a - # null value for this field if one is encountered. - self.errors.append(error) - return None + field_def: GraphQLField, + field_nodes: List[FieldNode], + parent_type: GraphQLObjectType, + path: Path, + ) -> GraphQLResolveInfo: + """Build the GraphQLResolveInfo object. + + For internal use only.""" + # The resolve function's first argument is a collection of information about + # the current execution state. + return GraphQLResolveInfo( + field_nodes[0].name.value, + field_nodes, + field_def.type, + parent_type, + path, + self.schema, + self.fragments, + self.root_value, + self.operation, + self.variable_values, + self.context_value, + self.is_awaitable, + ) def complete_value( self, @@ -615,6 +1050,7 @@ def complete_value( info: GraphQLResolveInfo, path: Path, result: Any, + async_payload_record: Optional[AsyncPayloadRecord], ) -> AwaitableOrValue[Any]: """Complete a value. @@ -651,6 +1087,7 @@ def complete_value( info, path, result, + async_payload_record, ) if completed is None: raise TypeError( @@ -666,7 +1103,7 @@ def complete_value( # If field type is List, complete each item in the list with inner type if is_list_type(return_type): return self.complete_list_value( - return_type, field_nodes, info, path, result + return_type, field_nodes, info, path, result, async_payload_record ) # If field type is a leaf type, Scalar or Enum, serialize to a valid value, @@ -678,13 +1115,13 @@ def complete_value( # Object type and complete for that type. if is_abstract_type(return_type): return self.complete_abstract_value( - return_type, field_nodes, info, path, result + return_type, field_nodes, info, path, result, async_payload_record ) # If field type is Object, execute and complete all sub-selections. if is_object_type(return_type): return self.complete_object_value( - return_type, field_nodes, info, path, result + return_type, field_nodes, info, path, result, async_payload_record ) # Not reachable. All possible output types have been considered. @@ -693,6 +1130,35 @@ def complete_value( f" '{inspect(return_type)}'." ) + def get_stream_values( + self, field_nodes: List[FieldNode], path: Path + ) -> Optional[StreamArguments]: + """Get stream values. + + Returns an object containing the `@stream` arguments if a field should be + streamed based on the experimental flag, stream directive present and + not disabled by the "if" argument. + """ + # do not stream inner lists of multidimensional lists + if isinstance(path.key, int): + return None + + # validation only allows equivalent streams on multiple fields, so it is + # safe to only check the first field_node for the stream directive + stream = get_directive_values( + GraphQLStreamDirective, field_nodes[0], self.variable_values + ) + + if not stream or stream.get("if") is False: + return None + + initial_count = stream.get("initialCount") + if initial_count is None or initial_count < 0: + raise ValueError("initialCount must be a positive integer") + + label = stream.get("label") + return StreamArguments(initial_count=initial_count, label=label) + async def complete_async_iterator_value( self, item_type: GraphQLOutputType, @@ -700,12 +1166,15 @@ async def complete_async_iterator_value( info: GraphQLResolveInfo, path: Path, iterator: AsyncIterator[Any], + async_payload_record: Optional[AsyncPayloadRecord], ) -> List[Any]: """Complete an async iterator. Complete a async iterator value by completing the result and calling recursively until all the results are completed. """ + errors = async_payload_record.errors if async_payload_record else self.errors + stream = self.get_stream_values(field_nodes, path) is_awaitable = self.is_awaitable awaitable_indices: List[int] = [] append_awaitable = awaitable_indices.append @@ -713,6 +1182,31 @@ async def complete_async_iterator_value( append_result = completed_results.append index = 0 while True: + if ( + stream + and isinstance(stream.initial_count, int) + and index >= stream.initial_count + ): + try: + await wait_for( + shield( + self.execute_stream_iterator( + index, + iterator, + field_nodes, + info, + item_type, + path, + stream.label, + async_payload_record, + ) + ), + timeout=ASYNC_DELAY, + ) + except TimeoutError: + pass + break + field_path = path.add_key(index, None) try: try: @@ -721,7 +1215,12 @@ async def complete_async_iterator_value( break try: completed_item = self.complete_value( - item_type, field_nodes, info, field_path, value + item_type, + field_nodes, + info, + field_path, + value, + async_payload_record, ) if is_awaitable(completed_item): append_awaitable(index) @@ -729,11 +1228,11 @@ async def complete_async_iterator_value( except Exception as raw_error: append_result(None) error = located_error(raw_error, field_nodes, field_path.as_list()) - self.handle_field_error(error, item_type) + handle_field_error(error, item_type, errors) except Exception as raw_error: append_result(None) error = located_error(raw_error, field_nodes, field_path.as_list()) - self.handle_field_error(error, item_type) + handle_field_error(error, item_type, errors) break index += 1 @@ -761,18 +1260,20 @@ def complete_list_value( info: GraphQLResolveInfo, path: Path, result: Union[AsyncIterable[Any], Iterable[Any]], + async_payload_record: Optional[AsyncPayloadRecord], ) -> AwaitableOrValue[List[Any]]: """Complete a list value. Complete a list value by completing each item in the list with the inner type. """ item_type = return_type.of_type + errors = async_payload_record.errors if async_payload_record else self.errors if isinstance(result, AsyncIterable): iterator = result.__aiter__() return self.complete_async_iterator_value( - item_type, field_nodes, info, path, iterator + item_type, field_nodes, info, path, iterator, async_payload_record ) if not is_iterable(result): @@ -781,12 +1282,15 @@ def complete_list_value( f" '{info.parent_type.name}.{info.field_name}'." ) + stream = self.get_stream_values(field_nodes, path) + # This is specified as a simple map, however we're optimizing the path where # the list contains no coroutine objects by avoiding creating another coroutine # object. is_awaitable = self.is_awaitable awaitable_indices: List[int] = [] append_awaitable = awaitable_indices.append + previous_async_payload_record = async_payload_record completed_results: List[Any] = [] append_result = completed_results.append for index, item in enumerate(result): @@ -794,12 +1298,33 @@ def complete_list_value( # it is not ever accessed by resolver functions. item_path = path.add_key(index, None) completed_item: AwaitableOrValue[Any] + + if ( + stream + and isinstance(stream.initial_count, int) + and index >= stream.initial_count + ): + previous_async_payload_record = self.execute_stream_field( + item_path, + item, + field_nodes, + info, + item_type, + stream.label, + previous_async_payload_record, + ) + continue if is_awaitable(item): # noinspection PyShadowingNames async def await_completed(item: Any, item_path: Path) -> Any: try: completed = self.complete_value( - item_type, field_nodes, info, item_path, await item + item_type, + field_nodes, + info, + item_path, + await item, + async_payload_record, ) if is_awaitable(completed): return await completed @@ -808,14 +1333,19 @@ async def await_completed(item: Any, item_path: Path) -> Any: error = located_error( raw_error, field_nodes, item_path.as_list() ) - self.handle_field_error(error, item_type) # noqa: B023 + handle_field_error(error, item_type, errors) return None completed_item = await_completed(item, item_path) else: try: completed_item = self.complete_value( - item_type, field_nodes, info, item_path, item + item_type, + field_nodes, + info, + item_path, + item, + async_payload_record, ) if is_awaitable(completed_item): # noinspection PyShadowingNames @@ -826,13 +1356,13 @@ async def await_completed(item: Any, item_path: Path) -> Any: error = located_error( raw_error, field_nodes, item_path.as_list() ) - self.handle_field_error(error, item_type) # noqa: B023 + handle_field_error(error, item_type, errors) return None completed_item = await_completed(completed_item, item_path) except Exception as raw_error: error = located_error(raw_error, field_nodes, item_path.as_list()) - self.handle_field_error(error, item_type) + handle_field_error(error, item_type, errors) completed_item = None if is_awaitable(completed_item): @@ -882,6 +1412,7 @@ def complete_abstract_value( info: GraphQLResolveInfo, path: Path, result: Any, + async_payload_record: Optional[AsyncPayloadRecord], ) -> AwaitableOrValue[Any]: """Complete an abstract value. @@ -907,6 +1438,7 @@ async def await_complete_object_value() -> Any: info, path, result, + async_payload_record, ) if self.is_awaitable(value): return await value # type: ignore @@ -923,6 +1455,7 @@ async def await_complete_object_value() -> Any: info, path, result, + async_payload_record, ) def ensure_valid_runtime_type( @@ -982,6 +1515,7 @@ def ensure_valid_runtime_type( field_nodes, ) + # noinspection PyTypeChecker return runtime_type def complete_object_value( @@ -991,11 +1525,9 @@ def complete_object_value( info: GraphQLResolveInfo, path: Path, result: Any, + async_payload_record: Optional[AsyncPayloadRecord], ) -> AwaitableOrValue[Dict[str, Any]]: """Complete an Object value by executing all sub-selections.""" - # Collect sub-fields to execute to complete this value. - sub_field_nodes = self.collect_subfields(return_type, field_nodes) - # If there is an `is_type_of()` predicate function, call it with the current # result. If `is_type_of()` returns False, then raise an error rather than # continuing execution. @@ -1009,20 +1541,50 @@ async def execute_subfields_async() -> Dict[str, Any]: raise invalid_return_type_error( return_type, result, field_nodes ) - return self.execute_fields( - return_type, result, path, sub_field_nodes + return self.collect_and_execute_subfields( + return_type, field_nodes, path, result, async_payload_record ) # type: ignore - return execute_subfields_async() + return execute_subfields_async() + + if not is_type_of: + raise invalid_return_type_error(return_type, result, field_nodes) + + return self.collect_and_execute_subfields( + return_type, field_nodes, path, result, async_payload_record + ) + + def collect_and_execute_subfields( + self, + return_type: GraphQLObjectType, + field_nodes: List[FieldNode], + path: Path, + result: Any, + async_payload_record: Optional[AsyncPayloadRecord], + ) -> AwaitableOrValue[Dict[str, Any]]: + # Collect sub-fields to execute to complete this value. + sub_field_nodes, sub_patches = self.collect_subfields(return_type, field_nodes) + + sub_fields = self.execute_fields( + return_type, result, path, sub_field_nodes, async_payload_record + ) - if not is_type_of: - raise invalid_return_type_error(return_type, result, field_nodes) + for sub_patch in sub_patches: + label, sub_patch_field_nodes = sub_patch + self.execute_deferred_fragment( + return_type, + result, + sub_patch_field_nodes, + label, + path, + async_payload_record, + ) - return self.execute_fields(return_type, result, path, sub_field_nodes) + return sub_fields def collect_subfields( self, return_type: GraphQLObjectType, field_nodes: List[FieldNode] - ) -> Dict[str, List[FieldNode]]: + ) -> FieldsAndPatches: """Collect subfields. A cached collection of relevant subfields with regard to the return type is @@ -1043,17 +1605,319 @@ def collect_subfields( if len(field_nodes) == 1 # optimize most frequent case else tuple((return_type, *map(id, field_nodes))) ) - sub_field_nodes = cache.get(key) - if sub_field_nodes is None: - sub_field_nodes = collect_subfields( + sub_fields_and_patches = cache.get(key) + if sub_fields_and_patches is None: + sub_fields_and_patches = collect_subfields( self.schema, self.fragments, self.variable_values, return_type, field_nodes, ) - cache[key] = sub_field_nodes - return sub_field_nodes + cache[key] = sub_fields_and_patches + return sub_fields_and_patches + + def map_source_to_response( + self, result_or_stream: Union[ExecutionResult, AsyncIterable[Any]] + ) -> AwaitableOrValue[ + Union[ + AsyncGenerator[ + Union[ + ExecutionResult, + InitialIncrementalExecutionResult, + SubsequentIncrementalExecutionResult, + ], + None, + ], + ExecutionResult, + ] + ]: + """Map source result to response. + + For each payload yielded from a subscription, + map it over the normal GraphQL :func:`~graphql.execution.execute` function, + with ``payload`` as the ``root_value``. + This implements the "MapSourceToResponseEvent" algorithm + described in the GraphQL specification. + The :func:`~graphql.execution.execute` function provides + the "ExecuteSubscriptionEvent" algorithm, + as it is nearly identical to the "ExecuteQuery" algorithm, + for which :func:`~graphql.execution.execute` is also used. + """ + + if not isinstance(result_or_stream, AsyncIterable): + return result_or_stream # pragma: no cover + + async def callback(payload: Any) -> AsyncGenerator: + result = execute_impl(self.build_per_event_execution_context(payload)) + return ensure_async_iterable( + await result if isawaitable(result) else result # type: ignore + ) + + return flatten_async_iterable(MapAsyncIterable(result_or_stream, callback)) + + def execute_deferred_fragment( + self, + parent_type: GraphQLObjectType, + source_value: Any, + fields: Dict[str, List[FieldNode]], + label: Optional[str] = None, + path: Optional[Path] = None, + parent_context: Optional[AsyncPayloadRecord] = None, + ) -> None: + async_payload_record = DeferredFragmentRecord(label, path, parent_context, self) + try: + awaitable_or_data = self.execute_fields( + parent_type, source_value, path, fields, async_payload_record + ) + + if self.is_awaitable(awaitable_or_data): + + async def await_data( + awaitable: Awaitable[Dict[str, Any]] + ) -> Optional[Dict[str, Any]]: + # noinspection PyShadowingNames + + try: + return await awaitable + except GraphQLError as error: + async_payload_record.errors.append(error) + return None + + awaitable_or_data = await_data(awaitable_or_data) # type: ignore + except GraphQLError as error: + async_payload_record.errors.append(error) + awaitable_or_data = None + + async_payload_record.add_data(awaitable_or_data) + + def execute_stream_field( + self, + path: Path, + item: AwaitableOrValue[Any], + field_nodes: List[FieldNode], + info: GraphQLResolveInfo, + item_type: GraphQLOutputType, + label: Optional[str] = None, + parent_context: Optional[AsyncPayloadRecord] = None, + ) -> AsyncPayloadRecord: + async_payload_record = StreamRecord(label, path, None, parent_context, self) + completed_item: Any + completed_items: Any + try: + try: + if self.is_awaitable(item): + + async def await_completed_item() -> Any: + completed = self.complete_value( + item_type, + field_nodes, + info, + path, + await item, + async_payload_record, + ) + return ( + await completed + if self.is_awaitable(completed) + else completed + ) + + completed_item = await_completed_item() + + else: + completed_item = self.complete_value( + item_type, field_nodes, info, path, item, async_payload_record + ) + + if self.is_awaitable(completed_item): + + async def await_completed_item() -> Any: + # noinspection PyShadowingNames + try: + return await completed_item + except Exception as raw_error: + # noinspection PyShadowingNames + error = located_error( + raw_error, field_nodes, path.as_list() + ) + handle_field_error( + error, item_type, async_payload_record.errors + ) + return None + + complete_item = await_completed_item() + + else: + complete_item = completed_item + except Exception as raw_error: + error = located_error(raw_error, field_nodes, path.as_list()) + handle_field_error(error, item_type, async_payload_record.errors) + complete_item = None # pragma: no cover + + except GraphQLError as error: + async_payload_record.errors.append(error) + async_payload_record.add_items(None) + return async_payload_record + + if self.is_awaitable(complete_item): + + async def await_completed_items() -> Optional[List[Any]]: + # noinspection PyShadowingNames + try: + return [await complete_item] # type: ignore + except GraphQLError as error: + async_payload_record.errors.append(error) + return None + + completed_items = await_completed_items() + else: + completed_items = [complete_item] + + async_payload_record.add_items(completed_items) + return async_payload_record + + async def execute_stream_iterator_item( + self, + iterator: AsyncIterator[Any], + field_nodes: List[FieldNode], + info: GraphQLResolveInfo, + item_type: GraphQLOutputType, + async_payload_record: StreamRecord, + field_path: Path, + ) -> Any: + try: + item = await anext(iterator) + completed_item = self.complete_value( + item_type, field_nodes, info, field_path, item, async_payload_record + ) + + return ( + await completed_item + if self.is_awaitable(completed_item) + else completed_item + ) + + except StopAsyncIteration as raw_error: + async_payload_record.set_ist_completed_iterator() + raise StopAsyncIteration from raw_error + + except Exception as raw_error: + error = located_error(raw_error, field_nodes, field_path.as_list()) + handle_field_error(error, item_type, async_payload_record.errors) + + async def execute_stream_iterator( + self, + initial_index: int, + iterator: AsyncIterator[Any], + field_modes: List[FieldNode], + info: GraphQLResolveInfo, + item_type: GraphQLOutputType, + path: Optional[Path], + label: Optional[str], + parent_context: Optional[AsyncPayloadRecord], + ) -> None: + index = initial_index + previous_async_payload_record = parent_context + + while True: + field_path = Path(path, index, None) + async_payload_record = StreamRecord( + label, field_path, iterator, previous_async_payload_record, self + ) + + awaitable_data = self.execute_stream_iterator_item( + iterator, field_modes, info, item_type, async_payload_record, field_path + ) + + # noinspection PyShadowingNames + async def items( + data: Awaitable[Any], async_payload_record: StreamRecord + ) -> AwaitableOrValue[Optional[List[Any]]]: + try: + return [await data] + except GraphQLError as error: + async_payload_record.errors.append(error) + return None + + try: + async_payload_record.add_items( + await items(awaitable_data, async_payload_record) + ) + except StopAsyncIteration: + if async_payload_record.errors: + async_payload_record.add_items([None]) # pragma: no cover + else: + del self.subsequent_payloads[async_payload_record] + break + + previous_async_payload_record = async_payload_record + index += 1 + + def get_completed_incremental_results(self) -> List[IncrementalResult]: + incremental_results: List[IncrementalResult] = [] + append_result = incremental_results.append + subsequent_payloads = list(self.subsequent_payloads) + for async_payload_record in subsequent_payloads: + incremental_result: IncrementalResult + if not async_payload_record.completed.is_set(): + continue + del self.subsequent_payloads[async_payload_record] + if isinstance(async_payload_record, StreamRecord): + items = async_payload_record.items + if async_payload_record.is_completed_iterator: + # async iterable resolver finished but there may be pending payload + continue # pragma: no cover + incremental_result = IncrementalStreamResult( + items, + async_payload_record.errors + if async_payload_record.errors + else None, + async_payload_record.path, + async_payload_record.label, + ) + else: + data = async_payload_record.data + incremental_result = IncrementalDeferResult( + data, + async_payload_record.errors + if async_payload_record.errors + else None, + async_payload_record.path, + async_payload_record.label, + ) + + append_result(incremental_result) + + return incremental_results + + async def yield_subsequent_payloads( + self, + ) -> AsyncGenerator[SubsequentIncrementalExecutionResult, None]: + payloads = self.subsequent_payloads + has_next = bool(payloads) + + while has_next: + for awaitable in as_completed(payloads): + await awaitable + + incremental = self.get_completed_incremental_results() + + has_next = bool(payloads) + + if incremental or not has_next: + yield SubsequentIncrementalExecutionResult( + incremental=incremental or None, has_next=has_next + ) + + if not has_next: + break + + +UNEXPECTED_MULTIPLE_PAYLOADS = ( + "Executing this GraphQL operation would unexpectedly produce multiple payloads" + " (due to @defer or @stream directive)" +) def execute( @@ -1079,6 +1943,66 @@ def execute( If the arguments to this function do not result in a legal execution context, a GraphQLError will be thrown immediately explaining the invalid input. + + This function does not support incremental delivery (`@defer` and `@stream`). + If an operation which would defer or stream data is executed with this + function, it will throw or resolve to an object containing an error instead. + Use `experimental_execute_incrementally` if you want to support incremental + delivery. + """ + result = experimental_execute_incrementally( + schema, + document, + root_value, + context_value, + variable_values, + operation_name, + field_resolver, + type_resolver, + subscribe_field_resolver, + middleware, + execution_context_class, + is_awaitable, + ) + if isinstance(result, ExperimentalExecuteSingleResult): + return result.single_result + if isinstance(result, ExperimentalExecuteMultipleResults): + raise GraphQLError(UNEXPECTED_MULTIPLE_PAYLOADS) + + async def await_result() -> Any: + awaited_result = await result # type: ignore + if isinstance(awaited_result, ExperimentalExecuteSingleResult): + return awaited_result.single_result + return ExecutionResult( + None, errors=[GraphQLError(UNEXPECTED_MULTIPLE_PAYLOADS)] + ) + + return await_result() + + +def experimental_execute_incrementally( + schema: GraphQLSchema, + document: DocumentNode, + root_value: Any = None, + context_value: Any = None, + variable_values: Optional[Dict[str, Any]] = None, + operation_name: Optional[str] = None, + field_resolver: Optional[GraphQLFieldResolver] = None, + type_resolver: Optional[GraphQLTypeResolver] = None, + subscribe_field_resolver: Optional[GraphQLFieldResolver] = None, + middleware: Optional[Middleware] = None, + execution_context_class: Optional[Type[ExecutionContext]] = None, + is_awaitable: Optional[Callable[[Any], bool]] = None, +) -> AwaitableOrValue[ExperimentalExecuteIncrementallyResults]: + """Execute GraphQL operation incrementally (internal implementation). + + Implements the "Executing requests" section of the GraphQL specification, + including `@defer` and `@stream` as proposed in + https://github.com/graphql/graphql-spec/pull/742 + + This function returns an awaitable of an ExperimentalExecuteIncrementallyResults + object. This object either contains a single ExecutionResult as + `single_result`, or an `initial_result` and a stream of `subsequent_results`. """ if execution_context_class is None: execution_context_class = ExecutionContext @@ -1101,12 +2025,16 @@ def execute( # Return early errors if execution context failed. if isinstance(context, list): - return ExecutionResult(data=None, errors=context) + return ExperimentalExecuteSingleResult( + single_result=ExecutionResult(None, errors=context) + ) return execute_impl(context) -def execute_impl(context: ExecutionContext) -> AwaitableOrValue[ExecutionResult]: +def execute_impl( + context: ExecutionContext, +) -> AwaitableOrValue[ExperimentalExecuteIncrementallyResults]: """Execute GraphQL operation (internal implementation).""" # Return a possible coroutine object that will eventually yield the data described # by the "Response" section of the GraphQL specification. @@ -1128,17 +2056,43 @@ def execute_impl(context: ExecutionContext) -> AwaitableOrValue[ExecutionResult] # noinspection PyShadowingNames async def await_result() -> Any: try: - return build_response(await result, errors) + initial_result = build_response( + await result, errors # type: ignore + ) + if context.subsequent_payloads: + return ExperimentalExecuteMultipleResults( + initial_result=InitialIncrementalExecutionResult( + initial_result.data, + initial_result.errors, + has_next=True, + ), + subsequent_results=context.yield_subsequent_payloads(), + ) + return ExperimentalExecuteSingleResult(single_result=initial_result) except GraphQLError as error: errors.append(error) - return build_response(None, errors) + return ExperimentalExecuteSingleResult( + single_result=build_response(None, errors) + ) return await_result() + + initial_result = build_response(result, errors) # type: ignore + if context.subsequent_payloads: + return ExperimentalExecuteMultipleResults( + initial_result=InitialIncrementalExecutionResult( + initial_result.data, + initial_result.errors, + has_next=True, + ), + subsequent_results=context.yield_subsequent_payloads(), + ) + return ExperimentalExecuteSingleResult(single_result=initial_result) except GraphQLError as error: errors.append(error) - return build_response(None, errors) - else: - return build_response(result, errors) # type: ignore + return ExperimentalExecuteSingleResult( + single_result=build_response(None, errors) + ) def assume_not_awaitable(_value: Any) -> bool: @@ -1174,7 +2128,7 @@ def execute_sync( else (None if check_sync else assume_not_awaitable) ) - result = execute( + result = experimental_execute_incrementally( schema, document, root_value, @@ -1190,11 +2144,26 @@ def execute_sync( ) # Assert that the execution was synchronous. - if isawaitable(result): - ensure_future(cast(Awaitable[ExecutionResult], result)).cancel() + if isawaitable(result) or isinstance(result, ExperimentalExecuteMultipleResults): + if isawaitable(result): + ensure_future(cast(Awaitable[ExecutionResult], result)).cancel() raise RuntimeError("GraphQL execution failed to complete synchronously.") - return cast(ExecutionResult, result) + return cast(ExperimentalExecuteSingleResult, result).single_result + + +def handle_field_error( + error: GraphQLError, return_type: GraphQLOutputType, errors: List[GraphQLError] +) -> None: + """Handle error properly according to the field type.""" + # If the field type is non-nullable, then it is resolved without any protection + # from errors, however it still properly locates the error. + if is_non_null_type(return_type): + raise error + # Otherwise, error protection is applied, logging the error and resolving a + # null value for this field if one is encountered. + errors.append(error) + return None def invalid_return_type_error( @@ -1324,6 +2293,111 @@ def subscribe( If the operation succeeded, the coroutine will yield an AsyncIterator, which yields a stream of ExecutionResults representing the response stream. + + This function does not support incremental delivery (`@defer` and `@stream`). + If an operation which would defer or stream data is executed with this function, + each :class:`InitialIncrementalExecutionResult` and + :class:`SubsequentIncrementalExecutionResult` + in the result stream will be replaced with an :class:`ExecutionResult` + with a single error stating that defer/stream is not supported. + Use :func:`experimental_subscribe_incrementally` if you want to support + incremental delivery. + """ + result = experimental_subscribe_incrementally( + schema, + document, + root_value, + context_value, + variable_values, + operation_name, + field_resolver, + type_resolver, + subscribe_field_resolver, + execution_context_class, + ) + + if isinstance(result, ExecutionResult): + return result + if isinstance(result, AsyncIterable): + return MapAsyncIterable(result, ensure_single_execution_result) + + async def await_result() -> Union[AsyncIterator[ExecutionResult], ExecutionResult]: + result_or_iterable = await result # type: ignore + if isinstance(result_or_iterable, AsyncIterable): + return MapAsyncIterable(result_or_iterable, ensure_single_execution_result) + return result_or_iterable + + return await_result() + + +def ensure_single_execution_result( + result: Union[ + ExecutionResult, + InitialIncrementalExecutionResult, + SubsequentIncrementalExecutionResult, + ] +) -> ExecutionResult: + """Ensure that the given result does not use incremental delivery.""" + if not isinstance(result, ExecutionResult): + return ExecutionResult( + None, errors=[GraphQLError(UNEXPECTED_MULTIPLE_PAYLOADS)] + ) + return result + + +def experimental_subscribe_incrementally( + schema: GraphQLSchema, + document: DocumentNode, + root_value: Any = None, + context_value: Any = None, + variable_values: Optional[Dict[str, Any]] = None, + operation_name: Optional[str] = None, + field_resolver: Optional[GraphQLFieldResolver] = None, + type_resolver: Optional[GraphQLTypeResolver] = None, + subscribe_field_resolver: Optional[GraphQLFieldResolver] = None, + execution_context_class: Optional[Type[ExecutionContext]] = None, +) -> AwaitableOrValue[ + Union[ + AsyncGenerator[ + Union[ + ExecutionResult, + InitialIncrementalExecutionResult, + SubsequentIncrementalExecutionResult, + ], + None, + ], + ExecutionResult, + ] +]: + """Create a GraphQL subscription. + + Implements the "Subscribe" algorithm described in the GraphQL spec. + + Returns a coroutine object which yields either an AsyncIterator (if successful) or + an ExecutionResult (client error). The coroutine will raise an exception if a server + error occurs. + + If the client-provided arguments to this function do not result in a compliant + subscription, a GraphQL Response (ExecutionResult) with descriptive errors and no + data will be returned. + + If the source stream could not be created due to faulty subscription resolver logic + or underlying systems, the coroutine object will yield a single ExecutionResult + containing ``errors`` and no ``data``. + + If the operation succeeded, the coroutine will yield an AsyncIterator, which yields + a stream of ExecutionResults representing the response stream. + + Each result may be an ExecutionResult with no ``has_next`` attribute (if executing + the event did not use `@defer` or `@stream`), or an + :class:`InitialIncrementalExecutionResult` or + :class:`SubsequentIncrementalExecutionResult` + (if executing the event used `@defer` or `@stream`). In the case of + incremental execution results, each event produces a single + :class:`InitialIncrementalExecutionResult` followed by one or more + :class:`SubsequentIncrementalExecutionResult`; all but the last have + ``has_next == true``, and the last has ``has_next == False``. + There is no interleaving between results generated from the same original event. """ if execution_context_class is None: execution_context_class = ExecutionContext @@ -1344,44 +2418,44 @@ def subscribe( # Return early errors if execution context failed. if isinstance(context, list): - return ExecutionResult(data=None, errors=context) + return ExecutionResult(None, errors=context) result_or_stream = create_source_event_stream_impl(context) - build_context = context.build_per_event_execution_context - - async def map_source_to_response(payload: Any) -> ExecutionResult: - """Map source to response. - - For each payload yielded from a subscription, map it over the normal GraphQL - :func:`~graphql.execute` function, with ``payload`` as the ``root_value``. - This implements the "MapSourceToResponseEvent" algorithm described in the - GraphQL specification. The :func:`~graphql.execute` function provides the - "ExecuteSubscriptionEvent" algorithm, as it is nearly identical to the - "ExecuteQuery" algorithm, for which :func:`~graphql.execute` is also used. - """ - result = execute_impl(build_context(payload)) - return await result if isawaitable(result) else result - - if execution_context_class.is_awaitable(result_or_stream): - awaitable_result_or_stream = cast(Awaitable, result_or_stream) - + if context.is_awaitable(result_or_stream): # noinspection PyShadowingNames async def await_result() -> Any: - result_or_stream = await awaitable_result_or_stream - if isinstance(result_or_stream, ExecutionResult): - return result_or_stream - return MapAsyncIterable(result_or_stream, map_source_to_response) + awaited_result_or_stream = await result_or_stream # type: ignore + if isinstance(awaited_result_or_stream, ExecutionResult): + return awaited_result_or_stream + return context.map_source_to_response( # type: ignore + awaited_result_or_stream + ) return await_result() if isinstance(result_or_stream, ExecutionResult): return result_or_stream - # Map every source value to a ExecutionResult value as described above. - return MapAsyncIterable( - cast(AsyncIterable[Any], result_or_stream), map_source_to_response - ) + return context.map_source_to_response(result_or_stream) # type: ignore + + +async def ensure_async_iterable( + some_execution_result: ExperimentalExecuteIncrementallyResults, +) -> AsyncGenerator[ + Union[ + ExecutionResult, + InitialIncrementalExecutionResult, + SubsequentIncrementalExecutionResult, + ], + None, +]: + if isinstance(some_execution_result, ExperimentalExecuteSingleResult): + yield some_execution_result.single_result + else: + yield some_execution_result.initial_result + async for result in some_execution_result.subsequent_results: + yield result def create_source_event_stream( @@ -1435,7 +2509,7 @@ def create_source_event_stream( # Return early errors if execution context failed. if isinstance(context, list): - return ExecutionResult(data=None, errors=context) + return ExecutionResult(None, errors=context) return create_source_event_stream_impl(context) @@ -1447,7 +2521,7 @@ def create_source_event_stream_impl( try: event_stream = execute_subscription(context) except GraphQLError as error: - return ExecutionResult(data=None, errors=[error]) + return ExecutionResult(None, errors=[error]) if context.is_awaitable(event_stream): awaitable_event_stream = cast(Awaitable, event_stream) @@ -1457,7 +2531,7 @@ async def await_event_stream() -> Union[AsyncIterable[Any], ExecutionResult]: try: return await awaitable_event_stream except GraphQLError as error: - return ExecutionResult(data=None, errors=[error]) + return ExecutionResult(None, errors=[error]) return await_event_stream() @@ -1482,7 +2556,7 @@ def execute_subscription( context.variable_values, root_type, context.operation.selection_set, - ) + ).fields first_root_field = next(iter(root_fields.items())) response_name, field_nodes = first_root_field field_name = field_nodes[0].name.value @@ -1537,3 +2611,140 @@ def assert_event_stream(result: Any) -> AsyncIterable: ) return result + + +class DeferredFragmentRecord: + """A record collecting data marked with the defer directive""" + + errors: List[GraphQLError] + label: Optional[str] + path: List[Union[str, int]] + data: Optional[Dict[str, Any]] + parent_context: Optional[AsyncPayloadRecord] + completed: Event + _context: ExecutionContext + _data: AwaitableOrValue[Optional[Dict[str, Any]]] + _data_added: Event + + def __init__( + self, + label: Optional[str], + path: Optional[Path], + parent_context: Optional[AsyncPayloadRecord], + context: ExecutionContext, + ) -> None: + self.label = label + self.path = path.as_list() if path else [] + self.parent_context = parent_context + self.errors = [] + self._context = context + context.subsequent_payloads[self] = None + self.data = self._data = None + self.completed = Event() + self._data_added = Event() + + def __repr__(self) -> str: + name = self.__class__.__name__ + args: List[str] = [f"path={self.path!r}"] + if self.label: + args.append(f"label={self.label!r}") + if self.parent_context: + args.append("parent_context") + if self.data is not None: + args.append("data") + return f"{name}({', '.join(args)})" + + def __await__(self) -> Generator[Any, None, Optional[Dict[str, Any]]]: + return self.wait().__await__() + + async def wait(self) -> Optional[Dict[str, Any]]: + if self.parent_context: + await self.parent_context.completed.wait() + _data = self._data + data = ( + await _data if self._context.is_awaitable(_data) else _data # type: ignore + ) + self.data = data + await sleep(ASYNC_DELAY) # always defer completion a little bit + self.completed.set() + return data + + def add_data(self, data: AwaitableOrValue[Optional[Dict[str, Any]]]) -> None: + self._data = data + self._data_added.set() + + +class StreamRecord: + """A record collecting items marked with the stream directive""" + + errors: List[GraphQLError] + label: Optional[str] + path: List[Union[str, int]] + items: Optional[List[str]] + parent_context: Optional[AsyncPayloadRecord] + iterator: Optional[AsyncIterator[Any]] + is_completed_iterator: bool + completed: Event + _context: ExecutionContext + _items: AwaitableOrValue[Optional[List[Any]]] + _items_added: Event + + def __init__( + self, + label: Optional[str], + path: Optional[Path], + iterator: Optional[AsyncIterator[Any]], + parent_context: Optional[AsyncPayloadRecord], + context: ExecutionContext, + ) -> None: + self.label = label + self.path = path.as_list() if path else [] + self.parent_context = parent_context + self.iterator = iterator + self.errors = [] + self._context = context + context.subsequent_payloads[self] = None + self.items = self._items = None + self.completed = Event() + self._items_added = Event() + self.is_completed_iterator = False + + def __repr__(self) -> str: + name = self.__class__.__name__ + args: List[str] = [f"path={self.path!r}"] + if self.label: + args.append(f"label={self.label!r}") + if self.parent_context: + args.append("parent_context") + if self.items is not None: + args.append("items") + return f"{name}({', '.join(args)})" + + def __await__(self) -> Generator[Any, None, Optional[List[str]]]: + return self.wait().__await__() + + async def wait(self) -> Optional[List[str]]: + await self._items_added.wait() + if self.parent_context: + await self.parent_context.completed.wait() + _items = self._items + items = ( + await _items # type: ignore + if self._context.is_awaitable(_items) + else _items + ) + self.items = items + await sleep(ASYNC_DELAY) # always defer completion a little bit + self.completed.set() + return items + + def add_items(self, items: AwaitableOrValue[Optional[List[Any]]]) -> None: + self._items = items + self._items_added.set() + + def set_ist_completed_iterator(self) -> None: + self.is_completed_iterator = True + self._items_added.set() + + +AsyncPayloadRecord = Union[DeferredFragmentRecord, StreamRecord] diff --git a/src/graphql/execution/flatten_async_iterable.py b/src/graphql/execution/flatten_async_iterable.py new file mode 100644 index 00000000..7c0e0721 --- /dev/null +++ b/src/graphql/execution/flatten_async_iterable.py @@ -0,0 +1,36 @@ +from typing import AsyncGenerator, AsyncIterable, TypeVar, Union + + +try: + from contextlib import aclosing +except ImportError: # python < 3.10 + from contextlib import asynccontextmanager + + @asynccontextmanager # type: ignore + async def aclosing(thing): + try: + yield thing + finally: + await thing.aclose() + + +T = TypeVar("T") + +AsyncIterableOrGenerator = Union[AsyncGenerator[T, None], AsyncIterable[T]] + +__all__ = ["flatten_async_iterable"] + + +async def flatten_async_iterable( + iterable: AsyncIterableOrGenerator[AsyncIterableOrGenerator[T]], +) -> AsyncGenerator[T, None]: + """Flatten async iterables. + + Given an AsyncIterable of AsyncIterables, flatten all yielded results into a + single AsyncIterable. + """ + async with aclosing(iterable) as sub_iterators: # type: ignore + async for sub_iterator in sub_iterators: + async with aclosing(sub_iterator) as items: # type: ignore + async for item in items: + yield item diff --git a/src/graphql/graphql.py b/src/graphql/graphql.py index a0265ec3..3ec84062 100644 --- a/src/graphql/graphql.py +++ b/src/graphql/graphql.py @@ -39,6 +39,8 @@ async def graphql( to separate the validation and execution phases to a static time tooling step, and a server runtime step. + This function does not support incremental delivery (`@defer` and `@stream`). + Accepts the following arguments: :arg schema: diff --git a/src/graphql/language/visitor.py b/src/graphql/language/visitor.py index 406684f0..e6d4768d 100644 --- a/src/graphql/language/visitor.py +++ b/src/graphql/language/visitor.py @@ -49,7 +49,7 @@ class VisitorActionEnum(Enum): VisitorAction: TypeAlias = Optional[VisitorActionEnum] -# Note that in GraphQL.js these are defined differently: +# Note that in GraphQL.js these are defined *differently*: # BREAK = {}, SKIP = false, REMOVE = null, IDLE = undefined BREAK = VisitorActionEnum.BREAK @@ -103,7 +103,7 @@ def leave(self, node, key, parent, path, ancestors): You can also define node kind specific methods by suffixing them with an underscore followed by the kind of the node to be visited. For instance, to visit ``field`` - nodes, you would defined the methods ``enter_field()`` and/or ``leave_field()``, + nodes, you would define the methods ``enter_field()`` and/or ``leave_field()``, with the same signature as above. If no kind specific method has been defined for a given node, the generic method is called. """ diff --git a/src/graphql/pyutils/__init__.py b/src/graphql/pyutils/__init__.py index fff78de4..e1aefd6a 100644 --- a/src/graphql/pyutils/__init__.py +++ b/src/graphql/pyutils/__init__.py @@ -8,6 +8,7 @@ These functions are not part of the module interface and are subject to change. """ +from .async_reduce import async_reduce from .convert_case import camel_to_snake, snake_to_camel from .cached_property import cached_property from .description import ( @@ -34,6 +35,7 @@ from .undefined import Undefined, UndefinedType __all__ = [ + "async_reduce", "camel_to_snake", "snake_to_camel", "cached_property", diff --git a/src/graphql/pyutils/async_reduce.py b/src/graphql/pyutils/async_reduce.py new file mode 100644 index 00000000..9755db80 --- /dev/null +++ b/src/graphql/pyutils/async_reduce.py @@ -0,0 +1,40 @@ +from typing import Any, Awaitable, Callable, Collection, TypeVar, cast + +from .awaitable_or_value import AwaitableOrValue +from .is_awaitable import is_awaitable as default_is_awaitable + + +__all__ = ["async_reduce"] + +T = TypeVar("T") +U = TypeVar("U") + + +def async_reduce( + callback: Callable[[U, T], AwaitableOrValue[U]], + values: Collection[T], + initial_value: AwaitableOrValue[U], + is_awaitable: Callable[[Any], bool] = default_is_awaitable, +) -> AwaitableOrValue[U]: + """Reduce the given potentially awaitable values using a callback function. + + Similar to functools.reduce(), however the reducing callback may return + an awaitable, in which case reduction will continue after each promise resolves. + + If the callback does not return an awaitable, then this function will also not + return an awaitable. + """ + accumulator: AwaitableOrValue[U] = initial_value + for value in values: + if is_awaitable(accumulator): + + async def async_callback( + current_accumulator: Awaitable[U], current_value: T + ) -> U: + result = callback(await current_accumulator, current_value) + return await cast(Awaitable, result) if is_awaitable(result) else result + + accumulator = async_callback(cast(Awaitable[U], accumulator), value) + else: + accumulator = callback(cast(U, accumulator), value) + return accumulator diff --git a/src/graphql/pyutils/simple_pub_sub.py b/src/graphql/pyutils/simple_pub_sub.py index 5a884c6e..52aab4b7 100644 --- a/src/graphql/pyutils/simple_pub_sub.py +++ b/src/graphql/pyutils/simple_pub_sub.py @@ -1,6 +1,6 @@ from __future__ import annotations # Python < 3.10 -from asyncio import Future, Queue, ensure_future, get_running_loop, sleep +from asyncio import Future, Queue, create_task, get_running_loop, sleep from inspect import isawaitable from typing import Any, AsyncIterator, Callable, Optional, Set @@ -26,7 +26,7 @@ def emit(self, event: Any) -> bool: for subscriber in self.subscribers: result = subscriber(event) if isawaitable(result): - ensure_future(result) + create_task(result) # type: ignore return bool(self.subscribers) def get_subscriber( diff --git a/src/graphql/type/__init__.py b/src/graphql/type/__init__.py index f6af8b7e..4db6516d 100644 --- a/src/graphql/type/__init__.py +++ b/src/graphql/type/__init__.py @@ -133,6 +133,8 @@ specified_directives, GraphQLIncludeDirective, GraphQLSkipDirective, + GraphQLDeferDirective, + GraphQLStreamDirective, GraphQLDeprecatedDirective, GraphQLSpecifiedByDirective, # Keyword Args @@ -280,6 +282,8 @@ "GraphQLDirective", "GraphQLIncludeDirective", "GraphQLSkipDirective", + "GraphQLDeferDirective", + "GraphQLStreamDirective", "GraphQLDeprecatedDirective", "GraphQLSpecifiedByDirective", "GraphQLDirectiveKwargs", diff --git a/src/graphql/type/directives.py b/src/graphql/type/directives.py index c3555615..324c9dff 100644 --- a/src/graphql/type/directives.py +++ b/src/graphql/type/directives.py @@ -6,7 +6,7 @@ from ..pyutils import inspect from .assert_name import assert_name from .definition import GraphQLArgument, GraphQLInputType, GraphQLNonNull -from .scalars import GraphQLBoolean, GraphQLString +from .scalars import GraphQLBoolean, GraphQLInt, GraphQLString try: @@ -23,10 +23,12 @@ "assert_directive", "is_specified_directive", "specified_directives", + "GraphQLDeferDirective", "GraphQLDirective", "GraphQLDirectiveKwargs", "GraphQLIncludeDirective", "GraphQLSkipDirective", + "GraphQLStreamDirective", "GraphQLDeprecatedDirective", "GraphQLSpecifiedByDirective", "DirectiveLocation", @@ -177,6 +179,43 @@ def assert_directive(directive: Any) -> GraphQLDirective: " when the `if` argument is true.", ) +# Used to conditionally defer fragments: +GraphQLDeferDirective = GraphQLDirective( + name="defer", + description="Directs the executor to defer this fragment" + " when the `if` argument is true or undefined.", + locations=[DirectiveLocation.FRAGMENT_SPREAD, DirectiveLocation.INLINE_FRAGMENT], + args={ + "if": GraphQLArgument( + GraphQLNonNull(GraphQLBoolean), + description="Deferred when true or undefined.", + default_value=True, + ), + "label": GraphQLArgument(GraphQLString, description="Unique name"), + }, +) + +# Used to conditionally stream list fields: +GraphQLStreamDirective = GraphQLDirective( + name="stream", + description="Directs the executor to stream plural fields" + " when the `if` argument is true or undefined.", + locations=[DirectiveLocation.FIELD], + args={ + "if": GraphQLArgument( + GraphQLNonNull(GraphQLBoolean), + description="Stream when true or undefined.", + default_value=True, + ), + "label": GraphQLArgument(GraphQLString, description="Unique name"), + "initialCount": GraphQLArgument( + GraphQLInt, + description="Number of items to return immediately", + default_value=0, + ), + }, +) + # Constant string used for default reason for a deprecation: DEFAULT_DEPRECATION_REASON = "No longer supported" diff --git a/src/graphql/validation/__init__.py b/src/graphql/validation/__init__.py index 313073a5..270eed06 100644 --- a/src/graphql/validation/__init__.py +++ b/src/graphql/validation/__init__.py @@ -17,6 +17,12 @@ # All validation rules in the GraphQL Specification. from .specified_rules import specified_rules +# Spec Section: "Defer And Stream Directive Labels Are Unique" +from .rules.defer_stream_directive_label import DeferStreamDirectiveLabel + +# Spec Section: "Defer And Stream Directives Are Used On Valid Root Field" +from .rules.defer_stream_directive_on_root_field import DeferStreamDirectiveOnRootField + # Spec Section: "Executable Definitions" from .rules.executable_definitions import ExecutableDefinitionsRule @@ -68,6 +74,9 @@ # Spec Section: "Subscriptions with Single Root Field" from .rules.single_field_subscriptions import SingleFieldSubscriptionsRule +# Spec Section: "Stream Directives Are Used On List Fields" +from .rules.stream_directive_on_list_field import StreamDirectiveOnListField + # Spec Section: "Argument Uniqueness" from .rules.unique_argument_names import UniqueArgumentNamesRule @@ -118,6 +127,8 @@ "ValidationContext", "ValidationRule", "specified_rules", + "DeferStreamDirectiveLabel", + "DeferStreamDirectiveOnRootField", "ExecutableDefinitionsRule", "FieldsOnCorrectTypeRule", "FragmentsOnCompositeTypesRule", @@ -135,6 +146,7 @@ "ProvidedRequiredArgumentsRule", "ScalarLeafsRule", "SingleFieldSubscriptionsRule", + "StreamDirectiveOnListField", "UniqueArgumentNamesRule", "UniqueDirectivesPerLocationRule", "UniqueFragmentNamesRule", diff --git a/src/graphql/validation/rules/defer_stream_directive_label.py b/src/graphql/validation/rules/defer_stream_directive_label.py new file mode 100644 index 00000000..9703ff82 --- /dev/null +++ b/src/graphql/validation/rules/defer_stream_directive_label.py @@ -0,0 +1,62 @@ +from typing import Any, Dict, List + +from ...error import GraphQLError +from ...language import DirectiveNode, Node, StringValueNode +from ...type import GraphQLDeferDirective, GraphQLStreamDirective +from . import ASTValidationRule, ValidationContext + + +__all__ = ["DeferStreamDirectiveLabel"] + + +class DeferStreamDirectiveLabel(ASTValidationRule): + """Defer and stream directive labels are unique + + A GraphQL document is only valid if defer and stream directives' label argument + is static and unique. + """ + + def __init__(self, context: ValidationContext): + super().__init__(context) + self.known_labels: Dict[str, Node] = {} + + def enter_directive( + self, + node: DirectiveNode, + _key: Any, + _parent: Any, + _path: Any, + _ancestors: List[Node], + ) -> None: + if node.name.value not in ( + GraphQLDeferDirective.name, + GraphQLStreamDirective.name, + ): + return + try: + label_argument = next( + arg for arg in node.arguments if arg.name.value == "label" + ) + except StopIteration: + return + label_value = label_argument.value + if not isinstance(label_value, StringValueNode): + self.report_error( + GraphQLError( + f"{node.name.value.capitalize()} directive label argument" + " must be a static string.", + node, + ), + ) + return + label_name = label_value.value + known_labels = self.known_labels + if label_name in known_labels: + self.report_error( + GraphQLError( + "Defer/Stream directive label argument must be unique.", + [known_labels[label_name], node], + ), + ) + return + known_labels[label_name] = node diff --git a/src/graphql/validation/rules/defer_stream_directive_on_root_field.py b/src/graphql/validation/rules/defer_stream_directive_on_root_field.py new file mode 100644 index 00000000..707ee9f3 --- /dev/null +++ b/src/graphql/validation/rules/defer_stream_directive_on_root_field.py @@ -0,0 +1,68 @@ +from typing import Any, List, cast + +from ...error import GraphQLError +from ...language import DirectiveNode, Node +from ...type import GraphQLDeferDirective, GraphQLStreamDirective +from . import ASTValidationRule, ValidationContext + + +__all__ = ["DeferStreamDirectiveOnRootField"] + + +class DeferStreamDirectiveOnRootField(ASTValidationRule): + """Defer and stream directives are used on valid root field + + A GraphQL document is only valid if defer directives are not used on root + mutation or subscription types. + """ + + def enter_directive( + self, + node: DirectiveNode, + _key: Any, + _parent: Any, + _path: Any, + _ancestors: List[Node], + ) -> None: + context = cast(ValidationContext, self.context) + parent_type = context.get_parent_type() + if not parent_type: + return + schema = context.schema + mutation_type = schema.mutation_type + subscription_type = schema.subscription_type + + if node.name.value == GraphQLDeferDirective.name: + if mutation_type and parent_type is mutation_type: + self.report_error( + GraphQLError( + "Defer directive cannot be used on root" + f" mutation type '{parent_type.name}'.", + node, + ) + ) + if subscription_type and parent_type is subscription_type: + self.report_error( + GraphQLError( + "Defer directive cannot be used on root" + f" subscription type '{parent_type.name}'.", + node, + ) + ) + if node.name.value == GraphQLStreamDirective.name: + if mutation_type and parent_type is mutation_type: + self.report_error( + GraphQLError( + "Stream directive cannot be used on root" + f" mutation type '{parent_type.name}'.", + node, + ) + ) + if subscription_type and parent_type is subscription_type: + self.report_error( + GraphQLError( + "Stream directive cannot be used on root" + f" subscription type '{parent_type.name}'.", + node, + ) + ) diff --git a/src/graphql/validation/rules/overlapping_fields_can_be_merged.py b/src/graphql/validation/rules/overlapping_fields_can_be_merged.py index d853c669..11ab44fa 100644 --- a/src/graphql/validation/rules/overlapping_fields_can_be_merged.py +++ b/src/graphql/validation/rules/overlapping_fields_can_be_merged.py @@ -1,8 +1,9 @@ from itertools import chain -from typing import Any, Dict, List, Optional, Tuple, Union, cast +from typing import Any, Dict, List, Optional, Sequence, Tuple, Union, cast from ...error import GraphQLError from ...language import ( + DirectiveNode, FieldNode, FragmentDefinitionNode, FragmentSpreadNode, @@ -120,7 +121,7 @@ def enter_selection_set(self, selection_set: SelectionSetNode, *_args: Any) -> N # A) Each selection set represented in the document first compares "within" its # collected set of fields, finding any conflicts between every pair of # overlapping fields. -# Note: This is the#only time* that a the fields "within" a set are compared +# Note: This is the *only time* that the fields "within" a set are compared # to each other. After this only fields "between" sets are compared. # # B) Also, if any fragment is referenced in a selection set, then a @@ -132,7 +133,7 @@ def enter_selection_set(self, selection_set: SelectionSetNode, *_args: Any) -> N # # D) When comparing "between" a set of fields and a referenced fragment, first # a comparison is made between each field in the original set of fields and -# each field in the the referenced set of fields. +# each field in the referenced set of fields. # # E) Also, if any fragment is referenced in the referenced selection set, # then a comparison is made "between" the original set of fields and the @@ -559,6 +560,15 @@ def find_conflict( if stringify_arguments(node1) != stringify_arguments(node2): return (response_name, "they have differing arguments"), [node1], [node2] + directives1 = node1.directives + directives2 = node2.directives + if not same_streams(directives1, directives2): + return ( + (response_name, "they have differing stream directives"), + [node1], + [node2], + ) + if type1 and type2 and do_types_conflict(type1, type2): return ( (response_name, f"they return conflicting types '{type1}' and '{type2}'"), @@ -587,7 +597,7 @@ def find_conflict( return None # no conflict -def stringify_arguments(field_node: FieldNode) -> str: +def stringify_arguments(field_node: Union[FieldNode, DirectiveNode]) -> str: input_object_with_args = ObjectValueNode( fields=tuple( ObjectFieldNode(name=arg_node.name, value=arg_node.value) @@ -597,6 +607,30 @@ def stringify_arguments(field_node: FieldNode) -> str: return print_ast(sort_value_node(input_object_with_args)) +def get_stream_directive( + directives: Sequence[DirectiveNode], +) -> Optional[DirectiveNode]: + for directive in directives: + if directive.name.value == "stream": + return directive + return None + + +def same_streams( + directives1: Sequence[DirectiveNode], directives2: Sequence[DirectiveNode] +) -> bool: + stream1 = get_stream_directive(directives1) + stream2 = get_stream_directive(directives2) + if not stream1 and not stream2: + # both fields do not have streams + return True + if stream1 and stream2: + # check if both fields have equivalent streams + return stringify_arguments(stream1) == stringify_arguments(stream2) + # fields have a mix of stream and no stream + return False + + def do_types_conflict(type1: GraphQLOutputType, type2: GraphQLOutputType) -> bool: """Check whether two types conflict diff --git a/src/graphql/validation/rules/single_field_subscriptions.py b/src/graphql/validation/rules/single_field_subscriptions.py index bf9541c3..968cda48 100644 --- a/src/graphql/validation/rules/single_field_subscriptions.py +++ b/src/graphql/validation/rules/single_field_subscriptions.py @@ -45,7 +45,7 @@ def enter_operation_definition( variable_values, subscription_type, node.selection_set, - ) + ).fields if len(fields) > 1: field_selection_lists = list(fields.values()) extra_field_selection_lists = field_selection_lists[1:] diff --git a/src/graphql/validation/rules/stream_directive_on_list_field.py b/src/graphql/validation/rules/stream_directive_on_list_field.py new file mode 100644 index 00000000..41ba6066 --- /dev/null +++ b/src/graphql/validation/rules/stream_directive_on_list_field.py @@ -0,0 +1,57 @@ +from typing import Any, List, cast + +from ...error import GraphQLError +from ...language import DirectiveNode, Node +from ...type import GraphQLStreamDirective, is_list_type, is_wrapping_type +from . import ASTValidationRule, ValidationContext + + +__all__ = ["StreamDirectiveOnListField"] + + +class StreamDirectiveOnListField(ASTValidationRule): + """Stream directive on list field + + A GraphQL document is only valid if stream directives are used on list fields. + """ + + def enter_directive( + self, + node: DirectiveNode, + _key: Any, + _parent: Any, + _path: Any, + _ancestors: List[Node], + ) -> None: + context = cast(ValidationContext, self.context) + field_def = context.get_field_def() + parent_type = context.get_parent_type() + if ( + field_def + and parent_type + and node.name.value == GraphQLStreamDirective.name + and not ( + is_list_type(field_def.type) + or ( + is_wrapping_type(field_def.type) + and is_list_type(field_def.type.of_type) + ) + ) + ): + try: + field_name = next( + name + for name, field in parent_type.fields.items() # type: ignore + if field is field_def + ) + except StopIteration: # pragma: no cover + field_name = "" + else: + field_name = f" '{field_name}'" + self.report_error( + GraphQLError( + "Stream directive cannot be used on non-list" + f" field{field_name} on type '{parent_type.name}'.", + node, + ) + ) diff --git a/src/graphql/validation/specified_rules.py b/src/graphql/validation/specified_rules.py index 8df6977f..e831c25f 100644 --- a/src/graphql/validation/specified_rules.py +++ b/src/graphql/validation/specified_rules.py @@ -2,6 +2,12 @@ from .rules import ASTValidationRule +# Spec Section: "Defer And Stream Directive Labels Are Unique" +from .rules.defer_stream_directive_label import DeferStreamDirectiveLabel + +# Spec Section: "Defer And Stream Directives Are Used On Valid Root Field" +from .rules.defer_stream_directive_on_root_field import DeferStreamDirectiveOnRootField + # Spec Section: "Executable Definitions" from .rules.executable_definitions import ExecutableDefinitionsRule @@ -62,9 +68,12 @@ # Spec Section: "Subscriptions with Single Root Field" from .rules.single_field_subscriptions import SingleFieldSubscriptionsRule -from .rules.unique_argument_definition_names import UniqueArgumentDefinitionNamesRule + +# Spec Section: "Stream Directives Are Used On List Fields" +from .rules.stream_directive_on_list_field import StreamDirectiveOnListField # Spec Section: "Argument Uniqueness" +from .rules.unique_argument_definition_names import UniqueArgumentDefinitionNamesRule from .rules.unique_argument_names import UniqueArgumentNamesRule from .rules.unique_directive_names import UniqueDirectiveNamesRule @@ -125,6 +134,9 @@ NoUnusedVariablesRule, KnownDirectivesRule, UniqueDirectivesPerLocationRule, + DeferStreamDirectiveOnRootField, + DeferStreamDirectiveLabel, + StreamDirectiveOnListField, KnownArgumentNamesRule, UniqueArgumentNamesRule, ValuesOfCorrectTypeRule, diff --git a/tests/execution/test_customize.py b/tests/execution/test_customize.py index 63f7e2ec..3dbc6d00 100644 --- a/tests/execution/test_customize.py +++ b/tests/execution/test_customize.py @@ -42,8 +42,12 @@ def uses_a_custom_execution_context_class(): ) class TestExecutionContext(ExecutionContext): - def execute_field(self, parent_type, source, field_nodes, path): - result = super().execute_field(parent_type, source, field_nodes, path) + def execute_field( + self, parent_type, source, field_nodes, path, async_payload_record=None + ): + result = super().execute_field( + parent_type, source, field_nodes, path, async_payload_record + ) return result * 2 # type: ignore assert execute(schema, query, execution_context_class=TestExecutionContext) == ( diff --git a/tests/execution/test_defer.py b/tests/execution/test_defer.py new file mode 100644 index 00000000..58e2ab7b --- /dev/null +++ b/tests/execution/test_defer.py @@ -0,0 +1,926 @@ +from asyncio import sleep +from typing import Any, Dict, List, NamedTuple + +from pytest import mark, raises + +from graphql.error import GraphQLError +from graphql.execution import ( + ExecutionContext, + ExperimentalExecuteMultipleResults, + ExperimentalExecuteSingleResult, + IncrementalDeferResult, + InitialIncrementalExecutionResult, + SubsequentIncrementalExecutionResult, + execute, + experimental_execute_incrementally, +) +from graphql.execution.execute import DeferredFragmentRecord +from graphql.language import DocumentNode, parse +from graphql.pyutils import Path +from graphql.type import ( + GraphQLField, + GraphQLID, + GraphQLList, + GraphQLNonNull, + GraphQLObjectType, + GraphQLSchema, + GraphQLString, +) + + +friend_type = GraphQLObjectType( + "Friend", {"id": GraphQLField(GraphQLID), "name": GraphQLField(GraphQLString)} +) + + +class Friend(NamedTuple): + name: str + id: int + + +friends = [Friend("Han", 2), Friend("Leia", 3), Friend("C-3PO", 4)] + + +async def resolve_slow(_obj, _info) -> str: + """Simulate a slow async resolver returning a value.""" + await sleep(0) + return "slow" + + +async def resolve_bad(_obj, _info) -> str: + """Simulate a bad async resolver raising an error.""" + raise RuntimeError("bad") + + +def resolve_null_sync(_obj, _info) -> None: + """Simulate a resolver returning a null value synchronously.""" + return None + + +async def resolve_null_async(_obj, _info) -> None: + """Simulate a resolver returning a null value asynchronously.""" + return None + + +hero_type = GraphQLObjectType( + "Hero", + { + "id": GraphQLField(GraphQLID), + "name": GraphQLField(GraphQLString), + "slowField": GraphQLField(GraphQLString, resolve=resolve_slow), + "errorField": GraphQLField(GraphQLString, resolve=resolve_bad), + "nonNullErrorField": GraphQLField( + GraphQLNonNull(GraphQLString), resolve=resolve_null_sync + ), + "asyncNonNullErrorField": GraphQLField( + GraphQLNonNull(GraphQLString), resolve=resolve_null_async + ), + "friends": GraphQLField( + GraphQLList(friend_type), resolve=lambda _obj, _info: friends + ), + }, +) + +hero = Friend("Luke", 1) + +query = GraphQLObjectType( + "Query", {"hero": GraphQLField(hero_type, resolve=lambda _obj, _info: hero)} +) + +schema = GraphQLSchema(query) + + +async def complete(document: DocumentNode, root_value: Any = None) -> Any: + result = experimental_execute_incrementally(schema, document, root_value) + + if isinstance(result, ExperimentalExecuteMultipleResults): + results: List[Any] = [result.initial_result.formatted] + async for patch in result.subsequent_results: + results.append(patch.formatted) + return results + + assert isinstance(result, ExperimentalExecuteSingleResult) + return result.single_result.formatted + + +def modified_args(args: Dict[str, Any], **modifications: Any) -> Dict[str, Any]: + return {**args, **modifications} + + +def describe_execute_defer_directive(): + def can_format_and_print_incremental_defer_result(): + result = IncrementalDeferResult() + assert result.formatted == {"data": None} + assert str(result) == "IncrementalDeferResult(data=None, errors=None)" + + result = IncrementalDeferResult( + data={"hello": "world"}, + errors=[GraphQLError("msg")], + path=["foo", 1], + label="bar", + extensions={"baz": 2}, + ) + assert result.formatted == { + "data": {"hello": "world"}, + "errors": [{"message": "msg"}], + "extensions": {"baz": 2}, + "label": "bar", + "path": ["foo", 1], + } + assert ( + str(result) == "IncrementalDeferResult(data={'hello': 'world'}," + " errors=[GraphQLError('msg')], path=['foo', 1], label='bar'," + " extensions={'baz': 2})" + ) + + # noinspection PyTypeChecker + def can_compare_incremental_defer_result(): + args: Dict[str, Any] = { + "data": {"hello": "world"}, + "errors": [GraphQLError("msg")], + "path": ["foo", 1], + "label": "bar", + "extensions": {"baz": 2}, + } + result = IncrementalDeferResult(**args) + assert result == IncrementalDeferResult(**args) + assert result != IncrementalDeferResult( + **modified_args(args, data={"hello": "foo"}) + ) + assert result != IncrementalDeferResult(**modified_args(args, errors=[])) + assert result != IncrementalDeferResult(**modified_args(args, path=["foo", 2])) + assert result != IncrementalDeferResult(**modified_args(args, label="baz")) + assert result != IncrementalDeferResult( + **modified_args(args, extensions={"baz": 1}) + ) + assert result == tuple(args.values()) + assert result == tuple(args.values())[:4] + assert result == tuple(args.values())[:3] + assert result == tuple(args.values())[:2] + assert result != tuple(args.values())[:1] + assert result != ({"hello": "world"}, []) + assert result == args + assert result == dict(list(args.items())[:2]) + assert result == dict(list(args.items())[:3]) + assert result != dict(list(args.items())[:2] + [("path", ["foo", 2])]) + assert result != {**args, "label": "baz"} + + def can_format_and_print_initial_incremental_execution_result(): + result = InitialIncrementalExecutionResult() + assert result.formatted == {"data": None, "hasNext": False} + assert ( + str(result) == "InitialIncrementalExecutionResult(data=None, errors=None)" + ) + + result = InitialIncrementalExecutionResult(has_next=True) + assert result.formatted == {"data": None, "hasNext": True} + assert ( + str(result) + == "InitialIncrementalExecutionResult(data=None, errors=None, has_next)" + ) + + incremental = [IncrementalDeferResult(label="foo")] + result = InitialIncrementalExecutionResult( + data={"hello": "world"}, + errors=[GraphQLError("msg")], + incremental=incremental, + has_next=True, + extensions={"baz": 2}, + ) + assert result.formatted == { + "data": {"hello": "world"}, + "errors": [GraphQLError("msg")], + "incremental": [{"data": None, "label": "foo"}], + "hasNext": True, + "extensions": {"baz": 2}, + } + assert ( + str(result) == "InitialIncrementalExecutionResult(" + "data={'hello': 'world'}, errors=[GraphQLError('msg')], incremental[1]," + " has_next, extensions={'baz': 2})" + ) + + def can_compare_initial_incremental_execution_result(): + incremental = [IncrementalDeferResult(label="foo")] + args: Dict[str, Any] = { + "data": {"hello": "world"}, + "errors": [GraphQLError("msg")], + "incremental": incremental, + "has_next": True, + "extensions": {"baz": 2}, + } + result = InitialIncrementalExecutionResult(**args) + assert result == InitialIncrementalExecutionResult(**args) + assert result != InitialIncrementalExecutionResult( + **modified_args(args, data={"hello": "foo"}) + ) + assert result != InitialIncrementalExecutionResult( + **modified_args(args, errors=[]) + ) + assert result != InitialIncrementalExecutionResult( + **modified_args(args, incremental=[]) + ) + assert result != InitialIncrementalExecutionResult( + **modified_args(args, has_next=False) + ) + assert result != InitialIncrementalExecutionResult( + **modified_args(args, extensions={"baz": 1}) + ) + assert result == tuple(args.values()) + assert result == tuple(args.values())[:4] + assert result == tuple(args.values())[:3] + assert result == tuple(args.values())[:2] + assert result != tuple(args.values())[:1] + assert result != ({"hello": "foo"}, []) + + assert result == { + "data": {"hello": "world"}, + "errors": [GraphQLError("msg")], + "incremental": incremental, + "hasNext": True, + "extensions": {"baz": 2}, + } + assert result == { + "data": {"hello": "world"}, + "errors": [GraphQLError("msg")], + "incremental": incremental, + "hasNext": True, + } + assert result != { + "data": {"hello": "world"}, + "errors": [GraphQLError("msg")], + "incremental": incremental, + "hasNext": False, + "extensions": {"baz": 2}, + } + + def can_format_and_print_subsequent_incremental_execution_result(): + result = SubsequentIncrementalExecutionResult() + assert result.formatted == {"hasNext": False} + assert str(result) == "SubsequentIncrementalExecutionResult()" + + result = SubsequentIncrementalExecutionResult(has_next=True) + assert result.formatted == {"hasNext": True} + assert str(result) == "SubsequentIncrementalExecutionResult(has_next)" + + incremental = [IncrementalDeferResult(label="foo")] + result = SubsequentIncrementalExecutionResult( + incremental=incremental, + has_next=True, + extensions={"baz": 2}, + ) + assert result.formatted == { + "incremental": [{"data": None, "label": "foo"}], + "hasNext": True, + "extensions": {"baz": 2}, + } + assert ( + str(result) == "SubsequentIncrementalExecutionResult(incremental[1]," + " has_next, extensions={'baz': 2})" + ) + + def can_compare_subsequent_incremental_execution_result(): + incremental = [IncrementalDeferResult(label="foo")] + args: Dict[str, Any] = { + "incremental": incremental, + "has_next": True, + "extensions": {"baz": 2}, + } + result = SubsequentIncrementalExecutionResult(**args) + assert result == SubsequentIncrementalExecutionResult(**args) + assert result != SubsequentIncrementalExecutionResult( + **modified_args(args, incremental=[]) + ) + assert result != SubsequentIncrementalExecutionResult( + **modified_args(args, has_next=False) + ) + assert result != SubsequentIncrementalExecutionResult( + **modified_args(args, extensions={"baz": 1}) + ) + assert result == tuple(args.values()) + assert result == tuple(args.values())[:2] + assert result != tuple(args.values())[:1] + assert result != (incremental, False) + assert result == { + "incremental": incremental, + "hasNext": True, + "extensions": {"baz": 2}, + } + assert result == {"incremental": incremental, "hasNext": True} + assert result != { + "incremental": incremental, + "hasNext": False, + "extensions": {"baz": 2}, + } + + def can_print_deferred_fragment_record(): + context = ExecutionContext.build(schema, parse("{ hero { id } }")) + assert isinstance(context, ExecutionContext) + record = DeferredFragmentRecord(None, None, None, context) + assert str(record) == "DeferredFragmentRecord(path=[])" + record = DeferredFragmentRecord( + "foo", Path(None, "bar", "Bar"), record, context + ) + assert ( + str(record) == "DeferredFragmentRecord(" + "path=['bar'], label='foo', parent_context)" + ) + record.data = {"hello": "world"} + assert ( + str(record) == "DeferredFragmentRecord(" + "path=['bar'], label='foo', parent_context, data)" + ) + + @mark.asyncio + async def can_defer_fragments_containing_scalar_types(): + document = parse( + """ + query HeroNameQuery { + hero { + id + ...NameFragment @defer + } + } + fragment NameFragment on Hero { + id + name + } + """ + ) + result = await complete(document) + + assert result == [ + {"data": {"hero": {"id": "1"}}, "hasNext": True}, + { + "incremental": [ + {"data": {"id": "1", "name": "Luke"}, "path": ["hero"]} + ], + "hasNext": False, + }, + ] + + @mark.asyncio + async def can_disable_defer_using_if_argument(): + document = parse( + """ + query HeroNameQuery { + hero { + id + ...NameFragment @defer(if: false) + } + } + fragment NameFragment on Hero { + name + } + """ + ) + result = await complete(document) + + assert result == { + "data": { + "hero": { + "id": "1", + "name": "Luke", + }, + }, + } + + @mark.asyncio + async def does_not_disable_defer_with_null_if_argument(): + document = parse( + """ + query HeroNameQuery($shouldDefer: Boolean) { + hero { + id + ...NameFragment @defer(if: $shouldDefer) + } + } + fragment NameFragment on Hero { + name + } + """ + ) + result = await complete(document) + + assert result == [ + {"data": {"hero": {"id": "1"}}, "hasNext": True}, + { + "incremental": [{"data": {"name": "Luke"}, "path": ["hero"]}], + "hasNext": False, + }, + ] + + @mark.asyncio + async def throws_an_error_for_defer_directive_with_non_string_label(): + document = parse( + """ + query Deferred { + ... @defer(label: 42) { hero { id } } + } + """ + ) + result = await complete(document) + + assert result == { + "data": None, + "errors": [ + { + "locations": [{"column": 33, "line": 3}], + "message": "Argument 'label' has invalid value 42.", + } + ], + } + + @mark.asyncio + async def can_defer_fragments_on_the_top_level_query_field(): + document = parse( + """ + query HeroNameQuery { + ...QueryFragment @defer(label: "DeferQuery") + } + fragment QueryFragment on Query { + hero { + id + } + } + """ + ) + result = await complete(document) + + assert result == [ + {"data": {}, "hasNext": True}, + { + "incremental": [ + {"data": {"hero": {"id": "1"}}, "path": [], "label": "DeferQuery"} + ], + "hasNext": False, + }, + ] + + @mark.asyncio + async def can_defer_fragments_with_errors_on_the_top_level_query_field(): + document = parse( + """ + query HeroNameQuery { + ...QueryFragment @defer(label: "DeferQuery") + } + fragment QueryFragment on Query { + hero { + errorField + } + } + """ + ) + result = await complete(document) + + assert result == [ + {"data": {}, "hasNext": True}, + { + "incremental": [ + { + "data": {"hero": {"errorField": None}}, + "errors": [ + { + "message": "bad", + "locations": [{"column": 17, "line": 7}], + "path": ["hero", "errorField"], + } + ], + "path": [], + "label": "DeferQuery", + } + ], + "hasNext": False, + }, + ] + + @mark.asyncio + async def can_defer_a_fragment_within_an_already_deferred_fragment(): + document = parse( + """ + query HeroNameQuery { + hero { + id + ...TopFragment @defer(label: "DeferTop") + } + } + fragment TopFragment on Hero { + name + ...NestedFragment @defer(label: "DeferNested") + } + fragment NestedFragment on Hero { + friends { + name + } + } + """ + ) + result = await complete(document) + + assert result == [ + {"data": {"hero": {"id": "1"}}, "hasNext": True}, + { + "incremental": [ + { + "data": { + "friends": [ + {"name": "Han"}, + {"name": "Leia"}, + {"name": "C-3PO"}, + ] + }, + "path": ["hero"], + "label": "DeferNested", + }, + { + "data": {"name": "Luke"}, + "path": ["hero"], + "label": "DeferTop", + }, + ], + "hasNext": False, + }, + ] + + @mark.asyncio + async def can_defer_a_fragment_that_is_also_not_deferred_with_deferred_first(): + document = parse( + """ + query HeroNameQuery { + hero { + id + ...TopFragment @defer(label: "DeferTop") + ...TopFragment + } + } + fragment TopFragment on Hero { + name + } + """ + ) + result = await complete(document) + + assert result == [ + {"data": {"hero": {"id": "1", "name": "Luke"}}, "hasNext": True}, + { + "incremental": [ + { + "data": {"name": "Luke"}, + "path": ["hero"], + "label": "DeferTop", + }, + ], + "hasNext": False, + }, + ] + + @mark.asyncio + async def can_defer_a_fragment_that_is_also_not_deferred_with_non_deferred_first(): + document = parse( + """ + query HeroNameQuery { + hero { + id + ...TopFragment + ...TopFragment @defer(label: "DeferTop") + } + } + fragment TopFragment on Hero { + name + } + """ + ) + result = await complete(document) + + assert result == [ + {"data": {"hero": {"id": "1", "name": "Luke"}}, "hasNext": True}, + { + "incremental": [ + { + "data": {"name": "Luke"}, + "path": ["hero"], + "label": "DeferTop", + }, + ], + "hasNext": False, + }, + ] + + @mark.asyncio + async def can_defer_an_inline_fragment(): + document = parse( + """ + query HeroNameQuery { + hero { + id + ... on Hero @defer(label: "InlineDeferred") { + name + } + } + } + """ + ) + result = await complete(document) + + assert result == [ + {"data": {"hero": {"id": "1"}}, "hasNext": True}, + { + "incremental": [ + { + "data": {"name": "Luke"}, + "path": ["hero"], + "label": "InlineDeferred", + }, + ], + "hasNext": False, + }, + ] + + @mark.asyncio + async def handles_errors_thrown_in_deferred_fragments(): + document = parse( + """ + query HeroNameQuery { + hero { + id + ...NameFragment @defer + } + } + fragment NameFragment on Hero { + errorField + } + """ + ) + result = await complete(document) + + assert result == [ + {"data": {"hero": {"id": "1"}}, "hasNext": True}, + { + "incremental": [ + { + "data": {"errorField": None}, + "path": ["hero"], + "errors": [ + { + "message": "bad", + "locations": [{"line": 9, "column": 15}], + "path": ["hero", "errorField"], + } + ], + }, + ], + "hasNext": False, + }, + ] + + @mark.asyncio + async def handles_non_nullable_errors_thrown_in_deferred_fragments(): + document = parse( + """ + query HeroNameQuery { + hero { + id + ...NameFragment @defer + } + } + fragment NameFragment on Hero { + nonNullErrorField + } + """ + ) + result = await complete(document) + + assert result == [ + {"data": {"hero": {"id": "1"}}, "hasNext": True}, + { + "incremental": [ + { + "data": None, + "path": ["hero"], + "errors": [ + { + "message": "Cannot return null for non-nullable field" + " Hero.nonNullErrorField.", + "locations": [{"line": 9, "column": 15}], + "path": ["hero", "nonNullErrorField"], + } + ], + }, + ], + "hasNext": False, + }, + ] + + @mark.asyncio + async def handles_non_nullable_errors_thrown_outside_deferred_fragments(): + document = parse( + """ + query HeroNameQuery { + hero { + nonNullErrorField + ...NameFragment @defer + } + } + fragment NameFragment on Hero { + id + } + """ + ) + result = await complete(document) + + assert result == { + "data": {"hero": None}, + "errors": [ + { + "message": "Cannot return null for non-nullable field" + " Hero.nonNullErrorField.", + "locations": [{"line": 4, "column": 17}], + "path": ["hero", "nonNullErrorField"], + } + ], + } + + @mark.asyncio + async def handles_async_non_nullable_errors_thrown_in_deferred_fragments(): + document = parse( + """ + query HeroNameQuery { + hero { + id + ...NameFragment @defer + } + } + fragment NameFragment on Hero { + asyncNonNullErrorField + } + """ + ) + result = await complete(document) + + assert result == [ + {"data": {"hero": {"id": "1"}}, "hasNext": True}, + { + "incremental": [ + { + "data": None, + "path": ["hero"], + "errors": [ + { + "message": "Cannot return null for non-nullable field" + " Hero.asyncNonNullErrorField.", + "locations": [{"line": 9, "column": 15}], + "path": ["hero", "asyncNonNullErrorField"], + } + ], + }, + ], + "hasNext": False, + }, + ] + + @mark.asyncio + async def returns_payloads_in_correct_order(): + document = parse( + """ + query HeroNameQuery { + hero { + id + ...NameFragment @defer + } + } + fragment NameFragment on Hero { + slowField + friends { + ...NestedFragment @defer + } + } + fragment NestedFragment on Friend { + name + } + """ + ) + result = await complete(document) + + assert result == [ + {"data": {"hero": {"id": "1"}}, "hasNext": True}, + { + "incremental": [ + { + "data": {"slowField": "slow", "friends": [{}, {}, {}]}, + "path": ["hero"], + } + ], + "hasNext": True, + }, + { + "incremental": [ + { + "data": {"name": "Han"}, + "path": ["hero", "friends", 0], + }, + { + "data": {"name": "Leia"}, + "path": ["hero", "friends", 1], + }, + { + "data": {"name": "C-3PO"}, + "path": ["hero", "friends", 2], + }, + ], + "hasNext": False, + }, + ] + + @mark.asyncio + async def returns_payloads_from_synchronous_data_in_correct_order(): + document = parse( + """ + query HeroNameQuery { + hero { + id + ...NameFragment @defer + } + } + fragment NameFragment on Hero { + name + friends { + ...NestedFragment @defer + } + } + fragment NestedFragment on Friend { + name + } + """ + ) + result = await complete(document) + + assert result == [ + {"data": {"hero": {"id": "1"}}, "hasNext": True}, + { + "incremental": [ + { + "data": {"name": "Luke", "friends": [{}, {}, {}]}, + "path": ["hero"], + }, + ], + "hasNext": True, + }, + { + "incremental": [ + { + "data": {"name": "Han"}, + "path": ["hero", "friends", 0], + }, + { + "data": {"name": "Leia"}, + "path": ["hero", "friends", 1], + }, + { + "data": {"name": "C-3PO"}, + "path": ["hero", "friends", 2], + }, + ], + "hasNext": False, + }, + ] + + @mark.asyncio + async def original_execute_function_throws_error_if_deferred_and_all_is_sync(): + document = parse( + """ + query Deferred { + ... @defer { hero { id } } + } + """ + ) + + with raises(GraphQLError) as exc_info: + await execute(schema, document, {}) # type: ignore + + assert str(exc_info.value) == ( + "Executing this GraphQL operation would unexpectedly produce" + " multiple payloads (due to @defer or @stream directive)" + ) + + @mark.asyncio + async def original_execute_function_throws_error_if_deferred_and_not_all_is_sync(): + document = parse( + """ + query Deferred { + hero { slowField } + ... @defer { hero { id } } + } + """ + ) + + result = await execute(schema, document, {}) # type: ignore + + assert result == ( + None, + [ + { + "message": "Executing this GraphQL operation would unexpectedly" + " produce multiple payloads (due to @defer or @stream directive)" + } + ], + ) diff --git a/tests/execution/test_flatten_async_iterable.py b/tests/execution/test_flatten_async_iterable.py new file mode 100644 index 00000000..de9c5499 --- /dev/null +++ b/tests/execution/test_flatten_async_iterable.py @@ -0,0 +1,131 @@ +from typing import AsyncGenerator + +from pytest import mark, raises + +from graphql.execution.flatten_async_iterable import flatten_async_iterable + + +try: # pragma: no cover + anext +except NameError: # pragma: no cover (Python < 3.10) + # noinspection PyShadowingBuiltins + async def anext(iterator): + """Return the next item from an async iterator.""" + return await iterator.__anext__() + + +def describe_flatten_async_iterable(): + @mark.asyncio + async def flattens_nested_async_generators(): + async def source(): + async def nested1() -> AsyncGenerator[float, None]: + yield 1.1 + yield 1.2 + + async def nested2() -> AsyncGenerator[float, None]: + yield 2.1 + yield 2.2 + + yield nested1() + yield nested2() + + doubles = flatten_async_iterable(source()) + + result = [x async for x in doubles] + + assert result == [1.1, 1.2, 2.1, 2.2] + + @mark.asyncio + async def allows_returning_early_from_a_nested_async_generator(): + async def source(): + async def nested1() -> AsyncGenerator[float, None]: + yield 1.1 + yield 1.2 + + async def nested2() -> AsyncGenerator[float, None]: + yield 2.1 + # Not reachable, early return + yield 2.2 # pragma: no cover + + # Not reachable, early return + async def nested3() -> AsyncGenerator[float, None]: + yield 3.1 # pragma: no cover + yield 3.2 # pragma: no cover + + yield nested1() + yield nested2() + yield nested3() # pragma: no cover + + doubles = flatten_async_iterable(source()) + + assert await anext(doubles) == 1.1 + assert await anext(doubles) == 1.2 + assert await anext(doubles) == 2.1 + + # early return + try: + await doubles.aclose() + except RuntimeError: # Python < 3.8 + pass + + # subsequent anext calls + with raises(StopAsyncIteration): + assert await anext(doubles) + with raises(StopAsyncIteration): + assert await anext(doubles) + + @mark.asyncio + async def allows_throwing_errors_from_a_nested_async_generator(): + async def source(): + async def nested1() -> AsyncGenerator[float, None]: + yield 1.1 + yield 1.2 + + async def nested2() -> AsyncGenerator[float, None]: + yield 2.1 + # Not reachable, early return + yield 2.2 # pragma: no cover + + # Not reachable, early return + async def nested3() -> AsyncGenerator[float, None]: + yield 3.1 # pragma: no cover + yield 3.2 # pragma: no cover + + yield nested1() + yield nested2() + yield nested3() # pragma: no cover + + doubles = flatten_async_iterable(source()) + + assert await anext(doubles) == 1.1 + assert await anext(doubles) == 1.2 + assert await anext(doubles) == 2.1 + + # throw error + with raises(RuntimeError, match="ouch"): + await doubles.athrow(RuntimeError, "ouch") + + @mark.asyncio + async def completely_yields_sub_iterables_even_when_anext_called_in_parallel(): + async def source(): + async def nested1() -> AsyncGenerator[float, None]: + yield 1.1 + yield 1.2 + + async def nested2() -> AsyncGenerator[float, None]: + yield 2.1 + yield 2.2 + + yield nested1() + yield nested2() + + doubles = flatten_async_iterable(source()) + + anext1 = anext(doubles) + anext2 = anext(doubles) + assert await anext1 == 1.1 + assert await anext2 == 1.2 + assert await anext(doubles) == 2.1 + assert await anext(doubles) == 2.2 + with raises(StopAsyncIteration): + assert await anext(doubles) diff --git a/tests/execution/test_mutations.py b/tests/execution/test_mutations.py index 3aa8d1c2..2a39d57b 100644 --- a/tests/execution/test_mutations.py +++ b/tests/execution/test_mutations.py @@ -1,9 +1,14 @@ -import asyncio -from typing import Awaitable +from asyncio import sleep +from typing import Any, Awaitable, List from pytest import mark -from graphql.execution import execute, execute_sync +from graphql.execution import ( + ExperimentalExecuteMultipleResults, + execute, + execute_sync, + experimental_execute_incrementally, +) from graphql.language import parse from graphql.type import ( GraphQLArgument, @@ -33,20 +38,31 @@ def immediately_change_the_number(self, newNumber: int) -> NumberHolder: self.numberHolder.theNumber = newNumber return self.numberHolder - async def promise_to_change_the_number(self, new_number: int) -> NumberHolder: - await asyncio.sleep(0) - return self.immediately_change_the_number(new_number) + async def promise_to_change_the_number(self, newNumber: int) -> NumberHolder: + await sleep(0) + return self.immediately_change_the_number(newNumber) def fail_to_change_the_number(self, newNumber: int): raise RuntimeError(f"Cannot change the number to {newNumber}") async def promise_and_fail_to_change_the_number(self, newNumber: int): - await asyncio.sleep(0) + await sleep(0) self.fail_to_change_the_number(newNumber) +async def promise_to_get_the_number(holder: NumberHolder, _info) -> int: + await sleep(0) + return holder.theNumber + + numberHolderType = GraphQLObjectType( - "NumberHolder", {"theNumber": GraphQLField(GraphQLInt)} + "NumberHolder", + { + "theNumber": GraphQLField(GraphQLInt), + "promiseToGetTheNumber": GraphQLField( + GraphQLInt, resolve=promise_to_get_the_number + ), + }, ) # noinspection PyPep8Naming @@ -193,3 +209,119 @@ async def evaluates_mutations_correctly_in_presence_of_a_failed_mutation(): }, ], ) + + @mark.asyncio + async def mutation_fields_with_defer_do_not_block_next_mutation(): + document = parse( + """ + mutation M { + first: promiseToChangeTheNumber(newNumber: 1) { + ...DeferFragment @defer(label: "defer-label") + }, + second: immediatelyChangeTheNumber(newNumber: 2) { + theNumber + } + } + fragment DeferFragment on NumberHolder { + promiseToGetTheNumber + } + """ + ) + + root_value = Root(6) + mutation_result = await experimental_execute_incrementally( # type: ignore + schema, document, root_value + ) + + patches: List[Any] = [] + assert isinstance(mutation_result, ExperimentalExecuteMultipleResults) + patches.append(mutation_result.initial_result.formatted) + async for patch in mutation_result.subsequent_results: + patches.append(patch.formatted) + + assert patches == [ + {"data": {"first": {}, "second": {"theNumber": 2}}, "hasNext": True}, + { + "incremental": [ + { + "label": "defer-label", + "path": ["first"], + "data": { + "promiseToGetTheNumber": 2, + }, + }, + ], + "hasNext": False, + }, + ] + + @mark.asyncio + async def mutation_inside_of_a_fragment(): + document = parse( + """ + mutation M { + ...MutationFragment + second: immediatelyChangeTheNumber(newNumber: 2) { + theNumber + } + } + fragment MutationFragment on Mutation { + first: promiseToChangeTheNumber(newNumber: 1) { + theNumber + }, + } + """ + ) + + root_value = Root(6) + mutation_result = await execute(schema, document, root_value) # type: ignore + + assert mutation_result == ( + {"first": {"theNumber": 1}, "second": {"theNumber": 2}}, + None, + ) + + @mark.asyncio + async def mutation_with_defer_is_not_executed_serially(): + document = parse( + """ + mutation M { + ...MutationFragment @defer(label: "defer-label") + second: immediatelyChangeTheNumber(newNumber: 2) { + theNumber + } + } + fragment MutationFragment on Mutation { + first: promiseToChangeTheNumber(newNumber: 1) { + theNumber + }, + } + """ + ) + + root_value = Root(6) + mutation_result = experimental_execute_incrementally( + schema, document, root_value + ) + + patches: List[Any] = [] + assert isinstance(mutation_result, ExperimentalExecuteMultipleResults) + patches.append(mutation_result.initial_result.formatted) + async for patch in mutation_result.subsequent_results: + patches.append(patch.formatted) + + assert patches == [ + {"data": {"second": {"theNumber": 2}}, "hasNext": True}, + { + "incremental": [ + { + "label": "defer-label", + "path": [], + "data": { + "first": {"theNumber": 1}, + }, + }, + ], + "hasNext": False, + }, + ] diff --git a/tests/execution/test_stream.py b/tests/execution/test_stream.py new file mode 100644 index 00000000..402a4a70 --- /dev/null +++ b/tests/execution/test_stream.py @@ -0,0 +1,1573 @@ +from asyncio import Event, Lock, gather, sleep +from typing import Any, Awaitable, Dict, List, NamedTuple + +from pytest import mark, raises + +from graphql.error import GraphQLError +from graphql.execution import ( + ExecutionContext, + ExperimentalExecuteMultipleResults, + ExperimentalExecuteSingleResult, + IncrementalStreamResult, + experimental_execute_incrementally, +) +from graphql.execution.execute import StreamRecord +from graphql.language import DocumentNode, parse +from graphql.pyutils import Path +from graphql.type import ( + GraphQLField, + GraphQLID, + GraphQLList, + GraphQLNonNull, + GraphQLObjectType, + GraphQLSchema, + GraphQLString, +) + + +try: # pragma: no cover + anext +except NameError: # pragma: no cover (Python < 3.10) + # noinspection PyShadowingBuiltins + async def anext(iterator): + """Return the next item from an async iterator.""" + return await iterator.__anext__() + + +friend_type = GraphQLObjectType( + "Friend", + { + "id": GraphQLField(GraphQLID), + "name": GraphQLField(GraphQLString), + "nonNullName": GraphQLField(GraphQLNonNull(GraphQLString)), + }, +) + + +class Friend(NamedTuple): + name: str + id: int + + +friends = [Friend("Luke", 1), Friend("Han", 2), Friend("Leia", 3)] + +query = GraphQLObjectType( + "Query", + { + "scalarList": GraphQLField(GraphQLList(GraphQLString)), + "scalarListList": GraphQLField(GraphQLList(GraphQLList(GraphQLString))), + "friendList": GraphQLField(GraphQLList(friend_type)), + "nonNullFriendList": GraphQLField(GraphQLList(GraphQLNonNull(friend_type))), + "nestedObject": GraphQLField( + GraphQLObjectType( + "NestedObject", + { + "scalarField": GraphQLField(GraphQLString), + "nestedFriendList": GraphQLField(GraphQLList(friend_type)), + }, + ) + ), + }, +) + +schema = GraphQLSchema(query) + + +async def complete(document: DocumentNode, root_value: Any = None) -> Any: + result = experimental_execute_incrementally(schema, document, root_value) + if isinstance(result, Awaitable): + result = await result + + if isinstance(result, ExperimentalExecuteMultipleResults): + results: List[Any] = [result.initial_result.formatted] + async for patch in result.subsequent_results: + results.append(patch.formatted) + return results + + assert isinstance(result, ExperimentalExecuteSingleResult) + return result.single_result.formatted + + +async def complete_async( + document: DocumentNode, num_calls: int, root_value: Any = None +) -> Any: + result = experimental_execute_incrementally(schema, document, root_value) + assert isinstance(result, Awaitable) + result = await result + assert isinstance(result, ExperimentalExecuteMultipleResults) + + class IteratorResult: + """Iterator result with formatted output.""" + + def __init__(self, value=None): + self.value = value + + @property + def formatted(self): + if self.value is None: + return {"done": True, "value": None} + return {"done": False, "value": self.value.formatted} + + lock = Lock() + iterator = result.subsequent_results + + async def locked_next(): + """Get next value with lock for concurrent access.""" + async with lock: + try: + next_value = await anext(iterator) + except StopAsyncIteration: + return None + return next_value + + next_results = [locked_next() for _i in range(num_calls)] + + results = [result.initial_result] + results.extend(await gather(*next_results)) + + return [IteratorResult(result).formatted for result in results] + + +def modified_args(args: Dict[str, Any], **modifications: Any) -> Dict[str, Any]: + return {**args, **modifications} + + +def describe_execute_stream_directive(): + def can_format_and_print_incremental_stream_result(): + result = IncrementalStreamResult() + assert result.formatted == {"items": None} + assert str(result) == "IncrementalStreamResult(items=None, errors=None)" + + result = IncrementalStreamResult( + items=["hello", "world"], + errors=[GraphQLError("msg")], + path=["foo", 1], + label="bar", + extensions={"baz": 2}, + ) + assert result.formatted == { + "items": ["hello", "world"], + "errors": [{"message": "msg"}], + "extensions": {"baz": 2}, + "label": "bar", + "path": ["foo", 1], + } + assert ( + str(result) == "IncrementalStreamResult(items=['hello', 'world']," + " errors=[GraphQLError('msg')], path=['foo', 1], label='bar'," + " extensions={'baz': 2})" + ) + + def can_print_stream_record(): + context = ExecutionContext.build(schema, parse("{ hero { id } }")) + assert isinstance(context, ExecutionContext) + record = StreamRecord(None, None, None, None, context) + assert str(record) == "StreamRecord(path=[])" + record = StreamRecord("foo", Path(None, "bar", "Bar"), None, record, context) + assert ( + str(record) == "StreamRecord(" "path=['bar'], label='foo', parent_context)" + ) + record.items = ["hello", "world"] + assert ( + str(record) == "StreamRecord(" + "path=['bar'], label='foo', parent_context, items)" + ) + + # noinspection PyTypeChecker + def can_compare_incremental_stream_result(): + args: Dict[str, Any] = { + "items": ["hello", "world"], + "errors": [GraphQLError("msg")], + "path": ["foo", 1], + "label": "bar", + "extensions": {"baz": 2}, + } + result = IncrementalStreamResult(**args) + assert result == IncrementalStreamResult(**args) + assert result != IncrementalStreamResult( + **modified_args(args, items=["hello", "foo"]) + ) + assert result != IncrementalStreamResult(**modified_args(args, errors=[])) + assert result != IncrementalStreamResult(**modified_args(args, path=["foo", 2])) + assert result != IncrementalStreamResult(**modified_args(args, label="baz")) + assert result != IncrementalStreamResult( + **modified_args(args, extensions={"baz": 1}) + ) + assert result == tuple(args.values()) + assert result == tuple(args.values())[:4] + assert result == tuple(args.values())[:3] + assert result == tuple(args.values())[:2] + assert result != tuple(args.values())[:1] + assert result != (["hello", "world"], []) + assert result == args + assert result == dict(list(args.items())[:2]) + assert result == dict(list(args.items())[:3]) + assert result != dict(list(args.items())[:2] + [("path", ["foo", 2])]) + assert result != {**args, "label": "baz"} + + @mark.asyncio + async def can_stream_a_list_field(): + document = parse("{ scalarList @stream(initialCount: 1) }") + result = await complete( + document, {"scalarList": ["apple", "banana", "coconut"]} + ) + assert result == [ + { + "data": { + "scalarList": ["apple"], + }, + "hasNext": True, + }, + { + "incremental": [{"items": ["banana"], "path": ["scalarList", 1]}], + "hasNext": True, + }, + { + "incremental": [{"items": ["coconut"], "path": ["scalarList", 2]}], + "hasNext": False, + }, + ] + + @mark.asyncio + async def can_use_default_value_of_initial_count(): + document = parse("{ scalarList @stream }") + result = await complete( + document, {"scalarList": ["apple", "banana", "coconut"]} + ) + assert result == [ + { + "data": { + "scalarList": [], + }, + "hasNext": True, + }, + { + "incremental": [{"items": ["apple"], "path": ["scalarList", 0]}], + "hasNext": True, + }, + { + "incremental": [{"items": ["banana"], "path": ["scalarList", 1]}], + "hasNext": True, + }, + { + "incremental": [{"items": ["coconut"], "path": ["scalarList", 2]}], + "hasNext": False, + }, + ] + + @mark.asyncio + async def negative_values_of_initial_count_throw_field_errors(): + document = parse("{ scalarList @stream(initialCount: -2) }") + result = await complete( + document, {"scalarList": ["apple", "banana", "coconut"]} + ) + assert result == { + "data": { + "scalarList": None, + }, + "errors": [ + { + "message": "initialCount must be a positive integer", + "locations": [{"line": 1, "column": 3}], + "path": ["scalarList"], + } + ], + } + + @mark.asyncio + async def non_integer_values_of_initial_count_throw_field_errors(): + document = parse("{ scalarList @stream(initialCount: 1.5) }") + result = await complete(document, {"scalarList": ["apple", "half of a banana"]}) + assert result == { + "data": { + "scalarList": None, + }, + "errors": [ + { + "message": "Argument 'initialCount' has invalid value 1.5.", + "locations": [{"line": 1, "column": 36}], + "path": ["scalarList"], + } + ], + } + + @mark.asyncio + async def returns_label_from_stream_directive(): + document = parse( + '{ scalarList @stream(initialCount: 1, label: "scalar-stream") }' + ) + result = await complete( + document, {"scalarList": ["apple", "banana", "coconut"]} + ) + assert result == [ + { + "data": { + "scalarList": ["apple"], + }, + "hasNext": True, + }, + { + "incremental": [ + { + "items": ["banana"], + "path": ["scalarList", 1], + "label": "scalar-stream", + } + ], + "hasNext": True, + }, + { + "incremental": [ + { + "items": ["coconut"], + "path": ["scalarList", 2], + "label": "scalar-stream", + } + ], + "hasNext": False, + }, + ] + + @mark.asyncio + async def throws_an_error_for_stream_directive_with_non_string_label(): + document = parse("{ scalarList @stream(initialCount: 1, label: 42) }") + result = await complete(document, {"scalarList": ["some apples"]}) + assert result == { + "data": {"scalarList": None}, + "errors": [ + { + "locations": [ + { + "line": 1, + "column": 46, + } + ], + "message": "Argument 'label' has invalid value 42.", + "path": ["scalarList"], + } + ], + } + + @mark.asyncio + async def can_disable_stream_using_if_argument(): + document = parse("{ scalarList @stream(initialCount: 0, if: false) }") + result = await complete( + document, {"scalarList": ["apple", "banana", "coconut"]} + ) + assert result == { + "data": { + "scalarList": ["apple", "banana", "coconut"], + }, + } + + @mark.asyncio + async def does_not_disable_stream_with_null_if_argument(): + document = parse( + "query ($shouldStream: Boolean)" + " { scalarList @stream(initialCount: 2, if: $shouldStream) }" + ) + result = await complete( + document, {"scalarList": ["apple", "banana", "coconut"]} + ) + assert result == [ + { + "data": { + "scalarList": ["apple", "banana"], + }, + "hasNext": True, + }, + { + "incremental": [ + { + "items": ["coconut"], + "path": ["scalarList", 2], + } + ], + "hasNext": False, + }, + ] + + @mark.asyncio + async def can_stream_multi_dimensional_lists(): + document = parse("{ scalarListList @stream(initialCount: 1) }") + result = await complete( + document, + { + "scalarListList": lambda _info: [ + ["apple", "apple", "apple"], + ["banana", "banana", "banana"], + ["coconut", "coconut", "coconut"], + ] + }, + ) + assert result == [ + { + "data": { + "scalarListList": [["apple", "apple", "apple"]], + }, + "hasNext": True, + }, + { + "incremental": [ + { + "items": [["banana", "banana", "banana"]], + "path": ["scalarListList", 1], + } + ], + "hasNext": True, + }, + { + "incremental": [ + { + "items": [["coconut", "coconut", "coconut"]], + "path": ["scalarListList", 2], + } + ], + "hasNext": False, + }, + ] + + @mark.asyncio + async def can_stream_a_field_that_returns_a_list_of_awaitables(): + document = parse( + """ + query { + friendList @stream(initialCount: 2) { + name + id + } + } + """ + ) + + async def await_friend(f): + await sleep(0) + return f + + result = await complete( + document, + {"friendList": lambda _info: [await_friend(f) for f in friends]}, + ) + assert result == [ + { + "data": { + "friendList": [ + {"name": "Luke", "id": "1"}, + {"name": "Han", "id": "2"}, + ], + }, + "hasNext": True, + }, + { + "incremental": [ + { + "items": [{"name": "Leia", "id": "3"}], + "path": ["friendList", 2], + } + ], + "hasNext": False, + }, + ] + + @mark.asyncio + async def can_stream_in_correct_order_with_list_of_awaitables(): + document = parse( + """ + query { + friendList @stream(initialCount: 0) { + name + id + } + } + """ + ) + + async def await_friend(f): + await sleep(0) + return f + + result = await complete( + document, + {"friendList": lambda _info: [await_friend(f) for f in friends]}, + ) + assert result == [ + { + "data": {"friendList": []}, + "hasNext": True, + }, + { + "incremental": [ + { + "items": [{"name": "Luke", "id": "1"}], + "path": ["friendList", 0], + } + ], + "hasNext": True, + }, + { + "incremental": [ + { + "items": [{"name": "Han", "id": "2"}], + "path": ["friendList", 1], + } + ], + "hasNext": True, + }, + { + "incremental": [ + { + "items": [{"name": "Leia", "id": "3"}], + "path": ["friendList", 2], + } + ], + "hasNext": False, + }, + ] + + @mark.asyncio + async def handles_error_in_list_of_awaitables_before_initial_count_reached(): + document = parse( + """ + query { + friendList @stream(initialCount: 2) { + name + id + } + } + """ + ) + + async def await_friend(f, i): + await sleep(0) + if i == 1: + raise RuntimeError("bad") + return f + + result = await complete( + document, + { + "friendList": lambda _info: [ + await_friend(f, i) for i, f in enumerate(friends) + ] + }, + ) + assert result == [ + { + "data": {"friendList": [{"name": "Luke", "id": "1"}, None]}, + "errors": [ + { + "message": "bad", + "locations": [{"line": 3, "column": 15}], + "path": ["friendList", 1], + } + ], + "hasNext": True, + }, + { + "incremental": [ + { + "items": [{"name": "Leia", "id": "3"}], + "path": ["friendList", 2], + } + ], + "hasNext": False, + }, + ] + + @mark.asyncio + async def handles_error_in_list_of_awaitables_after_initial_count_reached(): + document = parse( + """ + query { + friendList @stream(initialCount: 1) { + name + id + } + } + """ + ) + + async def await_friend(f, i): + await sleep(0) + if i == 1: + raise RuntimeError("bad") + return f + + result = await complete( + document, + { + "friendList": lambda _info: [ + await_friend(f, i) for i, f in enumerate(friends) + ] + }, + ) + assert result == [ + { + "data": {"friendList": [{"name": "Luke", "id": "1"}]}, + "hasNext": True, + }, + { + "incremental": [ + { + "items": [None], + "path": ["friendList", 1], + "errors": [ + { + "message": "bad", + "locations": [{"line": 3, "column": 15}], + "path": ["friendList", 1], + } + ], + } + ], + "hasNext": True, + }, + { + "incremental": [ + { + "items": [{"name": "Leia", "id": "3"}], + "path": ["friendList", 2], + } + ], + "hasNext": False, + }, + ] + + @mark.asyncio + async def can_stream_a_field_that_returns_an_async_iterable(): + document = parse( + """ + query { + friendList @stream { + name + id + } + } + """ + ) + + async def friend_list(_info): + for i in range(3): + await sleep(0) + yield friends[i] + + result = await complete(document, {"friendList": friend_list}) + assert result == [ + { + "data": {"friendList": []}, + "hasNext": True, + }, + { + "incremental": [ + { + "items": [{"name": "Luke", "id": "1"}], + "path": ["friendList", 0], + } + ], + "hasNext": True, + }, + { + "incremental": [ + { + "items": [{"name": "Han", "id": "2"}], + "path": ["friendList", 1], + } + ], + "hasNext": True, + }, + { + "incremental": [ + { + "items": [{"name": "Leia", "id": "3"}], + "path": ["friendList", 2], + } + ], + "hasNext": False, + }, + ] + + @mark.asyncio + async def can_stream_a_field_that_returns_an_async_iterable_with_initial_count(): + document = parse( + """ + query { + friendList @stream(initialCount: 2) { + name + id + } + } + """ + ) + + async def friend_list(_info): + for i in range(3): + await sleep(0) + yield friends[i] + + result = await complete(document, {"friendList": friend_list}) + assert result == [ + { + "data": { + "friendList": [ + {"name": "Luke", "id": "1"}, + {"name": "Han", "id": "2"}, + ] + }, + "hasNext": True, + }, + { + "incremental": [ + { + "items": [{"name": "Leia", "id": "3"}], + "path": ["friendList", 2], + } + ], + "hasNext": False, + }, + ] + + @mark.asyncio + async def negative_initial_count_throw_error_on_field_returning_async_iterable(): + document = parse( + """ + query { + friendList @stream(initialCount: -2) { + name + id + } + } + """ + ) + + async def friend_list(_info): + yield {} # pragma: no cover + + result = await complete(document, {"friendList": friend_list}) + assert result == { + "errors": [ + { + "message": "initialCount must be a positive integer", + "locations": [{"line": 3, "column": 15}], + "path": ["friendList"], + } + ], + "data": {"friendList": None}, + } + + @mark.asyncio + async def can_handle_concurrent_calls_to_next_without_waiting(): + document = parse( + """ + query { + friendList @stream(initialCount: 2) { + name + id + } + } + """ + ) + + async def friend_list(_info): + for i in range(3): + await sleep(0) + yield friends[i] + + result = await complete_async(document, 3, {"friendList": friend_list}) + assert result == [ + { + "done": False, + "value": { + "data": { + "friendList": [ + {"name": "Luke", "id": "1"}, + {"name": "Han", "id": "2"}, + ] + }, + "hasNext": True, + }, + }, + { + "done": False, + "value": { + "incremental": [ + { + "items": [{"name": "Leia", "id": "3"}], + "path": ["friendList", 2], + } + ], + "hasNext": False, + }, + }, + {"done": True, "value": None}, + {"done": True, "value": None}, + ] + + @mark.asyncio + async def handles_error_in_async_iterable_before_initial_count_is_reached(): + document = parse( + """ + query { + friendList @stream(initialCount: 2) { + name + id + } + } + """ + ) + + async def friend_list(_info): + await sleep(0) + yield friends[0] + await sleep(0) + raise RuntimeError("bad") + + result = await complete(document, {"friendList": friend_list}) + assert result == { + "errors": [ + { + "message": "bad", + "locations": [{"line": 3, "column": 15}], + "path": ["friendList", 1], + } + ], + "data": {"friendList": [{"name": "Luke", "id": "1"}, None]}, + } + + @mark.asyncio + async def handles_error_in_async_iterable_after_initial_count_is_reached(): + document = parse( + """ + query { + friendList @stream(initialCount: 1) { + name + id + } + } + """ + ) + + async def friend_list(_info): + await sleep(0) + yield friends[0] + await sleep(0) + raise RuntimeError("bad") + + result = await complete(document, {"friendList": friend_list}) + assert result == [ + { + "data": { + "friendList": [{"name": "Luke", "id": "1"}], + }, + "hasNext": True, + }, + { + "incremental": [ + { + "items": [None], + "path": ["friendList", 1], + "errors": [ + { + "message": "bad", + "locations": [{"line": 3, "column": 15}], + "path": ["friendList", 1], + }, + ], + }, + ], + "hasNext": False, + }, + ] + + @mark.asyncio + async def handles_null_for_non_null_list_items_after_initial_count_is_reached(): + document = parse( + """ + query { + nonNullFriendList @stream(initialCount: 1) { + name + } + } + """ + ) + + result = await complete( + document, {"nonNullFriendList": lambda _info: [friends[0], None]} + ) + assert result == [ + { + "data": { + "nonNullFriendList": [{"name": "Luke"}], + }, + "hasNext": True, + }, + { + "incremental": [ + { + "items": None, + "path": ["nonNullFriendList", 1], + "errors": [ + { + "message": "Cannot return null for non-nullable field" + " Query.nonNullFriendList.", + "locations": [{"line": 3, "column": 15}], + "path": ["nonNullFriendList", 1], + }, + ], + }, + ], + "hasNext": False, + }, + ] + + @mark.asyncio + async def handles_null_for_non_null_async_items_after_initial_count_is_reached(): + document = parse( + """ + query { + nonNullFriendList @stream(initialCount: 1) { + name + } + } + """ + ) + + async def friend_list(_info): + for i in range(3): + await sleep(0) + yield None if i & 1 else friends[i >> 1] + + result = await complete(document, {"nonNullFriendList": friend_list}) + assert result == [ + { + "data": { + "nonNullFriendList": [{"name": "Luke"}], + }, + "hasNext": True, + }, + { + "incremental": [ + { + "items": None, + "path": ["nonNullFriendList", 1], + "errors": [ + { + "message": "Cannot return null for non-nullable field" + " Query.nonNullFriendList.", + "locations": [{"line": 3, "column": 15}], + "path": ["nonNullFriendList", 1], + }, + ], + }, + ], + "hasNext": True, + }, + { + "incremental": [ + { + "items": [{"name": "Han"}], + "path": ["nonNullFriendList", 2], + }, + ], + "hasNext": False, + }, + ] + + @mark.asyncio + async def handles_error_thrown_in_complete_value_after_initial_count_is_reached(): + document = parse( + """ + query { + scalarList @stream(initialCount: 1) + } + """ + ) + + async def scalar_list(_info): + await sleep(0) + yield friends[0].name + await sleep(0) + yield {} + + result = await complete(document, {"scalarList": scalar_list}) + assert result == [ + { + "data": { + "scalarList": ["Luke"], + }, + "hasNext": True, + }, + { + "incremental": [ + { + "items": [None], + "path": ["scalarList", 1], + "errors": [ + { + "message": "String cannot represent value: {}", + "locations": [{"line": 3, "column": 15}], + "path": ["scalarList", 1], + }, + ], + }, + ], + "hasNext": False, + }, + ] + + @mark.asyncio + async def handles_async_error_in_complete_value_after_initial_count_is_reached(): + document = parse( + """ + query { + nonNullFriendList @stream(initialCount: 1) { + nonNullName + } + } + """ + ) + + async def throw(): + raise RuntimeError("Oops") + + async def get_friend(i): + await sleep(0) + return {"nonNullName": throw() if i < 0 else friends[i].name} + + def get_friends(_info): + return [get_friend(0), get_friend(-1), get_friend(1)] + + result = await complete( + document, + { + "nonNullFriendList": get_friends, + }, + ) + assert result == [ + { + "data": { + "nonNullFriendList": [{"nonNullName": "Luke"}], + }, + "hasNext": True, + }, + { + "incremental": [ + { + "items": None, + "path": ["nonNullFriendList", 1], + "errors": [ + { + "message": "Oops", + "locations": [{"line": 4, "column": 17}], + "path": ["nonNullFriendList", 1, "nonNullName"], + }, + ], + }, + ], + "hasNext": True, + }, + { + "incremental": [ + { + "items": [{"nonNullName": "Han"}], + "path": ["nonNullFriendList", 2], + }, + ], + "hasNext": False, + }, + ] + + @mark.asyncio + async def handles_async_error_after_initial_count_reached_from_async_iterable(): + document = parse( + """ + query { + friendList @stream(initialCount: 1) { + nonNullName + } + } + """ + ) + + async def throw(): + raise RuntimeError("Oops") + + async def get_friend(i): + await sleep(0) + return {"nonNullName": throw() if i < 0 else friends[i].name} + + async def get_friends(_info): + yield await get_friend(0) + yield await get_friend(-1) + yield await get_friend(1) + + result = await complete( + document, + { + "friendList": get_friends, + }, + ) + assert result == [ + { + "data": { + "friendList": [{"nonNullName": "Luke"}], + }, + "hasNext": True, + }, + { + "incremental": [ + { + "items": [None], + "path": ["friendList", 1], + "errors": [ + { + "message": "Oops", + "locations": [{"line": 4, "column": 17}], + "path": ["friendList", 1, "nonNullName"], + }, + ], + }, + ], + "hasNext": True, + }, + { + "incremental": [ + { + "items": [{"nonNullName": "Han"}], + "path": ["friendList", 2], + }, + ], + "hasNext": False, + }, + ] + + @mark.asyncio + async def handles_awaitables_from_complete_value_after_initial_count_is_reached(): + document = parse( + """ + query { + friendList @stream(initialCount: 1) { + id + name + } + } + """ + ) + + async def get_friend_name(i): + await sleep(0) + return friends[i].name + + async def get_friend(i): + await sleep(0) + if i < 2: + return friends[i] + return {"id": friends[2].id, "name": get_friend_name(i)} + + async def get_friends(_info): + for i in range(3): + yield await get_friend(i) + + result = await complete( + document, + { + "friendList": get_friends, + }, + ) + assert result == [ + { + "data": { + "friendList": [{"id": "1", "name": "Luke"}], + }, + "hasNext": True, + }, + { + "incremental": [ + { + "items": [{"id": "2", "name": "Han"}], + "path": ["friendList", 1], + } + ], + "hasNext": True, + }, + { + "incremental": [ + { + "items": [{"id": "3", "name": "Leia"}], + "path": ["friendList", 2], + } + ], + "hasNext": False, + }, + ] + + @mark.asyncio + async def returns_payloads_properly_when_parent_deferred_slower_than_stream(): + resolve_slow_field = Event() + + async def slow_field(_info): + await resolve_slow_field.wait() + return "slow" + + document = parse( + """ + query { + nestedObject { + ... DeferFragment @defer + } + } + fragment DeferFragment on NestedObject { + scalarField + nestedFriendList @stream(initialCount: 0) { + name + } + } + """ + ) + + async def get_friends(_info): + for i in range(2): + await sleep(0) + yield friends[i] + + execute_result = experimental_execute_incrementally( + schema, + document, + { + "nestedObject": { + "scalarField": slow_field, + "nestedFriendList": get_friends, + } + }, + ) + + assert isinstance(execute_result, ExperimentalExecuteMultipleResults) + iterator = execute_result.subsequent_results + + result1 = execute_result.initial_result + assert result1 == {"data": {"nestedObject": {}}, "hasNext": True} + + resolve_slow_field.set() + result2 = await anext(iterator) + assert result2.formatted == { + "incremental": [ + { + "data": {"scalarField": "slow", "nestedFriendList": []}, + "path": ["nestedObject"], + }, + ], + "hasNext": True, + } + result3 = await anext(iterator) + assert result3.formatted == { + "incremental": [ + { + "items": [{"name": "Luke"}], + "path": ["nestedObject", "nestedFriendList", 0], + }, + ], + "hasNext": True, + } + result4 = await anext(iterator) + assert result4.formatted == { + "incremental": [ + { + "items": [{"name": "Han"}], + "path": ["nestedObject", "nestedFriendList", 1], + }, + ], + "hasNext": False, + } + + with raises(StopAsyncIteration): + await anext(iterator) + + @mark.asyncio + async def can_defer_fields_that_are_resolved_after_async_iterable_is_complete(): + resolve_slow_field = Event() + resolve_iterable = Event() + + async def slow_field(_info): + await resolve_slow_field.wait() + return "Han" + + document = parse( + """ + query { + friendList @stream(initialCount: 1, label:"stream-label") { + ...NameFragment @defer(label: "DeferName") @defer(label: "DeferName") + id + } + } + fragment NameFragment on Friend { + name + } + """ + ) + + async def get_friends(_info): + await sleep(0) + yield friends[0] + await sleep(0) + yield {"id": friends[1].id, "name": slow_field} + await resolve_iterable.wait() + + execute_result = await experimental_execute_incrementally( # type: ignore + schema, + document, + { + "friendList": get_friends, + }, + ) + + assert isinstance(execute_result, ExperimentalExecuteMultipleResults) + iterator = execute_result.subsequent_results + + result1 = execute_result.initial_result + assert result1 == {"data": {"friendList": [{"id": "1"}]}, "hasNext": True} + + resolve_iterable.set() + result2 = await anext(iterator) + assert result2.formatted == { + "incremental": [ + { + "data": {"name": "Luke"}, + "path": ["friendList", 0], + "label": "DeferName", + }, + { + "items": [{"id": "2"}], + "path": ["friendList", 1], + "label": "stream-label", + }, + ], + "hasNext": True, + } + + resolve_slow_field.set() + result3 = await anext(iterator) + assert result3.formatted == { + "incremental": [ + { + "data": {"name": "Han"}, + "path": ["friendList", 1], + "label": "DeferName", + }, + ], + "hasNext": False, + } + + with raises(StopAsyncIteration): + await anext(iterator) + + @mark.asyncio + async def can_defer_fields_that_are_resolved_before_async_iterable_is_complete(): + resolve_slow_field = Event() + resolve_iterable = Event() + + async def slow_field(_info): + await resolve_slow_field.wait() + return "Han" + + document = parse( + """ + query { + friendList @stream(initialCount: 1, label:"stream-label") { + ...NameFragment @defer(label: "DeferName") @defer(label: "DeferName") + id + } + } + fragment NameFragment on Friend { + name + } + """ + ) + + async def get_friends(_info): + await sleep(0) + yield friends[0] + await sleep(0) + yield {"id": friends[1].id, "name": slow_field} + await sleep(0) + await resolve_iterable.wait() + + execute_result = await experimental_execute_incrementally( # type: ignore + schema, + document, + { + "friendList": get_friends, + }, + ) + + assert isinstance(execute_result, ExperimentalExecuteMultipleResults) + iterator = execute_result.subsequent_results + + result1 = execute_result.initial_result + assert result1 == {"data": {"friendList": [{"id": "1"}]}, "hasNext": True} + + resolve_slow_field.set() + result2 = await anext(iterator) + assert result2.formatted == { + "incremental": [ + { + "data": {"name": "Luke"}, + "path": ["friendList", 0], + "label": "DeferName", + }, + { + "items": [{"id": "2"}], + "path": ["friendList", 1], + "label": "stream-label", + }, + ], + "hasNext": True, + } + + result3 = await anext(iterator) + assert result3.formatted == { + "incremental": [ + { + "data": {"name": "Han"}, + "path": ["friendList", 1], + "label": "DeferName", + }, + ], + "hasNext": True, + } + + resolve_iterable.set() + result4 = await anext(iterator) + assert result4.formatted == { + "hasNext": False, + } + + with raises(StopAsyncIteration): + await anext(iterator) + + @mark.asyncio + async def finishes_async_iterable_when_returned_generator_is_closed(): + returned = False + + async def iterable(_info): + nonlocal returned + for i in range(3): + await sleep(0) + yield friends[i] + returned = True + + document = parse( + """ + query { + friendList @stream(initialCount: 1) { + id + ... @defer { + name + } + } + } + """ + ) + + execute_result = await experimental_execute_incrementally( # type: ignore + schema, document, {"friendList": iterable} + ) + assert isinstance(execute_result, ExperimentalExecuteMultipleResults) + iterator = execute_result.subsequent_results + + result1 = execute_result.initial_result + assert result1 == {"data": {"friendList": [{"id": "1"}]}, "hasNext": True} + + await iterator.aclose() + with raises(StopAsyncIteration): + await anext(iterator) + + assert returned + + @mark.asyncio + async def finishes_async_iterable_when_underlying_iterator_has_no_close_method(): + class Iterable: + def __init__(self): + self.index = 0 + + def __aiter__(self): + return self + + async def __anext__(self): + await sleep(0) + index = self.index + self.index = index + 1 + try: + return friends[index] + except IndexError: + raise StopAsyncIteration + + iterable = Iterable() + + document = parse( + """ + query { + friendList @stream(initialCount: 1) { + name + id + } + } + """ + ) + + execute_result = await experimental_execute_incrementally( # type: ignore + schema, document, {"friendList": iterable} + ) + assert isinstance(execute_result, ExperimentalExecuteMultipleResults) + iterator = execute_result.subsequent_results + + result1 = execute_result.initial_result + assert result1 == { + "data": {"friendList": [{"id": "1", "name": "Luke"}]}, + "hasNext": True, + } + + await iterator.aclose() + with raises(StopAsyncIteration): + await anext(iterator) + + assert iterable.index == 4 + + @mark.asyncio + async def finishes_async_iterable_when_error_is_raised_in_returned_generator(): + returned = False + + async def iterable(_info): + nonlocal returned + for i in range(3): + await sleep(0) + yield friends[i] + returned = True + + document = parse( + """ + query { + friendList @stream(initialCount: 1) { + ... @defer { + name + } + id + } + } + """ + ) + + execute_result = await experimental_execute_incrementally( # type: ignore + schema, document, {"friendList": iterable} + ) + assert isinstance(execute_result, ExperimentalExecuteMultipleResults) + iterator = execute_result.subsequent_results + + result1 = execute_result.initial_result + assert result1 == {"data": {"friendList": [{"id": "1"}]}, "hasNext": True} + + with raises(RuntimeError, match="bad"): + await iterator.athrow(RuntimeError("bad")) + + with raises(StopAsyncIteration): + await anext(iterator) + + assert returned diff --git a/tests/execution/test_subscribe.py b/tests/execution/test_subscribe.py index 73c489a5..d10edce6 100644 --- a/tests/execution/test_subscribe.py +++ b/tests/execution/test_subscribe.py @@ -1,12 +1,22 @@ import asyncio -from typing import Any, AsyncIterable, Callable, Dict, List, Optional, TypeVar, Union +from typing import ( + Any, + AsyncIterable, + AsyncIterator, + Callable, + Dict, + List, + Optional, + TypeVar, + Union, +) from pytest import mark, raises from graphql.execution import ( ExecutionResult, - MapAsyncIterable, create_source_event_stream, + experimental_subscribe_incrementally, subscribe, ) from graphql.language import DocumentNode, parse @@ -18,6 +28,7 @@ GraphQLInt, GraphQLList, GraphQLObjectType, + GraphQLResolveInfo, GraphQLSchema, GraphQLString, ) @@ -51,11 +62,17 @@ async def anext(iterator): }, ) + +async def async_subject(email: Email, _info: GraphQLResolveInfo) -> str: + return email["subject"] + + EmailType = GraphQLObjectType( "Email", { "from": GraphQLField(GraphQLString), "subject": GraphQLField(GraphQLString), + "asyncSubject": GraphQLField(GraphQLString, resolve=async_subject), "message": GraphQLField(GraphQLString), "unread": GraphQLField(GraphQLBoolean), }, @@ -98,18 +115,29 @@ async def anext(iterator): ) -def create_subscription(pubsub: SimplePubSub): +def create_subscription( + pubsub: SimplePubSub, + variable_values: Optional[Dict[str, Any]] = None, + original_subscribe: bool = False, +) -> AwaitableOrValue[Union[AsyncIterator[ExecutionResult], ExecutionResult]]: document = parse( """ - subscription ($priority: Int = 0) { + subscription ($priority: Int = 0, + $shouldDefer: Boolean = false + $asyncResolver: Boolean = false) { importantEmail(priority: $priority) { email { from subject + ... @include(if: $asyncResolver) { + asyncSubject + } } - inbox { - unread - total + ... @defer(if: $shouldDefer) { + inbox { + unread + total + } } } } @@ -135,7 +163,11 @@ def transform(new_email): "importantEmail": pubsub.get_subscriber(transform), } - return subscribe(email_schema, document, data) + return ( + subscribe if original_subscribe else experimental_subscribe_incrementally + )( # type: ignore + email_schema, document, data, variable_values=variable_values + ) DummyQueryType = GraphQLObjectType("Query", {"dummy": GraphQLField(GraphQLString)}) @@ -207,11 +239,11 @@ async def foo_generator(_info): subscription = subscribe( schema, parse("subscription { foo }"), {"foo": foo_generator} ) - assert isinstance(subscription, MapAsyncIterable) + assert isinstance(subscription, AsyncIterator) assert await anext(subscription) == ({"foo": "FooValue"}, None) - await subscription.aclose() + await subscription.aclose() # type: ignore @mark.asyncio async def accepts_type_definition_with_sync_subscribe_function(): @@ -227,11 +259,11 @@ async def foo_generator(_obj, _info): ) subscription = subscribe(schema, parse("subscription { foo }")) - assert isinstance(subscription, MapAsyncIterable) + assert isinstance(subscription, AsyncIterator) assert await anext(subscription) == ({"foo": "FooValue"}, None) - await subscription.aclose() + await subscription.aclose() # type: ignore @mark.asyncio async def accepts_type_definition_with_async_subscribe_function(): @@ -255,11 +287,11 @@ async def subscribe_fn(obj, info): assert is_awaitable(awaitable) subscription = await awaitable - assert isinstance(subscription, MapAsyncIterable) + assert isinstance(subscription, AsyncIterator) assert await anext(subscription) == ({"foo": "FooValue"}, None) - await subscription.aclose() + await subscription.aclose() # type: ignore @mark.asyncio async def should_only_resolve_the_first_field_of_invalid_multi_field(): @@ -285,7 +317,7 @@ async def subscribe_bar(_obj, _info): # pragma: no cover ) subscription = subscribe(schema, parse("subscription { foo bar }")) - assert isinstance(subscription, MapAsyncIterable) + assert isinstance(subscription, AsyncIterator) assert await anext(subscription) == ( {"foo": "FooValue", "bar": None}, @@ -294,7 +326,7 @@ async def subscribe_bar(_obj, _info): # pragma: no cover assert did_resolve == {"foo": True, "bar": False} - await subscription.aclose() + await subscription.aclose() # type: ignore @mark.asyncio async def resolves_to_an_error_if_schema_does_not_support_subscriptions(): @@ -465,10 +497,10 @@ async def produces_a_payload_for_multiple_subscribe_in_same_subscription(): pubsub = SimplePubSub() subscription = create_subscription(pubsub) - assert isinstance(subscription, MapAsyncIterable) + assert isinstance(subscription, AsyncIterator) second_subscription = create_subscription(pubsub) - assert isinstance(subscription, MapAsyncIterable) + assert isinstance(subscription, AsyncIterator) payload1 = anext(subscription) payload2 = anext(second_subscription) @@ -495,11 +527,50 @@ async def produces_a_payload_for_multiple_subscribe_in_same_subscription(): assert await payload1 == (expected_payload, None) assert await payload2 == (expected_payload, None) + @mark.asyncio + async def produces_a_payload_when_queried_fields_are_async(): + pubsub = SimplePubSub() + subscription = create_subscription(pubsub, {"asyncResolver": True}) + assert isinstance(subscription, AsyncIterator) + + assert ( + pubsub.emit( + { + "from": "yuzhi@graphql.org", + "subject": "Alright", + "message": "Tests are good", + "unread": True, + } + ) + is True + ) + + assert await anext(subscription) == ( + { + "importantEmail": { + "email": { + "from": "yuzhi@graphql.org", + "subject": "Alright", + "asyncSubject": "Alright", + }, + "inbox": {"unread": 1, "total": 2}, + } + }, + None, + ) + + try: + await subscription.aclose() # type: ignore + except RuntimeError: # Python < 3.8 + pass + with raises(StopAsyncIteration): + await anext(subscription) + @mark.asyncio async def produces_a_payload_per_subscription_event(): pubsub = SimplePubSub() subscription = create_subscription(pubsub) - assert isinstance(subscription, MapAsyncIterable) + assert isinstance(subscription, AsyncIterator) # Wait for the next subscription payload. payload = anext(subscription) @@ -554,7 +625,10 @@ async def produces_a_payload_per_subscription_event(): # The client decides to disconnect. # noinspection PyUnresolvedReferences - await subscription.aclose() + try: + await subscription.aclose() # type: ignore + except RuntimeError: # Python < 3.8 + pass # Which may result in disconnecting upstream services as well. assert ( @@ -573,11 +647,203 @@ async def produces_a_payload_per_subscription_event(): with raises(StopAsyncIteration): assert await anext(subscription) + @mark.asyncio + async def produces_additional_payloads_for_subscriptions_with_defer(): + pubsub = SimplePubSub() + subscription = create_subscription(pubsub, {"shouldDefer": True}) + assert isinstance(subscription, AsyncIterator) + + # Wait for the next subscription payload. + payload = anext(subscription) + + # A new email arrives! + assert ( + pubsub.emit( + { + "from": "yuzhi@graphql.org", + "subject": "Alright", + "message": "Tests are good", + "unread": True, + } + ) + is True + ) + + # The previously waited on payload now has a value. + result = await payload + assert result.formatted == { + "data": { + "importantEmail": { + "email": { + "from": "yuzhi@graphql.org", + "subject": "Alright", + }, + }, + }, + "hasNext": True, + } + + # Wait for the next payload from @defer + result = await anext(subscription) + assert result.formatted == { + "incremental": [ + { + "data": {"inbox": {"total": 2, "unread": 1}}, + "path": ["importantEmail"], + } + ], + "hasNext": False, + } + + # Another new email arrives, + # after all incrementally delivered payloads are received. + assert ( + pubsub.emit( + { + "from": "hyo@graphql.org", + "subject": "Tools", + "message": "I <3 making things", + "unread": True, + } + ) + is True + ) + + # The next waited on payload will have a value. + result = await anext(subscription) + assert result.formatted == { + "data": { + "importantEmail": { + "email": { + "from": "hyo@graphql.org", + "subject": "Tools", + }, + }, + }, + "hasNext": True, + } + + # Another new email arrives, + # before the incrementally delivered payloads from the last email was received. + assert ( + pubsub.emit( + { + "from": "adam@graphql.org", + "subject": "Important", + "message": "Read me please", + "unread": True, + } + ) + is True + ) + + # Deferred payload from previous event is received. + result = await anext(subscription) + assert result.formatted == { + "incremental": [ + { + "data": {"inbox": {"total": 3, "unread": 2}}, + "path": ["importantEmail"], + } + ], + "hasNext": False, + } + + # Next payload from last event + result = await anext(subscription) + assert result.formatted == { + "data": { + "importantEmail": { + "email": { + "from": "adam@graphql.org", + "subject": "Important", + }, + }, + }, + "hasNext": True, + } + + # The client disconnects before the deferred payload is consumed. + try: + await subscription.aclose() # type: ignore + except RuntimeError: # Python < 3.8 + pass + + # Awaiting a subscription after closing it results in completed results. + with raises(StopAsyncIteration): + assert await anext(subscription) + + @mark.asyncio + async def original_subscribe_function_returns_errors_with_defer(): + pubsub = SimplePubSub() + subscription = create_subscription(pubsub, {"shouldDefer": True}, True) + assert isinstance(subscription, AsyncIterator) + + # Wait for the next subscription payload. + payload = anext(subscription) + + # A new email arrives! + assert ( + pubsub.emit( + { + "from": "yuzhi@graphql.org", + "subject": "Alright", + "message": "Tests are good", + "unread": True, + } + ) + is True + ) + + error_payload = ( + None, + [ + { + "message": "Executing this GraphQL operation would unexpectedly" + " produce multiple payloads" + " (due to @defer or @stream directive)", + } + ], + ) + + # The previously waited on payload now has a value. + assert await payload == error_payload + + # Wait for the next payload from @defer + assert await anext(subscription) == error_payload + + # Another new email arrives, + # after all incrementally delivered payloads are received. + assert ( + pubsub.emit( + { + "from": "hyo@graphql.org", + "subject": "Tools", + "message": "I <3 making things", + "unread": True, + } + ) + is True + ) + + # The next waited on payload will have a value. + assert await anext(subscription) == error_payload + + # The next waited on payload will have a value. + assert await anext(subscription) == error_payload + + # The client disconnects before the deferred payload is consumed. + await subscription.aclose() # type: ignore + + # Awaiting a subscription after closing it results in completed results. + with raises(StopAsyncIteration): + assert await anext(subscription) + @mark.asyncio async def produces_a_payload_when_there_are_multiple_events(): pubsub = SimplePubSub() subscription = create_subscription(pubsub) - assert isinstance(subscription, MapAsyncIterable) + assert isinstance(subscription, AsyncIterator) payload = anext(subscription) @@ -633,7 +899,7 @@ async def produces_a_payload_when_there_are_multiple_events(): async def should_not_trigger_when_subscription_is_already_done(): pubsub = SimplePubSub() subscription = create_subscription(pubsub) - assert isinstance(subscription, MapAsyncIterable) + assert isinstance(subscription, AsyncIterator) payload = anext(subscription) @@ -661,7 +927,10 @@ async def should_not_trigger_when_subscription_is_already_done(): ) payload = anext(subscription) - await subscription.aclose() + try: + await subscription.aclose() # type: ignore + except RuntimeError: # Python < 3.8 + pass # A new email arrives! assert ( @@ -683,7 +952,7 @@ async def should_not_trigger_when_subscription_is_already_done(): async def should_not_trigger_when_subscription_is_thrown(): pubsub = SimplePubSub() subscription = create_subscription(pubsub) - assert isinstance(subscription, MapAsyncIterable) + assert isinstance(subscription, AsyncIterator) payload = anext(subscription) @@ -714,7 +983,7 @@ async def should_not_trigger_when_subscription_is_thrown(): # Throw error with raises(RuntimeError) as exc_info: - await subscription.athrow(RuntimeError("ouch")) + await subscription.athrow(RuntimeError("ouch")) # type: ignore assert str(exc_info.value) == "ouch" with raises(StopAsyncIteration): @@ -724,7 +993,7 @@ async def should_not_trigger_when_subscription_is_thrown(): async def event_order_is_correct_for_multiple_publishes(): pubsub = SimplePubSub() subscription = create_subscription(pubsub) - assert isinstance(subscription, MapAsyncIterable) + assert isinstance(subscription, AsyncIterator) payload = anext(subscription) @@ -804,7 +1073,7 @@ def resolve_message(message, _info): document = parse("subscription { newMessage }") subscription = subscribe(schema, document) - assert isinstance(subscription, MapAsyncIterable) + assert isinstance(subscription, AsyncIterator) assert await anext(subscription) == ({"newMessage": "Hello"}, None) @@ -849,7 +1118,7 @@ def resolve_message(message, _info): document = parse("subscription { newMessage }") subscription = subscribe(schema, document) - assert isinstance(subscription, MapAsyncIterable) + assert isinstance(subscription, AsyncIterator) assert await anext(subscription) == ({"newMessage": "Hello"}, None) @@ -885,7 +1154,7 @@ def resolve_message(message, _info): document = parse("subscription { newMessage }") subscription = subscribe(schema, document) - assert isinstance(subscription, MapAsyncIterable) + assert isinstance(subscription, AsyncIterator) assert await anext(subscription) == ({"newMessage": "Hello"}, None) @@ -915,6 +1184,6 @@ async def resolve_message(message, _info): document = parse("subscription { newMessage }") subscription = subscribe(schema, document) - assert isinstance(subscription, MapAsyncIterable) + assert isinstance(subscription, AsyncIterator) assert await anext(subscription) == ({"newMessage": "Hello"}, None) diff --git a/tests/execution/test_sync.py b/tests/execution/test_sync.py index d5604310..d397129c 100644 --- a/tests/execution/test_sync.py +++ b/tests/execution/test_sync.py @@ -112,6 +112,40 @@ async def throws_if_encountering_async_operation_without_check_sync(): del result collect() + @mark.asyncio + @mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") + async def throws_if_encountering_async_iterable_execution_with_check_sync(): + doc = """ + query Example { + ...deferFrag @defer(label: "deferLabel") + } + fragment deferFrag on Query { + syncField + } + """ + with raises(RuntimeError) as exc_info: + execute_sync( + schema, document=parse(doc), root_value="rootValue", check_sync=True + ) + msg = str(exc_info.value) + assert msg == "GraphQL execution failed to complete synchronously." + + @mark.asyncio + @mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") + async def throws_if_encountering_async_iterable_execution_without_check_sync(): + doc = """ + query Example { + ...deferFrag @defer(label: "deferLabel") + } + fragment deferFrag on Query { + syncField + } + """ + with raises(RuntimeError) as exc_info: + execute_sync(schema, document=parse(doc), root_value="rootValue") + msg = str(exc_info.value) + assert msg == "GraphQL execution failed to complete synchronously." + def describe_graphql_sync(): def reports_errors_raised_during_schema_validation(): bad_schema = GraphQLSchema() diff --git a/tests/language/test_visitor.py b/tests/language/test_visitor.py index b44736fd..06baa5af 100644 --- a/tests/language/test_visitor.py +++ b/tests/language/test_visitor.py @@ -1,6 +1,6 @@ from copy import copy from functools import partial -from typing import List, Optional, cast +from typing import Any, List, Optional, cast from pytest import mark, raises @@ -733,9 +733,9 @@ def leave(*args): # noinspection PyShadowingNames def visits_kitchen_sink(kitchen_sink_query): # noqa: F811 ast = parse(kitchen_sink_query, experimental_client_controlled_nullability=True) - visited: List = [] + visited: List[Any] = [] record = visited.append - arg_stack: List = [] + arg_stack: List[Any] = [] push = arg_stack.append pop = arg_stack.pop diff --git a/tests/pyutils/test_async_reduce.py b/tests/pyutils/test_async_reduce.py new file mode 100644 index 00000000..2e9144e6 --- /dev/null +++ b/tests/pyutils/test_async_reduce.py @@ -0,0 +1,64 @@ +from functools import reduce +from inspect import isawaitable + +from pytest import mark + +from graphql.pyutils import async_reduce + + +def describe_async_reduce(): + def works_like_reduce_for_lists_of_ints(): + initial_value = -15 + + def callback(accumulator, current_value): + return accumulator + current_value + + values = range(7, 13) + result = async_reduce(callback, values, initial_value) + assert result == 42 + assert result == reduce(callback, values, initial_value) + + @mark.asyncio + async def works_with_sync_values_and_sync_initial_value(): + def callback(accumulator, current_value): + return accumulator + "-" + current_value + + values = ["bar", "baz"] + result = async_reduce(callback, values, "foo") + assert not isawaitable(result) + assert result == "foo-bar-baz" + + @mark.asyncio + async def works_with_async_initial_value(): + async def async_initial_value(): + return "foo" + + def callback(accumulator, current_value): + return accumulator + "-" + current_value + + values = ["bar", "baz"] + result = async_reduce(callback, values, async_initial_value()) + assert isawaitable(result) + assert await result == "foo-bar-baz" + + @mark.asyncio + async def works_with_async_callback(): + async def async_callback(accumulator, current_value): + return accumulator + "-" + current_value + + values = ["bar", "baz"] + result = async_reduce(async_callback, values, "foo") + assert isawaitable(result) + assert await result == "foo-bar-baz" + + @mark.asyncio + async def works_with_async_callback_and_async_initial_value(): + async def async_initial_value(): + return 1 / 8 + + async def async_callback(accumulator, current_value): + return accumulator * current_value + + result = async_reduce(async_callback, range(6, 9), async_initial_value()) + assert isawaitable(result) + assert await result == 42 diff --git a/tests/test_user_registry.py b/tests/test_user_registry.py index f361ac59..2d2e876c 100644 --- a/tests/test_user_registry.py +++ b/tests/test_user_registry.py @@ -8,7 +8,7 @@ from collections import defaultdict from enum import Enum from inspect import isawaitable -from typing import Any, Dict, List, NamedTuple, Optional +from typing import Any, AsyncIterable, Dict, List, NamedTuple, Optional from pytest import fixture, mark @@ -29,7 +29,6 @@ parse, subscribe, ) -from graphql.execution.map_async_iterable import MapAsyncIterable from graphql.pyutils import SimplePubSub, SimplePubSubIterator @@ -413,7 +412,7 @@ async def subscribe_to_user_mutations(context): subscription_one = subscribe( schema, parse(query), context_value=context, variable_values=variables ) - assert isinstance(subscription_one, MapAsyncIterable) + assert isinstance(subscription_one, AsyncIterable) query = """ subscription { @@ -425,13 +424,13 @@ async def subscribe_to_user_mutations(context): """ subscription_all = subscribe(schema, parse(query), context_value=context) - assert isinstance(subscription_all, MapAsyncIterable) + assert isinstance(subscription_all, AsyncIterable) received_one = [] received_all = [] async def mutate_users(): - await sleep(0) # make sure subscribers are running + await sleep(2 / 512) # make sure subscribers are running await graphql( schema, """ diff --git a/tests/utils/test_assert_equal_awaitables_or_values.py b/tests/utils/test_assert_equal_awaitables_or_values.py index 63c389b7..7718e7a3 100644 --- a/tests/utils/test_assert_equal_awaitables_or_values.py +++ b/tests/utils/test_assert_equal_awaitables_or_values.py @@ -38,6 +38,7 @@ async def test_value(value): ) @mark.asyncio + @mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") async def throws_when_given_mixture_of_equal_values_and_awaitables(): async def test_value(): return {"test": "test"} diff --git a/tests/validation/harness.py b/tests/validation/harness.py index 01bb7641..68ec7c8f 100644 --- a/tests/validation/harness.py +++ b/tests/validation/harness.py @@ -62,7 +62,7 @@ type Human { name(surname: Boolean): String pets: [Pet] - relatives: [Human] + relatives: [Human]! } enum FurColor { diff --git a/tests/validation/test_defer_stream_directive_label.py b/tests/validation/test_defer_stream_directive_label.py new file mode 100644 index 00000000..07ac73ea --- /dev/null +++ b/tests/validation/test_defer_stream_directive_label.py @@ -0,0 +1,189 @@ +from functools import partial + +from graphql.validation import DeferStreamDirectiveLabel + +from .harness import assert_validation_errors + + +assert_errors = partial(assert_validation_errors, DeferStreamDirectiveLabel) + +assert_valid = partial(assert_errors, errors=[]) + + +def describe_defer_stream_label(): + def defer_fragments_with_no_label(): + assert_valid( + """ + { + dog { + ...dogFragmentA @defer + ...dogFragmentB @defer + } + } + fragment dogFragmentA on Dog { + name + } + fragment dogFragmentB on Dog { + nickname + } + """ + ) + + def defer_fragments_one_with_label_one_without(): + assert_valid( + """ + { + dog { + ...dogFragmentA @defer(label: "fragA") + ...dogFragmentB @defer + } + } + fragment dogFragmentA on Dog { + name + } + fragment dogFragmentB on Dog { + nickname + } + """ + ) + + def defer_fragment_with_variable_label(): + assert_errors( + """ + query($label: String) { + dog { + ...dogFragmentA @defer(label: $label) + ...dogFragmentB @defer(label: "fragA") + } + } + fragment dogFragmentA on Dog { + name + } + fragment dogFragmentB on Dog { + nickname + } + """, + [ + { + "message": "Defer directive label argument" + " must be a static string.", + "locations": [(4, 33)], + }, + ], + ) + + def defer_fragments_with_different_labels(): + assert_valid( + """ + { + dog { + ...dogFragmentA @defer(label: "fragB") + ...dogFragmentB @defer(label: "fragA") + } + } + fragment dogFragmentA on Dog { + name + } + fragment dogFragmentB on Dog { + nickname + } + """ + ) + + def defer_fragments_with_same_labels(): + assert_errors( + """ + { + dog { + ...dogFragmentA @defer(label: "fragA") + ...dogFragmentB @defer(label: "fragA") + } + } + fragment dogFragmentA on Dog { + name + } + fragment dogFragmentB on Dog { + nickname + } + """, + [ + { + "message": "Defer/Stream directive label argument must be unique.", + "locations": [(4, 33), (5, 33)], + }, + ], + ) + + def defer_and_stream_with_no_label(): + assert_valid( + """ + { + dog { + ...dogFragment @defer + } + pets @stream(initialCount: 0) @stream { + name + } + } + fragment dogFragment on Dog { + name + } + """ + ) + + def stream_with_variable_label(): + assert_errors( + """ + query ($label: String!) { + dog { + ...dogFragment @defer + } + pets @stream(initialCount: 0) @stream(label: $label) { + name + } + } + fragment dogFragment on Dog { + name + } + """, + [ + { + "message": "Stream directive label argument" + " must be a static string.", + "locations": [(6, 45)], + }, + ], + ) + + def defer_and_stream_with_the_same_labels(): + assert_errors( + """ + { + dog { + ...dogFragment @defer(label: "MyLabel") + } + pets @stream(initialCount: 0) @stream(label: "MyLabel") { + name + } + } + fragment dogFragment on Dog { + name + } + """, + [ + { + "message": "Defer/Stream directive label argument must be unique.", + "locations": [(4, 32), (6, 45)], + }, + ], + ) + + def no_defer_or_stream_directive_with_variable_and_duplicate_label(): + assert_valid( + """ + query($label: String) { + dog @skip(label: $label) + dog @skip(label: $label) + } + """ + ) diff --git a/tests/validation/test_defer_stream_directive_on_root_field.py b/tests/validation/test_defer_stream_directive_on_root_field.py new file mode 100644 index 00000000..687665f6 --- /dev/null +++ b/tests/validation/test_defer_stream_directive_on_root_field.py @@ -0,0 +1,284 @@ +from functools import partial + +from graphql.utilities import build_schema +from graphql.validation import DeferStreamDirectiveOnRootField + +from .harness import assert_validation_errors + + +schema = build_schema( + """ + type Message { + body: String + sender: String + } + + type SubscriptionRoot { + subscriptionField: Message + subscriptionListField: [Message] + } + + type MutationRoot { + mutationField: Message + mutationListField: [Message] + } + + type QueryRoot { + message: Message + messages: [Message] + } + + schema { + query: QueryRoot + mutation: MutationRoot + subscription: SubscriptionRoot + } + """ +) + +assert_errors = partial( + assert_validation_errors, DeferStreamDirectiveOnRootField, schema=schema +) + +assert_valid = partial(assert_errors, errors=[]) + + +def describe_defer_stream_on_root_field(): + def defer_fragments_spread_on_root_field(): + assert_valid( + """ + { + ...rootQueryFragment @defer + } + fragment rootQueryFragment on QueryRoot { + message { + body + } + } + """ + ) + + def defer_inline_fragment_spread_on_root_query_field(): + assert_valid( + """ + { + ... @defer { + message { + body + } + } + } + """ + ) + + def defer_fragment_spread_on_root_mutation_field(): + assert_errors( + """ + mutation { + ...rootFragment @defer + } + fragment rootFragment on MutationRoot { + mutationField { + body + } + } + """, + [ + { + "message": "Defer directive cannot be used on root" + " mutation type 'MutationRoot'.", + "locations": [(3, 31)], + }, + ], + ) + + def defer_inline_fragment_spread_on_root_mutation_field(): + assert_errors( + """ + mutation { + ... @defer { + mutationField { + body + } + } + } + """, + [ + { + "message": "Defer directive cannot be used on root" + " mutation type 'MutationRoot'.", + "locations": [(3, 19)], + }, + ], + ) + + def defer_fragment_spread_on_nested_mutation_field(): + assert_valid( + """ + mutation { + mutationField { + ... @defer { + body + } + } + } + """ + ) + + def defer_fragment_spread_on_root_subscription_field(): + assert_errors( + """ + subscription { + ...rootFragment @defer + } + fragment rootFragment on SubscriptionRoot { + subscriptionField { + body + } + } + """, + [ + { + "message": "Defer directive cannot be used on root" + " subscription type 'SubscriptionRoot'.", + "locations": [(3, 31)], + }, + ], + ) + + def defer_inline_fragment_spread_on_root_subscription_field(): + assert_errors( + """ + subscription { + ... @defer { + subscriptionField { + body + } + } + } + """, + [ + { + "message": "Defer directive cannot be used on root" + " subscription type 'SubscriptionRoot'.", + "locations": [(3, 19)], + }, + ], + ) + + def defer_fragment_spread_on_nested_subscription_field(): + assert_valid( + """ + subscription { + subscriptionField { + ...nestedFragment + } + } + fragment nestedFragment on Message { + body + } + """ + ) + + def stream_field_on_root_query_field(): + assert_valid( + """ + { + messages @stream { + name + } + } + """ + ) + + def stream_field_on_fragment_on_root_query_field(): + assert_valid( + """ + { + ...rootFragment + } + fragment rootFragment on QueryType { + messages @stream { + name + } + } + """ + ) + + def stream_field_on_root_mutation_field(): + assert_errors( + """ + mutation { + mutationListField @stream { + name + } + } + """, + [ + { + "message": "Stream directive cannot be used on root" + " mutation type 'MutationRoot'.", + "locations": [(3, 33)], + }, + ], + ) + + def stream_field_on_fragment_on_root_mutation_field(): + assert_errors( + """ + mutation { + ...rootFragment + } + fragment rootFragment on MutationRoot { + mutationListField @stream { + name + } + } + """, + [ + { + "message": "Stream directive cannot be used on root" + " mutation type 'MutationRoot'.", + "locations": [(6, 33)], + }, + ], + ) + + def stream_field_on_root_subscription_field(): + assert_errors( + """ + subscription { + subscriptionListField @stream { + name + } + } + """, + [ + { + "message": "Stream directive cannot be used on root" + " subscription type 'SubscriptionRoot'.", + "locations": [(3, 37)], + }, + ], + ) + + def stream_field_on_fragment_on_root_subscription_field(): + assert_errors( + """ + subscription { + ...rootFragment + } + fragment rootFragment on SubscriptionRoot { + subscriptionListField @stream { + name + } + } + """, + [ + { + "message": "Stream directive cannot be used on root" + " subscription type 'SubscriptionRoot'.", + "locations": [(6, 37)], + }, + ], + ) diff --git a/tests/validation/test_overlapping_fields_can_be_merged.py b/tests/validation/test_overlapping_fields_can_be_merged.py index d2528355..8e5b3989 100644 --- a/tests/validation/test_overlapping_fields_can_be_merged.py +++ b/tests/validation/test_overlapping_fields_can_be_merged.py @@ -85,6 +85,116 @@ def different_skip_or_include_directives_accepted(): """ ) + def same_stream_directives_supported(): + assert_valid( + """ + fragment differentDirectivesWithDifferentAliases on Dog { + name @stream(label: "streamLabel", initialCount: 1) + name @stream(label: "streamLabel", initialCount: 1) + } + """ + ) + + def different_stream_directive_label(): + assert_errors( + """ + fragment conflictingArgs on Dog { + name @stream(label: "streamLabel", initialCount: 1) + name @stream(label: "anotherLabel", initialCount: 1) + } + """, + [ + { + "message": "Fields 'name' conflict because they have differing" + " stream directives. Use different aliases on the fields" + " to fetch both if this was intentional.", + "locations": [(3, 15), (4, 15)], + } + ], + ) + + def different_stream_directive_initial_count(): + assert_errors( + """ + fragment conflictingArgs on Dog { + name @stream(label: "streamLabel", initialCount: 1) + name @stream(label: "streamLabel", initialCount: 2) + } + """, + [ + { + "message": "Fields 'name' conflict because they have differing" + " stream directives. Use different aliases on the fields" + " to fetch both if this was intentional.", + "locations": [(3, 15), (4, 15)], + } + ], + ) + + def different_stream_directive_first_missing_args(): + assert_errors( + """ + fragment conflictingArgs on Dog { + name @stream + name @stream(label: "streamLabel", initialCount: 1) + } + """, + [ + { + "message": "Fields 'name' conflict because they have differing" + " stream directives. Use different aliases on the fields" + " to fetch both if this was intentional.", + "locations": [(3, 15), (4, 15)], + } + ], + ) + + def different_stream_directive_second_missing_args(): + assert_errors( + """ + fragment conflictingArgs on Dog { + name @stream(label: "streamLabel", initialCount: 1) + name @stream + } + """, + [ + { + "message": "Fields 'name' conflict because they have differing" + " stream directives. Use different aliases on the fields" + " to fetch both if this was intentional.", + "locations": [(3, 15), (4, 15)], + } + ], + ) + + def mix_of_stream_and_no_stream(): + assert_errors( + """ + fragment conflictingArgs on Dog { + name @stream + name + } + """, + [ + { + "message": "Fields 'name' conflict because they have differing" + " stream directives. Use different aliases on the fields" + " to fetch both if this was intentional.", + "locations": [(3, 15), (4, 15)], + } + ], + ) + + def same_stream_directive_both_missing_args(): + assert_valid( + """ + fragment conflictingArgs on Dog { + name @stream + name @stream + } + """ + ) + def same_aliases_with_different_field_targets(): assert_errors( """ diff --git a/tests/validation/test_stream_directive_on_list_field.py b/tests/validation/test_stream_directive_on_list_field.py new file mode 100644 index 00000000..6613b15b --- /dev/null +++ b/tests/validation/test_stream_directive_on_list_field.py @@ -0,0 +1,83 @@ +from functools import partial + +from graphql.validation import StreamDirectiveOnListField + +from .harness import assert_validation_errors + + +assert_errors = partial(assert_validation_errors, StreamDirectiveOnListField) + +assert_valid = partial(assert_errors, errors=[]) + + +def describe_stream_directive_on_list_field(): + def stream_on_list_field(): + assert_valid( + """ + fragment objectFieldSelection on Human { + pets @stream(initialCount: 0) { + name + } + } + """ + ) + + def stream_on_non_null_list_field(): + assert_valid( + """ + fragment objectFieldSelection on Human { + relatives @stream(initialCount: 0) { + name + } + } + """ + ) + + def does_not_validate_other_directives_on_list_fields(): + assert_valid( + """ + fragment objectFieldSelection on Human { + pets @include(if: true) { + name + } + } + """ + ) + + def does_not_validate_other_directives_on_non_list_fields(): + assert_valid( + """ + fragment objectFieldSelection on Human { + pets { + name @include(if: true) + } + } + """ + ) + + def does_not_validate_misplaced_stream_directives(): + assert_valid( + """ + fragment objectFieldSelection on Human { + ... @stream(initialCount: 0) { + name + } + } + """ + ) + + def reports_errors_when_stream_is_used_on_non_list_field(): + assert_errors( + """ + fragment objectFieldSelection on Human { + name @stream(initialCount: 0) + } + """, + [ + { + "message": "Stream directive cannot be used" + " on non-list field 'name' on type 'Human'.", + "locations": [(3, 20)], + }, + ], + ) diff --git a/tox.ini b/tox.ini index 3e5d2e03..12cb18c7 100644 --- a/tox.ini +++ b/tox.ini @@ -37,7 +37,7 @@ commands = [testenv:mypy] basepython = python3.11 deps = - mypy==1.2.0 + mypy==1.3.0 pytest>=7.3,<8 commands = mypy src tests From 3e7b361ef9c41e09e30485dfb65e177cef34c90d Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Mon, 29 May 2023 01:23:30 +0200 Subject: [PATCH 097/230] Filter subsequent payloads when parent field is null Replicates graphql/graphql-js@b9a2695f167fb50741e39a016d28ac501692ba23 --- src/graphql/execution/execute.py | 124 +++++++--- tests/execution/test_stream.py | 386 ++++++++++++++++++++++++++++--- 2 files changed, 447 insertions(+), 63 deletions(-) diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index dd69658f..fe2bef87 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -2,6 +2,7 @@ from asyncio import Event, as_completed, ensure_future, gather, shield, sleep, wait_for from collections.abc import Mapping +from contextlib import suppress from inspect import isawaitable from typing import ( Any, @@ -17,6 +18,7 @@ NamedTuple, Optional, Sequence, + Set, Tuple, Type, Union, @@ -673,6 +675,7 @@ def __init__( self.middleware_manager = middleware_manager if is_awaitable: self.is_awaitable = is_awaitable + self._canceled_iterators: Set[AsyncIterator] = set() self._subfields_cache: Dict[Tuple, FieldsAndPatches] = {} @classmethod @@ -1006,6 +1009,7 @@ async def await_completed() -> Any: except Exception as raw_error: error = located_error(raw_error, field_nodes, path.as_list()) handle_field_error(error, return_type, errors) + self.filter_subsequent_payloads(path) return None return await_completed() @@ -1014,6 +1018,7 @@ async def await_completed() -> Any: except Exception as raw_error: error = located_error(raw_error, field_nodes, path.as_list()) handle_field_error(error, return_type, errors) + self.filter_subsequent_payloads(path) return None def build_resolve_info( @@ -1305,6 +1310,7 @@ def complete_list_value( and index >= stream.initial_count ): previous_async_payload_record = self.execute_stream_field( + path, item_path, item, field_nodes, @@ -1334,6 +1340,7 @@ async def await_completed(item: Any, item_path: Path) -> Any: raw_error, field_nodes, item_path.as_list() ) handle_field_error(error, item_type, errors) + self.filter_subsequent_payloads(item_path) return None completed_item = await_completed(item, item_path) @@ -1357,12 +1364,14 @@ async def await_completed(item: Any, item_path: Path) -> Any: raw_error, field_nodes, item_path.as_list() ) handle_field_error(error, item_type, errors) + self.filter_subsequent_payloads(item_path) return None completed_item = await_completed(completed_item, item_path) except Exception as raw_error: error = located_error(raw_error, field_nodes, item_path.as_list()) handle_field_error(error, item_type, errors) + self.filter_subsequent_payloads(item_path) completed_item = None if is_awaitable(completed_item): @@ -1694,6 +1703,7 @@ async def await_data( def execute_stream_field( self, path: Path, + item_path: Path, item: AwaitableOrValue[Any], field_nodes: List[FieldNode], info: GraphQLResolveInfo, @@ -1701,7 +1711,9 @@ def execute_stream_field( label: Optional[str] = None, parent_context: Optional[AsyncPayloadRecord] = None, ) -> AsyncPayloadRecord: - async_payload_record = StreamRecord(label, path, None, parent_context, self) + async_payload_record = StreamRecord( + label, item_path, None, parent_context, self + ) completed_item: Any completed_items: Any try: @@ -1713,7 +1725,7 @@ async def await_completed_item() -> Any: item_type, field_nodes, info, - path, + item_path, await item, async_payload_record, ) @@ -1727,7 +1739,12 @@ async def await_completed_item() -> Any: else: completed_item = self.complete_value( - item_type, field_nodes, info, path, item, async_payload_record + item_type, + field_nodes, + info, + item_path, + item, + async_payload_record, ) if self.is_awaitable(completed_item): @@ -1739,11 +1756,14 @@ async def await_completed_item() -> Any: except Exception as raw_error: # noinspection PyShadowingNames error = located_error( - raw_error, field_nodes, path.as_list() + raw_error, field_nodes, item_path.as_list() ) handle_field_error( error, item_type, async_payload_record.errors ) + self.filter_subsequent_payloads( + item_path, async_payload_record + ) return None complete_item = await_completed_item() @@ -1751,12 +1771,16 @@ async def await_completed_item() -> Any: else: complete_item = completed_item except Exception as raw_error: - error = located_error(raw_error, field_nodes, path.as_list()) + error = located_error(raw_error, field_nodes, item_path.as_list()) handle_field_error(error, item_type, async_payload_record.errors) + self.filter_subsequent_payloads( # pragma: no cover + item_path, async_payload_record + ) complete_item = None # pragma: no cover except GraphQLError as error: async_payload_record.errors.append(error) + self.filter_subsequent_payloads(item_path, async_payload_record) async_payload_record.add_items(None) return async_payload_record @@ -1768,6 +1792,7 @@ async def await_completed_items() -> Optional[List[Any]]: return [await complete_item] # type: ignore except GraphQLError as error: async_payload_record.errors.append(error) + self.filter_subsequent_payloads(path, async_payload_record) return None completed_items = await_completed_items() @@ -1786,6 +1811,8 @@ async def execute_stream_iterator_item( async_payload_record: StreamRecord, field_path: Path, ) -> Any: + if iterator in self._canceled_iterators: + raise StopAsyncIteration try: item = await anext(iterator) completed_item = self.complete_value( @@ -1799,12 +1826,13 @@ async def execute_stream_iterator_item( ) except StopAsyncIteration as raw_error: - async_payload_record.set_ist_completed_iterator() + async_payload_record.set_is_completed_iterator() raise StopAsyncIteration from raw_error except Exception as raw_error: error = located_error(raw_error, field_nodes, field_path.as_list()) handle_field_error(error, item_type, async_payload_record.errors) + self.filter_subsequent_payloads(field_path, async_payload_record) async def execute_stream_iterator( self, @@ -1830,30 +1858,50 @@ async def execute_stream_iterator( iterator, field_modes, info, item_type, async_payload_record, field_path ) - # noinspection PyShadowingNames - async def items( - data: Awaitable[Any], async_payload_record: StreamRecord - ) -> AwaitableOrValue[Optional[List[Any]]]: - try: - return [await data] - except GraphQLError as error: - async_payload_record.errors.append(error) - return None - try: - async_payload_record.add_items( - await items(awaitable_data, async_payload_record) - ) + data = await awaitable_data except StopAsyncIteration: if async_payload_record.errors: - async_payload_record.add_items([None]) # pragma: no cover + async_payload_record.add_items(None) # pragma: no cover else: del self.subsequent_payloads[async_payload_record] break + except GraphQLError as error: + # entire stream has errored and bubbled upwards + self.filter_subsequent_payloads(path, async_payload_record) + if iterator: # pragma: no cover else + with suppress(Exception): + await iterator.aclose() # type: ignore + # running generators cannot be closed since Python 3.8, + # so we need to remember that this iterator is already canceled + self._canceled_iterators.add(iterator) + async_payload_record.add_items(None) + async_payload_record.errors.append(error) + break + + async_payload_record.add_items([data]) previous_async_payload_record = async_payload_record index += 1 + def filter_subsequent_payloads( + self, + null_path: Optional[Path] = None, + current_async_record: Optional[AsyncPayloadRecord] = None, + ) -> None: + null_path_list = null_path.as_list() if null_path else [] + for async_record in list(self.subsequent_payloads): + if async_record is current_async_record: + # don't remove payload from where error originates + continue + if async_record.path[: len(null_path_list)] != null_path_list: + # async_record points to a path unaffected by this payload + continue + # async_record path points to nulled error field + if isinstance(async_record, StreamRecord) and async_record.iterator: + self._canceled_iterators.add(async_record.iterator) + del self.subsequent_payloads[async_record] + def get_completed_incremental_results(self) -> List[IncrementalResult]: incremental_results: List[IncrementalResult] = [] append_result = incremental_results.append @@ -2661,12 +2709,16 @@ async def wait(self) -> Optional[Dict[str, Any]]: if self.parent_context: await self.parent_context.completed.wait() _data = self._data - data = ( - await _data if self._context.is_awaitable(_data) else _data # type: ignore - ) - self.data = data - await sleep(ASYNC_DELAY) # always defer completion a little bit - self.completed.set() + try: + data = ( + await _data # type: ignore + if self._context.is_awaitable(_data) + else _data + ) + finally: + await sleep(ASYNC_DELAY) # always defer completion a little bit + self.data = data + self.completed.set() return data def add_data(self, data: AwaitableOrValue[Optional[Dict[str, Any]]]) -> None: @@ -2728,21 +2780,23 @@ async def wait(self) -> Optional[List[str]]: if self.parent_context: await self.parent_context.completed.wait() _items = self._items - items = ( - await _items # type: ignore - if self._context.is_awaitable(_items) - else _items - ) - self.items = items - await sleep(ASYNC_DELAY) # always defer completion a little bit - self.completed.set() + try: + items = ( + await _items # type: ignore + if self._context.is_awaitable(_items) + else _items + ) + finally: + await sleep(ASYNC_DELAY) # always defer completion a little bit + self.items = items + self.completed.set() return items def add_items(self, items: AwaitableOrValue[Optional[List[Any]]]) -> None: self._items = items self._items_added.set() - def set_ist_completed_iterator(self) -> None: + def set_is_completed_iterator(self) -> None: self.is_completed_iterator = True self._items_added.set() diff --git a/tests/execution/test_stream.py b/tests/execution/test_stream.py index 402a4a70..be624027 100644 --- a/tests/execution/test_stream.py +++ b/tests/execution/test_stream.py @@ -63,7 +63,21 @@ class Friend(NamedTuple): "NestedObject", { "scalarField": GraphQLField(GraphQLString), + "nonNullScalarField": GraphQLField(GraphQLNonNull(GraphQLString)), "nestedFriendList": GraphQLField(GraphQLList(friend_type)), + "deeperNestedObject": GraphQLField( + GraphQLObjectType( + "DeeperNestedObject", + { + "nonNullScalarField": GraphQLField( + GraphQLNonNull(GraphQLString) + ), + "deeperNestedFriendList": GraphQLField( + GraphQLList(friend_type) + ), + }, + ) + ), }, ) ), @@ -932,9 +946,13 @@ async def handles_null_for_non_null_async_items_after_initial_count_is_reached() ) async def friend_list(_info): - for i in range(3): + try: await sleep(0) - yield None if i & 1 else friends[i >> 1] + yield friends[0] + await sleep(0) + yield None + finally: + raise RuntimeError("Oops") result = await complete(document, {"nonNullFriendList": friend_list}) assert result == [ @@ -959,15 +977,6 @@ async def friend_list(_info): ], }, ], - "hasNext": True, - }, - { - "incremental": [ - { - "items": [{"name": "Han"}], - "path": ["nonNullFriendList", 2], - }, - ], "hasNext": False, }, ] @@ -1063,15 +1072,6 @@ def get_friends(_info): ], }, ], - "hasNext": True, - }, - { - "incremental": [ - { - "items": [{"nonNullName": "Han"}], - "path": ["nonNullFriendList", 2], - }, - ], "hasNext": False, }, ] @@ -1140,6 +1140,335 @@ async def get_friends(_info): }, ] + @mark.asyncio + async def filters_payloads_that_are_nulled(): + document = parse( + """ + query { + nestedObject { + nonNullScalarField + nestedFriendList @stream(initialCount: 0) { + name + } + } + } + """ + ) + + async def resolve_null(_info): + await sleep(0) + return None + + async def friend_list(_info): + await sleep(0) + yield friends[0] + + result = await complete( + document, + { + "nestedObject": { + "nonNullScalarField": resolve_null, + "nestedFriendList": friend_list, + } + }, + ) + + assert result == { + "errors": [ + { + "message": "Cannot return null for non-nullable field" + " NestedObject.nonNullScalarField.", + "locations": [ + { + "line": 4, + "column": 17, + } + ], + "path": ["nestedObject", "nonNullScalarField"], + }, + ], + "data": { + "nestedObject": None, + }, + } + + @mark.asyncio + async def does_not_filter_payloads_when_null_error_is_in_a_different_path(): + document = parse( + """ + query { + otherNestedObject: nestedObject { + ... @defer { + scalarField + } + } + nestedObject { + nestedFriendList @stream(initialCount: 0) { + name + } + } + } + """ + ) + + async def error_field(_info): + await sleep(0) + raise RuntimeError("Oops") + + async def friend_list(_info): + await sleep(0) + yield friends[0] + + result = await complete( + document, + { + "nestedObject": { + "scalarField": error_field, + "nestedFriendList": friend_list, + } + }, + ) + + assert result == [ + { + "data": { + "otherNestedObject": {}, + "nestedObject": {"nestedFriendList": []}, + }, + "hasNext": True, + }, + { + "incremental": [ + { + "data": {"scalarField": None}, + "path": ["otherNestedObject"], + "errors": [ + { + "message": "Oops", + "locations": [{"line": 5, "column": 19}], + "path": ["otherNestedObject", "scalarField"], + }, + ], + }, + { + "items": [{"name": "Luke"}], + "path": ["nestedObject", "nestedFriendList", 0], + }, + ], + "hasNext": False, + }, + ] + + @mark.asyncio + async def filters_stream_payloads_that_are_nulled_in_a_deferred_payload(): + document = parse( + """ + query { + nestedObject { + ... @defer { + deeperNestedObject { + nonNullScalarField + deeperNestedFriendList @stream(initialCount: 0) { + name + } + } + } + } + } + """ + ) + + async def resolve_null(_info): + await sleep(0) + return None + + async def friend_list(_info): + await sleep(0) + yield friends[0] + + result = await complete( + document, + { + "nestedObject": { + "deeperNestedObject": { + "nonNullScalarField": resolve_null, + "deeperNestedFriendList": friend_list, + } + } + }, + ) + + assert result == [ + { + "data": { + "nestedObject": {}, + }, + "hasNext": True, + }, + { + "incremental": [ + { + "data": { + "deeperNestedObject": None, + }, + "path": ["nestedObject"], + "errors": [ + { + "message": "Cannot return null for non-nullable field" + " DeeperNestedObject.nonNullScalarField.", + "locations": [{"line": 6, "column": 21}], + "path": [ + "nestedObject", + "deeperNestedObject", + "nonNullScalarField", + ], + }, + ], + }, + ], + "hasNext": False, + }, + ] + + @mark.asyncio + async def filters_defer_payloads_that_are_nulled_in_a_stream_response(): + document = parse( + """ + query { + friendList @stream(initialCount: 0) { + nonNullName + ... @defer { + name + } + } + } + """ + ) + + async def resolve_null(_info): + await sleep(0) + return None + + async def friend(): + await sleep(0) + return { + "name": friends[0].name, + "nonNullName": resolve_null, + } + + async def friend_list(_info): + await sleep(0) + yield await friend() + + result = await complete(document, {"friendList": friend_list}) + + assert result == [ + { + "data": { + "friendList": [], + }, + "hasNext": True, + }, + { + "incremental": [ + { + "items": [None], + "path": ["friendList", 0], + "errors": [ + { + "message": "Cannot return null for non-nullable field" + " Friend.nonNullName.", + "locations": [{"line": 4, "column": 17}], + "path": ["friendList", 0, "nonNullName"], + }, + ], + }, + ], + "hasNext": False, + }, + ] + + @mark.timeout(1) + @mark.asyncio + async def returns_iterator_and_ignores_error_when_stream_payloads_are_filtered(): + finished = False + + async def resolve_null(_info): + await sleep(0) + return None + + async def iterable(_info): + nonlocal finished + for i in range(3): + await sleep(0) + friend = friends[i] + yield {"name": friend.name, "nonNullName": None} + finished = True # pragma: no cover + + document = parse( + """ + query { + nestedObject { + ... @defer { + deeperNestedObject { + nonNullScalarField + deeperNestedFriendList @stream(initialCount: 0) { + name + } + } + } + } + } + """ + ) + + execute_result = experimental_execute_incrementally( + schema, + document, + { + "nestedObject": { + "deeperNestedObject": { + "nonNullScalarField": resolve_null, + "deeperNestedFriendList": iterable, + } + } + }, + ) + + assert isinstance(execute_result, ExperimentalExecuteMultipleResults) + iterator = execute_result.subsequent_results + + result1 = execute_result.initial_result + assert result1 == {"data": {"nestedObject": {}}, "hasNext": True} + + result2 = await anext(iterator) + assert result2.formatted == { + "incremental": [ + { + "data": {"deeperNestedObject": None}, + "path": ["nestedObject"], + "errors": [ + { + "message": "Cannot return null for non-nullable field" + " DeeperNestedObject.nonNullScalarField.", + "locations": [{"line": 6, "column": 21}], + "path": [ + "nestedObject", + "deeperNestedObject", + "nonNullScalarField", + ], + }, + ], + }, + ], + "hasNext": False, + } + + with raises(StopAsyncIteration): + await anext(iterator) + + assert not finished # running iterator cannot be canceled + @mark.asyncio async def handles_awaitables_from_complete_value_after_initial_count_is_reached(): document = parse( @@ -1281,6 +1610,7 @@ async def get_friends(_info): with raises(StopAsyncIteration): await anext(iterator) + @mark.timeout(1) @mark.asyncio async def can_defer_fields_that_are_resolved_after_async_iterable_is_complete(): resolve_slow_field = Event() @@ -1445,14 +1775,14 @@ async def get_friends(_info): @mark.asyncio async def finishes_async_iterable_when_returned_generator_is_closed(): - returned = False + finished = False async def iterable(_info): - nonlocal returned + nonlocal finished for i in range(3): await sleep(0) yield friends[i] - returned = True + finished = True document = parse( """ @@ -1480,7 +1810,7 @@ async def iterable(_info): with raises(StopAsyncIteration): await anext(iterator) - assert returned + assert finished @mark.asyncio async def finishes_async_iterable_when_underlying_iterator_has_no_close_method(): @@ -1533,14 +1863,14 @@ async def __anext__(self): @mark.asyncio async def finishes_async_iterable_when_error_is_raised_in_returned_generator(): - returned = False + finished = False async def iterable(_info): - nonlocal returned + nonlocal finished for i in range(3): await sleep(0) yield friends[i] - returned = True + finished = True document = parse( """ @@ -1570,4 +1900,4 @@ async def iterable(_info): with raises(StopAsyncIteration): await anext(iterator) - assert returned + assert finished From a0467949db5a26aacae8342fc488aff9e08ebaca Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Mon, 29 May 2023 11:07:55 +0200 Subject: [PATCH 098/230] Update dependencies --- poetry.lock | 305 +++++++++++++---------------- pyproject.toml | 17 +- tests/pyutils/test_is_awaitable.py | 2 +- tox.ini | 4 +- 4 files changed, 151 insertions(+), 177 deletions(-) diff --git a/poetry.lock b/poetry.lock index c1fb6f40..a75dc897 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,10 +1,9 @@ -# This file is automatically @generated by Poetry 1.4.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.5.0 and should not be changed by hand. [[package]] name = "alabaster" version = "0.7.13" description = "A configurable sidebar-enabled Sphinx theme" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -16,7 +15,6 @@ files = [ name = "appdirs" version = "1.4.4" description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "dev" optional = false python-versions = "*" files = [ @@ -28,7 +26,6 @@ files = [ name = "attrs" version = "23.1.0" description = "Classes Without Boilerplate" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -50,7 +47,6 @@ tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pyte name = "babel" version = "2.12.1" description = "Internationalization utilities" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -65,7 +61,6 @@ pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} name = "bandit" version = "1.7.5" description = "Security oriented static analyser for python code." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -89,7 +84,6 @@ yaml = ["PyYAML"] name = "black" version = "23.3.0" description = "The uncompromising code formatter." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -140,7 +134,6 @@ uvloop = ["uvloop (>=0.15.2)"] name = "bump2version" version = "1.0.1" description = "Version-bump your software with a single command!" -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -150,21 +143,19 @@ files = [ [[package]] name = "cachetools" -version = "5.3.0" +version = "5.3.1" description = "Extensible memoizing collections and decorators" -category = "dev" optional = false -python-versions = "~=3.7" +python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.0-py3-none-any.whl", hash = "sha256:429e1a1e845c008ea6c85aa35d4b98b65d6a9763eeef3e37e92728a12d1de9d4"}, - {file = "cachetools-5.3.0.tar.gz", hash = "sha256:13dfddc7b8df938c21a940dfa6557ce6e94a2f1cdfa58eb90c805721d58f2c14"}, + {file = "cachetools-5.3.1-py3-none-any.whl", hash = "sha256:95ef631eeaea14ba2e36f06437f36463aac3a096799e876ee55e5cdccb102590"}, + {file = "cachetools-5.3.1.tar.gz", hash = "sha256:dce83f2d9b4e1f732a8cd44af8e8fab2dbe46201467fc98b3ef8f269092bf62b"}, ] [[package]] name = "certifi" version = "2023.5.7" description = "Python package for providing Mozilla's CA Bundle." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -176,7 +167,6 @@ files = [ name = "chardet" version = "5.1.0" description = "Universal encoding detector for Python 3" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -188,7 +178,6 @@ files = [ name = "charset-normalizer" version = "3.1.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "dev" optional = false python-versions = ">=3.7.0" files = [ @@ -273,7 +262,6 @@ files = [ name = "click" version = "8.1.3" description = "Composable command line interface toolkit" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -289,7 +277,6 @@ importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." -category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -299,63 +286,62 @@ files = [ [[package]] name = "coverage" -version = "7.2.5" +version = "7.2.6" description = "Code coverage measurement for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "coverage-7.2.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:883123d0bbe1c136f76b56276074b0c79b5817dd4238097ffa64ac67257f4b6c"}, - {file = "coverage-7.2.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d2fbc2a127e857d2f8898aaabcc34c37771bf78a4d5e17d3e1f5c30cd0cbc62a"}, - {file = "coverage-7.2.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f3671662dc4b422b15776cdca89c041a6349b4864a43aa2350b6b0b03bbcc7f"}, - {file = "coverage-7.2.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780551e47d62095e088f251f5db428473c26db7829884323e56d9c0c3118791a"}, - {file = "coverage-7.2.5-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:066b44897c493e0dcbc9e6a6d9f8bbb6607ef82367cf6810d387c09f0cd4fe9a"}, - {file = "coverage-7.2.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b9a4ee55174b04f6af539218f9f8083140f61a46eabcaa4234f3c2a452c4ed11"}, - {file = "coverage-7.2.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:706ec567267c96717ab9363904d846ec009a48d5f832140b6ad08aad3791b1f5"}, - {file = "coverage-7.2.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ae453f655640157d76209f42c62c64c4d4f2c7f97256d3567e3b439bd5c9b06c"}, - {file = "coverage-7.2.5-cp310-cp310-win32.whl", hash = "sha256:f81c9b4bd8aa747d417407a7f6f0b1469a43b36a85748145e144ac4e8d303cb5"}, - {file = "coverage-7.2.5-cp310-cp310-win_amd64.whl", hash = "sha256:dc945064a8783b86fcce9a0a705abd7db2117d95e340df8a4333f00be5efb64c"}, - {file = "coverage-7.2.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:40cc0f91c6cde033da493227797be2826cbf8f388eaa36a0271a97a332bfd7ce"}, - {file = "coverage-7.2.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a66e055254a26c82aead7ff420d9fa8dc2da10c82679ea850d8feebf11074d88"}, - {file = "coverage-7.2.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c10fbc8a64aa0f3ed136b0b086b6b577bc64d67d5581acd7cc129af52654384e"}, - {file = "coverage-7.2.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a22cbb5ede6fade0482111fa7f01115ff04039795d7092ed0db43522431b4f2"}, - {file = "coverage-7.2.5-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:292300f76440651529b8ceec283a9370532f4ecba9ad67d120617021bb5ef139"}, - {file = "coverage-7.2.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7ff8f3fb38233035028dbc93715551d81eadc110199e14bbbfa01c5c4a43f8d8"}, - {file = "coverage-7.2.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:a08c7401d0b24e8c2982f4e307124b671c6736d40d1c39e09d7a8687bddf83ed"}, - {file = "coverage-7.2.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ef9659d1cda9ce9ac9585c045aaa1e59223b143f2407db0eaee0b61a4f266fb6"}, - {file = "coverage-7.2.5-cp311-cp311-win32.whl", hash = "sha256:30dcaf05adfa69c2a7b9f7dfd9f60bc8e36b282d7ed25c308ef9e114de7fc23b"}, - {file = "coverage-7.2.5-cp311-cp311-win_amd64.whl", hash = "sha256:97072cc90f1009386c8a5b7de9d4fc1a9f91ba5ef2146c55c1f005e7b5c5e068"}, - {file = "coverage-7.2.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:bebea5f5ed41f618797ce3ffb4606c64a5de92e9c3f26d26c2e0aae292f015c1"}, - {file = "coverage-7.2.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:828189fcdda99aae0d6bf718ea766b2e715eabc1868670a0a07bf8404bf58c33"}, - {file = "coverage-7.2.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e8a95f243d01ba572341c52f89f3acb98a3b6d1d5d830efba86033dd3687ade"}, - {file = "coverage-7.2.5-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8834e5f17d89e05697c3c043d3e58a8b19682bf365048837383abfe39adaed5"}, - {file = "coverage-7.2.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d1f25ee9de21a39b3a8516f2c5feb8de248f17da7eead089c2e04aa097936b47"}, - {file = "coverage-7.2.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1637253b11a18f453e34013c665d8bf15904c9e3c44fbda34c643fbdc9d452cd"}, - {file = "coverage-7.2.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8e575a59315a91ccd00c7757127f6b2488c2f914096077c745c2f1ba5b8c0969"}, - {file = "coverage-7.2.5-cp37-cp37m-win32.whl", hash = "sha256:509ecd8334c380000d259dc66feb191dd0a93b21f2453faa75f7f9cdcefc0718"}, - {file = "coverage-7.2.5-cp37-cp37m-win_amd64.whl", hash = "sha256:12580845917b1e59f8a1c2ffa6af6d0908cb39220f3019e36c110c943dc875b0"}, - {file = "coverage-7.2.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b5016e331b75310610c2cf955d9f58a9749943ed5f7b8cfc0bb89c6134ab0a84"}, - {file = "coverage-7.2.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:373ea34dca98f2fdb3e5cb33d83b6d801007a8074f992b80311fc589d3e6b790"}, - {file = "coverage-7.2.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a063aad9f7b4c9f9da7b2550eae0a582ffc7623dca1c925e50c3fbde7a579771"}, - {file = "coverage-7.2.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38c0a497a000d50491055805313ed83ddba069353d102ece8aef5d11b5faf045"}, - {file = "coverage-7.2.5-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2b3b05e22a77bb0ae1a3125126a4e08535961c946b62f30985535ed40e26614"}, - {file = "coverage-7.2.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0342a28617e63ad15d96dca0f7ae9479a37b7d8a295f749c14f3436ea59fdcb3"}, - {file = "coverage-7.2.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:cf97ed82ca986e5c637ea286ba2793c85325b30f869bf64d3009ccc1a31ae3fd"}, - {file = "coverage-7.2.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c2c41c1b1866b670573657d584de413df701f482574bad7e28214a2362cb1fd1"}, - {file = "coverage-7.2.5-cp38-cp38-win32.whl", hash = "sha256:10b15394c13544fce02382360cab54e51a9e0fd1bd61ae9ce012c0d1e103c813"}, - {file = "coverage-7.2.5-cp38-cp38-win_amd64.whl", hash = "sha256:a0b273fe6dc655b110e8dc89b8ec7f1a778d78c9fd9b4bda7c384c8906072212"}, - {file = "coverage-7.2.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5c587f52c81211d4530fa6857884d37f514bcf9453bdeee0ff93eaaf906a5c1b"}, - {file = "coverage-7.2.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4436cc9ba5414c2c998eaedee5343f49c02ca93b21769c5fdfa4f9d799e84200"}, - {file = "coverage-7.2.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6599bf92f33ab041e36e06d25890afbdf12078aacfe1f1d08c713906e49a3fe5"}, - {file = "coverage-7.2.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:857abe2fa6a4973f8663e039ead8d22215d31db613ace76e4a98f52ec919068e"}, - {file = "coverage-7.2.5-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6f5cab2d7f0c12f8187a376cc6582c477d2df91d63f75341307fcdcb5d60303"}, - {file = "coverage-7.2.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:aa387bd7489f3e1787ff82068b295bcaafbf6f79c3dad3cbc82ef88ce3f48ad3"}, - {file = "coverage-7.2.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:156192e5fd3dbbcb11cd777cc469cf010a294f4c736a2b2c891c77618cb1379a"}, - {file = "coverage-7.2.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bd3b4b8175c1db502adf209d06136c000df4d245105c8839e9d0be71c94aefe1"}, - {file = "coverage-7.2.5-cp39-cp39-win32.whl", hash = "sha256:ddc5a54edb653e9e215f75de377354e2455376f416c4378e1d43b08ec50acc31"}, - {file = "coverage-7.2.5-cp39-cp39-win_amd64.whl", hash = "sha256:338aa9d9883aaaad53695cb14ccdeb36d4060485bb9388446330bef9c361c252"}, - {file = "coverage-7.2.5-pp37.pp38.pp39-none-any.whl", hash = "sha256:8877d9b437b35a85c18e3c6499b23674684bf690f5d96c1006a1ef61f9fdf0f3"}, - {file = "coverage-7.2.5.tar.gz", hash = "sha256:f99ef080288f09ffc687423b8d60978cf3a465d3f404a18d1a05474bd8575a47"}, + {file = "coverage-7.2.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:496b86f1fc9c81a1cd53d8842ef712e950a4611bba0c42d33366a7b91ba969ec"}, + {file = "coverage-7.2.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fbe6e8c0a9a7193ba10ee52977d4d5e7652957c1f56ccefed0701db8801a2a3b"}, + {file = "coverage-7.2.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76d06b721c2550c01a60e5d3093f417168658fb454e5dfd9a23570e9bffe39a1"}, + {file = "coverage-7.2.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:77a04b84d01f0e12c66f16e69e92616442dc675bbe51b90bfb074b1e5d1c7fbd"}, + {file = "coverage-7.2.6-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:35db06450272473eab4449e9c2ad9bc6a0a68dab8e81a0eae6b50d9c2838767e"}, + {file = "coverage-7.2.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6727a0d929ff0028b1ed8b3e7f8701670b1d7032f219110b55476bb60c390bfb"}, + {file = "coverage-7.2.6-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aac1d5fdc5378f6bac2c0c7ebe7635a6809f5b4376f6cf5d43243c1917a67087"}, + {file = "coverage-7.2.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1c9e4a5eb1bbc3675ee57bc31f8eea4cd7fb0cbcbe4912cf1cb2bf3b754f4a80"}, + {file = "coverage-7.2.6-cp310-cp310-win32.whl", hash = "sha256:71f739f97f5f80627f1fee2331e63261355fd1e9a9cce0016394b6707ac3f4ec"}, + {file = "coverage-7.2.6-cp310-cp310-win_amd64.whl", hash = "sha256:fde5c7a9d9864d3e07992f66767a9817f24324f354caa3d8129735a3dc74f126"}, + {file = "coverage-7.2.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bc7b667f8654376e9353dd93e55e12ce2a59fb6d8e29fce40de682273425e044"}, + {file = "coverage-7.2.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:697f4742aa3f26c107ddcb2b1784a74fe40180014edbd9adaa574eac0529914c"}, + {file = "coverage-7.2.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:541280dde49ce74a4262c5e395b48ea1207e78454788887118c421cb4ffbfcac"}, + {file = "coverage-7.2.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e7f1a8328eeec34c54f1d5968a708b50fc38d31e62ca8b0560e84a968fbf9a9"}, + {file = "coverage-7.2.6-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4bbd58eb5a2371bf160590f4262109f66b6043b0b991930693134cb617bc0169"}, + {file = "coverage-7.2.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ae82c5f168d2a39a5d69a12a69d4dc23837a43cf2ca99be60dfe59996ea6b113"}, + {file = "coverage-7.2.6-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:f5440cdaf3099e7ab17a5a7065aed59aff8c8b079597b61c1f8be6f32fe60636"}, + {file = "coverage-7.2.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a6f03f87fea579d55e0b690d28f5042ec1368650466520fbc400e7aeaf09e995"}, + {file = "coverage-7.2.6-cp311-cp311-win32.whl", hash = "sha256:dc4d5187ef4d53e0d4c8eaf530233685667844c5fb0b855fea71ae659017854b"}, + {file = "coverage-7.2.6-cp311-cp311-win_amd64.whl", hash = "sha256:c93d52c3dc7b9c65e39473704988602300e3cc1bad08b5ab5b03ca98bbbc68c1"}, + {file = "coverage-7.2.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:42c692b55a647a832025a4c048007034fe77b162b566ad537ce65ad824b12a84"}, + {file = "coverage-7.2.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7786b2fa7809bf835f830779ad285215a04da76293164bb6745796873f0942d"}, + {file = "coverage-7.2.6-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25bad4196104761bc26b1dae9b57383826542ec689ff0042f7f4f4dd7a815cba"}, + {file = "coverage-7.2.6-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2692306d3d4cb32d2cceed1e47cebd6b1d2565c993d6d2eda8e6e6adf53301e6"}, + {file = "coverage-7.2.6-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:392154d09bd4473b9d11351ab5d63391f3d5d24d752f27b3be7498b0ee2b5226"}, + {file = "coverage-7.2.6-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:fa079995432037b5e2ef5ddbb270bcd2ded9f52b8e191a5de11fe59a00ea30d8"}, + {file = "coverage-7.2.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d712cefff15c712329113b01088ba71bbcef0f7ea58478ca0bbec63a824844cb"}, + {file = "coverage-7.2.6-cp37-cp37m-win32.whl", hash = "sha256:004948e296149644d208964300cb3d98affc5211e9e490e9979af4030b0d6473"}, + {file = "coverage-7.2.6-cp37-cp37m-win_amd64.whl", hash = "sha256:c1d7a31603c3483ac49c1726723b0934f88f2c011c660e6471e7bd735c2fa110"}, + {file = "coverage-7.2.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3436927d1794fa6763b89b60c896f9e3bd53212001026ebc9080d23f0c2733c1"}, + {file = "coverage-7.2.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44c9b9f1a245f3d0d202b1a8fa666a80b5ecbe4ad5d0859c0fb16a52d9763224"}, + {file = "coverage-7.2.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e3783a286d5a93a2921396d50ce45a909aa8f13eee964465012f110f0cbb611"}, + {file = "coverage-7.2.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cff6980fe7100242170092bb40d2b1cdad79502cd532fd26b12a2b8a5f9aee0"}, + {file = "coverage-7.2.6-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c534431153caffc7c495c3eddf7e6a6033e7f81d78385b4e41611b51e8870446"}, + {file = "coverage-7.2.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3062fd5c62df988cea9f2972c593f77fed1182bfddc5a3b12b1e606cb7aba99e"}, + {file = "coverage-7.2.6-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6284a2005e4f8061c58c814b1600ad0074ccb0289fe61ea709655c5969877b70"}, + {file = "coverage-7.2.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:97729e6828643f168a2a3f07848e1b1b94a366b13a9f5aba5484c2215724edc8"}, + {file = "coverage-7.2.6-cp38-cp38-win32.whl", hash = "sha256:dc11b42fa61ff1e788dd095726a0aed6aad9c03d5c5984b54cb9e1e67b276aa5"}, + {file = "coverage-7.2.6-cp38-cp38-win_amd64.whl", hash = "sha256:cbcc874f454ee51f158afd604a315f30c0e31dff1d5d5bf499fc529229d964dd"}, + {file = "coverage-7.2.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d3cacc6a665221108ecdf90517a8028d07a2783df3417d12dcfef1c517e67478"}, + {file = "coverage-7.2.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:272ab31228a9df857ab5df5d67936d8861464dc89c5d3fab35132626e9369379"}, + {file = "coverage-7.2.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9a8723ccec4e564d4b9a79923246f7b9a8de4ec55fa03ec4ec804459dade3c4f"}, + {file = "coverage-7.2.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5906f6a84b47f995cd1bf0aca1c72d591c55ee955f98074e93660d64dfc66eb9"}, + {file = "coverage-7.2.6-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52c139b7ab3f0b15f9aad0a3fedef5a1f8c0b2bdc291d88639ca2c97d3682416"}, + {file = "coverage-7.2.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a5ffd45c6b93c23a8507e2f436983015c6457aa832496b6a095505ca2f63e8f1"}, + {file = "coverage-7.2.6-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4f3c7c19581d471af0e9cb49d928172cd8492cd78a2b7a4e82345d33662929bb"}, + {file = "coverage-7.2.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2e8c0e79820cdd67978e1120983786422d279e07a381dbf89d03bbb23ec670a6"}, + {file = "coverage-7.2.6-cp39-cp39-win32.whl", hash = "sha256:13cde6bb0e58fb67d09e2f373de3899d1d1e866c5a9ff05d93615f2f54fbd2bb"}, + {file = "coverage-7.2.6-cp39-cp39-win_amd64.whl", hash = "sha256:6b9f64526286255735847aed0221b189486e0b9ed943446936e41b7e44b08783"}, + {file = "coverage-7.2.6-pp37.pp38.pp39-none-any.whl", hash = "sha256:6babcbf1e66e46052442f10833cfc4a0d3554d8276aa37af8531a83ed3c1a01d"}, + {file = "coverage-7.2.6.tar.gz", hash = "sha256:2025f913f2edb0272ef15d00b1f335ff8908c921c8eb2013536fcaf61f5a683d"}, ] [package.dependencies] @@ -368,7 +354,6 @@ toml = ["tomli"] name = "distlib" version = "0.3.6" description = "Distribution utilities" -category = "dev" optional = false python-versions = "*" files = [ @@ -380,7 +365,6 @@ files = [ name = "docutils" version = "0.17.1" description = "Docutils -- Python Documentation Utilities" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -392,7 +376,6 @@ files = [ name = "docutils" version = "0.18.1" description = "Docutils -- Python Documentation Utilities" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -404,7 +387,6 @@ files = [ name = "exceptiongroup" version = "1.1.1" description = "Backport of PEP 654 (exception groups)" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -419,7 +401,6 @@ test = ["pytest (>=6)"] name = "filelock" version = "3.12.0" description = "A platform independent file lock." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -435,7 +416,6 @@ testing = ["covdefaults (>=2.3)", "coverage (>=7.2.3)", "diff-cover (>=7.5)", "p name = "flake8" version = "5.0.4" description = "the modular source code checker: pep8 pyflakes and co" -category = "dev" optional = false python-versions = ">=3.6.1" files = [ @@ -449,11 +429,26 @@ mccabe = ">=0.7.0,<0.8.0" pycodestyle = ">=2.9.0,<2.10.0" pyflakes = ">=2.5.0,<2.6.0" +[[package]] +name = "flake8" +version = "6.0.0" +description = "the modular source code checker: pep8 pyflakes and co" +optional = false +python-versions = ">=3.8.1" +files = [ + {file = "flake8-6.0.0-py2.py3-none-any.whl", hash = "sha256:3833794e27ff64ea4e9cf5d410082a8b97ff1a06c16aa3d2027339cd0f1195c7"}, + {file = "flake8-6.0.0.tar.gz", hash = "sha256:c61007e76655af75e6785a931f452915b371dc48f56efd765247c8fe68f2b181"}, +] + +[package.dependencies] +mccabe = ">=0.7.0,<0.8.0" +pycodestyle = ">=2.10.0,<2.11.0" +pyflakes = ">=3.0.0,<3.1.0" + [[package]] name = "flake8-bandit" version = "4.1.1" description = "Automated security testing with bandit and flake8." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -467,14 +462,13 @@ flake8 = ">=5.0.0" [[package]] name = "flake8-bugbear" -version = "23.2.13" +version = "23.3.12" description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." -category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "flake8-bugbear-23.2.13.tar.gz", hash = "sha256:39259814a83f33c8409417ee12dd4050c9c0bb4c8707c12fc18ae62b2f3ddee1"}, - {file = "flake8_bugbear-23.2.13-py3-none-any.whl", hash = "sha256:f136bd0ca2684f101168bba2310dec541e11aa6b252260c17dcf58d18069a740"}, + {file = "flake8-bugbear-23.3.12.tar.gz", hash = "sha256:e3e7f74c8a49ad3794a7183353026dabd68c74030d5f46571f84c1fb0eb79363"}, + {file = "flake8_bugbear-23.3.12-py3-none-any.whl", hash = "sha256:beb5c7efcd7ccc2039ef66a77bb8db925e7be3531ff1cb4d0b7030d0e2113d72"}, ] [package.dependencies] @@ -484,11 +478,28 @@ flake8 = ">=3.0.0" [package.extras] dev = ["coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit", "pytest", "tox"] +[[package]] +name = "flake8-bugbear" +version = "23.5.9" +description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." +optional = false +python-versions = ">=3.8.1" +files = [ + {file = "flake8-bugbear-23.5.9.tar.gz", hash = "sha256:695c84a5d7da54eb35d79a7354dbaf3aaba80de32250608868aa1c85534b2a86"}, + {file = "flake8_bugbear-23.5.9-py3-none-any.whl", hash = "sha256:631fa927fbc799e8ca636b849dd7dfc304812287137b6ecb3277821f028bee40"}, +] + +[package.dependencies] +attrs = ">=19.2.0" +flake8 = ">=6.0.0" + +[package.extras] +dev = ["coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit", "pytest", "tox"] + [[package]] name = "gitdb" version = "4.0.10" description = "Git Object Database" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -503,7 +514,6 @@ smmap = ">=3.0.1,<6" name = "gitpython" version = "3.1.31" description = "GitPython is a Python library used to interact with Git repositories" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -519,7 +529,6 @@ typing-extensions = {version = ">=3.7.4.3", markers = "python_version < \"3.8\"" name = "idna" version = "3.4" description = "Internationalized Domain Names in Applications (IDNA)" -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -531,7 +540,6 @@ files = [ name = "imagesize" version = "1.4.1" description = "Getting image size from png/jpeg/jpeg2000/gif file" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -543,7 +551,6 @@ files = [ name = "importlib-metadata" version = "4.2.0" description = "Read metadata from Python packages" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -563,7 +570,6 @@ testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pep517", name = "importlib-metadata" version = "6.6.0" description = "Read metadata from Python packages" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -584,7 +590,6 @@ testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packag name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -596,7 +601,6 @@ files = [ name = "isort" version = "5.11.5" description = "A Python utility / library to sort Python imports." -category = "dev" optional = false python-versions = ">=3.7.0" files = [ @@ -614,7 +618,6 @@ requirements-deprecated-finder = ["pip-api", "pipreqs"] name = "isort" version = "5.12.0" description = "A Python utility / library to sort Python imports." -category = "dev" optional = false python-versions = ">=3.8.0" files = [ @@ -632,7 +635,6 @@ requirements-deprecated-finder = ["pip-api", "pipreqs"] name = "jinja2" version = "3.1.2" description = "A very fast and expressive template engine." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -650,7 +652,6 @@ i18n = ["Babel (>=2.7)"] name = "markdown-it-py" version = "2.2.0" description = "Python port of markdown-it. Markdown parsing, done right!" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -676,7 +677,6 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] name = "markupsafe" version = "2.1.2" description = "Safely add untrusted strings to HTML/XML markup." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -736,7 +736,6 @@ files = [ name = "mccabe" version = "0.7.0" description = "McCabe checker, plugin for flake8" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -748,7 +747,6 @@ files = [ name = "mdurl" version = "0.1.2" description = "Markdown URL utilities" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -760,7 +758,6 @@ files = [ name = "mypy" version = "1.3.0" description = "Optional static typing for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -808,7 +805,6 @@ reports = ["lxml"] name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -820,7 +816,6 @@ files = [ name = "packaging" version = "23.1" description = "Core utilities for Python packages" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -832,7 +827,6 @@ files = [ name = "pathspec" version = "0.11.1" description = "Utility library for gitignore style pattern matching of file paths." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -844,7 +838,6 @@ files = [ name = "pbr" version = "5.11.1" description = "Python Build Reasonableness" -category = "dev" optional = false python-versions = ">=2.6" files = [ @@ -856,7 +849,6 @@ files = [ name = "platformdirs" version = "3.5.1" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -875,7 +867,6 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest- name = "pluggy" version = "1.0.0" description = "plugin and hook calling mechanisms for python" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -894,7 +885,6 @@ testing = ["pytest", "pytest-benchmark"] name = "py" version = "1.11.0" description = "library with cross-python path, ini-parsing, io, code, log facilities" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -906,7 +896,6 @@ files = [ name = "py-cpuinfo" version = "9.0.0" description = "Get CPU info with pure Python" -category = "dev" optional = false python-versions = "*" files = [ @@ -918,7 +907,6 @@ files = [ name = "pycodestyle" version = "2.9.1" description = "Python style guide checker" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -926,11 +914,21 @@ files = [ {file = "pycodestyle-2.9.1.tar.gz", hash = "sha256:2c9607871d58c76354b697b42f5d57e1ada7d261c261efac224b664affdc5785"}, ] +[[package]] +name = "pycodestyle" +version = "2.10.0" +description = "Python style guide checker" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pycodestyle-2.10.0-py2.py3-none-any.whl", hash = "sha256:8a4eaf0d0495c7395bdab3589ac2db602797d76207242c17d470186815706610"}, + {file = "pycodestyle-2.10.0.tar.gz", hash = "sha256:347187bdb476329d98f695c213d7295a846d1152ff4fe9bacb8a9590b8ee7053"}, +] + [[package]] name = "pyflakes" version = "2.5.0" description = "passive checker of Python programs" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -938,11 +936,21 @@ files = [ {file = "pyflakes-2.5.0.tar.gz", hash = "sha256:491feb020dca48ccc562a8c0cbe8df07ee13078df59813b83959cbdada312ea3"}, ] +[[package]] +name = "pyflakes" +version = "3.0.1" +description = "passive checker of Python programs" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pyflakes-3.0.1-py2.py3-none-any.whl", hash = "sha256:ec55bf7fe21fff7f1ad2f7da62363d749e2a470500eab1b555334b67aa1ef8cf"}, + {file = "pyflakes-3.0.1.tar.gz", hash = "sha256:ec8b276a6b60bd80defed25add7e439881c19e64850afd9b346283d4165fd0fd"}, +] + [[package]] name = "pygments" version = "2.15.1" description = "Pygments is a syntax highlighting package written in Python." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -957,7 +965,6 @@ plugins = ["importlib-metadata"] name = "pyproject-api" version = "1.5.1" description = "API to interact with the python pyproject.toml based projects" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -977,7 +984,6 @@ testing = ["covdefaults (>=2.2.2)", "importlib-metadata (>=6)", "pytest (>=7.2.1 name = "pytest" version = "7.3.1" description = "pytest: simple powerful testing with Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1001,7 +1007,6 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no name = "pytest-asyncio" version = "0.21.0" description = "Pytest support for asyncio" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1021,7 +1026,6 @@ testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy name = "pytest-benchmark" version = "4.0.0" description = "A ``pytest`` fixture for benchmarking code. It will group the tests into rounds that are calibrated to the chosen timer." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1040,14 +1044,13 @@ histogram = ["pygal", "pygaljs"] [[package]] name = "pytest-cov" -version = "4.0.0" +version = "4.1.0" description = "Pytest plugin for measuring coverage." -category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "pytest-cov-4.0.0.tar.gz", hash = "sha256:996b79efde6433cdbd0088872dbc5fb3ed7fe1578b68cdbba634f14bb8dd0470"}, - {file = "pytest_cov-4.0.0-py3-none-any.whl", hash = "sha256:2feb1b751d66a8bd934e5edfa2e961d11309dc37b73b0eabe73b5945fee20f6b"}, + {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, + {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, ] [package.dependencies] @@ -1061,7 +1064,6 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtuale name = "pytest-describe" version = "2.1.0" description = "Describe-style plugin for pytest" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1076,7 +1078,6 @@ pytest = ">=4.6,<8" name = "pytest-timeout" version = "2.1.0" description = "pytest plugin to abort hanging tests" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1091,7 +1092,6 @@ pytest = ">=5.0.0" name = "pytz" version = "2023.3" description = "World timezone definitions, modern and historical" -category = "dev" optional = false python-versions = "*" files = [ @@ -1103,7 +1103,6 @@ files = [ name = "pyyaml" version = "6.0" description = "YAML parser and emitter for Python" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1151,14 +1150,13 @@ files = [ [[package]] name = "requests" -version = "2.30.0" +version = "2.31.0" description = "Python HTTP for Humans." -category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "requests-2.30.0-py3-none-any.whl", hash = "sha256:10e94cc4f3121ee6da529d358cdaeaff2f1c409cd377dbc72b825852f2f7e294"}, - {file = "requests-2.30.0.tar.gz", hash = "sha256:239d7d4458afcb28a692cdd298d87542235f4ca8d36d03a15bfc128a6559a2f4"}, + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, ] [package.dependencies] @@ -1175,7 +1173,6 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "rich" version = "13.3.5" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" -category = "dev" optional = false python-versions = ">=3.7.0" files = [ @@ -1195,7 +1192,6 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] name = "setuptools" version = "67.8.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1212,7 +1208,6 @@ testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs ( name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -1224,7 +1219,6 @@ files = [ name = "smmap" version = "5.0.0" description = "A pure Python implementation of a sliding window memory map manager" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1236,7 +1230,6 @@ files = [ name = "snowballstemmer" version = "2.2.0" description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." -category = "dev" optional = false python-versions = "*" files = [ @@ -1248,7 +1241,6 @@ files = [ name = "sphinx" version = "4.3.2" description = "Python documentation generator" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1284,7 +1276,6 @@ test = ["cython", "html5lib", "pytest", "pytest-cov", "typed-ast"] name = "sphinx" version = "6.2.1" description = "Python documentation generator" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1318,14 +1309,13 @@ test = ["cython", "filelock", "html5lib", "pytest (>=4.6)"] [[package]] name = "sphinx-rtd-theme" -version = "1.2.0" +version = "1.2.1" description = "Read the Docs theme for Sphinx" -category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ - {file = "sphinx_rtd_theme-1.2.0-py2.py3-none-any.whl", hash = "sha256:f823f7e71890abe0ac6aaa6013361ea2696fc8d3e1fa798f463e82bdb77eeff2"}, - {file = "sphinx_rtd_theme-1.2.0.tar.gz", hash = "sha256:a0d8bd1a2ed52e0b338cbe19c4b2eef3c5e7a048769753dac6a9f059c7b641b8"}, + {file = "sphinx_rtd_theme-1.2.1-py2.py3-none-any.whl", hash = "sha256:2cc9351176cbf91944ce44cefd4fab6c3b76ac53aa9e15d6db45a3229ad7f866"}, + {file = "sphinx_rtd_theme-1.2.1.tar.gz", hash = "sha256:cf9a7dc0352cf179c538891cb28d6fad6391117d4e21c891776ab41dd6c8ff70"}, ] [package.dependencies] @@ -1340,7 +1330,6 @@ dev = ["bump2version", "sphinxcontrib-httpdomain", "transifex-client", "wheel"] name = "sphinxcontrib-applehelp" version = "1.0.2" description = "sphinxcontrib-applehelp is a sphinx extension which outputs Apple help books" -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1356,7 +1345,6 @@ test = ["pytest"] name = "sphinxcontrib-applehelp" version = "1.0.4" description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1372,7 +1360,6 @@ test = ["pytest"] name = "sphinxcontrib-devhelp" version = "1.0.2" description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1388,7 +1375,6 @@ test = ["pytest"] name = "sphinxcontrib-htmlhelp" version = "2.0.0" description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1404,7 +1390,6 @@ test = ["html5lib", "pytest"] name = "sphinxcontrib-htmlhelp" version = "2.0.1" description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1420,7 +1405,6 @@ test = ["html5lib", "pytest"] name = "sphinxcontrib-jquery" version = "4.1" description = "Extension to include jQuery on newer Sphinx releases" -category = "dev" optional = false python-versions = ">=2.7" files = [ @@ -1435,7 +1419,6 @@ Sphinx = ">=1.8" name = "sphinxcontrib-jsmath" version = "1.0.1" description = "A sphinx extension which renders display math in HTML via JavaScript" -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1450,7 +1433,6 @@ test = ["flake8", "mypy", "pytest"] name = "sphinxcontrib-qthelp" version = "1.0.3" description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1466,7 +1448,6 @@ test = ["pytest"] name = "sphinxcontrib-serializinghtml" version = "1.1.5" description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1482,7 +1463,6 @@ test = ["pytest"] name = "stevedore" version = "3.5.2" description = "Manage dynamic plugins for Python applications" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1498,7 +1478,6 @@ pbr = ">=2.0.0,<2.1.0 || >2.1.0" name = "tomli" version = "2.0.1" description = "A lil' TOML parser" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1510,7 +1489,6 @@ files = [ name = "tox" version = "3.28.0" description = "tox is a generic virtualenv management and test command line tool" -category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" files = [ @@ -1535,37 +1513,35 @@ testing = ["flaky (>=3.4.0)", "freezegun (>=0.3.11)", "pathlib2 (>=2.3.3)", "psu [[package]] name = "tox" -version = "4.5.1" +version = "4.5.2" description = "tox is a generic virtualenv management and test command line tool" -category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "tox-4.5.1-py3-none-any.whl", hash = "sha256:d25a2e6cb261adc489604fafd76cd689efeadfa79709965e965668d6d3f63046"}, - {file = "tox-4.5.1.tar.gz", hash = "sha256:5a2eac5fb816779dfdf5cb00fecbc27eb0524e4626626bb1de84747b24cacc56"}, + {file = "tox-4.5.2-py3-none-any.whl", hash = "sha256:f1a9541b292aa0449f6c7bb67dc0073f25f9086413c3922fe47f5168cbf7b2f4"}, + {file = "tox-4.5.2.tar.gz", hash = "sha256:ad87fb7a10ef476afb6eb7e408808057f42976ef0d30ad5fe023099ba493ce58"}, ] [package.dependencies] cachetools = ">=5.3" chardet = ">=5.1" colorama = ">=0.4.6" -filelock = ">=3.11" +filelock = ">=3.12" packaging = ">=23.1" -platformdirs = ">=3.2" +platformdirs = ">=3.5.1" pluggy = ">=1" pyproject-api = ">=1.5.1" tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} -virtualenv = ">=20.21" +virtualenv = ">=20.23" [package.extras] -docs = ["furo (>=2023.3.27)", "sphinx (>=6.1.3)", "sphinx-argparse-cli (>=1.11)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)", "sphinx-copybutton (>=0.5.2)", "sphinx-inline-tabs (>=2022.1.2b11)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=22.12)"] -testing = ["build[virtualenv] (>=0.10)", "covdefaults (>=2.3)", "devpi-process (>=0.3)", "diff-cover (>=7.5)", "distlib (>=0.3.6)", "flaky (>=3.7)", "hatch-vcs (>=0.3)", "hatchling (>=1.14)", "psutil (>=5.9.4)", "pytest (>=7.3.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)", "pytest-xdist (>=3.2.1)", "re-assert (>=1.1)", "time-machine (>=2.9)", "wheel (>=0.40)"] +docs = ["furo (>=2023.5.20)", "sphinx (>=7.0.1)", "sphinx-argparse-cli (>=1.11)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)", "sphinx-copybutton (>=0.5.2)", "sphinx-inline-tabs (>=2023.4.21)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=22.12)"] +testing = ["build[virtualenv] (>=0.10)", "covdefaults (>=2.3)", "devpi-process (>=0.3)", "diff-cover (>=7.5)", "distlib (>=0.3.6)", "flaky (>=3.7)", "hatch-vcs (>=0.3)", "hatchling (>=1.17)", "psutil (>=5.9.5)", "pytest (>=7.3.1)", "pytest-cov (>=4.1)", "pytest-mock (>=3.10)", "pytest-xdist (>=3.3.1)", "re-assert (>=1.1)", "time-machine (>=2.9)", "wheel (>=0.40)"] [[package]] name = "typed-ast" version = "1.5.4" description = "a fork of Python 2 and 3 ast modules with type comment support" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1597,21 +1573,19 @@ files = [ [[package]] name = "typing-extensions" -version = "4.5.0" +version = "4.6.2" description = "Backported and Experimental Type Hints for Python 3.7+" -category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "typing_extensions-4.5.0-py3-none-any.whl", hash = "sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4"}, - {file = "typing_extensions-4.5.0.tar.gz", hash = "sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb"}, + {file = "typing_extensions-4.6.2-py3-none-any.whl", hash = "sha256:3a8b36f13dd5fdc5d1b16fe317f5668545de77fa0b8e02006381fd49d731ab98"}, + {file = "typing_extensions-4.6.2.tar.gz", hash = "sha256:06006244c70ac8ee83fa8282cb188f697b8db25bc8b4df07be1873c43897060c"}, ] [[package]] name = "urllib3" version = "2.0.2" description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1629,7 +1603,6 @@ zstd = ["zstandard (>=0.18.0)"] name = "virtualenv" version = "20.4.7" description = "Virtual Python Environment builder" -category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" files = [ @@ -1652,7 +1625,6 @@ testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", name = "virtualenv" version = "20.23.0" description = "Virtual Python Environment builder" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1673,7 +1645,6 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.3)", "coverage-enable-subprocess name = "zipp" version = "3.15.0" description = "Backport of pathlib-compatible object wrapper for zip files" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1688,4 +1659,4 @@ testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more [metadata] lock-version = "2.0" python-versions = "^3.7" -content-hash = "bbf381c53af408205d5513ac3af5a6f6ca4b1f08ab4c7c6863d93262fcdab2a6" +content-hash = "07e28b85afe797b936dde14b570501b255d800339a459c680b80c7abfd83ed3f" diff --git a/pyproject.toml b/pyproject.toml index c8b6ba18..baea196c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -52,11 +52,11 @@ optional = true pytest = "^7.3" pytest-asyncio = ">=0.21,<1" pytest-benchmark = "^4.0" -pytest-cov = "^4.0" +pytest-cov = "^4.1" pytest-describe = "^2.1" pytest-timeout = "^2.1" tox = [ - { version = ">=4.4,<5", python = ">=3.8" }, + { version = ">=4.5,<5", python = ">=3.8" }, { version = ">=3.28,<4", python = "<3.8" } ] @@ -66,11 +66,14 @@ optional = true [tool.poetry.group.lint.dependencies] black = "23.3.0" flake8 = [ - { version = ">=5,<7", python = ">=3.8" }, - { version = ">=5,<6", python = "<3.8" } + { version = ">=5,<7", python = ">=3.8.1" }, + { version = ">=5,<6", python = "<3.8.1" } ] flake8-bandit = "^4.1" -flake8-bugbear = "23.2.13" +flake8-bugbear = [ + { version = "23.5.9", python = ">=3.8.1" }, + { version = "23.3.12", python = "<3.8.1" }, +] isort = [ { version = "^5.12", python = ">=3.8" }, { version = "^5.11", python = "<3.8" } @@ -131,7 +134,7 @@ force_single_line = false lines_after_imports = 2 [tool.mypy] -python_version = "3.10" +python_version = "3.11" check_untyped_defs = true no_implicit_optional = true strict_optional = true @@ -164,5 +167,5 @@ timeout = "100" filterwarnings = "ignore::pytest.PytestConfigWarning" [build-system] -requires = ["poetry_core>=1.4,<2"] +requires = ["poetry_core>=1.6,<2"] build-backend = "poetry.core.masonry.api" diff --git a/tests/pyutils/test_is_awaitable.py b/tests/pyutils/test_is_awaitable.py index dd82a3dc..ee0ad0de 100644 --- a/tests/pyutils/test_is_awaitable.py +++ b/tests/pyutils/test_is_awaitable.py @@ -82,7 +82,7 @@ async def some_coroutine(): reason="Generator-based coroutines not supported any more since Python 3.11", ) def recognizes_an_old_style_coroutine(): # pragma: no cover - @asyncio.coroutine + @asyncio.coroutine # type: ignore def some_old_style_coroutine(): yield False # pragma: no cover diff --git a/tox.ini b/tox.ini index 12cb18c7..fb116dab 100644 --- a/tox.ini +++ b/tox.ini @@ -24,7 +24,7 @@ basepython = python3.11 deps = flake8>=6,<7 flake8-bandit>=4.1,<5 - flake8-bugbear==23.2.13 + flake8-bugbear==23.5.9 commands = flake8 src tests @@ -55,7 +55,7 @@ deps = pytest>=7.3,<8 pytest-asyncio>=0.21,<1 pytest-benchmark>=4,<5 - pytest-cov>=4,<5 + pytest-cov>=4.1,<5 pytest-describe>=2.1,<3 pytest-timeout>=2.1,<3 py37,py38,py39,pypy39: typing-extensions>=4.5,<5 From a71f6a90f2bf1c8f583ef6aa79e0696dc2a0c8bb Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Mon, 29 May 2023 12:05:46 +0200 Subject: [PATCH 099/230] incrementalDelivery: remove singleResult wrapper Replicates graphql/graphql-js@364cd71d1a26eb6f62661efd7fa399e91332d30d --- docs/conf.py | 5 +-- docs/modules/execution.rst | 2 +- src/graphql/__init__.py | 22 ++++++++++ src/graphql/execution/__init__.py | 8 +--- src/graphql/execution/execute.py | 71 ++++++++++++------------------- tests/execution/test_defer.py | 10 ++--- tests/execution/test_mutations.py | 6 +-- tests/execution/test_stream.py | 26 +++++------ 8 files changed, 76 insertions(+), 74 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 28ac1c71..c0129497 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -141,7 +141,7 @@ AsyncPayloadRecord AwaitableOrValue EnterLeaveVisitor -ExperimentalExecuteIncrementallyResults +ExperimentalIncrementalExecutionResults FormattedSourceLocation GraphQLAbstractType GraphQLErrorExtensions @@ -154,8 +154,7 @@ graphql.execution.map_async_iterable.MapAsyncIterable graphql.execution.Middleware graphql.execution.execute.DeferredFragmentRecord -graphql.execution.execute.ExperimentalExecuteMultipleResults -graphql.execution.execute.ExperimentalExecuteSingleResult +graphql.execution.execute.ExperimentalIncrementalExecutionResults graphql.execution.execute.StreamArguments graphql.execution.execute.StreamRecord graphql.language.lexer.EscapeSequence diff --git a/docs/modules/execution.rst b/docs/modules/execution.rst index 82147930..5f378390 100644 --- a/docs/modules/execution.rst +++ b/docs/modules/execution.rst @@ -24,7 +24,7 @@ Execution .. autoclass:: FormattedExecutionResult :no-inherited-members: -.. autoclass:: ExperimentalExecuteIncrementallyResults +.. autoclass:: ExperimentalIncrementalExecutionResults .. autoclass:: InitialIncrementalExecutionResult diff --git a/src/graphql/__init__.py b/src/graphql/__init__.py index cb946aba..260c2e8c 100644 --- a/src/graphql/__init__.py +++ b/src/graphql/__init__.py @@ -435,7 +435,18 @@ # Types ExecutionContext, ExecutionResult, + ExperimentalIncrementalExecutionResults, + InitialIncrementalExecutionResult, + SubsequentIncrementalExecutionResult, + IncrementalDeferResult, + IncrementalStreamResult, + IncrementalResult, FormattedExecutionResult, + FormattedInitialIncrementalExecutionResult, + FormattedSubsequentIncrementalExecutionResult, + FormattedIncrementalDeferResult, + FormattedIncrementalStreamResult, + FormattedIncrementalResult, # Subscription subscribe, create_source_event_stream, @@ -702,7 +713,18 @@ "get_variable_values", "ExecutionContext", "ExecutionResult", + "ExperimentalIncrementalExecutionResults", + "InitialIncrementalExecutionResult", + "SubsequentIncrementalExecutionResult", + "IncrementalDeferResult", + "IncrementalStreamResult", + "IncrementalResult", "FormattedExecutionResult", + "FormattedInitialIncrementalExecutionResult", + "FormattedSubsequentIncrementalExecutionResult", + "FormattedIncrementalDeferResult", + "FormattedIncrementalStreamResult", + "FormattedIncrementalResult", "Middleware", "MiddlewareManager", "subscribe", diff --git a/src/graphql/execution/__init__.py b/src/graphql/execution/__init__.py index 6487c33d..f18047b5 100644 --- a/src/graphql/execution/__init__.py +++ b/src/graphql/execution/__init__.py @@ -16,9 +16,7 @@ experimental_subscribe_incrementally, ExecutionContext, ExecutionResult, - ExperimentalExecuteIncrementallyResults, - ExperimentalExecuteMultipleResults, - ExperimentalExecuteSingleResult, + ExperimentalIncrementalExecutionResults, InitialIncrementalExecutionResult, SubsequentIncrementalExecutionResult, IncrementalDeferResult, @@ -48,9 +46,7 @@ "experimental_subscribe_incrementally", "ExecutionContext", "ExecutionResult", - "ExperimentalExecuteIncrementallyResults", - "ExperimentalExecuteMultipleResults", - "ExperimentalExecuteSingleResult", + "ExperimentalIncrementalExecutionResults", "InitialIncrementalExecutionResult", "SubsequentIncrementalExecutionResult", "IncrementalDeferResult", diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index fe2bef87..bb161756 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -104,9 +104,7 @@ async def anext(iterator: AsyncIterator) -> Any: "StreamRecord", "ExecutionResult", "ExecutionContext", - "ExperimentalExecuteIncrementallyResults", - "ExperimentalExecuteMultipleResults", - "ExperimentalExecuteSingleResult", + "ExperimentalIncrementalExecutionResults", "FormattedExecutionResult", "FormattedIncrementalDeferResult", "FormattedIncrementalResult", @@ -600,24 +598,13 @@ class StreamArguments(NamedTuple): label: Optional[str] -class ExperimentalExecuteSingleResult(NamedTuple): - """Execution result when retrieved at once.""" - - single_result: ExecutionResult - - -class ExperimentalExecuteMultipleResults(NamedTuple): +class ExperimentalIncrementalExecutionResults(NamedTuple): """Execution results when retrieved incrementally.""" initial_result: InitialIncrementalExecutionResult subsequent_results: AsyncGenerator[SubsequentIncrementalExecutionResult, None] -ExperimentalExecuteIncrementallyResults = Union[ - ExperimentalExecuteSingleResult, ExperimentalExecuteMultipleResults -] - - Middleware: TypeAlias = Optional[Union[Tuple, List, MiddlewareManager]] @@ -2012,15 +1999,15 @@ def execute( execution_context_class, is_awaitable, ) - if isinstance(result, ExperimentalExecuteSingleResult): - return result.single_result - if isinstance(result, ExperimentalExecuteMultipleResults): + if isinstance(result, ExecutionResult): + return result + if isinstance(result, ExperimentalIncrementalExecutionResults): raise GraphQLError(UNEXPECTED_MULTIPLE_PAYLOADS) async def await_result() -> Any: awaited_result = await result # type: ignore - if isinstance(awaited_result, ExperimentalExecuteSingleResult): - return awaited_result.single_result + if isinstance(awaited_result, ExecutionResult): + return awaited_result return ExecutionResult( None, errors=[GraphQLError(UNEXPECTED_MULTIPLE_PAYLOADS)] ) @@ -2041,16 +2028,16 @@ def experimental_execute_incrementally( middleware: Optional[Middleware] = None, execution_context_class: Optional[Type[ExecutionContext]] = None, is_awaitable: Optional[Callable[[Any], bool]] = None, -) -> AwaitableOrValue[ExperimentalExecuteIncrementallyResults]: +) -> AwaitableOrValue[Union[ExecutionResult, ExperimentalIncrementalExecutionResults]]: """Execute GraphQL operation incrementally (internal implementation). Implements the "Executing requests" section of the GraphQL specification, including `@defer` and `@stream` as proposed in https://github.com/graphql/graphql-spec/pull/742 - This function returns an awaitable of an ExperimentalExecuteIncrementallyResults - object. This object either contains a single ExecutionResult as - `single_result`, or an `initial_result` and a stream of `subsequent_results`. + This function returns an awaitable that is either a single ExecutionResult or + an ExperimentalIncrementalExecutionResults object, containing an `initialResult` + and a stream of `subsequent_results`. """ if execution_context_class is None: execution_context_class = ExecutionContext @@ -2073,16 +2060,14 @@ def experimental_execute_incrementally( # Return early errors if execution context failed. if isinstance(context, list): - return ExperimentalExecuteSingleResult( - single_result=ExecutionResult(None, errors=context) - ) + return ExecutionResult(None, errors=context) return execute_impl(context) def execute_impl( context: ExecutionContext, -) -> AwaitableOrValue[ExperimentalExecuteIncrementallyResults]: +) -> AwaitableOrValue[Union[ExecutionResult, ExperimentalIncrementalExecutionResults]]: """Execute GraphQL operation (internal implementation).""" # Return a possible coroutine object that will eventually yield the data described # by the "Response" section of the GraphQL specification. @@ -2108,7 +2093,7 @@ async def await_result() -> Any: await result, errors # type: ignore ) if context.subsequent_payloads: - return ExperimentalExecuteMultipleResults( + return ExperimentalIncrementalExecutionResults( initial_result=InitialIncrementalExecutionResult( initial_result.data, initial_result.errors, @@ -2116,18 +2101,16 @@ async def await_result() -> Any: ), subsequent_results=context.yield_subsequent_payloads(), ) - return ExperimentalExecuteSingleResult(single_result=initial_result) + return initial_result except GraphQLError as error: errors.append(error) - return ExperimentalExecuteSingleResult( - single_result=build_response(None, errors) - ) + return build_response(None, errors) return await_result() initial_result = build_response(result, errors) # type: ignore if context.subsequent_payloads: - return ExperimentalExecuteMultipleResults( + return ExperimentalIncrementalExecutionResults( initial_result=InitialIncrementalExecutionResult( initial_result.data, initial_result.errors, @@ -2135,12 +2118,10 @@ async def await_result() -> Any: ), subsequent_results=context.yield_subsequent_payloads(), ) - return ExperimentalExecuteSingleResult(single_result=initial_result) + return initial_result except GraphQLError as error: errors.append(error) - return ExperimentalExecuteSingleResult( - single_result=build_response(None, errors) - ) + return build_response(None, errors) def assume_not_awaitable(_value: Any) -> bool: @@ -2192,12 +2173,14 @@ def execute_sync( ) # Assert that the execution was synchronous. - if isawaitable(result) or isinstance(result, ExperimentalExecuteMultipleResults): + if isawaitable(result) or isinstance( + result, ExperimentalIncrementalExecutionResults + ): if isawaitable(result): ensure_future(cast(Awaitable[ExecutionResult], result)).cancel() raise RuntimeError("GraphQL execution failed to complete synchronously.") - return cast(ExperimentalExecuteSingleResult, result).single_result + return cast(ExecutionResult, result) def handle_field_error( @@ -2489,7 +2472,9 @@ async def await_result() -> Any: async def ensure_async_iterable( - some_execution_result: ExperimentalExecuteIncrementallyResults, + some_execution_result: Union[ + ExecutionResult, ExperimentalIncrementalExecutionResults + ], ) -> AsyncGenerator[ Union[ ExecutionResult, @@ -2498,8 +2483,8 @@ async def ensure_async_iterable( ], None, ]: - if isinstance(some_execution_result, ExperimentalExecuteSingleResult): - yield some_execution_result.single_result + if isinstance(some_execution_result, ExecutionResult): + yield some_execution_result else: yield some_execution_result.initial_result async for result in some_execution_result.subsequent_results: diff --git a/tests/execution/test_defer.py b/tests/execution/test_defer.py index 58e2ab7b..241a53b3 100644 --- a/tests/execution/test_defer.py +++ b/tests/execution/test_defer.py @@ -6,8 +6,8 @@ from graphql.error import GraphQLError from graphql.execution import ( ExecutionContext, - ExperimentalExecuteMultipleResults, - ExperimentalExecuteSingleResult, + ExecutionResult, + ExperimentalIncrementalExecutionResults, IncrementalDeferResult, InitialIncrementalExecutionResult, SubsequentIncrementalExecutionResult, @@ -93,14 +93,14 @@ async def resolve_null_async(_obj, _info) -> None: async def complete(document: DocumentNode, root_value: Any = None) -> Any: result = experimental_execute_incrementally(schema, document, root_value) - if isinstance(result, ExperimentalExecuteMultipleResults): + if isinstance(result, ExperimentalIncrementalExecutionResults): results: List[Any] = [result.initial_result.formatted] async for patch in result.subsequent_results: results.append(patch.formatted) return results - assert isinstance(result, ExperimentalExecuteSingleResult) - return result.single_result.formatted + assert isinstance(result, ExecutionResult) + return result.formatted def modified_args(args: Dict[str, Any], **modifications: Any) -> Dict[str, Any]: diff --git a/tests/execution/test_mutations.py b/tests/execution/test_mutations.py index 2a39d57b..022b4900 100644 --- a/tests/execution/test_mutations.py +++ b/tests/execution/test_mutations.py @@ -4,7 +4,7 @@ from pytest import mark from graphql.execution import ( - ExperimentalExecuteMultipleResults, + ExperimentalIncrementalExecutionResults, execute, execute_sync, experimental_execute_incrementally, @@ -234,7 +234,7 @@ async def mutation_fields_with_defer_do_not_block_next_mutation(): ) patches: List[Any] = [] - assert isinstance(mutation_result, ExperimentalExecuteMultipleResults) + assert isinstance(mutation_result, ExperimentalIncrementalExecutionResults) patches.append(mutation_result.initial_result.formatted) async for patch in mutation_result.subsequent_results: patches.append(patch.formatted) @@ -305,7 +305,7 @@ async def mutation_with_defer_is_not_executed_serially(): ) patches: List[Any] = [] - assert isinstance(mutation_result, ExperimentalExecuteMultipleResults) + assert isinstance(mutation_result, ExperimentalIncrementalExecutionResults) patches.append(mutation_result.initial_result.formatted) async for patch in mutation_result.subsequent_results: patches.append(patch.formatted) diff --git a/tests/execution/test_stream.py b/tests/execution/test_stream.py index be624027..ed235baa 100644 --- a/tests/execution/test_stream.py +++ b/tests/execution/test_stream.py @@ -6,8 +6,8 @@ from graphql.error import GraphQLError from graphql.execution import ( ExecutionContext, - ExperimentalExecuteMultipleResults, - ExperimentalExecuteSingleResult, + ExecutionResult, + ExperimentalIncrementalExecutionResults, IncrementalStreamResult, experimental_execute_incrementally, ) @@ -92,14 +92,14 @@ async def complete(document: DocumentNode, root_value: Any = None) -> Any: if isinstance(result, Awaitable): result = await result - if isinstance(result, ExperimentalExecuteMultipleResults): + if isinstance(result, ExperimentalIncrementalExecutionResults): results: List[Any] = [result.initial_result.formatted] async for patch in result.subsequent_results: results.append(patch.formatted) return results - assert isinstance(result, ExperimentalExecuteSingleResult) - return result.single_result.formatted + assert isinstance(result, ExecutionResult) + return result.formatted async def complete_async( @@ -108,7 +108,7 @@ async def complete_async( result = experimental_execute_incrementally(schema, document, root_value) assert isinstance(result, Awaitable) result = await result - assert isinstance(result, ExperimentalExecuteMultipleResults) + assert isinstance(result, ExperimentalIncrementalExecutionResults) class IteratorResult: """Iterator result with formatted output.""" @@ -1435,7 +1435,7 @@ async def iterable(_info): }, ) - assert isinstance(execute_result, ExperimentalExecuteMultipleResults) + assert isinstance(execute_result, ExperimentalIncrementalExecutionResults) iterator = execute_result.subsequent_results result1 = execute_result.initial_result @@ -1569,7 +1569,7 @@ async def get_friends(_info): }, ) - assert isinstance(execute_result, ExperimentalExecuteMultipleResults) + assert isinstance(execute_result, ExperimentalIncrementalExecutionResults) iterator = execute_result.subsequent_results result1 = execute_result.initial_result @@ -1649,7 +1649,7 @@ async def get_friends(_info): }, ) - assert isinstance(execute_result, ExperimentalExecuteMultipleResults) + assert isinstance(execute_result, ExperimentalIncrementalExecutionResults) iterator = execute_result.subsequent_results result1 = execute_result.initial_result @@ -1728,7 +1728,7 @@ async def get_friends(_info): }, ) - assert isinstance(execute_result, ExperimentalExecuteMultipleResults) + assert isinstance(execute_result, ExperimentalIncrementalExecutionResults) iterator = execute_result.subsequent_results result1 = execute_result.initial_result @@ -1800,7 +1800,7 @@ async def iterable(_info): execute_result = await experimental_execute_incrementally( # type: ignore schema, document, {"friendList": iterable} ) - assert isinstance(execute_result, ExperimentalExecuteMultipleResults) + assert isinstance(execute_result, ExperimentalIncrementalExecutionResults) iterator = execute_result.subsequent_results result1 = execute_result.initial_result @@ -1846,7 +1846,7 @@ async def __anext__(self): execute_result = await experimental_execute_incrementally( # type: ignore schema, document, {"friendList": iterable} ) - assert isinstance(execute_result, ExperimentalExecuteMultipleResults) + assert isinstance(execute_result, ExperimentalIncrementalExecutionResults) iterator = execute_result.subsequent_results result1 = execute_result.initial_result @@ -1888,7 +1888,7 @@ async def iterable(_info): execute_result = await experimental_execute_incrementally( # type: ignore schema, document, {"friendList": iterable} ) - assert isinstance(execute_result, ExperimentalExecuteMultipleResults) + assert isinstance(execute_result, ExperimentalIncrementalExecutionResults) iterator = execute_result.subsequent_results result1 = execute_result.initial_result From 6d7ccdb35221d1406663e9a6c04dc9de1be72141 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Mon, 29 May 2023 13:09:48 +0200 Subject: [PATCH 100/230] incrementalDelivery: fix iterable streaming with errors Replicates graphql/graphql-js@065a343ad1f1701b94796980a5453a3d8e243da7 --- src/graphql/execution/execute.py | 2 +- tests/execution/test_stream.py | 5 +---- 2 files changed, 2 insertions(+), 5 deletions(-) diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index bb161756..c62cae92 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -1702,7 +1702,6 @@ def execute_stream_field( label, item_path, None, parent_context, self ) completed_item: Any - completed_items: Any try: try: if self.is_awaitable(item): @@ -1771,6 +1770,7 @@ async def await_completed_item() -> Any: async_payload_record.add_items(None) return async_payload_record + completed_items: AwaitableOrValue[Optional[List[Any]]] if self.is_awaitable(complete_item): async def await_completed_items() -> Optional[List[Any]]: diff --git a/tests/execution/test_stream.py b/tests/execution/test_stream.py index ed235baa..7edb3845 100644 --- a/tests/execution/test_stream.py +++ b/tests/execution/test_stream.py @@ -992,10 +992,7 @@ async def handles_error_thrown_in_complete_value_after_initial_count_is_reached( ) async def scalar_list(_info): - await sleep(0) - yield friends[0].name - await sleep(0) - yield {} + return [friends[0].name, {}] result = await complete(document, {"scalarList": scalar_list}) assert result == [ From 8f66d137001639df3b0f338b3bbfddca45b499f4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristj=C3=A1n=20Valur=20J=C3=B3nsson?= Date: Sun, 4 Jun 2023 10:21:01 +0000 Subject: [PATCH 101/230] Turn MapAsyncIterable into an AsyncGenerator (#199) --- docs/conf.py | 2 +- docs/modules/execution.rst | 2 - pyproject.toml | 2 +- src/graphql/__init__.py | 4 +- src/graphql/execution/__init__.py | 4 +- src/graphql/execution/execute.py | 13 +- ...flatten_async_iterable.py => iterators.py} | 35 +- src/graphql/execution/map_async_iterable.py | 118 ---- tests/execution/test_customize.py | 8 +- .../execution/test_flatten_async_iterable.py | 2 +- tests/execution/test_map_async_iterable.py | 525 ++---------------- tests/execution/test_subscribe.py | 49 ++ 12 files changed, 160 insertions(+), 604 deletions(-) rename src/graphql/execution/{flatten_async_iterable.py => iterators.py} (50%) delete mode 100644 src/graphql/execution/map_async_iterable.py diff --git a/docs/conf.py b/docs/conf.py index c0129497..118db97e 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -151,7 +151,7 @@ Middleware asyncio.events.AbstractEventLoop graphql.execution.collect_fields.FieldsAndPatches -graphql.execution.map_async_iterable.MapAsyncIterable +graphql.execution.map_async_iterable.map_async_iterable graphql.execution.Middleware graphql.execution.execute.DeferredFragmentRecord graphql.execution.execute.ExperimentalIncrementalExecutionResults diff --git a/docs/modules/execution.rst b/docs/modules/execution.rst index 5f378390..535dffbd 100644 --- a/docs/modules/execution.rst +++ b/docs/modules/execution.rst @@ -57,8 +57,6 @@ Execution .. autofunction:: create_source_event_stream -.. autoclass:: MapAsyncIterable - .. autoclass:: Middleware .. autoclass:: MiddlewareManager diff --git a/pyproject.toml b/pyproject.toml index baea196c..d7c2a0d7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,7 +1,7 @@ [tool.poetry] name = "graphql-core" version = "3.3.0a2" -description = """ +description = """\ GraphQL-core is a Python port of GraphQL.js,\ the JavaScript reference implementation for GraphQL.""" license = "MIT" diff --git a/src/graphql/__init__.py b/src/graphql/__init__.py index 260c2e8c..d4805cda 100644 --- a/src/graphql/__init__.py +++ b/src/graphql/__init__.py @@ -450,7 +450,7 @@ # Subscription subscribe, create_source_event_stream, - MapAsyncIterable, + map_async_iterable, # Middleware Middleware, MiddlewareManager, @@ -729,7 +729,7 @@ "MiddlewareManager", "subscribe", "create_source_event_stream", - "MapAsyncIterable", + "map_async_iterable", "validate", "ValidationContext", "ValidationRule", diff --git a/src/graphql/execution/__init__.py b/src/graphql/execution/__init__.py index f18047b5..ee48b68b 100644 --- a/src/graphql/execution/__init__.py +++ b/src/graphql/execution/__init__.py @@ -30,7 +30,7 @@ FormattedIncrementalResult, Middleware, ) -from .map_async_iterable import MapAsyncIterable +from .iterators import map_async_iterable from .middleware import MiddlewareManager from .values import get_argument_values, get_directive_values, get_variable_values @@ -58,7 +58,7 @@ "FormattedIncrementalDeferResult", "FormattedIncrementalStreamResult", "FormattedIncrementalResult", - "MapAsyncIterable", + "map_async_iterable", "Middleware", "MiddlewareManager", "get_argument_values", diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index bb161756..37f488fe 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -71,8 +71,7 @@ is_object_type, ) from .collect_fields import FieldsAndPatches, collect_fields, collect_subfields -from .flatten_async_iterable import flatten_async_iterable -from .map_async_iterable import MapAsyncIterable +from .iterators import flatten_async_iterable, map_async_iterable from .middleware import MiddlewareManager from .values import get_argument_values, get_directive_values, get_variable_values @@ -1650,7 +1649,7 @@ async def callback(payload: Any) -> AsyncGenerator: await result if isawaitable(result) else result # type: ignore ) - return flatten_async_iterable(MapAsyncIterable(result_or_stream, callback)) + return flatten_async_iterable(map_async_iterable(result_or_stream, callback)) def execute_deferred_fragment( self, @@ -2350,18 +2349,20 @@ def subscribe( if isinstance(result, ExecutionResult): return result if isinstance(result, AsyncIterable): - return MapAsyncIterable(result, ensure_single_execution_result) + return map_async_iterable(result, ensure_single_execution_result) async def await_result() -> Union[AsyncIterator[ExecutionResult], ExecutionResult]: result_or_iterable = await result # type: ignore if isinstance(result_or_iterable, AsyncIterable): - return MapAsyncIterable(result_or_iterable, ensure_single_execution_result) + return map_async_iterable( + result_or_iterable, ensure_single_execution_result + ) return result_or_iterable return await_result() -def ensure_single_execution_result( +async def ensure_single_execution_result( result: Union[ ExecutionResult, InitialIncrementalExecutionResult, diff --git a/src/graphql/execution/flatten_async_iterable.py b/src/graphql/execution/iterators.py similarity index 50% rename from src/graphql/execution/flatten_async_iterable.py rename to src/graphql/execution/iterators.py index 7c0e0721..c3479175 100644 --- a/src/graphql/execution/flatten_async_iterable.py +++ b/src/graphql/execution/iterators.py @@ -1,4 +1,14 @@ -from typing import AsyncGenerator, AsyncIterable, TypeVar, Union +from __future__ import annotations # Python < 3.10 + +from typing import ( + Any, + AsyncGenerator, + AsyncIterable, + Awaitable, + Callable, + TypeVar, + Union, +) try: @@ -15,10 +25,11 @@ async def aclosing(thing): T = TypeVar("T") +V = TypeVar("V") AsyncIterableOrGenerator = Union[AsyncGenerator[T, None], AsyncIterable[T]] -__all__ = ["flatten_async_iterable"] +__all__ = ["flatten_async_iterable", "map_async_iterable"] async def flatten_async_iterable( @@ -34,3 +45,23 @@ async def flatten_async_iterable( async with aclosing(sub_iterator) as items: # type: ignore async for item in items: yield item + + +async def map_async_iterable( + iterable: AsyncIterable[T], callback: Callable[[T], Awaitable[V]] +) -> AsyncGenerator[V, None]: + """Map an AsyncIterable over a callback function. + + Given an AsyncIterable and an async callback callable, return an AsyncGenerator + which produces values mapped via calling the callback. + If the inner iterator supports an `aclose()` method, it will be called when + the generator finishes or closes. + """ + + aiter = iterable.__aiter__() + try: + async for element in aiter: + yield await callback(element) + finally: + if hasattr(aiter, "aclose"): + await aiter.aclose() diff --git a/src/graphql/execution/map_async_iterable.py b/src/graphql/execution/map_async_iterable.py deleted file mode 100644 index 84bd3f4a..00000000 --- a/src/graphql/execution/map_async_iterable.py +++ /dev/null @@ -1,118 +0,0 @@ -from __future__ import annotations # Python < 3.10 - -from asyncio import CancelledError, Event, Task, ensure_future, wait -from concurrent.futures import FIRST_COMPLETED -from inspect import isasyncgen, isawaitable -from types import TracebackType -from typing import Any, AsyncIterable, Callable, Optional, Set, Type, Union, cast - - -__all__ = ["MapAsyncIterable"] - - -# noinspection PyAttributeOutsideInit -class MapAsyncIterable: - """Map an AsyncIterable over a callback function. - - Given an AsyncIterable and a callback function, return an AsyncIterator which - produces values mapped via calling the callback function. - - When the resulting AsyncIterator is closed, the underlying AsyncIterable will also - be closed. - """ - - def __init__(self, iterable: AsyncIterable, callback: Callable) -> None: - self.iterator = iterable.__aiter__() - self.callback = callback - self._close_event = Event() - - def __aiter__(self) -> MapAsyncIterable: - """Get the iterator object.""" - return self - - async def __anext__(self) -> Any: - """Get the next value of the iterator.""" - if self.is_closed: - if not isasyncgen(self.iterator): - raise StopAsyncIteration - value = await self.iterator.__anext__() - else: - aclose = ensure_future(self._close_event.wait()) - anext = ensure_future(self.iterator.__anext__()) - - try: - pending: Set[Task] = ( - await wait([aclose, anext], return_when=FIRST_COMPLETED) - )[1] - except CancelledError: - # cancel underlying tasks and close - aclose.cancel() - anext.cancel() - await self.aclose() - raise # re-raise the cancellation - - for task in pending: - task.cancel() - - if aclose.done(): - raise StopAsyncIteration - - error = anext.exception() - if error: - raise error - - value = anext.result() - - result = self.callback(value) - - return await result if isawaitable(result) else result - - async def athrow( - self, - type_: Union[BaseException, Type[BaseException]], - value: Optional[BaseException] = None, - traceback: Optional[TracebackType] = None, - ) -> None: - """Throw an exception into the asynchronous iterator.""" - if self.is_closed: - return - athrow = getattr(self.iterator, "athrow", None) - if athrow: - await athrow(type_, value, traceback) - else: - await self.aclose() - if value is None: - if traceback is None: - raise type_ - value = ( - type_ - if isinstance(value, BaseException) - else cast(Type[BaseException], type_)() - ) - if traceback is not None: - value = value.with_traceback(traceback) - raise value - - async def aclose(self) -> None: - """Close the iterator.""" - if not self.is_closed: - aclose = getattr(self.iterator, "aclose", None) - if aclose: - try: - await aclose() - except RuntimeError: - pass - self.is_closed = True - - @property - def is_closed(self) -> bool: - """Check whether the iterator is closed.""" - return self._close_event.is_set() - - @is_closed.setter - def is_closed(self, value: bool) -> None: - """Mark the iterator as closed.""" - if value: - self._close_event.set() - else: - self._close_event.clear() diff --git a/tests/execution/test_customize.py b/tests/execution/test_customize.py index 3dbc6d00..5b839fc8 100644 --- a/tests/execution/test_customize.py +++ b/tests/execution/test_customize.py @@ -1,6 +1,8 @@ +from inspect import isasyncgen + from pytest import mark -from graphql.execution import ExecutionContext, MapAsyncIterable, execute, subscribe +from graphql.execution import ExecutionContext, execute, subscribe from graphql.language import parse from graphql.type import GraphQLField, GraphQLObjectType, GraphQLSchema, GraphQLString @@ -77,7 +79,7 @@ async def custom_foo(): root_value=Root(), subscribe_field_resolver=lambda root, _info: root.custom_foo(), ) - assert isinstance(subscription, MapAsyncIterable) + assert isasyncgen(subscription) assert await anext(subscription) == ( {"foo": "FooValue"}, @@ -121,6 +123,6 @@ def resolve_foo(message, _info): context_value={}, execution_context_class=TestExecutionContext, ) - assert isinstance(subscription, MapAsyncIterable) + assert isasyncgen(subscription) assert await anext(subscription) == ({"foo": "bar"}, None) diff --git a/tests/execution/test_flatten_async_iterable.py b/tests/execution/test_flatten_async_iterable.py index de9c5499..49ead410 100644 --- a/tests/execution/test_flatten_async_iterable.py +++ b/tests/execution/test_flatten_async_iterable.py @@ -2,7 +2,7 @@ from pytest import mark, raises -from graphql.execution.flatten_async_iterable import flatten_async_iterable +from graphql.execution.iterators import flatten_async_iterable try: # pragma: no cover diff --git a/tests/execution/test_map_async_iterable.py b/tests/execution/test_map_async_iterable.py index 6406f7dd..1462645a 100644 --- a/tests/execution/test_map_async_iterable.py +++ b/tests/execution/test_map_async_iterable.py @@ -1,500 +1,93 @@ -import platform -import sys -from asyncio import CancelledError, Event, ensure_future, sleep - from pytest import mark, raises -from graphql.execution import MapAsyncIterable - +from graphql.execution import map_async_iterable -is_pypy = platform.python_implementation() == "PyPy" -try: # pragma: no cover - anext -except NameError: # pragma: no cover (Python < 3.10) - # noinspection PyShadowingBuiltins - async def anext(iterator): - """Return the next item from an async iterator.""" - return await iterator.__anext__() +async def map_doubles(x): + return x + x def describe_map_async_iterable(): @mark.asyncio - async def maps_over_async_generator(): - async def source(): - yield 1 - yield 2 - yield 3 - - doubles = MapAsyncIterable(source(), lambda x: x + x) - - assert await anext(doubles) == 2 - assert await anext(doubles) == 4 - assert await anext(doubles) == 6 - with raises(StopAsyncIteration): - assert await anext(doubles) - - @mark.asyncio - async def maps_over_async_iterable(): - items = [1, 2, 3] - - class Iterable: - def __aiter__(self): - return self - - async def __anext__(self): - try: - return items.pop(0) - except IndexError: - raise StopAsyncIteration - - doubles = MapAsyncIterable(Iterable(), lambda x: x + x) - - values = [value async for value in doubles] - - assert not items - assert values == [2, 4, 6] - - @mark.asyncio - async def compatible_with_async_for(): - async def source(): - yield 1 - yield 2 - yield 3 - - doubles = MapAsyncIterable(source(), lambda x: x + x) - - values = [value async for value in doubles] - - assert values == [2, 4, 6] - - @mark.asyncio - async def maps_over_async_values_with_async_function(): - async def source(): - yield 1 - yield 2 - yield 3 - - async def double(x): - return x + x - - doubles = MapAsyncIterable(source(), double) - - values = [value async for value in doubles] - - assert values == [2, 4, 6] - - @mark.asyncio - async def allows_returning_early_from_mapped_async_generator(): - async def source(): - yield 1 - yield 2 - yield 3 # pragma: no cover - - doubles = MapAsyncIterable(source(), lambda x: x + x) - - assert await anext(doubles) == 2 - assert await anext(doubles) == 4 - - # Early return - await doubles.aclose() - - # Subsequent next calls - with raises(StopAsyncIteration): - await anext(doubles) - with raises(StopAsyncIteration): - await anext(doubles) - - @mark.asyncio - async def allows_returning_early_from_mapped_async_iterable(): - items = [1, 2, 3] - - class Iterable: - def __aiter__(self): - return self - - async def __anext__(self): - try: - return items.pop(0) - except IndexError: # pragma: no cover - raise StopAsyncIteration - - doubles = MapAsyncIterable(Iterable(), lambda x: x + x) - - assert await anext(doubles) == 2 - assert await anext(doubles) == 4 - - # Early return - await doubles.aclose() - - # Subsequent next calls - with raises(StopAsyncIteration): - await anext(doubles) - with raises(StopAsyncIteration): - await anext(doubles) - - @mark.asyncio - async def passes_through_early_return_from_async_values(): - async def source(): - try: - yield 1 - yield 2 - yield 3 # pragma: no cover - finally: - yield "Done" - yield "Last" - - doubles = MapAsyncIterable(source(), lambda x: x + x) - - assert await anext(doubles) == 2 - assert await anext(doubles) == 4 - - # Early return - await doubles.aclose() - - # Subsequent next calls may yield from finally block - assert await anext(doubles) == "LastLast" - with raises(GeneratorExit): - assert await anext(doubles) - - @mark.asyncio - async def allows_throwing_errors_through_async_iterable(): - items = [1, 2, 3] - - class Iterable: - def __aiter__(self): - return self - - async def __anext__(self): - try: - return items.pop(0) - except IndexError: # pragma: no cover - raise StopAsyncIteration + async def test_inner_close_called(): + """ + Test that a custom iterator with aclose() gets an aclose() call + when outer is closed + """ - doubles = MapAsyncIterable(Iterable(), lambda x: x + x) - - assert await anext(doubles) == 2 - assert await anext(doubles) == 4 - - # Throw error - message = "allows throwing errors when mapping async iterable" - with raises(RuntimeError) as exc_info: - await doubles.athrow(RuntimeError(message)) - - assert str(exc_info.value) == message - - with raises(StopAsyncIteration): - await anext(doubles) - with raises(StopAsyncIteration): - await anext(doubles) - - @mark.asyncio - async def allows_throwing_errors_with_values_through_async_iterables(): - class Iterable: - def __aiter__(self): - return self - - async def __anext__(self): - return 1 - - one = MapAsyncIterable(Iterable(), lambda x: x) - - assert await anext(one) == 1 - - # Throw error with value passed separately - try: - raise RuntimeError("Ouch") - except RuntimeError as error: - with raises(RuntimeError, match="Ouch") as exc_info: - await one.athrow(error.__class__, error) - - assert exc_info.value is error - assert exc_info.tb is error.__traceback__ + class Inner: + def __init__(self): + self.closed = False - with raises(StopAsyncIteration): - await anext(one) + async def aclose(self): + self.closed = True - @mark.asyncio - async def allows_throwing_errors_with_traceback_through_async_iterables(): - class Iterable: def __aiter__(self): return self async def __anext__(self): return 1 - one = MapAsyncIterable(Iterable(), lambda x: x) - - assert await anext(one) == 1 - - # Throw error with traceback passed separately - try: - raise RuntimeError("Ouch") - except RuntimeError as error: - with raises(RuntimeError) as exc_info: - await one.athrow(error.__class__, None, error.__traceback__) - - assert exc_info.tb and error.__traceback__ - assert exc_info.tb.tb_frame is error.__traceback__.tb_frame - - with raises(StopAsyncIteration): - await anext(one) + inner = Inner() + outer = map_async_iterable(inner, map_doubles) + it = outer.__aiter__() + assert await it.__anext__() == 2 + assert not inner.closed + await outer.aclose() + assert inner.closed @mark.asyncio - async def passes_through_caught_errors_through_async_generators(): - async def source(): - try: - yield 1 - yield 2 - yield 3 # pragma: no cover - except Exception as e: - yield e - - doubles = MapAsyncIterable(source(), lambda x: x + x) - - assert await anext(doubles) == 2 - assert await anext(doubles) == 4 - - # Throw error - await doubles.athrow(RuntimeError("ouch")) - - with raises(StopAsyncIteration): - await anext(doubles) - with raises(StopAsyncIteration): - await anext(doubles) - - @mark.asyncio - async def does_not_normally_map_over_thrown_errors(): - async def source(): - yield "Hello" - raise RuntimeError("Goodbye") - - doubles = MapAsyncIterable(source(), lambda x: x + x) - - assert await anext(doubles) == "HelloHello" - - with raises(RuntimeError) as exc_info: - await anext(doubles) - - assert str(exc_info.value) == "Goodbye" - - @mark.asyncio - async def does_not_normally_map_over_externally_thrown_errors(): - async def source(): - yield "Hello" - - doubles = MapAsyncIterable(source(), lambda x: x + x) - - assert await anext(doubles) == "HelloHello" + async def test_inner_close_called_on_callback_err(): + """ + Test that a custom iterator with aclose() gets an aclose() call + when the callback errors and the outer iterator aborts. + """ - with raises(RuntimeError) as exc_info: - await doubles.athrow(RuntimeError("Goodbye")) - - assert str(exc_info.value) == "Goodbye" - - @mark.asyncio - async def can_use_simple_iterable_instead_of_generator(): - async def source(): - yield 1 - yield 2 - yield 3 - - class Source: + class Inner: def __init__(self): - self.counter = 0 - - def __aiter__(self): - return self - - async def __anext__(self): - self.counter += 1 - if self.counter > 3: - raise StopAsyncIteration - return self.counter - - def double(x): - return x + x - - for iterable in source, Source: - doubles = MapAsyncIterable(iterable(), double) - - await doubles.aclose() - - with raises(StopAsyncIteration): - await anext(doubles) - - doubles = MapAsyncIterable(iterable(), double) - - assert await anext(doubles) == 2 - assert await anext(doubles) == 4 - assert await anext(doubles) == 6 - - with raises(StopAsyncIteration): - await anext(doubles) - - doubles = MapAsyncIterable(iterable(), double) - - assert await anext(doubles) == 2 - assert await anext(doubles) == 4 + self.closed = False - # Throw error - with raises(RuntimeError) as exc_info: - await doubles.athrow(RuntimeError("ouch")) - - assert str(exc_info.value) == "ouch" - - with raises(StopAsyncIteration): - await anext(doubles) - with raises(StopAsyncIteration): - await anext(doubles) - - # no more exceptions should be thrown - if is_pypy: - # need to investigate why this is needed with PyPy - await doubles.aclose() # pragma: no cover - await doubles.athrow(RuntimeError("no more ouch")) - - with raises(StopAsyncIteration): - await anext(doubles) - - await doubles.aclose() - - doubles = MapAsyncIterable(iterable(), double) - - assert await anext(doubles) == 2 - assert await anext(doubles) == 4 - - try: - raise ValueError("bad") - except ValueError: - tb = sys.exc_info()[2] - - # Throw error - with raises(ValueError): - await doubles.athrow(ValueError, None, tb) - - await sleep(0) - - @mark.asyncio - async def stops_async_iteration_on_close(): - async def source(): - yield 1 - await Event().wait() # Block forever - yield 2 # pragma: no cover - yield 3 # pragma: no cover - - singles = source() - doubles = MapAsyncIterable(singles, lambda x: x * 2) - - result = await anext(doubles) - assert result == 2 - - # Make sure it is blocked - doubles_future = ensure_future(anext(doubles)) - await sleep(0.05) - assert not doubles_future.done() - - # Unblock and watch StopAsyncIteration propagate - await doubles.aclose() - await sleep(0.05) - assert doubles_future.done() - assert isinstance(doubles_future.exception(), StopAsyncIteration) - - with raises(StopAsyncIteration): - await anext(singles) - - @mark.asyncio - async def can_unset_closed_state_of_async_iterable(): - items = [1, 2, 3] - - class Iterable: - def __init__(self): - self.is_closed = False + async def aclose(self): + self.closed = True def __aiter__(self): return self async def __anext__(self): - if self.is_closed: - raise StopAsyncIteration - try: - return items.pop(0) - except IndexError: - raise StopAsyncIteration - - async def aclose(self): - self.is_closed = True - - iterable = Iterable() - doubles = MapAsyncIterable(iterable, lambda x: x + x) - - assert await anext(doubles) == 2 - assert await anext(doubles) == 4 - assert not iterable.is_closed - await doubles.aclose() - assert iterable.is_closed - with raises(StopAsyncIteration): - await anext(iterable) - with raises(StopAsyncIteration): - await anext(doubles) - assert doubles.is_closed + return 1 - iterable.is_closed = False - doubles.is_closed = False - assert not doubles.is_closed + async def callback(v): + raise RuntimeError() - assert await anext(doubles) == 6 - assert not doubles.is_closed - assert not iterable.is_closed - with raises(StopAsyncIteration): - await anext(iterable) - with raises(StopAsyncIteration): - await anext(doubles) - assert not doubles.is_closed - assert not iterable.is_closed + inner = Inner() + outer = map_async_iterable(inner, callback) + with raises(RuntimeError): + async for _ in outer: + pass + assert inner.closed @mark.asyncio - async def can_cancel_async_iterable_while_waiting(): - class Iterable: - def __init__(self): - self.is_closed = False - self.value = 1 + async def test_inner_exit_on_callback_err(): + """ + Test that a custom iterator with aclose() gets an aclose() call + when the callback errors and the outer iterator aborts. + """ - def __aiter__(self): - return self - - async def __anext__(self): - try: - await sleep(0.5) - return self.value # pragma: no cover - except CancelledError: - self.value = -1 - raise + inner_exit = False - async def aclose(self): - self.is_closed = True - - iterable = Iterable() - doubles = MapAsyncIterable(iterable, lambda x: x + x) # pragma: no cover exit - cancelled = False - - async def iterator_task(): - nonlocal cancelled + async def inner(): + nonlocal inner_exit try: - async for _ in doubles: - assert False # pragma: no cover - except CancelledError: - cancelled = True - - task = ensure_future(iterator_task()) - await sleep(0.05) - assert not cancelled - assert not doubles.is_closed - assert iterable.value == 1 - assert not iterable.is_closed - task.cancel() - await sleep(0.05) - assert cancelled - assert iterable.value == -1 - assert doubles.is_closed - assert iterable.is_closed + while True: + yield 1 + except GeneratorExit: + inner_exit = True + + async def callback(v): + raise RuntimeError + + outer = map_async_iterable(inner(), callback) + with raises(RuntimeError): + async for _ in outer: + pass + assert inner_exit diff --git a/tests/execution/test_subscribe.py b/tests/execution/test_subscribe.py index d10edce6..b3364c9a 100644 --- a/tests/execution/test_subscribe.py +++ b/tests/execution/test_subscribe.py @@ -1187,3 +1187,52 @@ async def resolve_message(message, _info): assert isinstance(subscription, AsyncIterator) assert await anext(subscription) == ({"newMessage": "Hello"}, None) + + @mark.asyncio + async def custom_async_iterator(): + class CustomAsyncIterator: + def __init__(self, events): + self.events = events + + def __aiter__(self): + return self + + async def __anext__(self): + await asyncio.sleep(0) + if not self.events: + raise StopAsyncIteration + return self.events.pop(0) + + def generate_messages(_obj, _info): + return CustomAsyncIterator(["Hello", "Dolly"]) + + async def resolve_message(message, _info): + await asyncio.sleep(0) + return message + + schema = GraphQLSchema( + query=QueryType, + subscription=GraphQLObjectType( + "Subscription", + { + "newMessage": GraphQLField( + GraphQLString, + resolve=resolve_message, + subscribe=generate_messages, + ) + }, + ), + ) + + document = parse("subscription { newMessage }") + subscription = subscribe(schema, document) + assert isinstance(subscription, AsyncIterator) + + msgs = [] + async for result in subscription: + assert result.errors is None + assert result.data is not None + msgs.append(result.data["newMessage"]) + assert msgs == ["Hello", "Dolly"] + if hasattr(subscription, "aclose"): + await subscription.aclose() From 1e77b8cf7d871c205f3ab96de33a0f295d24eb79 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 4 Jun 2023 14:39:35 +0200 Subject: [PATCH 102/230] Use a more tolerant aclosing() context manager --- src/graphql/execution/__init__.py | 3 +- .../{iterators.py => async_iterables.py} | 48 ++++---- src/graphql/execution/execute.py | 2 +- .../execution/test_flatten_async_iterable.py | 84 +++++++++++++- tests/execution/test_map_async_iterable.py | 72 +++++++----- tests/execution/test_subscribe.py | 103 ++++++++++++++---- 6 files changed, 241 insertions(+), 71 deletions(-) rename src/graphql/execution/{iterators.py => async_iterables.py} (53%) diff --git a/src/graphql/execution/__init__.py b/src/graphql/execution/__init__.py index ee48b68b..29aa1594 100644 --- a/src/graphql/execution/__init__.py +++ b/src/graphql/execution/__init__.py @@ -30,7 +30,7 @@ FormattedIncrementalResult, Middleware, ) -from .iterators import map_async_iterable +from .async_iterables import flatten_async_iterable, map_async_iterable from .middleware import MiddlewareManager from .values import get_argument_values, get_directive_values, get_variable_values @@ -58,6 +58,7 @@ "FormattedIncrementalDeferResult", "FormattedIncrementalStreamResult", "FormattedIncrementalResult", + "flatten_async_iterable", "map_async_iterable", "Middleware", "MiddlewareManager", diff --git a/src/graphql/execution/iterators.py b/src/graphql/execution/async_iterables.py similarity index 53% rename from src/graphql/execution/iterators.py rename to src/graphql/execution/async_iterables.py index c3479175..1e142a99 100644 --- a/src/graphql/execution/iterators.py +++ b/src/graphql/execution/async_iterables.py @@ -1,5 +1,6 @@ from __future__ import annotations # Python < 3.10 +from contextlib import AbstractAsyncContextManager from typing import ( Any, AsyncGenerator, @@ -11,25 +12,34 @@ ) -try: - from contextlib import aclosing -except ImportError: # python < 3.10 - from contextlib import asynccontextmanager - - @asynccontextmanager # type: ignore - async def aclosing(thing): - try: - yield thing - finally: - await thing.aclose() - +__all__ = ["aclosing", "flatten_async_iterable", "map_async_iterable"] T = TypeVar("T") V = TypeVar("V") AsyncIterableOrGenerator = Union[AsyncGenerator[T, None], AsyncIterable[T]] -__all__ = ["flatten_async_iterable", "map_async_iterable"] + +class aclosing(AbstractAsyncContextManager): + """Async context manager for safely finalizing an async iterator or generator. + + Contrary to the function available via the standard library, this one silently + ignores the case that custom iterators have no aclose() method. + """ + + def __init__(self, iterable: AsyncIterableOrGenerator[T]) -> None: + self.iterable = iterable + + async def __aenter__(self) -> AsyncIterableOrGenerator[T]: + return self.iterable + + async def __aexit__(self, *_exc_info: Any) -> None: + try: + aclose = self.iterable.aclose # type: ignore + except AttributeError: + pass # do not complain if the iterator has no aclose() method + else: + await aclose() async def flatten_async_iterable( @@ -48,7 +58,7 @@ async def flatten_async_iterable( async def map_async_iterable( - iterable: AsyncIterable[T], callback: Callable[[T], Awaitable[V]] + iterable: AsyncIterableOrGenerator[T], callback: Callable[[T], Awaitable[V]] ) -> AsyncGenerator[V, None]: """Map an AsyncIterable over a callback function. @@ -58,10 +68,6 @@ async def map_async_iterable( the generator finishes or closes. """ - aiter = iterable.__aiter__() - try: - async for element in aiter: - yield await callback(element) - finally: - if hasattr(aiter, "aclose"): - await aiter.aclose() + async with aclosing(iterable) as items: # type: ignore + async for item in items: + yield await callback(item) diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index be2a426c..292d1ce1 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -70,8 +70,8 @@ is_non_null_type, is_object_type, ) +from .async_iterables import flatten_async_iterable, map_async_iterable from .collect_fields import FieldsAndPatches, collect_fields, collect_subfields -from .iterators import flatten_async_iterable, map_async_iterable from .middleware import MiddlewareManager from .values import get_argument_values, get_directive_values, get_variable_values diff --git a/tests/execution/test_flatten_async_iterable.py b/tests/execution/test_flatten_async_iterable.py index 49ead410..f98c16d0 100644 --- a/tests/execution/test_flatten_async_iterable.py +++ b/tests/execution/test_flatten_async_iterable.py @@ -2,7 +2,7 @@ from pytest import mark, raises -from graphql.execution.iterators import flatten_async_iterable +from graphql.execution import flatten_async_iterable try: # pragma: no cover @@ -129,3 +129,85 @@ async def nested2() -> AsyncGenerator[float, None]: assert await anext(doubles) == 2.2 with raises(StopAsyncIteration): assert await anext(doubles) + + @mark.asyncio + async def closes_nested_async_iterators(): + closed = [] + + class Source: + def __init__(self): + self.counter = 0 + + def __aiter__(self): + return self + + async def __anext__(self): + if self.counter == 2: + raise StopAsyncIteration + self.counter += 1 + return Nested(self.counter) + + async def aclose(self): + nonlocal closed + closed.append(self.counter) + + class Nested: + def __init__(self, value): + self.value = value + self.counter = 0 + + def __aiter__(self): + return self + + async def __anext__(self): + if self.counter == 2: + raise StopAsyncIteration + self.counter += 1 + return self.value + self.counter / 10 + + async def aclose(self): + nonlocal closed + closed.append(self.value + self.counter / 10) + + doubles = flatten_async_iterable(Source()) + + result = [x async for x in doubles] + + assert result == [1.1, 1.2, 2.1, 2.2] + + assert closed == [1.2, 2.2, 2] + + @mark.asyncio + async def works_with_nested_async_iterators_that_have_no_close_method(): + class Source: + def __init__(self): + self.counter = 0 + + def __aiter__(self): + return self + + async def __anext__(self): + if self.counter == 2: + raise StopAsyncIteration + self.counter += 1 + return Nested(self.counter) + + class Nested: + def __init__(self, value): + self.value = value + self.counter = 0 + + def __aiter__(self): + return self + + async def __anext__(self): + if self.counter == 2: + raise StopAsyncIteration + self.counter += 1 + return self.value + self.counter / 10 + + doubles = flatten_async_iterable(Source()) + + result = [x async for x in doubles] + + assert result == [1.1, 1.2, 2.1, 2.2] diff --git a/tests/execution/test_map_async_iterable.py b/tests/execution/test_map_async_iterable.py index 1462645a..e88b73d9 100644 --- a/tests/execution/test_map_async_iterable.py +++ b/tests/execution/test_map_async_iterable.py @@ -3,18 +3,22 @@ from graphql.execution import map_async_iterable -async def map_doubles(x): +try: # pragma: no cover + anext +except NameError: # pragma: no cover (Python < 3.10) + # noinspection PyShadowingBuiltins + async def anext(iterator): + """Return the next item from an async iterator.""" + return await iterator.__anext__() + + +async def map_doubles(x: int) -> int: return x + x def describe_map_async_iterable(): @mark.asyncio - async def test_inner_close_called(): - """ - Test that a custom iterator with aclose() gets an aclose() call - when outer is closed - """ - + async def inner_is_closed_when_outer_is_closed(): class Inner: def __init__(self): self.closed = False @@ -30,19 +34,14 @@ async def __anext__(self): inner = Inner() outer = map_async_iterable(inner, map_doubles) - it = outer.__aiter__() - assert await it.__anext__() == 2 + iterator = outer.__aiter__() + assert await anext(iterator) == 2 assert not inner.closed await outer.aclose() assert inner.closed @mark.asyncio - async def test_inner_close_called_on_callback_err(): - """ - Test that a custom iterator with aclose() gets an aclose() call - when the callback errors and the outer iterator aborts. - """ - + async def inner_is_closed_on_callback_error(): class Inner: def __init__(self): self.closed = False @@ -62,17 +61,11 @@ async def callback(v): inner = Inner() outer = map_async_iterable(inner, callback) with raises(RuntimeError): - async for _ in outer: - pass + await anext(outer) assert inner.closed @mark.asyncio - async def test_inner_exit_on_callback_err(): - """ - Test that a custom iterator with aclose() gets an aclose() call - when the callback errors and the outer iterator aborts. - """ - + async def test_inner_exits_on_callback_error(): inner_exit = False async def inner(): @@ -88,6 +81,35 @@ async def callback(v): outer = map_async_iterable(inner(), callback) with raises(RuntimeError): - async for _ in outer: - pass + await anext(outer) assert inner_exit + + @mark.asyncio + async def inner_has_no_close_method_when_outer_is_closed(): + class Inner: + def __aiter__(self): + return self + + async def __anext__(self): + return 1 + + outer = map_async_iterable(Inner(), map_doubles) + iterator = outer.__aiter__() + assert await anext(iterator) == 2 + await outer.aclose() + + @mark.asyncio + async def inner_has_no_close_method_on_callback_error(): + class Inner: + def __aiter__(self): + return self + + async def __anext__(self): + return 1 + + async def callback(v): + raise RuntimeError() + + outer = map_async_iterable(Inner(), callback) + with raises(RuntimeError): + await anext(outer) diff --git a/tests/execution/test_subscribe.py b/tests/execution/test_subscribe.py index b3364c9a..ac5003c9 100644 --- a/tests/execution/test_subscribe.py +++ b/tests/execution/test_subscribe.py @@ -1189,26 +1189,82 @@ async def resolve_message(message, _info): assert await anext(subscription) == ({"newMessage": "Hello"}, None) @mark.asyncio - async def custom_async_iterator(): - class CustomAsyncIterator: - def __init__(self, events): - self.events = events + async def should_work_with_custom_async_iterator(): + class MessageGenerator: + resolved: List[str] = [] + + def __init__(self, values, _info): + self.values = values def __aiter__(self): return self async def __anext__(self): + if not self.values: + raise StopAsyncIteration + await asyncio.sleep(0) + return self.values.pop(0) + + @classmethod + async def resolve(cls, message, _info): await asyncio.sleep(0) - if not self.events: + cls.resolved.append(message) + return message + "!" + + schema = GraphQLSchema( + query=QueryType, + subscription=GraphQLObjectType( + "Subscription", + { + "newMessage": GraphQLField( + GraphQLString, + resolve=MessageGenerator.resolve, + subscribe=MessageGenerator, + ) + }, + ), + ) + + document = parse("subscription { newMessage }") + subscription = subscribe(schema, document, ["Hello", "Dolly"]) + assert isinstance(subscription, AsyncIterator) + + assert [result async for result in subscription] == [ + ({"newMessage": "Hello!"}, None), + ({"newMessage": "Dolly!"}, None), + ] + + assert MessageGenerator.resolved == ["Hello", "Dolly"] + + await subscription.aclose() # type: ignore + + @mark.asyncio + async def should_close_custom_async_iterator(): + class MessageGenerator: + closed: bool = False + resolved: List[str] = [] + + def __init__(self, values, _info): + self.values = values + + def __aiter__(self): + return self + + async def __anext__(self): + if not self.values: raise StopAsyncIteration - return self.events.pop(0) + await asyncio.sleep(0) + return self.values.pop(0) - def generate_messages(_obj, _info): - return CustomAsyncIterator(["Hello", "Dolly"]) + @classmethod + async def resolve(cls, message, _info): + await asyncio.sleep(0) + cls.resolved.append(message) + return message + "!" - async def resolve_message(message, _info): - await asyncio.sleep(0) - return message + @classmethod + async def aclose(cls): + cls.closed = True schema = GraphQLSchema( query=QueryType, @@ -1217,22 +1273,25 @@ async def resolve_message(message, _info): { "newMessage": GraphQLField( GraphQLString, - resolve=resolve_message, - subscribe=generate_messages, + resolve=MessageGenerator.resolve, + subscribe=MessageGenerator, ) }, ), ) document = parse("subscription { newMessage }") - subscription = subscribe(schema, document) + subscription = subscribe(schema, document, ["Hello", "Dolly"]) assert isinstance(subscription, AsyncIterator) - msgs = [] - async for result in subscription: - assert result.errors is None - assert result.data is not None - msgs.append(result.data["newMessage"]) - assert msgs == ["Hello", "Dolly"] - if hasattr(subscription, "aclose"): - await subscription.aclose() + assert not MessageGenerator.closed + + assert [result async for result in subscription] == [ + ({"newMessage": "Hello!"}, None), + ({"newMessage": "Dolly!"}, None), + ] + + assert MessageGenerator.closed + assert MessageGenerator.resolved == ["Hello", "Dolly"] + + await subscription.aclose() From 56ea5dd5604e630d9bacff89c07a2009eb6d010b Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 4 Jun 2023 15:49:24 +0200 Subject: [PATCH 103/230] Add some more tests for map_async_iterable Add back some of the original tests and use similar names in tests. --- src/graphql/execution/async_iterables.py | 4 +- tests/execution/test_map_async_iterable.py | 313 +++++++++++++++++---- 2 files changed, 266 insertions(+), 51 deletions(-) diff --git a/src/graphql/execution/async_iterables.py b/src/graphql/execution/async_iterables.py index 1e142a99..ac476fc0 100644 --- a/src/graphql/execution/async_iterables.py +++ b/src/graphql/execution/async_iterables.py @@ -62,8 +62,8 @@ async def map_async_iterable( ) -> AsyncGenerator[V, None]: """Map an AsyncIterable over a callback function. - Given an AsyncIterable and an async callback callable, return an AsyncGenerator - which produces values mapped via calling the callback. + Given an AsyncIterable and an async callback function, return an AsyncGenerator + that produces values mapped via calling the callback function. If the inner iterator supports an `aclose()` method, it will be called when the generator finishes or closes. """ diff --git a/tests/execution/test_map_async_iterable.py b/tests/execution/test_map_async_iterable.py index e88b73d9..e5d2312d 100644 --- a/tests/execution/test_map_async_iterable.py +++ b/tests/execution/test_map_async_iterable.py @@ -12,104 +12,319 @@ async def anext(iterator): return await iterator.__anext__() -async def map_doubles(x: int) -> int: +async def double(x: int) -> int: + """Test callback that doubles the input value.""" return x + x +async def throw(_x: int) -> int: + """Test callback that raises a RuntimeError.""" + raise RuntimeError("Ouch") + + def describe_map_async_iterable(): @mark.asyncio - async def inner_is_closed_when_outer_is_closed(): - class Inner: - def __init__(self): - self.closed = False + async def maps_over_async_generator(): + async def source(): + yield 1 + yield 2 + yield 3 - async def aclose(self): - self.closed = True + doubles = map_async_iterable(source(), double) + + assert await anext(doubles) == 2 + assert await anext(doubles) == 4 + assert await anext(doubles) == 6 + with raises(StopAsyncIteration): + assert await anext(doubles) + + @mark.asyncio + async def maps_over_async_iterable(): + items = [1, 2, 3] + + class Iterable: + def __aiter__(self): + return self + + async def __anext__(self): + try: + return items.pop(0) + except IndexError: + raise StopAsyncIteration + + doubles = map_async_iterable(Iterable(), double) + + values = [value async for value in doubles] + + assert not items + assert values == [2, 4, 6] + + @mark.asyncio + async def compatible_with_async_for(): + async def source(): + yield 1 + yield 2 + yield 3 + + doubles = map_async_iterable(source(), double) + + values = [value async for value in doubles] + + assert values == [2, 4, 6] + + @mark.asyncio + async def allows_returning_early_from_mapped_async_generator(): + async def source(): + yield 1 + yield 2 + yield 3 # pragma: no cover + doubles = map_async_iterable(source(), double) + + assert await anext(doubles) == 2 + assert await anext(doubles) == 4 + + # Early return + await doubles.aclose() + + # Subsequent next calls + with raises(StopAsyncIteration): + await anext(doubles) + with raises(StopAsyncIteration): + await anext(doubles) + + @mark.asyncio + async def allows_returning_early_from_mapped_async_iterable(): + items = [1, 2, 3] + + class Iterable: + def __aiter__(self): + return self + + async def __anext__(self): + try: + return items.pop(0) + except IndexError: # pragma: no cover + raise StopAsyncIteration + + doubles = map_async_iterable(Iterable(), double) + + assert await anext(doubles) == 2 + assert await anext(doubles) == 4 + + # Early return + await doubles.aclose() + + # Subsequent next calls + with raises(StopAsyncIteration): + await anext(doubles) + with raises(StopAsyncIteration): + await anext(doubles) + + @mark.asyncio + async def allows_throwing_errors_through_async_iterable(): + items = [1, 2, 3] + + class Iterable: + def __aiter__(self): + return self + + async def __anext__(self): + try: + return items.pop(0) + except IndexError: # pragma: no cover + raise StopAsyncIteration + + doubles = map_async_iterable(Iterable(), double) + + assert await anext(doubles) == 2 + assert await anext(doubles) == 4 + + # Throw error + message = "allows throwing errors when mapping async iterable" + with raises(RuntimeError) as exc_info: + await doubles.athrow(RuntimeError(message)) + + assert str(exc_info.value) == message + + with raises(StopAsyncIteration): + await anext(doubles) + with raises(StopAsyncIteration): + await anext(doubles) + + @mark.asyncio + async def allows_throwing_errors_with_values_through_async_iterables(): + class Iterable: + def __aiter__(self): + return self + + async def __anext__(self): + return 1 + + one = map_async_iterable(Iterable(), double) + + assert await anext(one) == 2 + + # Throw error with value passed separately + try: + raise RuntimeError("Ouch") + except RuntimeError as error: + with raises(RuntimeError, match="Ouch") as exc_info: + await one.athrow(error.__class__, error) + + assert exc_info.value is error + assert exc_info.tb is error.__traceback__ + + with raises(StopAsyncIteration): + await anext(one) + + @mark.asyncio + async def allows_throwing_errors_with_traceback_through_async_iterables(): + class Iterable: def __aiter__(self): return self async def __anext__(self): return 1 - inner = Inner() - outer = map_async_iterable(inner, map_doubles) - iterator = outer.__aiter__() - assert await anext(iterator) == 2 - assert not inner.closed - await outer.aclose() - assert inner.closed + one = map_async_iterable(Iterable(), double) + + assert await anext(one) == 2 + + # Throw error with traceback passed separately + try: + raise RuntimeError("Ouch") + except RuntimeError as error: + with raises(RuntimeError) as exc_info: + await one.athrow(error.__class__, None, error.__traceback__) + + assert exc_info.tb and error.__traceback__ + assert exc_info.tb.tb_frame is error.__traceback__.tb_frame + + with raises(StopAsyncIteration): + await anext(one) @mark.asyncio - async def inner_is_closed_on_callback_error(): - class Inner: + async def does_not_map_over_thrown_errors(): + async def source(): + yield 1 + raise RuntimeError("Goodbye") + + doubles = map_async_iterable(source(), double) + + assert await anext(doubles) == 2 + + with raises(RuntimeError) as exc_info: + await anext(doubles) + + assert str(exc_info.value) == "Goodbye" + + @mark.asyncio + async def does_not_map_over_externally_thrown_errors(): + async def source(): + yield 1 + + doubles = map_async_iterable(source(), double) + + assert await anext(doubles) == 2 + + with raises(RuntimeError) as exc_info: + await doubles.athrow(RuntimeError("Goodbye")) + + assert str(exc_info.value) == "Goodbye" + + @mark.asyncio + async def iterable_is_closed_when_mapped_iterable_is_closed(): + class Iterable: def __init__(self): self.closed = False + def __aiter__(self): + return self + + async def __anext__(self): + return 1 + async def aclose(self): self.closed = True + iterable = Iterable() + doubles = map_async_iterable(iterable, double) + assert await anext(doubles) == 2 + assert not iterable.closed + await doubles.aclose() + assert iterable.closed + with raises(StopAsyncIteration): + await anext(doubles) + + @mark.asyncio + async def iterable_is_closed_on_callback_error(): + class Iterable: + def __init__(self): + self.closed = False + def __aiter__(self): return self async def __anext__(self): return 1 - async def callback(v): - raise RuntimeError() + async def aclose(self): + self.closed = True - inner = Inner() - outer = map_async_iterable(inner, callback) - with raises(RuntimeError): - await anext(outer) - assert inner.closed + iterable = Iterable() + doubles = map_async_iterable(iterable, throw) + with raises(RuntimeError, match="Ouch"): + await anext(doubles) + assert iterable.closed + with raises(StopAsyncIteration): + await anext(doubles) @mark.asyncio - async def test_inner_exits_on_callback_error(): - inner_exit = False + async def iterable_exits_on_callback_error(): + exited = False - async def inner(): - nonlocal inner_exit + async def iterable(): + nonlocal exited try: while True: yield 1 except GeneratorExit: - inner_exit = True + exited = True - async def callback(v): - raise RuntimeError - - outer = map_async_iterable(inner(), callback) - with raises(RuntimeError): - await anext(outer) - assert inner_exit + doubles = map_async_iterable(iterable(), throw) + with raises(RuntimeError, match="Ouch"): + await anext(doubles) + assert exited + with raises(StopAsyncIteration): + await anext(doubles) @mark.asyncio - async def inner_has_no_close_method_when_outer_is_closed(): - class Inner: + async def mapped_iterable_is_closed_when_iterable_cannot_be_closed(): + class Iterable: def __aiter__(self): return self async def __anext__(self): return 1 - outer = map_async_iterable(Inner(), map_doubles) - iterator = outer.__aiter__() - assert await anext(iterator) == 2 - await outer.aclose() + doubles = map_async_iterable(Iterable(), double) + assert await anext(doubles) == 2 + await doubles.aclose() + with raises(StopAsyncIteration): + await anext(doubles) @mark.asyncio - async def inner_has_no_close_method_on_callback_error(): - class Inner: + async def ignores_that_iterable_cannot_be_closed_on_callback_error(): + class Iterable: def __aiter__(self): return self async def __anext__(self): return 1 - async def callback(v): - raise RuntimeError() - - outer = map_async_iterable(Inner(), callback) - with raises(RuntimeError): - await anext(outer) + doubles = map_async_iterable(Iterable(), throw) + with raises(RuntimeError, match="Ouch"): + await anext(doubles) + with raises(StopAsyncIteration): + await anext(doubles) From a46cc8489ddc7ec46b8d4c9e21bac10ddbc2fc8a Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 4 Jun 2023 17:48:49 +0200 Subject: [PATCH 104/230] Remove expected warnings from tests more reliably --- tests/execution/test_subscribe.py | 4 ++ tests/execution/test_sync.py | 17 ++++-- tests/fixtures/__init__.py | 11 ++++ tests/pyutils/test_is_awaitable.py | 56 +++++++++++-------- .../test_assert_equal_awaitables_or_values.py | 8 ++- 5 files changed, 66 insertions(+), 30 deletions(-) diff --git a/tests/execution/test_subscribe.py b/tests/execution/test_subscribe.py index ac5003c9..3d14c260 100644 --- a/tests/execution/test_subscribe.py +++ b/tests/execution/test_subscribe.py @@ -33,6 +33,7 @@ GraphQLString, ) +from ..fixtures import cleanup from ..utils.assert_equal_awaitables_or_values import assert_equal_awaitables_or_values @@ -405,6 +406,9 @@ async def async_fn(obj, info): assert is_awaitable(result) assert await result == expected_result + del result + cleanup() + @mark.asyncio async def resolves_to_an_error_for_subscription_resolver_errors(): expected_result = ( diff --git a/tests/execution/test_sync.py b/tests/execution/test_sync.py index d397129c..872186fc 100644 --- a/tests/execution/test_sync.py +++ b/tests/execution/test_sync.py @@ -1,4 +1,3 @@ -from gc import collect from inspect import isawaitable from pytest import mark, raises @@ -9,6 +8,8 @@ from graphql.type import GraphQLField, GraphQLObjectType, GraphQLSchema, GraphQLString from graphql.validation import validate +from ..fixtures import cleanup + def describe_execute_synchronously_when_possible(): def _resolve_sync(root_value, _info): @@ -91,6 +92,8 @@ async def throws_if_encountering_async_execution_with_check_sync(): ) msg = str(exc_info.value) assert msg == "GraphQL execution failed to complete synchronously." + del exc_info + cleanup() @mark.asyncio @mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") @@ -108,9 +111,8 @@ async def throws_if_encountering_async_operation_without_check_sync(): } ], ) - # garbage collect coroutine in order to not postpone the warning del result - collect() + cleanup() @mark.asyncio @mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") @@ -129,6 +131,8 @@ async def throws_if_encountering_async_iterable_execution_with_check_sync(): ) msg = str(exc_info.value) assert msg == "GraphQL execution failed to complete synchronously." + del exc_info + cleanup() @mark.asyncio @mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") @@ -145,6 +149,8 @@ async def throws_if_encountering_async_iterable_execution_without_check_sync(): execute_sync(schema, document=parse(doc), root_value="rootValue") msg = str(exc_info.value) assert msg == "GraphQL execution failed to complete synchronously." + del exc_info + cleanup() def describe_graphql_sync(): def reports_errors_raised_during_schema_validation(): @@ -192,6 +198,8 @@ async def throws_if_encountering_async_operation_with_check_sync(): graphql_sync(schema, doc, "rootValue", check_sync=True) msg = str(exc_info.value) assert msg == "GraphQL execution failed to complete synchronously." + del exc_info + cleanup() @mark.asyncio @mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") @@ -209,6 +217,5 @@ async def throws_if_encountering_async_operation_without_check_sync(): } ], ) - # garbage collect coroutine in order to not postpone the warning del result - collect() + cleanup() diff --git a/tests/fixtures/__init__.py b/tests/fixtures/__init__.py index 2d216f5c..be23672c 100644 --- a/tests/fixtures/__init__.py +++ b/tests/fixtures/__init__.py @@ -1,11 +1,13 @@ """Fixtures for graphql tests""" import json +from gc import collect from os.path import dirname, join from pytest import fixture __all__ = [ + "cleanup", "kitchen_sink_query", "kitchen_sink_sdl", "big_schema_sdl", @@ -13,6 +15,15 @@ ] +def cleanup(rounds=5): + """Run garbage collector. + + This can be used to remove coroutines that were not awaited after running tests. + """ + for _generation in range(rounds): + collect() + + def read_graphql(name): path = join(dirname(__file__), name + ".graphql") return open(path, encoding="utf-8").read() diff --git a/tests/pyutils/test_is_awaitable.py b/tests/pyutils/test_is_awaitable.py index ee0ad0de..2847e0f2 100644 --- a/tests/pyutils/test_is_awaitable.py +++ b/tests/pyutils/test_is_awaitable.py @@ -61,49 +61,59 @@ def some_generator(): assert not is_awaitable(some_generator()) def declines_a_coroutine_function(): - async def some_coroutine(): + async def some_async_function(): return True # pragma: no cover - assert not isawaitable(some_coroutine) - assert not is_awaitable(some_coroutine) + assert not isawaitable(some_async_function) + assert not is_awaitable(some_async_function) @mark.asyncio - @mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") async def recognizes_a_coroutine_object(): - async def some_coroutine(): - return False # pragma: no cover + async def some_async_function(): + return True + + some_coroutine = some_async_function() + + assert isawaitable(some_coroutine) + assert is_awaitable(some_coroutine) - assert isawaitable(some_coroutine()) - assert is_awaitable(some_coroutine()) + assert await some_coroutine is True @mark.filterwarnings("ignore::Warning") # Deprecation and Runtime warnings @mark.skipif( python_version >= (3, 11), reason="Generator-based coroutines not supported any more since Python 3.11", ) - def recognizes_an_old_style_coroutine(): # pragma: no cover + async def recognizes_an_old_style_coroutine(): # pragma: no cover @asyncio.coroutine # type: ignore - def some_old_style_coroutine(): - yield False # pragma: no cover + def some_function(): + yield True - assert is_awaitable(some_old_style_coroutine()) - assert is_awaitable(some_old_style_coroutine()) + some_old_style_coroutine = some_function() + assert is_awaitable(some_old_style_coroutine) + assert is_awaitable(some_old_style_coroutine) @mark.asyncio - @mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") async def recognizes_a_future_object(): - async def some_coroutine(): - return False # pragma: no cover + async def some_async_function(): + return True - some_future = asyncio.ensure_future(some_coroutine()) + some_coroutine = some_async_function() + some_future = asyncio.ensure_future(some_coroutine) assert is_awaitable(some_future) assert is_awaitable(some_future) - @mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") - def declines_an_async_generator(): - async def some_async_generator(): - yield True # pragma: no cover + assert await some_future is True + + @mark.asyncio + async def declines_an_async_generator(): + async def some_async_generator_function(): + yield True + + some_async_generator = some_async_generator_function() + + assert not isawaitable(some_async_generator) + assert not is_awaitable(some_async_generator) - assert not isawaitable(some_async_generator()) - assert not is_awaitable(some_async_generator()) + assert await some_async_generator.__anext__() is True diff --git a/tests/utils/test_assert_equal_awaitables_or_values.py b/tests/utils/test_assert_equal_awaitables_or_values.py index 7718e7a3..ce202baf 100644 --- a/tests/utils/test_assert_equal_awaitables_or_values.py +++ b/tests/utils/test_assert_equal_awaitables_or_values.py @@ -38,13 +38,17 @@ async def test_value(value): ) @mark.asyncio - @mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") async def throws_when_given_mixture_of_equal_values_and_awaitables(): async def test_value(): return {"test": "test"} + value1 = await test_value() + value2 = test_value() + with raises( AssertionError, match=r"Received an invalid mixture of promises and values\.", ): - await assert_equal_awaitables_or_values(await test_value(), test_value()) + await assert_equal_awaitables_or_values(value1, value2) + + assert await value2 == value1 From 1a62f3023adc64711bad1f4db744fa70b76b05df Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 4 Jun 2023 20:25:26 +0200 Subject: [PATCH 105/230] Use is_awaitable more consistently --- src/graphql/execution/execute.py | 9 ++++----- src/graphql/graphql.py | 8 ++++---- src/graphql/pyutils/is_awaitable.py | 6 +++--- src/graphql/pyutils/simple_pub_sub.py | 5 +++-- tests/benchmarks/test_async_iterable.py | 4 ++-- tests/execution/test_abstract.py | 4 ++-- tests/execution/test_sync.py | 5 ++--- tests/pyutils/test_async_reduce.py | 11 +++++------ tests/pyutils/test_simple_pub_sub.py | 7 +++---- tests/test_user_registry.py | 5 ++--- 10 files changed, 30 insertions(+), 34 deletions(-) diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index 292d1ce1..c9efd0a4 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -3,7 +3,6 @@ from asyncio import Event, as_completed, ensure_future, gather, shield, sleep, wait_for from collections.abc import Mapping from contextlib import suppress -from inspect import isawaitable from typing import ( Any, AsyncGenerator, @@ -1646,7 +1645,7 @@ def map_source_to_response( async def callback(payload: Any) -> AsyncGenerator: result = execute_impl(self.build_per_event_execution_context(payload)) return ensure_async_iterable( - await result if isawaitable(result) else result # type: ignore + await result if self.is_awaitable(result) else result # type: ignore ) return flatten_async_iterable(map_async_iterable(result_or_stream, callback)) @@ -2124,7 +2123,7 @@ async def await_result() -> Any: def assume_not_awaitable(_value: Any) -> bool: - """Replacement for isawaitable if everything is assumed to be synchronous.""" + """Replacement for is_awaitable if everything is assumed to be synchronous.""" return False @@ -2172,10 +2171,10 @@ def execute_sync( ) # Assert that the execution was synchronous. - if isawaitable(result) or isinstance( + if default_is_awaitable(result) or isinstance( result, ExperimentalIncrementalExecutionResults ): - if isawaitable(result): + if default_is_awaitable(result): ensure_future(cast(Awaitable[ExecutionResult], result)).cancel() raise RuntimeError("GraphQL execution failed to complete synchronously.") diff --git a/src/graphql/graphql.py b/src/graphql/graphql.py index 3ec84062..28afe7dc 100644 --- a/src/graphql/graphql.py +++ b/src/graphql/graphql.py @@ -1,11 +1,11 @@ from asyncio import ensure_future -from inspect import isawaitable from typing import Any, Awaitable, Callable, Dict, Optional, Type, Union, cast from .error import GraphQLError from .execution import ExecutionContext, ExecutionResult, Middleware, execute from .language import Source, parse from .pyutils import AwaitableOrValue +from .pyutils import is_awaitable as default_is_awaitable from .type import ( GraphQLFieldResolver, GraphQLSchema, @@ -92,14 +92,14 @@ async def graphql( is_awaitable, ) - if isawaitable(result): + if default_is_awaitable(result): return await cast(Awaitable[ExecutionResult], result) return cast(ExecutionResult, result) def assume_not_awaitable(_value: Any) -> bool: - """Replacement for isawaitable if everything is assumed to be synchronous.""" + """Replacement for is_awaitable if everything is assumed to be synchronous.""" return False @@ -145,7 +145,7 @@ def graphql_sync( ) # Assert that the execution was synchronous. - if isawaitable(result): + if default_is_awaitable(result): ensure_future(cast(Awaitable[ExecutionResult], result)).cancel() raise RuntimeError("GraphQL execution failed to complete synchronously.") diff --git a/src/graphql/pyutils/is_awaitable.py b/src/graphql/pyutils/is_awaitable.py index cc927f7f..d68a911d 100644 --- a/src/graphql/pyutils/is_awaitable.py +++ b/src/graphql/pyutils/is_awaitable.py @@ -15,10 +15,10 @@ def is_awaitable(value: Any) -> TypeGuard[Awaitable]: - """Return true if object can be passed to an ``await`` expression. + """Return True if object can be passed to an ``await`` expression. - Instead of testing if the object is an instance of abc.Awaitable, it checks - the existence of an `__await__` attribute. This is much faster. + Instead of testing whether the object is an instance of abc.Awaitable, we + check the existence of an `__await__` attribute. This is much faster. """ return ( # check for coroutine objects diff --git a/src/graphql/pyutils/simple_pub_sub.py b/src/graphql/pyutils/simple_pub_sub.py index 52aab4b7..8bd6c7f6 100644 --- a/src/graphql/pyutils/simple_pub_sub.py +++ b/src/graphql/pyutils/simple_pub_sub.py @@ -1,9 +1,10 @@ from __future__ import annotations # Python < 3.10 from asyncio import Future, Queue, create_task, get_running_loop, sleep -from inspect import isawaitable from typing import Any, AsyncIterator, Callable, Optional, Set +from .is_awaitable import is_awaitable + __all__ = ["SimplePubSub", "SimplePubSubIterator"] @@ -25,7 +26,7 @@ def emit(self, event: Any) -> bool: """Emit an event.""" for subscriber in self.subscribers: result = subscriber(event) - if isawaitable(result): + if is_awaitable(result): create_task(result) # type: ignore return bool(self.subscribers) diff --git a/tests/benchmarks/test_async_iterable.py b/tests/benchmarks/test_async_iterable.py index 47b00446..2be53bf7 100644 --- a/tests/benchmarks/test_async_iterable.py +++ b/tests/benchmarks/test_async_iterable.py @@ -1,7 +1,7 @@ import asyncio -from inspect import isawaitable from graphql import ExecutionResult, build_schema, execute, parse +from graphql.pyutils import is_awaitable schema = build_schema("type Query { listField: [String] }") @@ -18,7 +18,7 @@ async def listField(info_): async def execute_async() -> ExecutionResult: result = execute(schema, document, Data()) - assert isawaitable(result) + assert is_awaitable(result) return await result diff --git a/tests/execution/test_abstract.py b/tests/execution/test_abstract.py index 51e3a55b..d8282176 100644 --- a/tests/execution/test_abstract.py +++ b/tests/execution/test_abstract.py @@ -1,10 +1,10 @@ -from inspect import isawaitable from typing import Any, NamedTuple, Optional from pytest import mark from graphql.execution import ExecutionResult, execute, execute_sync from graphql.language import parse +from graphql.pyutils import is_awaitable from graphql.type import ( GraphQLBoolean, GraphQLField, @@ -43,7 +43,7 @@ async def execute_query( result = (execute_sync if sync else execute)( schema, document, root_value ) # type: ignore - if not sync and isawaitable(result): + if not sync and is_awaitable(result): result = await result assert isinstance(result, ExecutionResult) return result diff --git a/tests/execution/test_sync.py b/tests/execution/test_sync.py index 872186fc..300eded1 100644 --- a/tests/execution/test_sync.py +++ b/tests/execution/test_sync.py @@ -1,10 +1,9 @@ -from inspect import isawaitable - from pytest import mark, raises from graphql import graphql_sync from graphql.execution import execute, execute_sync from graphql.language import parse +from graphql.pyutils import is_awaitable from graphql.type import GraphQLField, GraphQLObjectType, GraphQLSchema, GraphQLString from graphql.validation import validate @@ -57,7 +56,7 @@ def does_not_return_an_awaitable_if_mutation_fields_are_all_synchronous(): async def returns_an_awaitable_if_any_field_is_asynchronous(): doc = "query Example { syncField, asyncField }" result = execute(schema, parse(doc), "rootValue") - assert isawaitable(result) + assert is_awaitable(result) assert await result == ( {"syncField": "rootValue", "asyncField": "rootValue"}, None, diff --git a/tests/pyutils/test_async_reduce.py b/tests/pyutils/test_async_reduce.py index 2e9144e6..15c47595 100644 --- a/tests/pyutils/test_async_reduce.py +++ b/tests/pyutils/test_async_reduce.py @@ -1,9 +1,8 @@ from functools import reduce -from inspect import isawaitable from pytest import mark -from graphql.pyutils import async_reduce +from graphql.pyutils import async_reduce, is_awaitable def describe_async_reduce(): @@ -25,7 +24,7 @@ def callback(accumulator, current_value): values = ["bar", "baz"] result = async_reduce(callback, values, "foo") - assert not isawaitable(result) + assert not is_awaitable(result) assert result == "foo-bar-baz" @mark.asyncio @@ -38,7 +37,7 @@ def callback(accumulator, current_value): values = ["bar", "baz"] result = async_reduce(callback, values, async_initial_value()) - assert isawaitable(result) + assert is_awaitable(result) assert await result == "foo-bar-baz" @mark.asyncio @@ -48,7 +47,7 @@ async def async_callback(accumulator, current_value): values = ["bar", "baz"] result = async_reduce(async_callback, values, "foo") - assert isawaitable(result) + assert is_awaitable(result) assert await result == "foo-bar-baz" @mark.asyncio @@ -60,5 +59,5 @@ async def async_callback(accumulator, current_value): return accumulator * current_value result = async_reduce(async_callback, range(6, 9), async_initial_value()) - assert isawaitable(result) + assert is_awaitable(result) assert await result == 42 diff --git a/tests/pyutils/test_simple_pub_sub.py b/tests/pyutils/test_simple_pub_sub.py index 1c6f793b..093a6e61 100644 --- a/tests/pyutils/test_simple_pub_sub.py +++ b/tests/pyutils/test_simple_pub_sub.py @@ -1,9 +1,8 @@ from asyncio import sleep -from inspect import isawaitable from pytest import mark, raises -from graphql.pyutils import SimplePubSub +from graphql.pyutils import SimplePubSub, is_awaitable def describe_simple_pub_sub(): @@ -22,9 +21,9 @@ async def subscribe_async_iterator_mock(): # Read ahead i3 = await iterator.__anext__() - assert isawaitable(i3) + assert is_awaitable(i3) i4 = await iterator.__anext__() - assert isawaitable(i4) + assert is_awaitable(i4) # Publish assert pubsub.emit("Coconut") is True diff --git a/tests/test_user_registry.py b/tests/test_user_registry.py index 2d2e876c..8f2879b7 100644 --- a/tests/test_user_registry.py +++ b/tests/test_user_registry.py @@ -7,7 +7,6 @@ from asyncio import create_task, sleep, wait from collections import defaultdict from enum import Enum -from inspect import isawaitable from typing import Any, AsyncIterable, Dict, List, NamedTuple, Optional from pytest import fixture, mark @@ -29,7 +28,7 @@ parse, subscribe, ) -from graphql.pyutils import SimplePubSub, SimplePubSubIterator +from graphql.pyutils import SimplePubSub, SimplePubSubIterator, is_awaitable class User(NamedTuple): @@ -157,7 +156,7 @@ async def subscribe_user(_root, info, id=None): """Subscribe to mutations of a specific user object or all user objects""" async_iterator = info.context["registry"].event_iterator(id) async for event in async_iterator: - yield await event if isawaitable(event) else event # pragma: no cover exit + yield await event if is_awaitable(event) else event # pragma: no cover exit # noinspection PyShadowingBuiltins,PyUnusedLocal From e94fe8175136e190bb29b937856dab3146ae852d Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 4 Jun 2023 21:15:29 +0200 Subject: [PATCH 106/230] Include name in representation of nodes (#198) --- src/graphql/language/ast.py | 13 +++++++++++-- tests/language/test_ast.py | 29 ++++++++++++++++++++++++++++- 2 files changed, 39 insertions(+), 3 deletions(-) diff --git a/src/graphql/language/ast.py b/src/graphql/language/ast.py index e57adb82..bbae00d2 100644 --- a/src/graphql/language/ast.py +++ b/src/graphql/language/ast.py @@ -359,8 +359,17 @@ def __init__(self, **kwargs: Any) -> None: def __repr__(self) -> str: """Get a simple representation of the node.""" - name, loc = self.__class__.__name__, getattr(self, "loc", None) - return f"{name} at {loc}" if loc else name + rep = self.__class__.__name__ + if isinstance(self, NameNode): + rep += f"({self.value!r})" + else: + name = getattr(self, "name", None) + if name: + rep += f"(name={name.value!r})" + loc = getattr(self, "loc", None) + if loc: + rep += f" at {loc}" + return rep def __eq__(self, other: Any) -> bool: """Test whether two nodes are equal (recursively).""" diff --git a/tests/language/test_ast.py b/tests/language/test_ast.py index 46619e40..76e8a66c 100644 --- a/tests/language/test_ast.py +++ b/tests/language/test_ast.py @@ -1,7 +1,8 @@ import weakref from copy import copy, deepcopy +from typing import Optional -from graphql.language import Location, Node, Source, Token, TokenKind +from graphql.language import Location, NameNode, Node, Source, Token, TokenKind from graphql.pyutils import inspect @@ -12,6 +13,13 @@ class SampleTestNode(Node): beta: int +class SampleNamedNode(Node): + __slots__ = "foo", "name" + + foo: str + name: Optional[str] + + def describe_token_class(): def initializes(): token = Token( @@ -160,6 +168,25 @@ def has_representation_with_loc(): node = SampleTestNode(alpha=1, beta=2, loc=3) assert repr(node) == "SampleTestNode at 3" + def has_representation_when_named(): + name_node = NameNode(value="baz") + node = SampleNamedNode(foo="bar", name=name_node) + assert repr(node) == "SampleNamedNode(name='baz')" + node = SampleNamedNode(alpha=1, beta=2, name=name_node, loc=3) + assert repr(node) == "SampleNamedNode(name='baz') at 3" + + def has_representation_when_named_but_name_is_none(): + node = SampleNamedNode(alpha=1, beta=2, name=None) + assert repr(node) == "SampleNamedNode" + node = SampleNamedNode(alpha=1, beta=2, name=None, loc=3) + assert repr(node) == "SampleNamedNode at 3" + + def has_special_representation_when_it_is_a_name_node(): + node = NameNode(value="foo") + assert repr(node) == "NameNode('foo')" + node = NameNode(value="foo", loc=3) + assert repr(node) == "NameNode('foo') at 3" + def can_check_equality(): node = SampleTestNode(alpha=1, beta=2) node2 = SampleTestNode(alpha=1, beta=2) From f7e937ca298d90bca74cc1c90904f585c516a0bb Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 4 Jun 2023 21:49:58 +0200 Subject: [PATCH 107/230] Alpha release v3.3.0a3 with new features --- .bumpversion.cfg | 2 +- README.md | 22 ++-- docs/conf.py | 4 +- poetry.lock | 227 +++++++++++++++++++++-------------------- pyproject.toml | 2 +- src/graphql/version.py | 4 +- 6 files changed, 139 insertions(+), 122 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 9cb8d30b..e22c7dc9 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 3.3.0a2 +current_version = 3.3.0a3 commit = False tag = False diff --git a/README.md b/README.md index bbe91db9..51001276 100644 --- a/README.md +++ b/README.md @@ -14,12 +14,20 @@ An extensive test suite with over 2300 unit tests and 100% coverage comprises a replication of the complete test suite of GraphQL.js, making sure this port is reliable and compatible with GraphQL.js. -The current stable version 3.2.3 of GraphQL-core is up-to-date with GraphQL.js version 16.6.0. +The current stable version 3.2.3 of GraphQL-core is up-to-date with GraphQL.js +version 16.6.0 and supports Python version 3.6 and newer. -You can also try out the latest alpha version 3.3.0a2 of GraphQL-core that is up-to-date with GraphQL.js version 17.0.0a1. -Please note that this new minor version of GraphQL-core does not support Python 3.7 anymore. +You can also try out the latest alpha version 3.3.0a3 of GraphQL-core +which is up-to-date with GraphQL.js version 17.0.0a2. +Please note that this new minor version of GraphQL-core does not support +Python 3.6 anymore. -Note that for various reasons, GraphQL-core does not use SemVer like GraphQL.js. Changes in the major version of GraphQL.js are reflected in the minor version of GraphQL-core instead. This means there can be breaking changes in the API when the minor version changes, and only patch releases are fully backward compatible. Therefore, we recommend something like `=~ 3.2.0` as version specifier when including GraphQL-core as a dependency. +Note that for various reasons, GraphQL-core does not use SemVer like GraphQL.js. +Changes in the major version of GraphQL.js are reflected in the minor version of +GraphQL-core instead. This means there can be breaking changes in the API +when the minor version changes, and only patch releases are fully backward compatible. +Therefore, we recommend something like `=~ 3.2.0` as version specifier +when including GraphQL-core as a dependency. ## Documentation @@ -129,9 +137,9 @@ ExecutionResult(data=None, errors=[GraphQLError( locations=[SourceLocation(line=1, column=3)])]) ``` -The `graphql_sync` function assumes that all resolvers return values synchronously. By -using coroutines as resolvers, you can also create results in an asynchronous fashion -with the `graphql` function. +The `graphql_sync` function assumes that all resolvers return values synchronously. +By using coroutines as resolvers, you can also create results in an asynchronous +fashion with the `graphql` function. ```python import asyncio diff --git a/docs/conf.py b/docs/conf.py index 118db97e..246db043 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -51,7 +51,7 @@ # General information about the project. project = 'GraphQL-core 3' -copyright = '2022, Christoph Zwerschke' +copyright = '2023, Christoph Zwerschke' author = 'Christoph Zwerschke' # The version info for the project you're documenting, acts as replacement for @@ -61,7 +61,7 @@ # The short X.Y version. # version = '3.3' # The full version, including alpha/beta/rc tags. -version = release = '3.3.0a2' +version = release = '3.3.0a3' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/poetry.lock b/poetry.lock index a75dc897..041a7e5f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -286,62 +286,71 @@ files = [ [[package]] name = "coverage" -version = "7.2.6" +version = "7.2.7" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.7" files = [ - {file = "coverage-7.2.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:496b86f1fc9c81a1cd53d8842ef712e950a4611bba0c42d33366a7b91ba969ec"}, - {file = "coverage-7.2.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fbe6e8c0a9a7193ba10ee52977d4d5e7652957c1f56ccefed0701db8801a2a3b"}, - {file = "coverage-7.2.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76d06b721c2550c01a60e5d3093f417168658fb454e5dfd9a23570e9bffe39a1"}, - {file = "coverage-7.2.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:77a04b84d01f0e12c66f16e69e92616442dc675bbe51b90bfb074b1e5d1c7fbd"}, - {file = "coverage-7.2.6-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:35db06450272473eab4449e9c2ad9bc6a0a68dab8e81a0eae6b50d9c2838767e"}, - {file = "coverage-7.2.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6727a0d929ff0028b1ed8b3e7f8701670b1d7032f219110b55476bb60c390bfb"}, - {file = "coverage-7.2.6-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aac1d5fdc5378f6bac2c0c7ebe7635a6809f5b4376f6cf5d43243c1917a67087"}, - {file = "coverage-7.2.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1c9e4a5eb1bbc3675ee57bc31f8eea4cd7fb0cbcbe4912cf1cb2bf3b754f4a80"}, - {file = "coverage-7.2.6-cp310-cp310-win32.whl", hash = "sha256:71f739f97f5f80627f1fee2331e63261355fd1e9a9cce0016394b6707ac3f4ec"}, - {file = "coverage-7.2.6-cp310-cp310-win_amd64.whl", hash = "sha256:fde5c7a9d9864d3e07992f66767a9817f24324f354caa3d8129735a3dc74f126"}, - {file = "coverage-7.2.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bc7b667f8654376e9353dd93e55e12ce2a59fb6d8e29fce40de682273425e044"}, - {file = "coverage-7.2.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:697f4742aa3f26c107ddcb2b1784a74fe40180014edbd9adaa574eac0529914c"}, - {file = "coverage-7.2.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:541280dde49ce74a4262c5e395b48ea1207e78454788887118c421cb4ffbfcac"}, - {file = "coverage-7.2.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e7f1a8328eeec34c54f1d5968a708b50fc38d31e62ca8b0560e84a968fbf9a9"}, - {file = "coverage-7.2.6-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4bbd58eb5a2371bf160590f4262109f66b6043b0b991930693134cb617bc0169"}, - {file = "coverage-7.2.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ae82c5f168d2a39a5d69a12a69d4dc23837a43cf2ca99be60dfe59996ea6b113"}, - {file = "coverage-7.2.6-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:f5440cdaf3099e7ab17a5a7065aed59aff8c8b079597b61c1f8be6f32fe60636"}, - {file = "coverage-7.2.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a6f03f87fea579d55e0b690d28f5042ec1368650466520fbc400e7aeaf09e995"}, - {file = "coverage-7.2.6-cp311-cp311-win32.whl", hash = "sha256:dc4d5187ef4d53e0d4c8eaf530233685667844c5fb0b855fea71ae659017854b"}, - {file = "coverage-7.2.6-cp311-cp311-win_amd64.whl", hash = "sha256:c93d52c3dc7b9c65e39473704988602300e3cc1bad08b5ab5b03ca98bbbc68c1"}, - {file = "coverage-7.2.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:42c692b55a647a832025a4c048007034fe77b162b566ad537ce65ad824b12a84"}, - {file = "coverage-7.2.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7786b2fa7809bf835f830779ad285215a04da76293164bb6745796873f0942d"}, - {file = "coverage-7.2.6-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25bad4196104761bc26b1dae9b57383826542ec689ff0042f7f4f4dd7a815cba"}, - {file = "coverage-7.2.6-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2692306d3d4cb32d2cceed1e47cebd6b1d2565c993d6d2eda8e6e6adf53301e6"}, - {file = "coverage-7.2.6-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:392154d09bd4473b9d11351ab5d63391f3d5d24d752f27b3be7498b0ee2b5226"}, - {file = "coverage-7.2.6-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:fa079995432037b5e2ef5ddbb270bcd2ded9f52b8e191a5de11fe59a00ea30d8"}, - {file = "coverage-7.2.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d712cefff15c712329113b01088ba71bbcef0f7ea58478ca0bbec63a824844cb"}, - {file = "coverage-7.2.6-cp37-cp37m-win32.whl", hash = "sha256:004948e296149644d208964300cb3d98affc5211e9e490e9979af4030b0d6473"}, - {file = "coverage-7.2.6-cp37-cp37m-win_amd64.whl", hash = "sha256:c1d7a31603c3483ac49c1726723b0934f88f2c011c660e6471e7bd735c2fa110"}, - {file = "coverage-7.2.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3436927d1794fa6763b89b60c896f9e3bd53212001026ebc9080d23f0c2733c1"}, - {file = "coverage-7.2.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44c9b9f1a245f3d0d202b1a8fa666a80b5ecbe4ad5d0859c0fb16a52d9763224"}, - {file = "coverage-7.2.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e3783a286d5a93a2921396d50ce45a909aa8f13eee964465012f110f0cbb611"}, - {file = "coverage-7.2.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cff6980fe7100242170092bb40d2b1cdad79502cd532fd26b12a2b8a5f9aee0"}, - {file = "coverage-7.2.6-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c534431153caffc7c495c3eddf7e6a6033e7f81d78385b4e41611b51e8870446"}, - {file = "coverage-7.2.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3062fd5c62df988cea9f2972c593f77fed1182bfddc5a3b12b1e606cb7aba99e"}, - {file = "coverage-7.2.6-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6284a2005e4f8061c58c814b1600ad0074ccb0289fe61ea709655c5969877b70"}, - {file = "coverage-7.2.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:97729e6828643f168a2a3f07848e1b1b94a366b13a9f5aba5484c2215724edc8"}, - {file = "coverage-7.2.6-cp38-cp38-win32.whl", hash = "sha256:dc11b42fa61ff1e788dd095726a0aed6aad9c03d5c5984b54cb9e1e67b276aa5"}, - {file = "coverage-7.2.6-cp38-cp38-win_amd64.whl", hash = "sha256:cbcc874f454ee51f158afd604a315f30c0e31dff1d5d5bf499fc529229d964dd"}, - {file = "coverage-7.2.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d3cacc6a665221108ecdf90517a8028d07a2783df3417d12dcfef1c517e67478"}, - {file = "coverage-7.2.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:272ab31228a9df857ab5df5d67936d8861464dc89c5d3fab35132626e9369379"}, - {file = "coverage-7.2.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9a8723ccec4e564d4b9a79923246f7b9a8de4ec55fa03ec4ec804459dade3c4f"}, - {file = "coverage-7.2.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5906f6a84b47f995cd1bf0aca1c72d591c55ee955f98074e93660d64dfc66eb9"}, - {file = "coverage-7.2.6-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52c139b7ab3f0b15f9aad0a3fedef5a1f8c0b2bdc291d88639ca2c97d3682416"}, - {file = "coverage-7.2.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a5ffd45c6b93c23a8507e2f436983015c6457aa832496b6a095505ca2f63e8f1"}, - {file = "coverage-7.2.6-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4f3c7c19581d471af0e9cb49d928172cd8492cd78a2b7a4e82345d33662929bb"}, - {file = "coverage-7.2.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2e8c0e79820cdd67978e1120983786422d279e07a381dbf89d03bbb23ec670a6"}, - {file = "coverage-7.2.6-cp39-cp39-win32.whl", hash = "sha256:13cde6bb0e58fb67d09e2f373de3899d1d1e866c5a9ff05d93615f2f54fbd2bb"}, - {file = "coverage-7.2.6-cp39-cp39-win_amd64.whl", hash = "sha256:6b9f64526286255735847aed0221b189486e0b9ed943446936e41b7e44b08783"}, - {file = "coverage-7.2.6-pp37.pp38.pp39-none-any.whl", hash = "sha256:6babcbf1e66e46052442f10833cfc4a0d3554d8276aa37af8531a83ed3c1a01d"}, - {file = "coverage-7.2.6.tar.gz", hash = "sha256:2025f913f2edb0272ef15d00b1f335ff8908c921c8eb2013536fcaf61f5a683d"}, + {file = "coverage-7.2.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d39b5b4f2a66ccae8b7263ac3c8170994b65266797fb96cbbfd3fb5b23921db8"}, + {file = "coverage-7.2.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d040ef7c9859bb11dfeb056ff5b3872436e3b5e401817d87a31e1750b9ae2fb"}, + {file = "coverage-7.2.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba90a9563ba44a72fda2e85302c3abc71c5589cea608ca16c22b9804262aaeb6"}, + {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7d9405291c6928619403db1d10bd07888888ec1abcbd9748fdaa971d7d661b2"}, + {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31563e97dae5598556600466ad9beea39fb04e0229e61c12eaa206e0aa202063"}, + {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ebba1cd308ef115925421d3e6a586e655ca5a77b5bf41e02eb0e4562a111f2d1"}, + {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cb017fd1b2603ef59e374ba2063f593abe0fc45f2ad9abdde5b4d83bd922a353"}, + {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62a5c7dad11015c66fbb9d881bc4caa5b12f16292f857842d9d1871595f4495"}, + {file = "coverage-7.2.7-cp310-cp310-win32.whl", hash = "sha256:ee57190f24fba796e36bb6d3aa8a8783c643d8fa9760c89f7a98ab5455fbf818"}, + {file = "coverage-7.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:f75f7168ab25dd93110c8a8117a22450c19976afbc44234cbf71481094c1b850"}, + {file = "coverage-7.2.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06a9a2be0b5b576c3f18f1a241f0473575c4a26021b52b2a85263a00f034d51f"}, + {file = "coverage-7.2.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5baa06420f837184130752b7c5ea0808762083bf3487b5038d68b012e5937dbe"}, + {file = "coverage-7.2.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdec9e8cbf13a5bf63290fc6013d216a4c7232efb51548594ca3631a7f13c3a3"}, + {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52edc1a60c0d34afa421c9c37078817b2e67a392cab17d97283b64c5833f427f"}, + {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63426706118b7f5cf6bb6c895dc215d8a418d5952544042c8a2d9fe87fcf09cb"}, + {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:afb17f84d56068a7c29f5fa37bfd38d5aba69e3304af08ee94da8ed5b0865833"}, + {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:48c19d2159d433ccc99e729ceae7d5293fbffa0bdb94952d3579983d1c8c9d97"}, + {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0e1f928eaf5469c11e886fe0885ad2bf1ec606434e79842a879277895a50942a"}, + {file = "coverage-7.2.7-cp311-cp311-win32.whl", hash = "sha256:33d6d3ea29d5b3a1a632b3c4e4f4ecae24ef170b0b9ee493883f2df10039959a"}, + {file = "coverage-7.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:5b7540161790b2f28143191f5f8ec02fb132660ff175b7747b95dcb77ac26562"}, + {file = "coverage-7.2.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f2f67fe12b22cd130d34d0ef79206061bfb5eda52feb6ce0dba0644e20a03cf4"}, + {file = "coverage-7.2.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a342242fe22407f3c17f4b499276a02b01e80f861f1682ad1d95b04018e0c0d4"}, + {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:171717c7cb6b453aebac9a2ef603699da237f341b38eebfee9be75d27dc38e01"}, + {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49969a9f7ffa086d973d91cec8d2e31080436ef0fb4a359cae927e742abfaaa6"}, + {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b46517c02ccd08092f4fa99f24c3b83d8f92f739b4657b0f146246a0ca6a831d"}, + {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a3d33a6b3eae87ceaefa91ffdc130b5e8536182cd6dfdbfc1aa56b46ff8c86de"}, + {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:976b9c42fb2a43ebf304fa7d4a310e5f16cc99992f33eced91ef6f908bd8f33d"}, + {file = "coverage-7.2.7-cp312-cp312-win32.whl", hash = "sha256:8de8bb0e5ad103888d65abef8bca41ab93721647590a3f740100cd65c3b00511"}, + {file = "coverage-7.2.7-cp312-cp312-win_amd64.whl", hash = "sha256:9e31cb64d7de6b6f09702bb27c02d1904b3aebfca610c12772452c4e6c21a0d3"}, + {file = "coverage-7.2.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:58c2ccc2f00ecb51253cbe5d8d7122a34590fac9646a960d1430d5b15321d95f"}, + {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d22656368f0e6189e24722214ed8d66b8022db19d182927b9a248a2a8a2f67eb"}, + {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a895fcc7b15c3fc72beb43cdcbdf0ddb7d2ebc959edac9cef390b0d14f39f8a9"}, + {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e84606b74eb7de6ff581a7915e2dab7a28a0517fbe1c9239eb227e1354064dcd"}, + {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0a5f9e1dbd7fbe30196578ca36f3fba75376fb99888c395c5880b355e2875f8a"}, + {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:419bfd2caae268623dd469eff96d510a920c90928b60f2073d79f8fe2bbc5959"}, + {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2aee274c46590717f38ae5e4650988d1af340fe06167546cc32fe2f58ed05b02"}, + {file = "coverage-7.2.7-cp37-cp37m-win32.whl", hash = "sha256:61b9a528fb348373c433e8966535074b802c7a5d7f23c4f421e6c6e2f1697a6f"}, + {file = "coverage-7.2.7-cp37-cp37m-win_amd64.whl", hash = "sha256:b1c546aca0ca4d028901d825015dc8e4d56aac4b541877690eb76490f1dc8ed0"}, + {file = "coverage-7.2.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:54b896376ab563bd38453cecb813c295cf347cf5906e8b41d340b0321a5433e5"}, + {file = "coverage-7.2.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3d376df58cc111dc8e21e3b6e24606b5bb5dee6024f46a5abca99124b2229ef5"}, + {file = "coverage-7.2.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e330fc79bd7207e46c7d7fd2bb4af2963f5f635703925543a70b99574b0fea9"}, + {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e9d683426464e4a252bf70c3498756055016f99ddaec3774bf368e76bbe02b6"}, + {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d13c64ee2d33eccf7437961b6ea7ad8673e2be040b4f7fd4fd4d4d28d9ccb1e"}, + {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b7aa5f8a41217360e600da646004f878250a0d6738bcdc11a0a39928d7dc2050"}, + {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8fa03bce9bfbeeef9f3b160a8bed39a221d82308b4152b27d82d8daa7041fee5"}, + {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:245167dd26180ab4c91d5e1496a30be4cd721a5cf2abf52974f965f10f11419f"}, + {file = "coverage-7.2.7-cp38-cp38-win32.whl", hash = "sha256:d2c2db7fd82e9b72937969bceac4d6ca89660db0a0967614ce2481e81a0b771e"}, + {file = "coverage-7.2.7-cp38-cp38-win_amd64.whl", hash = "sha256:2e07b54284e381531c87f785f613b833569c14ecacdcb85d56b25c4622c16c3c"}, + {file = "coverage-7.2.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:537891ae8ce59ef63d0123f7ac9e2ae0fc8b72c7ccbe5296fec45fd68967b6c9"}, + {file = "coverage-7.2.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:06fb182e69f33f6cd1d39a6c597294cff3143554b64b9825d1dc69d18cc2fff2"}, + {file = "coverage-7.2.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:201e7389591af40950a6480bd9edfa8ed04346ff80002cec1a66cac4549c1ad7"}, + {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f6951407391b639504e3b3be51b7ba5f3528adbf1a8ac3302b687ecababf929e"}, + {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f48351d66575f535669306aa7d6d6f71bc43372473b54a832222803eb956fd1"}, + {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b29019c76039dc3c0fd815c41392a044ce555d9bcdd38b0fb60fb4cd8e475ba9"}, + {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:81c13a1fc7468c40f13420732805a4c38a105d89848b7c10af65a90beff25250"}, + {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:975d70ab7e3c80a3fe86001d8751f6778905ec723f5b110aed1e450da9d4b7f2"}, + {file = "coverage-7.2.7-cp39-cp39-win32.whl", hash = "sha256:7ee7d9d4822c8acc74a5e26c50604dff824710bc8de424904c0982e25c39c6cb"}, + {file = "coverage-7.2.7-cp39-cp39-win_amd64.whl", hash = "sha256:eb393e5ebc85245347950143969b241d08b52b88a3dc39479822e073a1a8eb27"}, + {file = "coverage-7.2.7-pp37.pp38.pp39-none-any.whl", hash = "sha256:b7b4c971f05e6ae490fef852c218b0e79d4e52f79ef0c8475566584a8fb3e01d"}, + {file = "coverage-7.2.7.tar.gz", hash = "sha256:924d94291ca674905fe9481f12294eb11f2d3d3fd1adb20314ba89e94f44ed59"}, ] [package.dependencies] @@ -675,61 +684,61 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] [[package]] name = "markupsafe" -version = "2.1.2" +version = "2.1.3" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.7" files = [ - {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-win32.whl", hash = "sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-win32.whl", hash = "sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a6e40afa7f45939ca356f348c8e23048e02cb109ced1eb8420961b2f40fb373a"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf877ab4ed6e302ec1d04952ca358b381a882fbd9d1b07cccbfd61783561f98a"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63ba06c9941e46fa389d389644e2d8225e0e3e5ebcc4ff1ea8506dce646f8c8a"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f1cd098434e83e656abf198f103a8207a8187c0fc110306691a2e94a78d0abb2"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:55f44b440d491028addb3b88f72207d71eeebfb7b5dbf0643f7c023ae1fba619"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a6f2fcca746e8d5910e18782f976489939d54a91f9411c32051b4aab2bd7c513"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0b462104ba25f1ac006fdab8b6a01ebbfbce9ed37fd37fd4acd70c67c973e460"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-win32.whl", hash = "sha256:7668b52e102d0ed87cb082380a7e2e1e78737ddecdde129acadb0eccc5423859"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6d6607f98fcf17e534162f0709aaad3ab7a96032723d8ac8750ffe17ae5a0666"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a806db027852538d2ad7555b203300173dd1b77ba116de92da9afbc3a3be3eed"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a4abaec6ca3ad8660690236d11bfe28dfd707778e2442b45addd2f086d6ef094"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f03a532d7dee1bed20bc4884194a16160a2de9ffc6354b3878ec9682bb623c54"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cf06cdc1dda95223e9d2d3c58d3b178aa5dacb35ee7e3bbac10e4e1faacb419"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22731d79ed2eb25059ae3df1dfc9cb1546691cc41f4e3130fe6bfbc3ecbbecfa"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f8ffb705ffcf5ddd0e80b65ddf7bed7ee4f5a441ea7d3419e861a12eaf41af58"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8db032bf0ce9022a8e41a22598eefc802314e81b879ae093f36ce9ddf39ab1ba"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2298c859cfc5463f1b64bd55cb3e602528db6fa0f3cfd568d3605c50678f8f03"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-win32.whl", hash = "sha256:50c42830a633fa0cf9e7d27664637532791bfc31c731a87b202d2d8ac40c3ea2"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:bb06feb762bade6bf3c8b844462274db0c76acc95c52abe8dbed28ae3d44a147"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-win32.whl", hash = "sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed"}, - {file = "MarkupSafe-2.1.2.tar.gz", hash = "sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-win32.whl", hash = "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-win32.whl", hash = "sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-win_amd64.whl", hash = "sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-win32.whl", hash = "sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-win32.whl", hash = "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba"}, + {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, ] [[package]] @@ -1171,13 +1180,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "rich" -version = "13.3.5" +version = "13.4.1" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false python-versions = ">=3.7.0" files = [ - {file = "rich-13.3.5-py3-none-any.whl", hash = "sha256:69cdf53799e63f38b95b9bf9c875f8c90e78dd62b2f00c13a911c7a3b9fa4704"}, - {file = "rich-13.3.5.tar.gz", hash = "sha256:2d11b9b8dd03868f09b4fffadc84a6a8cda574e40dc90821bd845720ebb8e89c"}, + {file = "rich-13.4.1-py3-none-any.whl", hash = "sha256:d204aadb50b936bf6b1a695385429d192bc1fdaf3e8b907e8e26f4c4e4b5bf75"}, + {file = "rich-13.4.1.tar.gz", hash = "sha256:76f6b65ea7e5c5d924ba80e322231d7cb5b5981aa60bfc1e694f1bc097fe6fe1"}, ] [package.dependencies] @@ -1573,13 +1582,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.6.2" +version = "4.6.3" description = "Backported and Experimental Type Hints for Python 3.7+" optional = false python-versions = ">=3.7" files = [ - {file = "typing_extensions-4.6.2-py3-none-any.whl", hash = "sha256:3a8b36f13dd5fdc5d1b16fe317f5668545de77fa0b8e02006381fd49d731ab98"}, - {file = "typing_extensions-4.6.2.tar.gz", hash = "sha256:06006244c70ac8ee83fa8282cb188f697b8db25bc8b4df07be1873c43897060c"}, + {file = "typing_extensions-4.6.3-py3-none-any.whl", hash = "sha256:88a4153d8505aabbb4e13aacb7c486c2b4a33ca3b3f807914a9b4c844c471c26"}, + {file = "typing_extensions-4.6.3.tar.gz", hash = "sha256:d91d5919357fe7f681a9f2b5b4cb2a5f1ef0a1e9f59c4d8ff0d3491e05c0ffd5"}, ] [[package]] diff --git a/pyproject.toml b/pyproject.toml index d7c2a0d7..a0aec64d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "graphql-core" -version = "3.3.0a2" +version = "3.3.0a3" description = """\ GraphQL-core is a Python port of GraphQL.js,\ the JavaScript reference implementation for GraphQL.""" diff --git a/src/graphql/version.py b/src/graphql/version.py index 0a7c851d..bbe3b1fe 100644 --- a/src/graphql/version.py +++ b/src/graphql/version.py @@ -7,9 +7,9 @@ __all__ = ["version", "version_info", "version_js", "version_info_js"] -version = "3.3.0a2" +version = "3.3.0a3" -version_js = "17.0.0a1" +version_js = "17.0.0a2" _re_version = re.compile(r"(\d+)\.(\d+)\.(\d+)(\D*)(\d*)") From a6c15f38a51e4dbcefa48babb9a994306b0d1c83 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Mon, 5 Jun 2023 00:13:20 +0200 Subject: [PATCH 108/230] fix: allow async errors to bubble to AsyncIterable list items Replicates graphql/graphql-js@84797fb9f5ddae74e71d4c74082fb15a99dffdc5 --- src/graphql/execution/execute.py | 17 ++++++++++++++++- tests/execution/test_lists.py | 9 +++++++-- 2 files changed, 23 insertions(+), 3 deletions(-) diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index c9efd0a4..1734d12c 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -1213,8 +1213,23 @@ async def complete_async_iterator_value( async_payload_record, ) if is_awaitable(completed_item): + # noinspection PyShadowingNames + async def catch_error( + completed_item: Awaitable[Any], field_path: Path + ) -> Any: + try: + return await completed_item + except Exception as raw_error: + error = located_error( + raw_error, field_nodes, field_path.as_list() + ) + handle_field_error(error, item_type, errors) + return None + + append_result(catch_error(completed_item, field_path)) append_awaitable(index) - append_result(completed_item) + else: + append_result(completed_item) except Exception as raw_error: append_result(None) error = located_error(raw_error, field_nodes, field_path.as_list()) diff --git a/tests/execution/test_lists.py b/tests/execution/test_lists.py index 2eed7595..2558b719 100644 --- a/tests/execution/test_lists.py +++ b/tests/execution/test_lists.py @@ -9,6 +9,7 @@ GraphQLField, GraphQLFieldResolver, GraphQLList, + GraphQLNonNull, GraphQLObjectType, GraphQLResolveInfo, GraphQLSchema, @@ -155,7 +156,11 @@ async def _list_field( GraphQLList( GraphQLObjectType( "ObjectWrapper", - {"index": GraphQLField(GraphQLString, resolve=resolve)}, + { + "index": GraphQLField( + GraphQLNonNull(GraphQLString), resolve=resolve + ) + }, ) ), resolve=_list_field, @@ -274,7 +279,7 @@ async def resolve(data: _IndexData, info_: GraphQLResolveInfo) -> int: return index assert await _complete_object_lists(resolve) == ( - {"listField": [{"index": "0"}, {"index": "1"}, {"index": None}]}, + {"listField": [{"index": "0"}, {"index": "1"}, None]}, [ { "message": "bad", From 0c93b8452eed38d4f800c7e71cf6f3f3758cd1c6 Mon Sep 17 00:00:00 2001 From: Evan Mays Date: Fri, 9 Jun 2023 15:13:10 -0700 Subject: [PATCH 109/230] Fix return type of assert_composite_type (#203) --- src/graphql/type/definition.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/graphql/type/definition.py b/src/graphql/type/definition.py index e1a0a770..73e34e08 100644 --- a/src/graphql/type/definition.py +++ b/src/graphql/type/definition.py @@ -1620,7 +1620,7 @@ def is_composite_type(type_: Any) -> TypeGuard[GraphQLCompositeType]: ) -def assert_composite_type(type_: Any) -> GraphQLType: +def assert_composite_type(type_: Any) -> GraphQLCompositeType: if not is_composite_type(type_): raise TypeError(f"Expected {type_} to be a GraphQL composite type.") return type_ From 4d240d48588ce61dd134c2db7202e4692032ebd6 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Thu, 8 Feb 2024 21:45:05 +0100 Subject: [PATCH 110/230] Migrate from black, flake8 and isort to ruff Re-formatted and improved style to comply with additional rules. --- .bandit | 4 - .flake8 | 7 - .github/workflows/lint.yml | 2 +- .github/workflows/test.yml | 2 +- README.md | 7 +- poetry.lock | 1151 ++++++----------- pyproject.toml | 170 ++- src/graphql/error/graphql_error.py | 14 +- src/graphql/error/located_error.py | 15 +- src/graphql/error/syntax_error.py | 8 +- src/graphql/execution/async_iterables.py | 9 +- src/graphql/execution/collect_fields.py | 5 +- src/graphql/execution/execute.py | 191 +-- src/graphql/execution/middleware.py | 5 +- src/graphql/execution/values.py | 36 +- src/graphql/graphql.py | 6 +- src/graphql/language/ast.py | 30 +- src/graphql/language/block_string.py | 3 +- src/graphql/language/character_classes.py | 2 + src/graphql/language/directive_locations.py | 3 +- src/graphql/language/lexer.py | 43 +- src/graphql/language/location.py | 16 +- src/graphql/language/parser.py | 21 +- src/graphql/language/predicates.py | 3 +- src/graphql/language/print_location.py | 3 +- src/graphql/language/print_string.py | 6 +- src/graphql/language/printer.py | 3 +- src/graphql/language/source.py | 19 +- src/graphql/language/token_kind.py | 3 +- src/graphql/language/visitor.py | 36 +- src/graphql/pyutils/async_reduce.py | 3 +- src/graphql/pyutils/awaitable_or_value.py | 3 +- src/graphql/pyutils/cached_property.py | 3 +- src/graphql/pyutils/convert_case.py | 3 +- src/graphql/pyutils/description.py | 12 +- src/graphql/pyutils/did_you_mean.py | 3 +- src/graphql/pyutils/format_list.py | 6 +- src/graphql/pyutils/frozen_error.py | 2 + src/graphql/pyutils/group_by.py | 3 +- src/graphql/pyutils/identity_func.py | 3 +- src/graphql/pyutils/inspect.py | 33 +- src/graphql/pyutils/is_awaitable.py | 3 +- src/graphql/pyutils/is_iterable.py | 7 +- src/graphql/pyutils/merge_kwargs.py | 3 +- src/graphql/pyutils/natural_compare.py | 3 +- src/graphql/pyutils/path.py | 3 +- src/graphql/pyutils/print_path_list.py | 3 + src/graphql/pyutils/simple_pub_sub.py | 11 +- src/graphql/pyutils/suggestion_list.py | 5 +- src/graphql/pyutils/undefined.py | 10 +- src/graphql/type/assert_name.py | 23 +- src/graphql/type/definition.py | 233 +++- src/graphql/type/directives.py | 22 +- src/graphql/type/introspection.py | 67 +- src/graphql/type/scalars.py | 59 +- src/graphql/type/schema.py | 52 +- src/graphql/type/validate.py | 11 +- src/graphql/utilities/ast_from_value.py | 9 +- src/graphql/utilities/ast_to_dict.py | 6 +- src/graphql/utilities/build_ast_schema.py | 5 +- src/graphql/utilities/build_client_schema.py | 57 +- src/graphql/utilities/coerce_input_value.py | 8 +- src/graphql/utilities/concat_ast.py | 3 +- src/graphql/utilities/extend_schema.py | 57 +- .../utilities/find_breaking_changes.py | 41 +- .../utilities/get_introspection_query.py | 3 +- src/graphql/utilities/get_operation_ast.py | 3 +- .../utilities/introspection_from_schema.py | 9 +- .../utilities/lexicographic_sort_schema.py | 12 +- src/graphql/utilities/print_schema.py | 29 +- src/graphql/utilities/separate_operations.py | 3 +- src/graphql/utilities/sort_value_node.py | 3 +- .../utilities/strip_ignored_characters.py | 16 +- src/graphql/utilities/type_comparators.py | 10 +- src/graphql/utilities/type_from_ast.py | 6 +- src/graphql/utilities/type_info.py | 24 +- src/graphql/utilities/value_from_ast.py | 8 +- .../utilities/value_from_ast_untyped.py | 8 +- src/graphql/validation/rules/__init__.py | 3 +- .../validation/rules/custom/no_deprecated.py | 3 +- .../rules/custom/no_schema_introspection.py | 3 +- .../rules/defer_stream_directive_label.py | 5 +- .../defer_stream_directive_on_root_field.py | 3 +- .../rules/executable_definitions.py | 3 +- .../rules/fields_on_correct_type.py | 6 +- .../rules/fragments_on_composite_types.py | 3 +- .../validation/rules/known_argument_names.py | 7 +- .../validation/rules/known_directives.py | 13 +- .../validation/rules/known_fragment_names.py | 3 +- .../validation/rules/known_type_names.py | 7 +- .../rules/lone_anonymous_operation.py | 5 +- .../rules/lone_schema_definition.py | 5 +- .../validation/rules/no_fragment_cycles.py | 5 +- .../rules/no_undefined_variables.py | 5 +- .../validation/rules/no_unused_fragments.py | 5 +- .../validation/rules/no_unused_variables.py | 5 +- .../rules/overlapping_fields_can_be_merged.py | 9 +- .../rules/possible_fragment_spreads.py | 3 +- .../rules/possible_type_extensions.py | 8 +- .../rules/provided_required_arguments.py | 7 +- src/graphql/validation/rules/scalar_leafs.py | 3 +- .../rules/single_field_subscriptions.py | 3 +- .../rules/stream_directive_on_list_field.py | 3 +- .../rules/unique_argument_definition_names.py | 3 +- .../validation/rules/unique_argument_names.py | 5 +- .../rules/unique_directive_names.py | 5 +- .../rules/unique_directives_per_location.py | 5 +- .../rules/unique_enum_value_names.py | 5 +- .../rules/unique_field_definition_names.py | 5 +- .../validation/rules/unique_fragment_names.py | 5 +- .../rules/unique_input_field_names.py | 5 +- .../rules/unique_operation_names.py | 5 +- .../rules/unique_operation_types.py | 11 +- .../validation/rules/unique_type_names.py | 5 +- .../validation/rules/unique_variable_names.py | 3 +- .../rules/values_of_correct_type.py | 5 +- .../rules/variables_are_input_types.py | 3 +- .../rules/variables_in_allowed_position.py | 5 +- src/graphql/validation/specified_rules.py | 3 +- src/graphql/validation/validate.py | 5 +- src/graphql/validation/validation_context.py | 7 +- src/graphql/version.py | 3 +- tests/benchmarks/test_async_iterable.py | 3 +- tests/benchmarks/test_build_client_schema.py | 3 +- tests/benchmarks/test_execution_async.py | 3 +- tests/benchmarks/test_execution_sync.py | 3 +- tests/error/test_graphql_error.py | 43 +- tests/execution/test_abstract.py | 15 +- tests/execution/test_customize.py | 12 +- tests/execution/test_defer.py | 46 +- tests/execution/test_directives.py | 1 - tests/execution/test_execution_result.py | 15 +- tests/execution/test_executor.py | 19 +- .../execution/test_flatten_async_iterable.py | 33 +- tests/execution/test_lists.py | 39 +- tests/execution/test_map_async_iterable.py | 91 +- tests/execution/test_middleware.py | 9 +- tests/execution/test_mutations.py | 13 +- tests/execution/test_nonnull.py | 24 +- tests/execution/test_parallel.py | 13 +- tests/execution/test_resolve.py | 8 +- tests/execution/test_schema.py | 6 +- tests/execution/test_stream.py | 104 +- tests/execution/test_subscribe.py | 111 +- tests/execution/test_sync.py | 37 +- tests/execution/test_union_interface.py | 9 +- tests/fixtures/__init__.py | 24 +- tests/language/test_ast.py | 20 +- tests/language/test_block_string_fuzz.py | 7 +- tests/language/test_character_classes.py | 3 +- tests/language/test_lexer.py | 23 +- tests/language/test_location.py | 18 +- tests/language/test_parser.py | 38 +- tests/language/test_predicates.py | 1 - tests/language/test_printer.py | 7 +- tests/language/test_schema_parser.py | 6 +- tests/language/test_schema_printer.py | 5 +- tests/language/test_source.py | 31 +- tests/language/test_visitor.py | 89 +- tests/pyutils/test_async_reduce.py | 11 +- tests/pyutils/test_description.py | 7 +- tests/pyutils/test_format_list.py | 7 +- tests/pyutils/test_group_by.py | 2 +- tests/pyutils/test_inspect.py | 18 +- tests/pyutils/test_is_awaitable.py | 13 +- tests/pyutils/test_is_iterable.py | 4 +- tests/pyutils/test_merge_kwargs.py | 1 - tests/pyutils/test_natural_compare.py | 1 - tests/pyutils/test_simple_pub_sub.py | 15 +- tests/pyutils/test_undefined.py | 12 +- tests/star_wars_data.py | 17 +- tests/star_wars_schema.py | 2 +- tests/test_docs.py | 47 +- tests/test_star_wars_query.py | 39 +- tests/test_user_registry.py | 42 +- tests/test_version.py | 1 - tests/type/test_assert_name.py | 21 +- tests/type/test_custom_scalars.py | 1 - tests/type/test_definition.py | 139 +- tests/type/test_directives.py | 44 +- tests/type/test_enum.py | 24 +- tests/type/test_extensions.py | 9 +- tests/type/test_predicate.py | 96 +- tests/type/test_scalars.py | 103 +- tests/type/test_schema.py | 11 +- tests/type/test_validation.py | 41 +- tests/utilities/test_ast_from_value.py | 25 +- tests/utilities/test_ast_to_dict.py | 6 +- tests/utilities/test_build_ast_schema.py | 29 +- tests/utilities/test_build_client_schema.py | 45 +- tests/utilities/test_coerce_input_value.py | 7 +- tests/utilities/test_extend_schema.py | 18 +- .../utilities/test_get_introspection_query.py | 1 - .../test_introspection_from_schema.py | 11 +- tests/utilities/test_print_schema.py | 2 +- .../test_strip_ignored_characters.py | 5 +- .../test_strip_ignored_characters_fuzz.py | 44 +- tests/utilities/test_type_from_ast.py | 5 +- tests/utilities/test_type_info.py | 5 +- tests/utilities/test_value_from_ast.py | 10 +- tests/utils/__init__.py | 1 - .../assert_equal_awaitables_or_values.py | 1 - tests/utils/assert_matching_values.py | 1 - tests/utils/dedent.py | 1 - tests/utils/gen_fuzz_strings.py | 1 - .../test_assert_equal_awaitables_or_values.py | 14 +- tests/utils/test_assert_matching_values.py | 4 +- tests/validation/__init__.py | 5 +- tests/validation/harness.py | 1 - .../test_defer_stream_directive_label.py | 1 - ...st_defer_stream_directive_on_root_field.py | 1 - .../validation/test_executable_definitions.py | 1 - .../validation/test_fields_on_correct_type.py | 1 - .../test_fragments_on_composite_types.py | 1 - tests/validation/test_known_argument_names.py | 1 - tests/validation/test_known_directives.py | 1 - tests/validation/test_known_fragment_names.py | 1 - tests/validation/test_known_type_names.py | 1 - .../test_lone_anonymous_operation.py | 1 - .../validation/test_lone_schema_definition.py | 1 - tests/validation/test_no_fragment_cycles.py | 1 - .../test_no_schema_introspection.py | 1 - .../validation/test_no_undefined_variables.py | 1 - tests/validation/test_no_unused_fragments.py | 1 - tests/validation/test_no_unused_variables.py | 1 - .../test_overlapping_fields_can_be_merged.py | 1 - .../test_possible_fragment_spreads.py | 1 - .../test_possible_type_extensions.py | 1 - .../test_provided_required_arguments.py | 1 - tests/validation/test_scalar_leafs.py | 1 - .../test_single_field_subscriptions.py | 1 - .../test_stream_directive_on_list_field.py | 1 - .../test_unique_argument_definition_names.py | 1 - .../validation/test_unique_argument_names.py | 1 - .../validation/test_unique_directive_names.py | 1 - .../test_unique_directives_per_location.py | 1 - .../test_unique_enum_value_names.py | 1 - .../test_unique_field_definition_names.py | 1 - .../validation/test_unique_fragment_names.py | 1 - .../test_unique_input_field_names.py | 1 - .../validation/test_unique_operation_names.py | 1 - .../validation/test_unique_operation_types.py | 1 - tests/validation/test_unique_type_names.py | 1 - .../validation/test_unique_variable_names.py | 1 - tests/validation/test_validation.py | 7 +- .../validation/test_values_of_correct_type.py | 3 +- .../test_variables_are_input_types.py | 1 - .../test_variables_in_allowed_position.py | 1 - tox.ini | 24 +- 249 files changed, 2449 insertions(+), 2474 deletions(-) delete mode 100644 .bandit delete mode 100644 .flake8 diff --git a/.bandit b/.bandit deleted file mode 100644 index 0b9b0e68..00000000 --- a/.bandit +++ /dev/null @@ -1,4 +0,0 @@ -# unfortunately, flake8-bandit does not support pyproject.toml - -[bandit] -exclude = /tests diff --git a/.flake8 b/.flake8 deleted file mode 100644 index ad0b79f2..00000000 --- a/.flake8 +++ /dev/null @@ -1,7 +0,0 @@ -# unfortunately, flake8 does not support pyproject.toml - -[flake8] -ignore = E203,W503 -exclude = .git,.mypy_cache,.pytest_cache,.tox,.venv,__pycache__,build,dist,docs -max-line-length = 88 -per-file-ignores = tests/*:B011 diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 12790703..8dcb79ef 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -22,4 +22,4 @@ jobs: - name: Run code quality tests with tox run: tox env: - TOXENV: black,flake8,isort,mypy,docs + TOXENV: ruff,mypy,docs diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index ae9062bf..b7050d6a 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -21,7 +21,7 @@ jobs: - name: Install dependencies run: | python -m pip install --upgrade pip - pip install "tox>=4.4,<5" "tox-gh-actions>=3.1,<4" + pip install "tox>=4.12,<5" "tox-gh-actions>=3.2,<4" - name: Run unit tests with tox run: tox diff --git a/README.md b/README.md index 51001276..5cf727d1 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # GraphQL-core 3 -GraphQL-core 3 is a Python 3.6+ port of [GraphQL.js](https://github.com/graphql/graphql-js), +GraphQL-core 3 is a Python 3.7+ port of [GraphQL.js](https://github.com/graphql/graphql-js), the JavaScript reference implementation for [GraphQL](https://graphql.org/), a query language for APIs created by Facebook. @@ -15,7 +15,7 @@ replication of the complete test suite of GraphQL.js, making sure this port is reliable and compatible with GraphQL.js. The current stable version 3.2.3 of GraphQL-core is up-to-date with GraphQL.js -version 16.6.0 and supports Python version 3.6 and newer. +version 16.6.0 and supports Python version 3.7 and newer. You can also try out the latest alpha version 3.3.0a3 of GraphQL-core which is up-to-date with GraphQL.js version 17.0.0a2. @@ -196,13 +196,14 @@ Design goals for the GraphQL-core 3 library were: (and is now using TypeScript) * to use [black](https://github.com/ambv/black) to achieve a consistent code style while saving time and mental energy for more important matters + (we are now using [ruff](https://github.com/astral-sh/ruff) instead) * to replicate the complete Mocha-based test suite of GraphQL.js using [pytest](https://docs.pytest.org/) with [pytest-describe](https://pypi.org/project/pytest-describe/) Some restrictions (mostly in line with the design goals): -* requires Python 3.6 or newer +* requires Python 3.7 or newer * does not support some already deprecated methods and options of GraphQL.js * supports asynchronous operations only via async.io (does not support the additional executors in GraphQL-core) diff --git a/poetry.lock b/poetry.lock index 041a7e5f..fb2f90cd 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.5.0 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "alabaster" @@ -11,124 +11,22 @@ files = [ {file = "alabaster-0.7.13.tar.gz", hash = "sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2"}, ] -[[package]] -name = "appdirs" -version = "1.4.4" -description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -optional = false -python-versions = "*" -files = [ - {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, - {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, -] - -[[package]] -name = "attrs" -version = "23.1.0" -description = "Classes Without Boilerplate" -optional = false -python-versions = ">=3.7" -files = [ - {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, - {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, -] - -[package.dependencies] -importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} - -[package.extras] -cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] -dev = ["attrs[docs,tests]", "pre-commit"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] -tests = ["attrs[tests-no-zope]", "zope-interface"] -tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] - [[package]] name = "babel" -version = "2.12.1" +version = "2.14.0" description = "Internationalization utilities" optional = false python-versions = ">=3.7" files = [ - {file = "Babel-2.12.1-py3-none-any.whl", hash = "sha256:b4246fb7677d3b98f501a39d43396d3cafdc8eadb045f4a31be01863f655c610"}, - {file = "Babel-2.12.1.tar.gz", hash = "sha256:cc2d99999cd01d44420ae725a21c9e3711b3aadc7976d6147f622d8581963455"}, + {file = "Babel-2.14.0-py3-none-any.whl", hash = "sha256:efb1a25b7118e67ce3a259bed20545c29cb68be8ad2c784c83689981b7a57287"}, + {file = "Babel-2.14.0.tar.gz", hash = "sha256:6919867db036398ba21eb5c7a0f6b28ab8cbc3ae7a73a44ebe34ae74a4e7d363"}, ] [package.dependencies] pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} -[[package]] -name = "bandit" -version = "1.7.5" -description = "Security oriented static analyser for python code." -optional = false -python-versions = ">=3.7" -files = [ - {file = "bandit-1.7.5-py3-none-any.whl", hash = "sha256:75665181dc1e0096369112541a056c59d1c5f66f9bb74a8d686c3c362b83f549"}, - {file = "bandit-1.7.5.tar.gz", hash = "sha256:bdfc739baa03b880c2d15d0431b31c658ffc348e907fe197e54e0389dd59e11e"}, -] - -[package.dependencies] -colorama = {version = ">=0.3.9", markers = "platform_system == \"Windows\""} -GitPython = ">=1.0.1" -PyYAML = ">=5.3.1" -rich = "*" -stevedore = ">=1.20.0" - -[package.extras] -test = ["beautifulsoup4 (>=4.8.0)", "coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "pylint (==1.9.4)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)", "tomli (>=1.1.0)"] -toml = ["tomli (>=1.1.0)"] -yaml = ["PyYAML"] - -[[package]] -name = "black" -version = "23.3.0" -description = "The uncompromising code formatter." -optional = false -python-versions = ">=3.7" -files = [ - {file = "black-23.3.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:0945e13506be58bf7db93ee5853243eb368ace1c08a24c65ce108986eac65915"}, - {file = "black-23.3.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:67de8d0c209eb5b330cce2469503de11bca4085880d62f1628bd9972cc3366b9"}, - {file = "black-23.3.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:7c3eb7cea23904399866c55826b31c1f55bbcd3890ce22ff70466b907b6775c2"}, - {file = "black-23.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32daa9783106c28815d05b724238e30718f34155653d4d6e125dc7daec8e260c"}, - {file = "black-23.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:35d1381d7a22cc5b2be2f72c7dfdae4072a3336060635718cc7e1ede24221d6c"}, - {file = "black-23.3.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:a8a968125d0a6a404842fa1bf0b349a568634f856aa08ffaff40ae0dfa52e7c6"}, - {file = "black-23.3.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:c7ab5790333c448903c4b721b59c0d80b11fe5e9803d8703e84dcb8da56fec1b"}, - {file = "black-23.3.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:a6f6886c9869d4daae2d1715ce34a19bbc4b95006d20ed785ca00fa03cba312d"}, - {file = "black-23.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f3c333ea1dd6771b2d3777482429864f8e258899f6ff05826c3a4fcc5ce3f70"}, - {file = "black-23.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:11c410f71b876f961d1de77b9699ad19f939094c3a677323f43d7a29855fe326"}, - {file = "black-23.3.0-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:1d06691f1eb8de91cd1b322f21e3bfc9efe0c7ca1f0e1eb1db44ea367dff656b"}, - {file = "black-23.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50cb33cac881766a5cd9913e10ff75b1e8eb71babf4c7104f2e9c52da1fb7de2"}, - {file = "black-23.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:e114420bf26b90d4b9daa597351337762b63039752bdf72bf361364c1aa05925"}, - {file = "black-23.3.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:48f9d345675bb7fbc3dd85821b12487e1b9a75242028adad0333ce36ed2a6d27"}, - {file = "black-23.3.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:714290490c18fb0126baa0fca0a54ee795f7502b44177e1ce7624ba1c00f2331"}, - {file = "black-23.3.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:064101748afa12ad2291c2b91c960be28b817c0c7eaa35bec09cc63aa56493c5"}, - {file = "black-23.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:562bd3a70495facf56814293149e51aa1be9931567474993c7942ff7d3533961"}, - {file = "black-23.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:e198cf27888ad6f4ff331ca1c48ffc038848ea9f031a3b40ba36aced7e22f2c8"}, - {file = "black-23.3.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:3238f2aacf827d18d26db07524e44741233ae09a584273aa059066d644ca7b30"}, - {file = "black-23.3.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:f0bd2f4a58d6666500542b26354978218a9babcdc972722f4bf90779524515f3"}, - {file = "black-23.3.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:92c543f6854c28a3c7f39f4d9b7694f9a6eb9d3c5e2ece488c327b6e7ea9b266"}, - {file = "black-23.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a150542a204124ed00683f0db1f5cf1c2aaaa9cc3495b7a3b5976fb136090ab"}, - {file = "black-23.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:6b39abdfb402002b8a7d030ccc85cf5afff64ee90fa4c5aebc531e3ad0175ddb"}, - {file = "black-23.3.0-py3-none-any.whl", hash = "sha256:ec751418022185b0c1bb7d7736e6933d40bbb14c14a0abcf9123d1b159f98dd4"}, - {file = "black-23.3.0.tar.gz", hash = "sha256:1c7b8d606e728a41ea1ccbd7264677e494e87cf630e399262ced92d4a8dac940"}, -] - -[package.dependencies] -click = ">=8.0.0" -mypy-extensions = ">=0.4.3" -packaging = ">=22.0" -pathspec = ">=0.9.0" -platformdirs = ">=2" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typed-ast = {version = ">=1.4.2", markers = "python_version < \"3.8\" and implementation_name == \"cpython\""} -typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} - [package.extras] -colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)"] -jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] -uvloop = ["uvloop (>=0.15.2)"] +dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] [[package]] name = "bump2version" @@ -143,136 +41,136 @@ files = [ [[package]] name = "cachetools" -version = "5.3.1" +version = "5.3.2" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.1-py3-none-any.whl", hash = "sha256:95ef631eeaea14ba2e36f06437f36463aac3a096799e876ee55e5cdccb102590"}, - {file = "cachetools-5.3.1.tar.gz", hash = "sha256:dce83f2d9b4e1f732a8cd44af8e8fab2dbe46201467fc98b3ef8f269092bf62b"}, + {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, + {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, ] [[package]] name = "certifi" -version = "2023.5.7" +version = "2024.2.2" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2023.5.7-py3-none-any.whl", hash = "sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716"}, - {file = "certifi-2023.5.7.tar.gz", hash = "sha256:0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7"}, + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, ] [[package]] name = "chardet" -version = "5.1.0" +version = "5.2.0" description = "Universal encoding detector for Python 3" optional = false python-versions = ">=3.7" files = [ - {file = "chardet-5.1.0-py3-none-any.whl", hash = "sha256:362777fb014af596ad31334fde1e8c327dfdb076e1960d1694662d46a6917ab9"}, - {file = "chardet-5.1.0.tar.gz", hash = "sha256:0d62712b956bc154f85fb0a266e2a3c5913c2967e00348701b32411d6def31e5"}, + {file = "chardet-5.2.0-py3-none-any.whl", hash = "sha256:e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970"}, + {file = "chardet-5.2.0.tar.gz", hash = "sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7"}, ] [[package]] name = "charset-normalizer" -version = "3.1.0" +version = "3.3.2" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7.0" files = [ - {file = "charset-normalizer-3.1.0.tar.gz", hash = "sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-win32.whl", hash = "sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-win32.whl", hash = "sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-win32.whl", hash = "sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-win32.whl", hash = "sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b"}, - {file = "charset_normalizer-3.1.0-py3-none-any.whl", hash = "sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d"}, + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, ] -[[package]] -name = "click" -version = "8.1.3" -description = "Composable command line interface toolkit" -optional = false -python-versions = ">=3.7" -files = [ - {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, - {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} -importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} - [[package]] name = "colorama" version = "0.4.6" @@ -361,24 +259,13 @@ toml = ["tomli"] [[package]] name = "distlib" -version = "0.3.6" +version = "0.3.8" description = "Distribution utilities" optional = false python-versions = "*" files = [ - {file = "distlib-0.3.6-py2.py3-none-any.whl", hash = "sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e"}, - {file = "distlib-0.3.6.tar.gz", hash = "sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46"}, -] - -[[package]] -name = "docutils" -version = "0.17.1" -description = "Docutils -- Python Documentation Utilities" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "docutils-0.17.1-py2.py3-none-any.whl", hash = "sha256:cf316c8370a737a022b72b56874f6602acf974a37a9fba42ec2876387549fc61"}, - {file = "docutils-0.17.1.tar.gz", hash = "sha256:686577d2e4c32380bb50cbb22f575ed742d58168cee37e99117a854bcd88f125"}, + {file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"}, + {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"}, ] [[package]] @@ -394,13 +281,13 @@ files = [ [[package]] name = "exceptiongroup" -version = "1.1.1" +version = "1.2.0" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.1.1-py3-none-any.whl", hash = "sha256:232c37c63e4f682982c8b6459f33a8981039e5fb8756b2074364e5055c498c9e"}, - {file = "exceptiongroup-1.1.1.tar.gz", hash = "sha256:d484c3090ba2889ae2928419117447a14daf3c1231d5e30d0aae34f354f01785"}, + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, ] [package.extras] @@ -408,141 +295,44 @@ test = ["pytest (>=6)"] [[package]] name = "filelock" -version = "3.12.0" +version = "3.12.2" description = "A platform independent file lock." optional = false python-versions = ">=3.7" files = [ - {file = "filelock-3.12.0-py3-none-any.whl", hash = "sha256:ad98852315c2ab702aeb628412cbf7e95b7ce8c3bf9565670b4eaecf1db370a9"}, - {file = "filelock-3.12.0.tar.gz", hash = "sha256:fc03ae43288c013d2ea83c8597001b1129db351aad9c57fe2409327916b8e718"}, + {file = "filelock-3.12.2-py3-none-any.whl", hash = "sha256:cbb791cdea2a72f23da6ac5b5269ab0a0d161e9ef0100e653b69049a7706d1ec"}, + {file = "filelock-3.12.2.tar.gz", hash = "sha256:002740518d8aa59a26b0c76e10fb8c6e15eae825d34b6fdf670333fd7b938d81"}, ] [package.extras] -docs = ["furo (>=2023.3.27)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.2.3)", "diff-cover (>=7.5)", "pytest (>=7.3.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)", "pytest-timeout (>=2.1)"] - -[[package]] -name = "flake8" -version = "5.0.4" -description = "the modular source code checker: pep8 pyflakes and co" -optional = false -python-versions = ">=3.6.1" -files = [ - {file = "flake8-5.0.4-py2.py3-none-any.whl", hash = "sha256:7a1cf6b73744f5806ab95e526f6f0d8c01c66d7bbe349562d22dfca20610b248"}, - {file = "flake8-5.0.4.tar.gz", hash = "sha256:6fbe320aad8d6b95cec8b8e47bc933004678dc63095be98528b7bdd2a9f510db"}, -] - -[package.dependencies] -importlib-metadata = {version = ">=1.1.0,<4.3", markers = "python_version < \"3.8\""} -mccabe = ">=0.7.0,<0.8.0" -pycodestyle = ">=2.9.0,<2.10.0" -pyflakes = ">=2.5.0,<2.6.0" +docs = ["furo (>=2023.5.20)", "sphinx (>=7.0.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "diff-cover (>=7.5)", "pytest (>=7.3.1)", "pytest-cov (>=4.1)", "pytest-mock (>=3.10)", "pytest-timeout (>=2.1)"] [[package]] -name = "flake8" -version = "6.0.0" -description = "the modular source code checker: pep8 pyflakes and co" -optional = false -python-versions = ">=3.8.1" -files = [ - {file = "flake8-6.0.0-py2.py3-none-any.whl", hash = "sha256:3833794e27ff64ea4e9cf5d410082a8b97ff1a06c16aa3d2027339cd0f1195c7"}, - {file = "flake8-6.0.0.tar.gz", hash = "sha256:c61007e76655af75e6785a931f452915b371dc48f56efd765247c8fe68f2b181"}, -] - -[package.dependencies] -mccabe = ">=0.7.0,<0.8.0" -pycodestyle = ">=2.10.0,<2.11.0" -pyflakes = ">=3.0.0,<3.1.0" - -[[package]] -name = "flake8-bandit" -version = "4.1.1" -description = "Automated security testing with bandit and flake8." -optional = false -python-versions = ">=3.6" -files = [ - {file = "flake8_bandit-4.1.1-py3-none-any.whl", hash = "sha256:4c8a53eb48f23d4ef1e59293657181a3c989d0077c9952717e98a0eace43e06d"}, - {file = "flake8_bandit-4.1.1.tar.gz", hash = "sha256:068e09287189cbfd7f986e92605adea2067630b75380c6b5733dab7d87f9a84e"}, -] - -[package.dependencies] -bandit = ">=1.7.3" -flake8 = ">=5.0.0" - -[[package]] -name = "flake8-bugbear" -version = "23.3.12" -description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." -optional = false -python-versions = ">=3.7" -files = [ - {file = "flake8-bugbear-23.3.12.tar.gz", hash = "sha256:e3e7f74c8a49ad3794a7183353026dabd68c74030d5f46571f84c1fb0eb79363"}, - {file = "flake8_bugbear-23.3.12-py3-none-any.whl", hash = "sha256:beb5c7efcd7ccc2039ef66a77bb8db925e7be3531ff1cb4d0b7030d0e2113d72"}, -] - -[package.dependencies] -attrs = ">=19.2.0" -flake8 = ">=3.0.0" - -[package.extras] -dev = ["coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit", "pytest", "tox"] - -[[package]] -name = "flake8-bugbear" -version = "23.5.9" -description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." +name = "filelock" +version = "3.13.1" +description = "A platform independent file lock." optional = false -python-versions = ">=3.8.1" +python-versions = ">=3.8" files = [ - {file = "flake8-bugbear-23.5.9.tar.gz", hash = "sha256:695c84a5d7da54eb35d79a7354dbaf3aaba80de32250608868aa1c85534b2a86"}, - {file = "flake8_bugbear-23.5.9-py3-none-any.whl", hash = "sha256:631fa927fbc799e8ca636b849dd7dfc304812287137b6ecb3277821f028bee40"}, + {file = "filelock-3.13.1-py3-none-any.whl", hash = "sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c"}, + {file = "filelock-3.13.1.tar.gz", hash = "sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e"}, ] -[package.dependencies] -attrs = ">=19.2.0" -flake8 = ">=6.0.0" - [package.extras] -dev = ["coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit", "pytest", "tox"] - -[[package]] -name = "gitdb" -version = "4.0.10" -description = "Git Object Database" -optional = false -python-versions = ">=3.7" -files = [ - {file = "gitdb-4.0.10-py3-none-any.whl", hash = "sha256:c286cf298426064079ed96a9e4a9d39e7f3e9bf15ba60701e95f5492f28415c7"}, - {file = "gitdb-4.0.10.tar.gz", hash = "sha256:6eb990b69df4e15bad899ea868dc46572c3f75339735663b81de79b06f17eb9a"}, -] - -[package.dependencies] -smmap = ">=3.0.1,<6" - -[[package]] -name = "gitpython" -version = "3.1.31" -description = "GitPython is a Python library used to interact with Git repositories" -optional = false -python-versions = ">=3.7" -files = [ - {file = "GitPython-3.1.31-py3-none-any.whl", hash = "sha256:f04893614f6aa713a60cbbe1e6a97403ef633103cdd0ef5eb6efe0deb98dbe8d"}, - {file = "GitPython-3.1.31.tar.gz", hash = "sha256:8ce3bcf69adfdf7c7d503e78fd3b1c492af782d58893b650adb2ac8912ddd573"}, -] - -[package.dependencies] -gitdb = ">=4.0.1,<5" -typing-extensions = {version = ">=3.7.4.3", markers = "python_version < \"3.8\""} +docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.24)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] +typing = ["typing-extensions (>=4.8)"] [[package]] name = "idna" -version = "3.4" +version = "3.6" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.5" files = [ - {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, - {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, ] [[package]] @@ -558,32 +348,13 @@ files = [ [[package]] name = "importlib-metadata" -version = "4.2.0" -description = "Read metadata from Python packages" -optional = false -python-versions = ">=3.6" -files = [ - {file = "importlib_metadata-4.2.0-py3-none-any.whl", hash = "sha256:057e92c15bc8d9e8109738a48db0ccb31b4d9d5cfbee5a8670879a30be66304b"}, - {file = "importlib_metadata-4.2.0.tar.gz", hash = "sha256:b7e52a1f8dec14a75ea73e0891f3060099ca1d8e6a462a4dff11c3e119ea1b31"}, -] - -[package.dependencies] -typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} -zipp = ">=0.5" - -[package.extras] -docs = ["jaraco.packaging (>=8.2)", "rst.linker (>=1.9)", "sphinx"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pep517", "pyfakefs", "pytest (>=4.6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy"] - -[[package]] -name = "importlib-metadata" -version = "6.6.0" +version = "6.7.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "importlib_metadata-6.6.0-py3-none-any.whl", hash = "sha256:43dd286a2cd8995d5eaef7fee2066340423b818ed3fd70adf0bad5f1fac53fed"}, - {file = "importlib_metadata-6.6.0.tar.gz", hash = "sha256:92501cdf9cc66ebd3e612f1b4f0c0765dfa42f0fa38ffb319b6bd84dd675d705"}, + {file = "importlib_metadata-6.7.0-py3-none-any.whl", hash = "sha256:cb52082e659e97afc5dac71e79de97d8681de3aa07ff18578330904a9d18e5b5"}, + {file = "importlib_metadata-6.7.0.tar.gz", hash = "sha256:1aaf550d4f73e5d6783e7acb77aec43d49da8017410afae93822cc9cca98c4d4"}, ] [package.dependencies] @@ -593,7 +364,7 @@ zipp = ">=0.5" [package.extras] docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] -testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] [[package]] name = "iniconfig" @@ -606,49 +377,15 @@ files = [ {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] -[[package]] -name = "isort" -version = "5.11.5" -description = "A Python utility / library to sort Python imports." -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "isort-5.11.5-py3-none-any.whl", hash = "sha256:ba1d72fb2595a01c7895a5128f9585a5cc4b6d395f1c8d514989b9a7eb2a8746"}, - {file = "isort-5.11.5.tar.gz", hash = "sha256:6be1f76a507cb2ecf16c7cf14a37e41609ca082330be4e3436a18ef74add55db"}, -] - -[package.extras] -colors = ["colorama (>=0.4.3,<0.5.0)"] -pipfile-deprecated-finder = ["pip-shims (>=0.5.2)", "pipreqs", "requirementslib"] -plugins = ["setuptools"] -requirements-deprecated-finder = ["pip-api", "pipreqs"] - -[[package]] -name = "isort" -version = "5.12.0" -description = "A Python utility / library to sort Python imports." -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "isort-5.12.0-py3-none-any.whl", hash = "sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6"}, - {file = "isort-5.12.0.tar.gz", hash = "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504"}, -] - -[package.extras] -colors = ["colorama (>=0.4.3)"] -pipfile-deprecated-finder = ["pip-shims (>=0.5.2)", "pipreqs", "requirementslib"] -plugins = ["setuptools"] -requirements-deprecated-finder = ["pip-api", "pipreqs"] - [[package]] name = "jinja2" -version = "3.1.2" +version = "3.1.3" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" files = [ - {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, - {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, ] [package.dependencies] @@ -657,110 +394,73 @@ MarkupSafe = ">=2.0" [package.extras] i18n = ["Babel (>=2.7)"] -[[package]] -name = "markdown-it-py" -version = "2.2.0" -description = "Python port of markdown-it. Markdown parsing, done right!" -optional = false -python-versions = ">=3.7" -files = [ - {file = "markdown-it-py-2.2.0.tar.gz", hash = "sha256:7c9a5e412688bc771c67432cbfebcdd686c93ce6484913dccf06cb5a0bea35a1"}, - {file = "markdown_it_py-2.2.0-py3-none-any.whl", hash = "sha256:5a35f8d1870171d9acc47b99612dc146129b631baf04970128b568f190d0cc30"}, -] - -[package.dependencies] -mdurl = ">=0.1,<1.0" -typing_extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""} - -[package.extras] -benchmarking = ["psutil", "pytest", "pytest-benchmark"] -code-style = ["pre-commit (>=3.0,<4.0)"] -compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] -linkify = ["linkify-it-py (>=1,<3)"] -plugins = ["mdit-py-plugins"] -profiling = ["gprof2dot"] -rtd = ["attrs", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] -testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] - [[package]] name = "markupsafe" -version = "2.1.3" +version = "2.1.5" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.7" files = [ - {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-win32.whl", hash = "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-win32.whl", hash = "sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-win_amd64.whl", hash = "sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-win32.whl", hash = "sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-win32.whl", hash = "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba"}, - {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, -] - -[[package]] -name = "mccabe" -version = "0.7.0" -description = "McCabe checker, plugin for flake8" -optional = false -python-versions = ">=3.6" -files = [ - {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, - {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, -] - -[[package]] -name = "mdurl" -version = "0.1.2" -description = "Markdown URL utilities" -optional = false -python-versions = ">=3.7" -files = [ - {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, - {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, ] [[package]] @@ -823,64 +523,57 @@ files = [ [[package]] name = "packaging" -version = "23.1" +version = "23.2" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"}, - {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, ] [[package]] -name = "pathspec" -version = "0.11.1" -description = "Utility library for gitignore style pattern matching of file paths." +name = "platformdirs" +version = "4.0.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." optional = false python-versions = ">=3.7" files = [ - {file = "pathspec-0.11.1-py3-none-any.whl", hash = "sha256:d8af70af76652554bd134c22b3e8a1cc46ed7d91edcdd721ef1a0c51a84a5293"}, - {file = "pathspec-0.11.1.tar.gz", hash = "sha256:2798de800fa92780e33acca925945e9a19a133b715067cf165b8866c15a31687"}, + {file = "platformdirs-4.0.0-py3-none-any.whl", hash = "sha256:118c954d7e949b35437270383a3f2531e99dd93cf7ce4dc8340d3356d30f173b"}, + {file = "platformdirs-4.0.0.tar.gz", hash = "sha256:cb633b2bcf10c51af60beb0ab06d2f1d69064b43abf4c185ca6b28865f3f9731"}, ] -[[package]] -name = "pbr" -version = "5.11.1" -description = "Python Build Reasonableness" -optional = false -python-versions = ">=2.6" -files = [ - {file = "pbr-5.11.1-py2.py3-none-any.whl", hash = "sha256:567f09558bae2b3ab53cb3c1e2e33e726ff3338e7bae3db5dc954b3a44eef12b"}, - {file = "pbr-5.11.1.tar.gz", hash = "sha256:aefc51675b0b533d56bb5fd1c8c6c0522fe31896679882e1c4c63d5e4a0fccb3"}, -] +[package.dependencies] +typing-extensions = {version = ">=4.7.1", markers = "python_version < \"3.8\""} + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] [[package]] name = "platformdirs" -version = "3.5.1" +version = "4.2.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "platformdirs-3.5.1-py3-none-any.whl", hash = "sha256:e2378146f1964972c03c085bb5662ae80b2b8c06226c54b2ff4aa9483e8a13a5"}, - {file = "platformdirs-3.5.1.tar.gz", hash = "sha256:412dae91f52a6f84830f39a8078cecd0e866cb72294a5c66808e74d5e88d251f"}, + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, ] -[package.dependencies] -typing-extensions = {version = ">=4.5", markers = "python_version < \"3.8\""} - [package.extras] -docs = ["furo (>=2023.3.27)", "proselint (>=0.13)", "sphinx (>=6.2.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] [[package]] name = "pluggy" -version = "1.0.0" +version = "1.2.0" description = "plugin and hook calling mechanisms for python" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, - {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, + {file = "pluggy-1.2.0-py3-none-any.whl", hash = "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849"}, + {file = "pluggy-1.2.0.tar.gz", hash = "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3"}, ] [package.dependencies] @@ -912,92 +605,49 @@ files = [ {file = "py_cpuinfo-9.0.0-py3-none-any.whl", hash = "sha256:859625bc251f64e21f077d099d4162689c762b5d6a4c3c97553d56241c9674d5"}, ] -[[package]] -name = "pycodestyle" -version = "2.9.1" -description = "Python style guide checker" -optional = false -python-versions = ">=3.6" -files = [ - {file = "pycodestyle-2.9.1-py2.py3-none-any.whl", hash = "sha256:d1735fc58b418fd7c5f658d28d943854f8a849b01a5d0a1e6f3f3fdd0166804b"}, - {file = "pycodestyle-2.9.1.tar.gz", hash = "sha256:2c9607871d58c76354b697b42f5d57e1ada7d261c261efac224b664affdc5785"}, -] - -[[package]] -name = "pycodestyle" -version = "2.10.0" -description = "Python style guide checker" -optional = false -python-versions = ">=3.6" -files = [ - {file = "pycodestyle-2.10.0-py2.py3-none-any.whl", hash = "sha256:8a4eaf0d0495c7395bdab3589ac2db602797d76207242c17d470186815706610"}, - {file = "pycodestyle-2.10.0.tar.gz", hash = "sha256:347187bdb476329d98f695c213d7295a846d1152ff4fe9bacb8a9590b8ee7053"}, -] - -[[package]] -name = "pyflakes" -version = "2.5.0" -description = "passive checker of Python programs" -optional = false -python-versions = ">=3.6" -files = [ - {file = "pyflakes-2.5.0-py2.py3-none-any.whl", hash = "sha256:4579f67d887f804e67edb544428f264b7b24f435b263c4614f384135cea553d2"}, - {file = "pyflakes-2.5.0.tar.gz", hash = "sha256:491feb020dca48ccc562a8c0cbe8df07ee13078df59813b83959cbdada312ea3"}, -] - -[[package]] -name = "pyflakes" -version = "3.0.1" -description = "passive checker of Python programs" -optional = false -python-versions = ">=3.6" -files = [ - {file = "pyflakes-3.0.1-py2.py3-none-any.whl", hash = "sha256:ec55bf7fe21fff7f1ad2f7da62363d749e2a470500eab1b555334b67aa1ef8cf"}, - {file = "pyflakes-3.0.1.tar.gz", hash = "sha256:ec8b276a6b60bd80defed25add7e439881c19e64850afd9b346283d4165fd0fd"}, -] - [[package]] name = "pygments" -version = "2.15.1" +version = "2.17.2" description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.7" files = [ - {file = "Pygments-2.15.1-py3-none-any.whl", hash = "sha256:db2db3deb4b4179f399a09054b023b6a586b76499d36965813c71aa8ed7b5fd1"}, - {file = "Pygments-2.15.1.tar.gz", hash = "sha256:8ace4d3c1dd481894b2005f560ead0f9f19ee64fe983366be1a21e171d12775c"}, + {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, + {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, ] [package.extras] plugins = ["importlib-metadata"] +windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pyproject-api" -version = "1.5.1" +version = "1.6.1" description = "API to interact with the python pyproject.toml based projects" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pyproject_api-1.5.1-py3-none-any.whl", hash = "sha256:4698a3777c2e0f6b624f8a4599131e2a25376d90fe8d146d7ac74c67c6f97c43"}, - {file = "pyproject_api-1.5.1.tar.gz", hash = "sha256:435f46547a9ff22cf4208ee274fca3e2869aeb062a4834adfc99a4dd64af3cf9"}, + {file = "pyproject_api-1.6.1-py3-none-any.whl", hash = "sha256:4c0116d60476b0786c88692cf4e325a9814965e2469c5998b830bba16b183675"}, + {file = "pyproject_api-1.6.1.tar.gz", hash = "sha256:1817dc018adc0d1ff9ca1ed8c60e1623d5aaca40814b953af14a9cf9a5cae538"}, ] [package.dependencies] -packaging = ">=23" +packaging = ">=23.1" tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} [package.extras] -docs = ["furo (>=2022.12.7)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)"] -testing = ["covdefaults (>=2.2.2)", "importlib-metadata (>=6)", "pytest (>=7.2.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)", "virtualenv (>=20.17.1)", "wheel (>=0.38.4)"] +docs = ["furo (>=2023.8.19)", "sphinx (<7.2)", "sphinx-autodoc-typehints (>=1.24)"] +testing = ["covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)", "setuptools (>=68.1.2)", "wheel (>=0.41.2)"] [[package]] name = "pytest" -version = "7.3.1" +version = "7.4.4" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-7.3.1-py3-none-any.whl", hash = "sha256:3799fa815351fea3a5e96ac7e503a96fa51cc9942c3753cda7651b93c1cfa362"}, - {file = "pytest-7.3.1.tar.gz", hash = "sha256:434afafd78b1d78ed0addf160ad2b77a30d35d4bdf8af234fe621919d9ed15e3"}, + {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, + {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, ] [package.dependencies] @@ -1010,17 +660,17 @@ pluggy = ">=0.12,<2.0" tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} [package.extras] -testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] [[package]] name = "pytest-asyncio" -version = "0.21.0" +version = "0.21.1" description = "Pytest support for asyncio" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-asyncio-0.21.0.tar.gz", hash = "sha256:2b38a496aef56f56b0e87557ec313e11e1ab9276fc3863f6a7be0f1d0e415e1b"}, - {file = "pytest_asyncio-0.21.0-py3-none-any.whl", hash = "sha256:f2b3366b7cd501a4056858bd39349d5af19742aed2d81660b7998b6341c7eb9c"}, + {file = "pytest-asyncio-0.21.1.tar.gz", hash = "sha256:40a7eae6dded22c7b604986855ea48400ab15b069ae38116e8c01238e9eeb64d"}, + {file = "pytest_asyncio-0.21.1-py3-none-any.whl", hash = "sha256:8666c1c8ac02631d7c51ba282e0c69a8a452b211ffedf2599099845da5c5c37b"}, ] [package.dependencies] @@ -1085,13 +735,13 @@ pytest = ">=4.6,<8" [[package]] name = "pytest-timeout" -version = "2.1.0" +version = "2.2.0" description = "pytest plugin to abort hanging tests" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "pytest-timeout-2.1.0.tar.gz", hash = "sha256:c07ca07404c612f8abbe22294b23c368e2e5104b521c1790195561f37e1ac3d9"}, - {file = "pytest_timeout-2.1.0-py3-none-any.whl", hash = "sha256:f6f50101443ce70ad325ceb4473c4255e9d74e3c7cd0ef827309dfa4c0d975c6"}, + {file = "pytest-timeout-2.2.0.tar.gz", hash = "sha256:3b0b95dabf3cb50bac9ef5ca912fa0cfc286526af17afc806824df20c2f72c90"}, + {file = "pytest_timeout-2.2.0-py3-none-any.whl", hash = "sha256:bde531e096466f49398a59f2dde76fa78429a09a12411466f88a07213e220de2"}, ] [package.dependencies] @@ -1099,62 +749,13 @@ pytest = ">=5.0.0" [[package]] name = "pytz" -version = "2023.3" +version = "2024.1" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" files = [ - {file = "pytz-2023.3-py2.py3-none-any.whl", hash = "sha256:a151b3abb88eda1d4e34a9814df37de2a80e301e68ba0fd856fb9b46bfbbbffb"}, - {file = "pytz-2023.3.tar.gz", hash = "sha256:1d8ce29db189191fb55338ee6d0387d82ab59f3d00eac103412d64e0ebd0c588"}, -] - -[[package]] -name = "pyyaml" -version = "6.0" -description = "YAML parser and emitter for Python" -optional = false -python-versions = ">=3.6" -files = [ - {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, - {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, - {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, - {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, - {file = "PyYAML-6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358"}, - {file = "PyYAML-6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1"}, - {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d"}, - {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f"}, - {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782"}, - {file = "PyYAML-6.0-cp311-cp311-win32.whl", hash = "sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7"}, - {file = "PyYAML-6.0-cp311-cp311-win_amd64.whl", hash = "sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf"}, - {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"}, - {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"}, - {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"}, - {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"}, - {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"}, - {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"}, - {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"}, - {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"}, - {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"}, - {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"}, - {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"}, - {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"}, - {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, - {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, + {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, + {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, ] [[package]] @@ -1179,40 +780,31 @@ socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] -name = "rich" -version = "13.4.1" -description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "rich-13.4.1-py3-none-any.whl", hash = "sha256:d204aadb50b936bf6b1a695385429d192bc1fdaf3e8b907e8e26f4c4e4b5bf75"}, - {file = "rich-13.4.1.tar.gz", hash = "sha256:76f6b65ea7e5c5d924ba80e322231d7cb5b5981aa60bfc1e694f1bc097fe6fe1"}, -] - -[package.dependencies] -markdown-it-py = ">=2.2.0,<3.0.0" -pygments = ">=2.13.0,<3.0.0" -typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.9\""} - -[package.extras] -jupyter = ["ipywidgets (>=7.5.1,<9)"] - -[[package]] -name = "setuptools" -version = "67.8.0" -description = "Easily download, build, install, upgrade, and uninstall Python packages" +name = "ruff" +version = "0.2.1" +description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "setuptools-67.8.0-py3-none-any.whl", hash = "sha256:5df61bf30bb10c6f756eb19e7c9f3b473051f48db77fddbe06ff2ca307df9a6f"}, - {file = "setuptools-67.8.0.tar.gz", hash = "sha256:62642358adc77ffa87233bc4d2354c4b2682d214048f500964dbe760ccedf102"}, + {file = "ruff-0.2.1-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:dd81b911d28925e7e8b323e8d06951554655021df8dd4ac3045d7212ac4ba080"}, + {file = "ruff-0.2.1-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:dc586724a95b7d980aa17f671e173df00f0a2eef23f8babbeee663229a938fec"}, + {file = "ruff-0.2.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c92db7101ef5bfc18e96777ed7bc7c822d545fa5977e90a585accac43d22f18a"}, + {file = "ruff-0.2.1-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:13471684694d41ae0f1e8e3a7497e14cd57ccb7dd72ae08d56a159d6c9c3e30e"}, + {file = "ruff-0.2.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a11567e20ea39d1f51aebd778685582d4c56ccb082c1161ffc10f79bebe6df35"}, + {file = "ruff-0.2.1-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:00a818e2db63659570403e44383ab03c529c2b9678ba4ba6c105af7854008105"}, + {file = "ruff-0.2.1-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be60592f9d218b52f03384d1325efa9d3b41e4c4d55ea022cd548547cc42cd2b"}, + {file = "ruff-0.2.1-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbd2288890b88e8aab4499e55148805b58ec711053588cc2f0196a44f6e3d855"}, + {file = "ruff-0.2.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3ef052283da7dec1987bba8d8733051c2325654641dfe5877a4022108098683"}, + {file = "ruff-0.2.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:7022d66366d6fded4ba3889f73cd791c2d5621b2ccf34befc752cb0df70f5fad"}, + {file = "ruff-0.2.1-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:0a725823cb2a3f08ee743a534cb6935727d9e47409e4ad72c10a3faf042ad5ba"}, + {file = "ruff-0.2.1-py3-none-musllinux_1_2_i686.whl", hash = "sha256:0034d5b6323e6e8fe91b2a1e55b02d92d0b582d2953a2b37a67a2d7dedbb7acc"}, + {file = "ruff-0.2.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:e5cb5526d69bb9143c2e4d2a115d08ffca3d8e0fddc84925a7b54931c96f5c02"}, + {file = "ruff-0.2.1-py3-none-win32.whl", hash = "sha256:6b95ac9ce49b4fb390634d46d6ece32ace3acdd52814671ccaf20b7f60adb232"}, + {file = "ruff-0.2.1-py3-none-win_amd64.whl", hash = "sha256:e3affdcbc2afb6f5bd0eb3130139ceedc5e3f28d206fe49f63073cb9e65988e0"}, + {file = "ruff-0.2.1-py3-none-win_arm64.whl", hash = "sha256:efababa8e12330aa94a53e90a81eb6e2d55f348bc2e71adbf17d9cad23c03ee6"}, + {file = "ruff-0.2.1.tar.gz", hash = "sha256:3b42b5d8677cd0c72b99fcaf068ffc62abb5a19e71b4a3b9cfa50658a0af02f1"}, ] -[package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] - [[package]] name = "six" version = "1.16.0" @@ -1224,17 +816,6 @@ files = [ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] -[[package]] -name = "smmap" -version = "5.0.0" -description = "A pure Python implementation of a sliding window memory map manager" -optional = false -python-versions = ">=3.6" -files = [ - {file = "smmap-5.0.0-py3-none-any.whl", hash = "sha256:2aba19d6a040e78d8b09de5c57e96207b09ed71d8e55ce0959eeee6c8e190d94"}, - {file = "smmap-5.0.0.tar.gz", hash = "sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936"}, -] - [[package]] name = "snowballstemmer" version = "2.2.0" @@ -1248,27 +829,27 @@ files = [ [[package]] name = "sphinx" -version = "4.3.2" +version = "5.3.0" description = "Python documentation generator" optional = false python-versions = ">=3.6" files = [ - {file = "Sphinx-4.3.2-py3-none-any.whl", hash = "sha256:6a11ea5dd0bdb197f9c2abc2e0ce73e01340464feaece525e64036546d24c851"}, - {file = "Sphinx-4.3.2.tar.gz", hash = "sha256:0a8836751a68306b3fe97ecbe44db786f8479c3bf4b80e3a7f5c838657b4698c"}, + {file = "Sphinx-5.3.0.tar.gz", hash = "sha256:51026de0a9ff9fc13c05d74913ad66047e104f56a129ff73e174eb5c3ee794b5"}, + {file = "sphinx-5.3.0-py3-none-any.whl", hash = "sha256:060ca5c9f7ba57a08a1219e547b269fadf125ae25b06b9fa7f66768efb652d6d"}, ] [package.dependencies] alabaster = ">=0.7,<0.8" -babel = ">=1.3" -colorama = {version = ">=0.3.5", markers = "sys_platform == \"win32\""} -docutils = ">=0.14,<0.18" -imagesize = "*" -Jinja2 = ">=2.3" -packaging = "*" -Pygments = ">=2.0" +babel = ">=2.9" +colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} +docutils = ">=0.14,<0.20" +imagesize = ">=1.3" +importlib-metadata = {version = ">=4.8", markers = "python_version < \"3.10\""} +Jinja2 = ">=3.0" +packaging = ">=21.0" +Pygments = ">=2.12" requests = ">=2.5.0" -setuptools = "*" -snowballstemmer = ">=1.1" +snowballstemmer = ">=2.0" sphinxcontrib-applehelp = "*" sphinxcontrib-devhelp = "*" sphinxcontrib-htmlhelp = ">=2.0.0" @@ -1278,8 +859,8 @@ sphinxcontrib-serializinghtml = ">=1.1.5" [package.extras] docs = ["sphinxcontrib-websupport"] -lint = ["docutils-stubs", "flake8 (>=3.5.0)", "isort", "mypy (>=0.920)", "types-pkg-resources", "types-requests", "types-typed-ast"] -test = ["cython", "html5lib", "pytest", "pytest-cov", "typed-ast"] +lint = ["docutils-stubs", "flake8 (>=3.5.0)", "flake8-bugbear", "flake8-comprehensions", "flake8-simplify", "isort", "mypy (>=0.981)", "sphinx-lint", "types-requests", "types-typed-ast"] +test = ["cython", "html5lib", "pytest (>=4.6)", "typed_ast"] [[package]] name = "sphinx" @@ -1318,19 +899,19 @@ test = ["cython", "filelock", "html5lib", "pytest (>=4.6)"] [[package]] name = "sphinx-rtd-theme" -version = "1.2.1" +version = "1.3.0" description = "Read the Docs theme for Sphinx" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ - {file = "sphinx_rtd_theme-1.2.1-py2.py3-none-any.whl", hash = "sha256:2cc9351176cbf91944ce44cefd4fab6c3b76ac53aa9e15d6db45a3229ad7f866"}, - {file = "sphinx_rtd_theme-1.2.1.tar.gz", hash = "sha256:cf9a7dc0352cf179c538891cb28d6fad6391117d4e21c891776ab41dd6c8ff70"}, + {file = "sphinx_rtd_theme-1.3.0-py2.py3-none-any.whl", hash = "sha256:46ddef89cc2416a81ecfbeaceab1881948c014b1b6e4450b815311a89fb977b0"}, + {file = "sphinx_rtd_theme-1.3.0.tar.gz", hash = "sha256:590b030c7abb9cf038ec053b95e5380b5c70d61591eb0b552063fbe7c41f0931"}, ] [package.dependencies] docutils = "<0.19" -sphinx = ">=1.6,<7" -sphinxcontrib-jquery = {version = ">=2.0.0,<3.0.0 || >3.0.0", markers = "python_version > \"3\""} +sphinx = ">=1.6,<8" +sphinxcontrib-jquery = ">=4,<5" [package.extras] dev = ["bump2version", "sphinxcontrib-httpdomain", "transifex-client", "wheel"] @@ -1468,21 +1049,6 @@ files = [ lint = ["docutils-stubs", "flake8", "mypy"] test = ["pytest"] -[[package]] -name = "stevedore" -version = "3.5.2" -description = "Manage dynamic plugins for Python applications" -optional = false -python-versions = ">=3.6" -files = [ - {file = "stevedore-3.5.2-py3-none-any.whl", hash = "sha256:fa2630e3d0ad3e22d4914aff2501445815b9a4467a6edc49387c667a38faf5bf"}, - {file = "stevedore-3.5.2.tar.gz", hash = "sha256:cf99f41fc0d5a4f185ca4d3d42b03be9011b0a1ec1a4ea1a282be1b4b306dcc2"}, -] - -[package.dependencies] -importlib-metadata = {version = ">=1.7.0", markers = "python_version < \"3.8\""} -pbr = ">=2.0.0,<2.1.0 || >2.1.0" - [[package]] name = "tomli" version = "2.0.1" @@ -1522,84 +1088,101 @@ testing = ["flaky (>=3.4.0)", "freezegun (>=0.3.11)", "pathlib2 (>=2.3.3)", "psu [[package]] name = "tox" -version = "4.5.2" +version = "4.10.0" description = "tox is a generic virtualenv management and test command line tool" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "tox-4.5.2-py3-none-any.whl", hash = "sha256:f1a9541b292aa0449f6c7bb67dc0073f25f9086413c3922fe47f5168cbf7b2f4"}, - {file = "tox-4.5.2.tar.gz", hash = "sha256:ad87fb7a10ef476afb6eb7e408808057f42976ef0d30ad5fe023099ba493ce58"}, + {file = "tox-4.10.0-py3-none-any.whl", hash = "sha256:e4a1b1438955a6da548d69a52350054350cf6a126658c20943261c48ed6d4c92"}, + {file = "tox-4.10.0.tar.gz", hash = "sha256:e041b2165375be690aca0ec4d96360c6906451380520e4665bf274f66112be35"}, ] [package.dependencies] -cachetools = ">=5.3" -chardet = ">=5.1" +cachetools = ">=5.3.1" +chardet = ">=5.2" colorama = ">=0.4.6" -filelock = ">=3.12" +filelock = ">=3.12.2" packaging = ">=23.1" -platformdirs = ">=3.5.1" -pluggy = ">=1" -pyproject-api = ">=1.5.1" +platformdirs = ">=3.10" +pluggy = ">=1.2" +pyproject-api = ">=1.5.3" tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} -virtualenv = ">=20.23" +virtualenv = ">=20.24.3" [package.extras] -docs = ["furo (>=2023.5.20)", "sphinx (>=7.0.1)", "sphinx-argparse-cli (>=1.11)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)", "sphinx-copybutton (>=0.5.2)", "sphinx-inline-tabs (>=2023.4.21)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=22.12)"] -testing = ["build[virtualenv] (>=0.10)", "covdefaults (>=2.3)", "devpi-process (>=0.3)", "diff-cover (>=7.5)", "distlib (>=0.3.6)", "flaky (>=3.7)", "hatch-vcs (>=0.3)", "hatchling (>=1.17)", "psutil (>=5.9.5)", "pytest (>=7.3.1)", "pytest-cov (>=4.1)", "pytest-mock (>=3.10)", "pytest-xdist (>=3.3.1)", "re-assert (>=1.1)", "time-machine (>=2.9)", "wheel (>=0.40)"] +docs = ["furo (>=2023.7.26)", "sphinx (>=7.1.2)", "sphinx-argparse-cli (>=1.11.1)", "sphinx-autodoc-typehints (>=1.24)", "sphinx-copybutton (>=0.5.2)", "sphinx-inline-tabs (>=2023.4.21)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +testing = ["build[virtualenv] (>=0.10)", "covdefaults (>=2.3)", "detect-test-pollution (>=1.1.1)", "devpi-process (>=0.3.1)", "diff-cover (>=7.7)", "distlib (>=0.3.7)", "flaky (>=3.7)", "hatch-vcs (>=0.3)", "hatchling (>=1.18)", "psutil (>=5.9.5)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)", "pytest-xdist (>=3.3.1)", "re-assert (>=1.1)", "time-machine (>=2.12)", "wheel (>=0.41.1)"] [[package]] name = "typed-ast" -version = "1.5.4" +version = "1.5.5" description = "a fork of Python 2 and 3 ast modules with type comment support" optional = false python-versions = ">=3.6" files = [ - {file = "typed_ast-1.5.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:669dd0c4167f6f2cd9f57041e03c3c2ebf9063d0757dc89f79ba1daa2bfca9d4"}, - {file = "typed_ast-1.5.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:211260621ab1cd7324e0798d6be953d00b74e0428382991adfddb352252f1d62"}, - {file = "typed_ast-1.5.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:267e3f78697a6c00c689c03db4876dd1efdfea2f251a5ad6555e82a26847b4ac"}, - {file = "typed_ast-1.5.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c542eeda69212fa10a7ada75e668876fdec5f856cd3d06829e6aa64ad17c8dfe"}, - {file = "typed_ast-1.5.4-cp310-cp310-win_amd64.whl", hash = "sha256:a9916d2bb8865f973824fb47436fa45e1ebf2efd920f2b9f99342cb7fab93f72"}, - {file = "typed_ast-1.5.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:79b1e0869db7c830ba6a981d58711c88b6677506e648496b1f64ac7d15633aec"}, - {file = "typed_ast-1.5.4-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a94d55d142c9265f4ea46fab70977a1944ecae359ae867397757d836ea5a3f47"}, - {file = "typed_ast-1.5.4-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:183afdf0ec5b1b211724dfef3d2cad2d767cbefac291f24d69b00546c1837fb6"}, - {file = "typed_ast-1.5.4-cp36-cp36m-win_amd64.whl", hash = "sha256:639c5f0b21776605dd6c9dbe592d5228f021404dafd377e2b7ac046b0349b1a1"}, - {file = "typed_ast-1.5.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cf4afcfac006ece570e32d6fa90ab74a17245b83dfd6655a6f68568098345ff6"}, - {file = "typed_ast-1.5.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed855bbe3eb3715fca349c80174cfcfd699c2f9de574d40527b8429acae23a66"}, - {file = "typed_ast-1.5.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6778e1b2f81dfc7bc58e4b259363b83d2e509a65198e85d5700dfae4c6c8ff1c"}, - {file = "typed_ast-1.5.4-cp37-cp37m-win_amd64.whl", hash = "sha256:0261195c2062caf107831e92a76764c81227dae162c4f75192c0d489faf751a2"}, - {file = "typed_ast-1.5.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2efae9db7a8c05ad5547d522e7dbe62c83d838d3906a3716d1478b6c1d61388d"}, - {file = "typed_ast-1.5.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7d5d014b7daa8b0bf2eaef684295acae12b036d79f54178b92a2b6a56f92278f"}, - {file = "typed_ast-1.5.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:370788a63915e82fd6f212865a596a0fefcbb7d408bbbb13dea723d971ed8bdc"}, - {file = "typed_ast-1.5.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4e964b4ff86550a7a7d56345c7864b18f403f5bd7380edf44a3c1fb4ee7ac6c6"}, - {file = "typed_ast-1.5.4-cp38-cp38-win_amd64.whl", hash = "sha256:683407d92dc953c8a7347119596f0b0e6c55eb98ebebd9b23437501b28dcbb8e"}, - {file = "typed_ast-1.5.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4879da6c9b73443f97e731b617184a596ac1235fe91f98d279a7af36c796da35"}, - {file = "typed_ast-1.5.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3e123d878ba170397916557d31c8f589951e353cc95fb7f24f6bb69adc1a8a97"}, - {file = "typed_ast-1.5.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebd9d7f80ccf7a82ac5f88c521115cc55d84e35bf8b446fcd7836eb6b98929a3"}, - {file = "typed_ast-1.5.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98f80dee3c03455e92796b58b98ff6ca0b2a6f652120c263efdba4d6c5e58f72"}, - {file = "typed_ast-1.5.4-cp39-cp39-win_amd64.whl", hash = "sha256:0fdbcf2fef0ca421a3f5912555804296f0b0960f0418c440f5d6d3abb549f3e1"}, - {file = "typed_ast-1.5.4.tar.gz", hash = "sha256:39e21ceb7388e4bb37f4c679d72707ed46c2fbf2a5609b8b8ebc4b067d977df2"}, + {file = "typed_ast-1.5.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4bc1efe0ce3ffb74784e06460f01a223ac1f6ab31c6bc0376a21184bf5aabe3b"}, + {file = "typed_ast-1.5.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5f7a8c46a8b333f71abd61d7ab9255440d4a588f34a21f126bbfc95f6049e686"}, + {file = "typed_ast-1.5.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:597fc66b4162f959ee6a96b978c0435bd63791e31e4f410622d19f1686d5e769"}, + {file = "typed_ast-1.5.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d41b7a686ce653e06c2609075d397ebd5b969d821b9797d029fccd71fdec8e04"}, + {file = "typed_ast-1.5.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5fe83a9a44c4ce67c796a1b466c270c1272e176603d5e06f6afbc101a572859d"}, + {file = "typed_ast-1.5.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d5c0c112a74c0e5db2c75882a0adf3133adedcdbfd8cf7c9d6ed77365ab90a1d"}, + {file = "typed_ast-1.5.5-cp310-cp310-win_amd64.whl", hash = "sha256:e1a976ed4cc2d71bb073e1b2a250892a6e968ff02aa14c1f40eba4f365ffec02"}, + {file = "typed_ast-1.5.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c631da9710271cb67b08bd3f3813b7af7f4c69c319b75475436fcab8c3d21bee"}, + {file = "typed_ast-1.5.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b445c2abfecab89a932b20bd8261488d574591173d07827c1eda32c457358b18"}, + {file = "typed_ast-1.5.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc95ffaaab2be3b25eb938779e43f513e0e538a84dd14a5d844b8f2932593d88"}, + {file = "typed_ast-1.5.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61443214d9b4c660dcf4b5307f15c12cb30bdfe9588ce6158f4a005baeb167b2"}, + {file = "typed_ast-1.5.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6eb936d107e4d474940469e8ec5b380c9b329b5f08b78282d46baeebd3692dc9"}, + {file = "typed_ast-1.5.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e48bf27022897577d8479eaed64701ecaf0467182448bd95759883300ca818c8"}, + {file = "typed_ast-1.5.5-cp311-cp311-win_amd64.whl", hash = "sha256:83509f9324011c9a39faaef0922c6f720f9623afe3fe220b6d0b15638247206b"}, + {file = "typed_ast-1.5.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:44f214394fc1af23ca6d4e9e744804d890045d1643dd7e8229951e0ef39429b5"}, + {file = "typed_ast-1.5.5-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:118c1ce46ce58fda78503eae14b7664163aa735b620b64b5b725453696f2a35c"}, + {file = "typed_ast-1.5.5-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be4919b808efa61101456e87f2d4c75b228f4e52618621c77f1ddcaae15904fa"}, + {file = "typed_ast-1.5.5-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:fc2b8c4e1bc5cd96c1a823a885e6b158f8451cf6f5530e1829390b4d27d0807f"}, + {file = "typed_ast-1.5.5-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:16f7313e0a08c7de57f2998c85e2a69a642e97cb32f87eb65fbfe88381a5e44d"}, + {file = "typed_ast-1.5.5-cp36-cp36m-win_amd64.whl", hash = "sha256:2b946ef8c04f77230489f75b4b5a4a6f24c078be4aed241cfabe9cbf4156e7e5"}, + {file = "typed_ast-1.5.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2188bc33d85951ea4ddad55d2b35598b2709d122c11c75cffd529fbc9965508e"}, + {file = "typed_ast-1.5.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0635900d16ae133cab3b26c607586131269f88266954eb04ec31535c9a12ef1e"}, + {file = "typed_ast-1.5.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57bfc3cf35a0f2fdf0a88a3044aafaec1d2f24d8ae8cd87c4f58d615fb5b6311"}, + {file = "typed_ast-1.5.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:fe58ef6a764de7b4b36edfc8592641f56e69b7163bba9f9c8089838ee596bfb2"}, + {file = "typed_ast-1.5.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d09d930c2d1d621f717bb217bf1fe2584616febb5138d9b3e8cdd26506c3f6d4"}, + {file = "typed_ast-1.5.5-cp37-cp37m-win_amd64.whl", hash = "sha256:d40c10326893ecab8a80a53039164a224984339b2c32a6baf55ecbd5b1df6431"}, + {file = "typed_ast-1.5.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fd946abf3c31fb50eee07451a6aedbfff912fcd13cf357363f5b4e834cc5e71a"}, + {file = "typed_ast-1.5.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ed4a1a42df8a3dfb6b40c3d2de109e935949f2f66b19703eafade03173f8f437"}, + {file = "typed_ast-1.5.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:045f9930a1550d9352464e5149710d56a2aed23a2ffe78946478f7b5416f1ede"}, + {file = "typed_ast-1.5.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:381eed9c95484ceef5ced626355fdc0765ab51d8553fec08661dce654a935db4"}, + {file = "typed_ast-1.5.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:bfd39a41c0ef6f31684daff53befddae608f9daf6957140228a08e51f312d7e6"}, + {file = "typed_ast-1.5.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8c524eb3024edcc04e288db9541fe1f438f82d281e591c548903d5b77ad1ddd4"}, + {file = "typed_ast-1.5.5-cp38-cp38-win_amd64.whl", hash = "sha256:7f58fabdde8dcbe764cef5e1a7fcb440f2463c1bbbec1cf2a86ca7bc1f95184b"}, + {file = "typed_ast-1.5.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:042eb665ff6bf020dd2243307d11ed626306b82812aba21836096d229fdc6a10"}, + {file = "typed_ast-1.5.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:622e4a006472b05cf6ef7f9f2636edc51bda670b7bbffa18d26b255269d3d814"}, + {file = "typed_ast-1.5.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1efebbbf4604ad1283e963e8915daa240cb4bf5067053cf2f0baadc4d4fb51b8"}, + {file = "typed_ast-1.5.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0aefdd66f1784c58f65b502b6cf8b121544680456d1cebbd300c2c813899274"}, + {file = "typed_ast-1.5.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:48074261a842acf825af1968cd912f6f21357316080ebaca5f19abbb11690c8a"}, + {file = "typed_ast-1.5.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:429ae404f69dc94b9361bb62291885894b7c6fb4640d561179548c849f8492ba"}, + {file = "typed_ast-1.5.5-cp39-cp39-win_amd64.whl", hash = "sha256:335f22ccb244da2b5c296e6f96b06ee9bed46526db0de38d2f0e5a6597b81155"}, + {file = "typed_ast-1.5.5.tar.gz", hash = "sha256:94282f7a354f36ef5dbce0ef3467ebf6a258e370ab33d5b40c249fa996e590dd"}, ] [[package]] name = "typing-extensions" -version = "4.6.3" +version = "4.7.1" description = "Backported and Experimental Type Hints for Python 3.7+" optional = false python-versions = ">=3.7" files = [ - {file = "typing_extensions-4.6.3-py3-none-any.whl", hash = "sha256:88a4153d8505aabbb4e13aacb7c486c2b4a33ca3b3f807914a9b4c844c471c26"}, - {file = "typing_extensions-4.6.3.tar.gz", hash = "sha256:d91d5919357fe7f681a9f2b5b4cb2a5f1ef0a1e9f59c4d8ff0d3491e05c0ffd5"}, + {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"}, + {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"}, ] [[package]] name = "urllib3" -version = "2.0.2" +version = "2.0.7" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.7" files = [ - {file = "urllib3-2.0.2-py3-none-any.whl", hash = "sha256:d055c2f9d38dc53c808f6fdc8eab7360b6fdbbde02340ed25cfbcd817c62469e"}, - {file = "urllib3-2.0.2.tar.gz", hash = "sha256:61717a1095d7e155cdb737ac7bb2f4324a858a1e2e6466f6d03ff630ca68d3cc"}, + {file = "urllib3-2.0.7-py3-none-any.whl", hash = "sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e"}, + {file = "urllib3-2.0.7.tar.gz", hash = "sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84"}, ] [package.extras] @@ -1609,46 +1192,42 @@ socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] [[package]] -name = "virtualenv" -version = "20.4.7" -description = "Virtual Python Environment builder" +name = "urllib3" +version = "2.2.0" +description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" +python-versions = ">=3.8" files = [ - {file = "virtualenv-20.4.7-py2.py3-none-any.whl", hash = "sha256:2b0126166ea7c9c3661f5b8e06773d28f83322de7a3ff7d06f0aed18c9de6a76"}, - {file = "virtualenv-20.4.7.tar.gz", hash = "sha256:14fdf849f80dbb29a4eb6caa9875d476ee2a5cf76a5f5415fa2f1606010ab467"}, + {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, + {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, ] -[package.dependencies] -appdirs = ">=1.4.3,<2" -distlib = ">=0.3.1,<1" -filelock = ">=3.0.0,<4" -importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} -six = ">=1.9.0,<2" - [package.extras] -docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=19.9.0rc1)"] -testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "packaging (>=20.0)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)", "xonsh (>=0.9.16)"] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] [[package]] name = "virtualenv" -version = "20.23.0" +version = "20.25.0" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.23.0-py3-none-any.whl", hash = "sha256:6abec7670e5802a528357fdc75b26b9f57d5d92f29c5462ba0fbe45feacc685e"}, - {file = "virtualenv-20.23.0.tar.gz", hash = "sha256:a85caa554ced0c0afbd0d638e7e2d7b5f92d23478d05d17a76daeac8f279f924"}, + {file = "virtualenv-20.25.0-py3-none-any.whl", hash = "sha256:4238949c5ffe6876362d9c0180fc6c3a824a7b12b80604eeb8085f2ed7460de3"}, + {file = "virtualenv-20.25.0.tar.gz", hash = "sha256:bf51c0d9c7dd63ea8e44086fa1e4fb1093a31e963b86959257378aef020e1f1b"}, ] [package.dependencies] -distlib = ">=0.3.6,<1" -filelock = ">=3.11,<4" -platformdirs = ">=3.2,<4" +distlib = ">=0.3.7,<1" +filelock = ">=3.12.2,<4" +importlib-metadata = {version = ">=6.6", markers = "python_version < \"3.8\""} +platformdirs = ">=3.9.1,<5" [package.extras] -docs = ["furo (>=2023.3.27)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=22.12)"] -test = ["covdefaults (>=2.3)", "coverage (>=7.2.3)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.3.1)", "pytest-env (>=0.8.1)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.10)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=67.7.1)", "time-machine (>=2.9)"] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] [[package]] name = "zipp" @@ -1668,4 +1247,4 @@ testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more [metadata] lock-version = "2.0" python-versions = "^3.7" -content-hash = "07e28b85afe797b936dde14b570501b255d800339a459c680b80c7abfd83ed3f" +content-hash = "69b91d868497438a43047a8deb7e9ae765ddc2872d8367a17fe65fc681e2d03a" diff --git a/pyproject.toml b/pyproject.toml index a0aec64d..45759531 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -28,7 +28,6 @@ packages = [ { include = "docs", format = "sdist" }, { include = ".bumpversion.cfg", format = "sdist" }, { include = ".editorconfig", format = "sdist" }, - { include = ".flake8", format = "sdist" }, { include = ".readthedocs.yaml", format = "sdist" }, { include = "poetry.lock", format = "sdist" }, { include = "tox.ini", format = "sdist" }, @@ -56,7 +55,7 @@ pytest-cov = "^4.1" pytest-describe = "^2.1" pytest-timeout = "^2.1" tox = [ - { version = ">=4.5,<5", python = ">=3.8" }, + { version = ">=4.12,<5", python = ">=3.8" }, { version = ">=3.28,<4", python = "<3.8" } ] @@ -64,20 +63,7 @@ tox = [ optional = true [tool.poetry.group.lint.dependencies] -black = "23.3.0" -flake8 = [ - { version = ">=5,<7", python = ">=3.8.1" }, - { version = ">=5,<6", python = "<3.8.1" } -] -flake8-bandit = "^4.1" -flake8-bugbear = [ - { version = "23.5.9", python = ">=3.8.1" }, - { version = "23.3.12", python = "<3.8.1" }, -] -isort = [ - { version = "^5.12", python = ">=3.8" }, - { version = "^5.11", python = "<3.8" } -] +ruff = ">=0.2,<0.3" mypy = "1.3.0" bump2version = ">=1.0,<2" @@ -91,11 +77,147 @@ sphinx = [ ] sphinx_rtd_theme = ">=1,<2" -[tool.bandit] -exclude_dirs = ["tests"] +[tool.ruff] +line-length = 88 +target-version = "py37" + +[tool.ruff.lint] +select = [ + "A", # flake8-builtins + "ANN", # flake8-annotations + "ARG", # flake8-unused-arguments + "B", # flake8-bugbear + "BLE", # flake8-blind-except + "C4", # flake8-comprehensions + "C90", # McCabe cyclomatic complexity + "COM", # flake8-commas + "D", # pydocstyle + "DTZ", # flake8-datetimez + "E", # pycodestyle + "EM", # flake8-errmsg + "ERA", # eradicate + "EXE", # flake8-executable + "F", # Pyflakes + "FBT", # flake8-boolean-trap + "G", # flake8-logging-format + "I", # isort + "ICN", # flake8-import-conventions + "INP", # flake8-no-pep420 + "INT", # flake8-gettext + "ISC", # flake8-implicit-str-concat + "N", # pep8-naming + "PGH", # pygrep-hooks + "PIE", # flake8-pie + "PL", # Pylint + "PT", # flake8-pytest-style + "PTH", # flake8-use-pathlib + "PYI", # flake8-pyi + "Q", # flake8-quotes + "RET", # flake8-return + "RSE", # flake8-raise + "RUF", # Ruff-specific rules + "S", # flake8-bandit + "SLF", # flake8-self + "SIM", # flake8-simplify + "T10", # flake8-debugger + "T20", # flake8-print + "TCH", # flake8-type-checking + "TID", # flake8-tidy-imports + "TRY", # tryceratops + "UP", # pyupgrade + "W", # pycodestyle + "YTT", # flake8-2020 +] +ignore = [ + "ANN101", "ANN102", # no type annotation for self and cls needed + "ANN401", # allow explicit Any + "COM812", # allow trailing commas for auto-formatting + "D105", "D107", # no docstring needed for magic methods + "D203", # no blank line before class docstring + "D213", # multi-line docstrings should not start at second line + "D400", "D415", # first line in docstring does not need to be a sentence + "D401", # do not always require imperative mood in first line + "FBT001", "FBT002", "FBT003", # allow boolean parameters + "ISC001", # allow string literal concatenation for auto-formatting + "PGH003", # type ignores do not need to be specific + "PLR2004", # allow some "magic" values + "PYI034", # do not check return value of new method + "TID252", # allow relative imports + "UP006", "UP007", # use old type annotations (for now) + "TRY003", # allow specific messages outside the exception class +] + +[tool.ruff.lint.per-file-ignores] +"*/__init__.py" = [ + "I001", # imports do not need to be sorted +] +"src/graphql/execution/*" = [ + "BLE001", # allow catching blind exception +] +"src/graphql/language/ast.py" = [ + "D101", # do not require docstrings +] +"src/graphql/language/parser.py" = [ + "RSE102", # raised exception may need to be called +] +"src/graphql/type/introspection.py" = [ + "ANN001", "ANN003", "ANN204", "ANN205", # allow missing type annotations + "N803", # allow JavaScript style arguments +] +"src/graphql/utilities/get_introspection_query.py" = [ + "D101", # allow missing class docstrings + "N815", # allow JavaScript style class attributes +] +"src/graphql/utilities/type_info.py" = [ + "D102", # allow missing method docstrings +] +"src/graphql/validation/rules/*" = [ + "D102", # allow missing method docstrings +] +"src/graphql/validation/validation_context.py" = [ + "D102", # allow missing method docstrings +] +"tests/*" = [ + "ANN001", "ANN002", "ANN003", # allow missing type annotations + "ANN201", "ANN202", "ANN204", "ANN205", # allow missing type annotations + "B011", # allow always failing assertions + "B904", # allow raising exceptions without context + "C901", # allow complex functions + "D100", "D101", "D102", "D103", # allow missing docstrings + "EM101", "EM102", # allow passing literal strings to exceptions + "N802", "N803", "N806", "N815", "N816", # allow JavaScript style names + "PLR0915", # allow many statements + "PT015", # allow always failing assertions + "RUF012", # allow mutable class attributes + "S101", # allow assertions + "S301", # allow pickling + "TRY002", "TRY301", # less strict handling of exceptions +] +"tests/star_wars_schema.py" = [ + "A002", # allow shadowin builtins + "ERA001", # allow commented-out code +] +"tests/test_docs.py" = [ + "S102", # allow use of exec +] + + +[tool.ruff.lint.flake8-quotes] +inline-quotes = "double" -[tool.black] -target-version = ["py37", "py38", "py39", "py310", "py311"] +[tool.ruff.lint.mccabe] +max-complexity = 50 + +[tool.ruff.lint.pylint] +max-args = 15 +max-branches = 50 +max-returns = 25 +max-statements = 125 + +[tool.ruff.format] +indent-style = "space" +quote-style = "double" +skip-magic-trailing-comma = false [tool.coverage.run] branch = true @@ -116,7 +238,6 @@ exclude_lines = [ "except ImportError:", "# Python <", "raise NotImplementedError", - 'raise TypeError\(f?"Unexpected', "assert False,", '\s+next\($', "if MYPY:", @@ -126,13 +247,6 @@ exclude_lines = [ ] ignore_errors = true -[tool.isort] -src_paths = ["src", "tests"] -skip_glob = ["src/**/__init__.py"] -profile = "black" -force_single_line = false -lines_after_imports = 2 - [tool.mypy] python_version = "3.11" check_untyped_defs = true diff --git a/src/graphql/error/graphql_error.py b/src/graphql/error/graphql_error.py index 6896f4f7..2f530660 100644 --- a/src/graphql/error/graphql_error.py +++ b/src/graphql/error/graphql_error.py @@ -1,7 +1,8 @@ +"""GraphQL Error""" + from sys import exc_info from typing import TYPE_CHECKING, Any, Collection, Dict, List, Optional, Union - try: from typing import TypedDict except ImportError: # Python < 3.8 @@ -12,12 +13,12 @@ from typing_extensions import TypeAlias if TYPE_CHECKING: - from ..language.ast import Node # noqa: F401 - from ..language.location import ( # noqa: F401 + from ..language.ast import Node + from ..language.location import ( FormattedSourceLocation, SourceLocation, ) - from ..language.source import Source # noqa: F401 + from ..language.source import Source __all__ = ["GraphQLError", "GraphQLErrorExtensions", "GraphQLFormattedError"] @@ -127,6 +128,7 @@ def __init__( original_error: Optional[Exception] = None, extensions: Optional[GraphQLErrorExtensions] = None, ) -> None: + """Initialize a GraphQLError.""" super().__init__(message) self.message = message @@ -201,7 +203,7 @@ def __repr__(self) -> str: args.append(f"extensions={self.extensions!r}") return f"{self.__class__.__name__}({', '.join(args)})" - def __eq__(self, other: Any) -> bool: + def __eq__(self, other: object) -> bool: return ( isinstance(other, GraphQLError) and self.__class__ == other.__class__ @@ -220,7 +222,7 @@ def __eq__(self, other: Any) -> bool: ) ) - def __ne__(self, other: Any) -> bool: + def __ne__(self, other: object) -> bool: return not self == other @property diff --git a/src/graphql/error/located_error.py b/src/graphql/error/located_error.py index 8f08dcf9..690bcddf 100644 --- a/src/graphql/error/located_error.py +++ b/src/graphql/error/located_error.py @@ -1,11 +1,13 @@ +"""Located GraphQL Error""" + +from contextlib import suppress from typing import TYPE_CHECKING, Collection, Optional, Union from ..pyutils import inspect from .graphql_error import GraphQLError - if TYPE_CHECKING: - from ..language.ast import Node # noqa: F401 + from ..language.ast import Node __all__ = ["located_error"] @@ -29,23 +31,18 @@ def located_error( if isinstance(original_error, GraphQLError) and original_error.path is not None: return original_error try: - # noinspection PyUnresolvedReferences message = str(original_error.message) # type: ignore except AttributeError: message = str(original_error) try: - # noinspection PyUnresolvedReferences source = original_error.source # type: ignore except AttributeError: source = None try: - # noinspection PyUnresolvedReferences positions = original_error.positions # type: ignore except AttributeError: positions = None - try: - # noinspection PyUnresolvedReferences + + with suppress(AttributeError): nodes = original_error.nodes or nodes # type: ignore - except AttributeError: - pass return GraphQLError(message, nodes, source, positions, path, original_error) diff --git a/src/graphql/error/syntax_error.py b/src/graphql/error/syntax_error.py index c3d95020..97b61d83 100644 --- a/src/graphql/error/syntax_error.py +++ b/src/graphql/error/syntax_error.py @@ -1,12 +1,13 @@ +"""GraphQL Syntax Error""" + from __future__ import annotations # Python < 3.10 from typing import TYPE_CHECKING from .graphql_error import GraphQLError - if TYPE_CHECKING: - from ..language.source import Source # noqa: F401 + from ..language.source import Source __all__ = ["GraphQLSyntaxError"] @@ -14,7 +15,8 @@ class GraphQLSyntaxError(GraphQLError): """A GraphQLError representing a syntax error.""" - def __init__(self, source: "Source", position: int, description: str) -> None: + def __init__(self, source: Source, position: int, description: str) -> None: + """Initialize the GraphQLSyntaxError""" super().__init__( f"Syntax Error: {description}", source=source, positions=[position] ) diff --git a/src/graphql/execution/async_iterables.py b/src/graphql/execution/async_iterables.py index ac476fc0..7b7f6340 100644 --- a/src/graphql/execution/async_iterables.py +++ b/src/graphql/execution/async_iterables.py @@ -1,8 +1,9 @@ +"""Helpers for async iterables""" + from __future__ import annotations # Python < 3.10 from contextlib import AbstractAsyncContextManager from typing import ( - Any, AsyncGenerator, AsyncIterable, Awaitable, @@ -11,7 +12,6 @@ Union, ) - __all__ = ["aclosing", "flatten_async_iterable", "map_async_iterable"] T = TypeVar("T") @@ -20,7 +20,7 @@ AsyncIterableOrGenerator = Union[AsyncGenerator[T, None], AsyncIterable[T]] -class aclosing(AbstractAsyncContextManager): +class aclosing(AbstractAsyncContextManager): # noqa: N801 """Async context manager for safely finalizing an async iterator or generator. Contrary to the function available via the standard library, this one silently @@ -33,7 +33,7 @@ def __init__(self, iterable: AsyncIterableOrGenerator[T]) -> None: async def __aenter__(self) -> AsyncIterableOrGenerator[T]: return self.iterable - async def __aexit__(self, *_exc_info: Any) -> None: + async def __aexit__(self, *_exc_info: object) -> None: try: aclose = self.iterable.aclose # type: ignore except AttributeError: @@ -67,7 +67,6 @@ async def map_async_iterable( If the inner iterator supports an `aclose()` method, it will be called when the generator finishes or closes. """ - async with aclosing(iterable) as items: # type: ignore async for item in items: yield await callback(item) diff --git a/src/graphql/execution/collect_fields.py b/src/graphql/execution/collect_fields.py index 8330b634..260e10ae 100644 --- a/src/graphql/execution/collect_fields.py +++ b/src/graphql/execution/collect_fields.py @@ -1,3 +1,5 @@ +"""Collect fields""" + from collections import defaultdict from typing import Any, Dict, List, NamedTuple, Optional, Set, Union @@ -19,7 +21,6 @@ from ..utilities.type_from_ast import type_from_ast from .values import get_directive_values - __all__ = ["collect_fields", "collect_subfields", "FieldsAndPatches"] @@ -264,5 +265,5 @@ def does_fragment_condition_match( def get_field_entry_key(node: FieldNode) -> str: - """Implements the logic to compute the key of a given field's entry""" + """Implement the logic to compute the key of a given field's entry""" return node.alias.value if node.alias else node.name.value diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index 1734d12c..8884cb7e 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -1,3 +1,5 @@ +"""GraphQL execution""" + from __future__ import annotations # Python < 3.10 from asyncio import Event, as_completed, ensure_future, gather, shield, sleep, wait_for @@ -13,6 +15,7 @@ Dict, Generator, Iterable, + Iterator, List, NamedTuple, Optional, @@ -24,7 +27,6 @@ cast, ) - try: from typing import TypedDict except ImportError: # Python < 3.8 @@ -47,9 +49,15 @@ OperationDefinitionNode, OperationType, ) -from ..pyutils import AwaitableOrValue, Path, Undefined, async_reduce, inspect +from ..pyutils import ( + AwaitableOrValue, + Path, + Undefined, + async_reduce, + inspect, + is_iterable, +) from ..pyutils import is_awaitable as default_is_awaitable -from ..pyutils import is_iterable from ..type import ( GraphQLAbstractType, GraphQLField, @@ -74,15 +82,13 @@ from .middleware import MiddlewareManager from .values import get_argument_values, get_directive_values, get_variable_values - ASYNC_DELAY = 1 / 512 # wait time in seconds for deferring execution - try: # pragma: no cover - anext + anext # noqa: B018 except NameError: # pragma: no cover (Python < 3.10) # noinspection PyShadowingBuiltins - async def anext(iterator: AsyncIterator) -> Any: + async def anext(iterator: AsyncIterator) -> Any: # noqa: A001 """Return the next item from an async iterator.""" return await iterator.__anext__() @@ -164,7 +170,7 @@ def __init__( data: Optional[Dict[str, Any]] = None, errors: Optional[List[GraphQLError]] = None, extensions: Optional[Dict[str, Any]] = None, - ): + ) -> None: self.data = data self.errors = errors self.extensions = extensions @@ -174,7 +180,7 @@ def __repr__(self) -> str: ext = "" if self.extensions is None else f", extensions={self.extensions}" return f"{name}(data={self.data!r}, errors={self.errors!r}{ext})" - def __iter__(self) -> Iterable[Any]: + def __iter__(self) -> Iterator[Any]: return iter((self.data, self.errors)) @property @@ -187,13 +193,15 @@ def formatted(self) -> FormattedExecutionResult: formatted["extensions"] = self.extensions return formatted - def __eq__(self, other: Any) -> bool: + def __eq__(self, other: object) -> bool: if isinstance(other, dict): if "extensions" not in other: - return other == dict(data=self.data, errors=self.errors) - return other == dict( - data=self.data, errors=self.errors, extensions=self.extensions - ) + return other == {"data": self.data, "errors": self.errors} + return other == { + "data": self.data, + "errors": self.errors, + "extensions": self.extensions, + } if isinstance(other, tuple): if len(other) == 2: return other == (self.data, self.errors) @@ -205,7 +213,7 @@ def __eq__(self, other: Any) -> bool: and other.extensions == self.extensions ) - def __ne__(self, other: Any) -> bool: + def __ne__(self, other: object) -> bool: return not self == other @@ -237,7 +245,7 @@ def __init__( path: Optional[List[Union[str, int]]] = None, label: Optional[str] = None, extensions: Optional[Dict[str, Any]] = None, - ): + ) -> None: self.data = data self.errors = errors self.path = path @@ -269,7 +277,7 @@ def formatted(self) -> FormattedIncrementalDeferResult: formatted["extensions"] = self.extensions return formatted - def __eq__(self, other: Any) -> bool: + def __eq__(self, other: object) -> bool: if isinstance(other, dict): return ( other.get("data") == self.data @@ -298,7 +306,7 @@ def __eq__(self, other: Any) -> bool: and other.extensions == self.extensions ) - def __ne__(self, other: Any) -> bool: + def __ne__(self, other: object) -> bool: return not self == other @@ -330,7 +338,7 @@ def __init__( path: Optional[List[Union[str, int]]] = None, label: Optional[str] = None, extensions: Optional[Dict[str, Any]] = None, - ): + ) -> None: self.items = items self.errors = errors self.path = path @@ -362,7 +370,7 @@ def formatted(self) -> FormattedIncrementalStreamResult: formatted["extensions"] = self.extensions return formatted - def __eq__(self, other: Any) -> bool: + def __eq__(self, other: object) -> bool: if isinstance(other, dict): return ( other.get("items") == self.items @@ -391,7 +399,7 @@ def __eq__(self, other: Any) -> bool: and other.extensions == self.extensions ) - def __ne__(self, other: Any) -> bool: + def __ne__(self, other: object) -> bool: return not self == other @@ -434,7 +442,7 @@ def __init__( incremental: Optional[Sequence[IncrementalResult]] = None, has_next: bool = False, extensions: Optional[Dict[str, Any]] = None, - ): + ) -> None: self.data = data self.errors = errors self.incremental = incremental @@ -465,7 +473,7 @@ def formatted(self) -> FormattedInitialIncrementalExecutionResult: formatted["extensions"] = self.extensions return formatted - def __eq__(self, other: Any) -> bool: + def __eq__(self, other: object) -> bool: if isinstance(other, dict): return ( other.get("data") == self.data @@ -501,7 +509,7 @@ def __eq__(self, other: Any) -> bool: and other.extensions == self.extensions ) - def __ne__(self, other: Any) -> bool: + def __ne__(self, other: object) -> bool: return not self == other @@ -558,7 +566,7 @@ def formatted(self) -> FormattedSubsequentIncrementalExecutionResult: formatted["extensions"] = self.extensions return formatted - def __eq__(self, other: Any) -> bool: + def __eq__(self, other: object) -> bool: if isinstance(other, dict): return ( ("incremental" not in other or other["incremental"] == self.incremental) @@ -585,7 +593,7 @@ def __eq__(self, other: Any) -> bool: and other.extensions == self.extensions ) - def __ne__(self, other: Any) -> bool: + def __ne__(self, other: object) -> bool: return not self == other @@ -699,11 +707,12 @@ def build( elif isinstance(middleware, MiddlewareManager): middleware_manager = middleware else: - raise TypeError( + msg = ( "Middleware must be passed as a list or tuple of functions" " or objects, or as a single MiddlewareManager object." f" Got {inspect(middleware)} instead." ) + raise TypeError(msg) for definition in document.definitions: if isinstance(definition, OperationDefinitionNode): @@ -797,11 +806,11 @@ def execute_operation(self) -> AwaitableOrValue[Dict[str, Any]]: operation = self.operation root_type = schema.get_root_type(operation.operation) if root_type is None: - raise GraphQLError( + msg = ( "Schema is not configured to execute" - f" {operation.operation.value} operation.", - operation, + f" {operation.operation.value} operation." ) + raise GraphQLError(msg, operation) root_fields, patches = collect_fields( schema, @@ -817,9 +826,7 @@ def execute_operation(self) -> AwaitableOrValue[Dict[str, Any]]: self.execute_fields_serially if operation.operation == OperationType.MUTATION else self.execute_fields - )( - root_type, root_value, None, root_fields - ) # type: ignore + )(root_type, root_value, None, root_fields) # type: ignore for patch in patches: label, patch_fields = patch @@ -975,11 +982,11 @@ async def await_result() -> Any: ) if self.is_awaitable(completed): return await completed - return completed except Exception as raw_error: error = located_error(raw_error, field_nodes, path.as_list()) handle_field_error(error, return_type, errors) return None + return completed return await_result() @@ -999,13 +1006,14 @@ async def await_completed() -> Any: return await_completed() - return completed except Exception as raw_error: error = located_error(raw_error, field_nodes, path.as_list()) handle_field_error(error, return_type, errors) self.filter_subsequent_payloads(path) return None + return completed + def build_resolve_info( self, field_def: GraphQLField, @@ -1015,7 +1023,8 @@ def build_resolve_info( ) -> GraphQLResolveInfo: """Build the GraphQLResolveInfo object. - For internal use only.""" + For internal use only. + """ # The resolve function's first argument is a collection of information about # the current execution state. return GraphQLResolveInfo( @@ -1080,10 +1089,11 @@ def complete_value( async_payload_record, ) if completed is None: - raise TypeError( + msg = ( "Cannot return null for non-nullable field" f" {info.parent_type.name}.{info.field_name}." ) + raise TypeError(msg) return completed # If result value is null or undefined then return null. @@ -1115,10 +1125,11 @@ def complete_value( ) # Not reachable. All possible output types have been considered. - raise TypeError( # pragma: no cover + msg = ( "Cannot complete value of unexpected output type:" f" '{inspect(return_type)}'." - ) + ) # pragma: no cover + raise TypeError(msg) # pragma: no cover def get_stream_values( self, field_nodes: List[FieldNode], path: Path @@ -1144,7 +1155,8 @@ def get_stream_values( initial_count = stream.get("initialCount") if initial_count is None or initial_count < 0: - raise ValueError("initialCount must be a positive integer") + msg = "initialCount must be a positive integer" + raise ValueError(msg) label = stream.get("label") return StreamArguments(initial_count=initial_count, label=label) @@ -1177,7 +1189,7 @@ async def complete_async_iterator_value( and isinstance(stream.initial_count, int) and index >= stream.initial_count ): - try: + with suppress(TimeoutError): await wait_for( shield( self.execute_stream_iterator( @@ -1193,8 +1205,6 @@ async def complete_async_iterator_value( ), timeout=ASYNC_DELAY, ) - except TimeoutError: - pass break field_path = path.add_key(index, None) @@ -1282,10 +1292,11 @@ def complete_list_value( ) if not is_iterable(result): - raise GraphQLError( + msg = ( "Expected Iterable, but did not find one for field" f" '{info.parent_type.name}.{info.field_name}'." ) + raise GraphQLError(msg) stream = self.get_stream_values(field_nodes, path) @@ -1334,7 +1345,6 @@ async def await_completed(item: Any, item_path: Path) -> Any: ) if is_awaitable(completed): return await completed - return completed except Exception as raw_error: error = located_error( raw_error, field_nodes, item_path.as_list() @@ -1342,6 +1352,7 @@ async def await_completed(item: Any, item_path: Path) -> Any: handle_field_error(error, item_type, errors) self.filter_subsequent_payloads(item_path) return None + return completed completed_item = await_completed(item, item_path) else: @@ -1408,10 +1419,12 @@ def complete_leaf_value(return_type: GraphQLLeafType, result: Any) -> Any: """ serialized_result = return_type.serialize(result) if serialized_result is Undefined or serialized_result is None: - raise TypeError( + msg = ( f"Expected `{inspect(return_type)}.serialize({inspect(result)})`" - f" to return non-nullable value, returned: {inspect(serialized_result)}" + " to return non-nullable value, returned:" + f" {inspect(serialized_result)}" ) + raise TypeError(msg) return serialized_result def complete_abstract_value( @@ -1475,54 +1488,56 @@ def ensure_valid_runtime_type( info: GraphQLResolveInfo, result: Any, ) -> GraphQLObjectType: + """Ensure that the given type is valid at runtime.""" if runtime_type_name is None: - raise GraphQLError( + msg = ( f"Abstract type '{return_type.name}' must resolve" " to an Object type at runtime" f" for field '{info.parent_type.name}.{info.field_name}'." f" Either the '{return_type.name}' type should provide" " a 'resolve_type' function or each possible type should provide" - " an 'is_type_of' function.", - field_nodes, + " an 'is_type_of' function." ) + raise GraphQLError(msg, field_nodes) if is_object_type(runtime_type_name): # pragma: no cover - raise GraphQLError( + msg = ( "Support for returning GraphQLObjectType from resolve_type was" " removed in GraphQL-core 3.2, please return type name instead." ) + raise GraphQLError(msg) if not isinstance(runtime_type_name, str): - raise GraphQLError( + msg = ( f"Abstract type '{return_type.name}' must resolve" " to an Object type at runtime" f" for field '{info.parent_type.name}.{info.field_name}' with value" - f" {inspect(result)}, received '{inspect(runtime_type_name)}'.", - field_nodes, + f" {inspect(result)}, received '{inspect(runtime_type_name)}'." ) + raise GraphQLError(msg, field_nodes) runtime_type = self.schema.get_type(runtime_type_name) if runtime_type is None: - raise GraphQLError( + msg = ( f"Abstract type '{return_type.name}' was resolved to a type" - f" '{runtime_type_name}' that does not exist inside the schema.", - field_nodes, + f" '{runtime_type_name}' that does not exist inside the schema." ) + raise GraphQLError(msg, field_nodes) if not is_object_type(runtime_type): - raise GraphQLError( + msg = ( f"Abstract type '{return_type.name}' was resolved" - f" to a non-object type '{runtime_type_name}'.", - field_nodes, + f" to a non-object type '{runtime_type_name}'." ) + raise GraphQLError(msg, field_nodes) if not self.schema.is_sub_type(return_type, runtime_type): - raise GraphQLError( + msg = ( f"Runtime Object type '{runtime_type.name}' is not a possible" - f" type for '{return_type.name}'.", - field_nodes, + f" type for '{return_type.name}'." ) + raise GraphQLError(msg, field_nodes) # noinspection PyTypeChecker return runtime_type @@ -1571,7 +1586,7 @@ def collect_and_execute_subfields( result: Any, async_payload_record: Optional[AsyncPayloadRecord], ) -> AwaitableOrValue[Dict[str, Any]]: - # Collect sub-fields to execute to complete this value. + """Collect sub-fields to execute to complete this value.""" sub_field_nodes, sub_patches = self.collect_subfields(return_type, field_nodes) sub_fields = self.execute_fields( @@ -1612,7 +1627,7 @@ def collect_subfields( key = ( (return_type, id(field_nodes[0])) if len(field_nodes) == 1 # optimize most frequent case - else tuple((return_type, *map(id, field_nodes))) + else (return_type, *map(id, field_nodes)) ) sub_fields_and_patches = cache.get(key) if sub_fields_and_patches is None: @@ -1653,7 +1668,6 @@ def map_source_to_response( as it is nearly identical to the "ExecuteQuery" algorithm, for which :func:`~graphql.execution.execute` is also used. """ - if not isinstance(result_or_stream, AsyncIterable): return result_or_stream # pragma: no cover @@ -1674,6 +1688,7 @@ def execute_deferred_fragment( path: Optional[Path] = None, parent_context: Optional[AsyncPayloadRecord] = None, ) -> None: + """Execute deferred fragment.""" async_payload_record = DeferredFragmentRecord(label, path, parent_context, self) try: awaitable_or_data = self.execute_fields( @@ -1683,7 +1698,7 @@ def execute_deferred_fragment( if self.is_awaitable(awaitable_or_data): async def await_data( - awaitable: Awaitable[Dict[str, Any]] + awaitable: Awaitable[Dict[str, Any]], ) -> Optional[Dict[str, Any]]: # noinspection PyShadowingNames @@ -1711,6 +1726,7 @@ def execute_stream_field( label: Optional[str] = None, parent_context: Optional[AsyncPayloadRecord] = None, ) -> AsyncPayloadRecord: + """Execute stream field.""" async_payload_record = StreamRecord( label, item_path, None, parent_context, self ) @@ -1811,6 +1827,7 @@ async def execute_stream_iterator_item( async_payload_record: StreamRecord, field_path: Path, ) -> Any: + """Execute stream iterator item.""" if iterator in self._canceled_iterators: raise StopAsyncIteration try: @@ -1845,6 +1862,7 @@ async def execute_stream_iterator( label: Optional[str], parent_context: Optional[AsyncPayloadRecord], ) -> None: + """Execute stream iterator.""" index = initial_index previous_async_payload_record = parent_context @@ -1889,6 +1907,7 @@ def filter_subsequent_payloads( null_path: Optional[Path] = None, current_async_record: Optional[AsyncPayloadRecord] = None, ) -> None: + """Filter subsequent payloads.""" null_path_list = null_path.as_list() if null_path else [] for async_record in list(self.subsequent_payloads): if async_record is current_async_record: @@ -1903,6 +1922,7 @@ def filter_subsequent_payloads( del self.subsequent_payloads[async_record] def get_completed_incremental_results(self) -> List[IncrementalResult]: + """Get completed incremental results.""" incremental_results: List[IncrementalResult] = [] append_result = incremental_results.append subsequent_payloads = list(self.subsequent_payloads) @@ -1942,6 +1962,7 @@ def get_completed_incremental_results(self) -> List[IncrementalResult]: async def yield_subsequent_payloads( self, ) -> AsyncGenerator[SubsequentIncrementalExecutionResult, None]: + """Yield subsequent payloads.""" payloads = self.subsequent_payloads has_next = bool(payloads) @@ -2103,7 +2124,8 @@ def execute_impl( async def await_result() -> Any: try: initial_result = build_response( - await result, errors # type: ignore + await result, # type: ignore + errors, ) if context.subsequent_payloads: return ExperimentalIncrementalExecutionResults( @@ -2114,10 +2136,10 @@ async def await_result() -> Any: ), subsequent_results=context.yield_subsequent_payloads(), ) - return initial_result except GraphQLError as error: errors.append(error) return build_response(None, errors) + return initial_result return await_result() @@ -2131,10 +2153,10 @@ async def await_result() -> Any: ), subsequent_results=context.yield_subsequent_payloads(), ) - return initial_result except GraphQLError as error: errors.append(error) return build_response(None, errors) + return initial_result def assume_not_awaitable(_value: Any) -> bool: @@ -2191,7 +2213,8 @@ def execute_sync( ): if default_is_awaitable(result): ensure_future(cast(Awaitable[ExecutionResult], result)).cancel() - raise RuntimeError("GraphQL execution failed to complete synchronously.") + msg = "GraphQL execution failed to complete synchronously." + raise RuntimeError(msg) return cast(ExecutionResult, result) @@ -2207,7 +2230,6 @@ def handle_field_error( # Otherwise, error protection is applied, logging the error and resolving a # null value for this field if one is encountered. errors.append(error) - return None def invalid_return_type_error( @@ -2381,7 +2403,7 @@ async def ensure_single_execution_result( ExecutionResult, InitialIncrementalExecutionResult, SubsequentIncrementalExecutionResult, - ] + ], ) -> ExecutionResult: """Ensure that the given result does not use incremental delivery.""" if not isinstance(result, ExecutionResult): @@ -2593,10 +2615,8 @@ def execute_subscription( root_type = schema.subscription_type if root_type is None: - raise GraphQLError( - "Schema is not configured to execute subscription operation.", - context.operation, - ) + msg = "Schema is not configured to execute subscription operation." + raise GraphQLError(msg, context.operation) root_fields = collect_fields( schema, @@ -2611,9 +2631,8 @@ def execute_subscription( field_def = schema.get_field(root_type, field_name) if not field_def: - raise GraphQLError( - f"The subscription field '{field_name}' is not defined.", field_nodes - ) + msg = f"The subscription field '{field_name}' is not defined." + raise GraphQLError(msg, field_nodes) path = Path(None, response_name, root_type.name) info = context.build_resolve_info(field_def, field_nodes, root_type, path) @@ -2637,14 +2656,14 @@ async def await_result() -> AsyncIterable[Any]: try: return assert_event_stream(await result) except Exception as error: - raise located_error(error, field_nodes, path.as_list()) + raise located_error(error, field_nodes, path.as_list()) from error return await_result() return assert_event_stream(result) except Exception as error: - raise located_error(error, field_nodes, path.as_list()) + raise located_error(error, field_nodes, path.as_list()) from error def assert_event_stream(result: Any) -> AsyncIterable: @@ -2653,10 +2672,11 @@ def assert_event_stream(result: Any) -> AsyncIterable: # Assert field returned an event stream, otherwise yield an error. if not isinstance(result, AsyncIterable): - raise GraphQLError( + msg = ( "Subscription field must return AsyncIterable." f" Received: {inspect(result)}." ) + raise GraphQLError(msg) return result @@ -2706,6 +2726,7 @@ def __await__(self) -> Generator[Any, None, Optional[Dict[str, Any]]]: return self.wait().__await__() async def wait(self) -> Optional[Dict[str, Any]]: + """Wait until data is ready.""" if self.parent_context: await self.parent_context.completed.wait() _data = self._data @@ -2722,6 +2743,7 @@ async def wait(self) -> Optional[Dict[str, Any]]: return data def add_data(self, data: AwaitableOrValue[Optional[Dict[str, Any]]]) -> None: + """Add data to the record.""" self._data = data self._data_added.set() @@ -2776,6 +2798,7 @@ def __await__(self) -> Generator[Any, None, Optional[List[str]]]: return self.wait().__await__() async def wait(self) -> Optional[List[str]]: + """Wait until data is ready.""" await self._items_added.wait() if self.parent_context: await self.parent_context.completed.wait() @@ -2793,10 +2816,12 @@ async def wait(self) -> Optional[List[str]]: return items def add_items(self, items: AwaitableOrValue[Optional[List[Any]]]) -> None: + """Add items to the record.""" self._items = items self._items_added.set() def set_is_completed_iterator(self) -> None: + """Mark as completed.""" self.is_completed_iterator = True self._items_added.set() diff --git a/src/graphql/execution/middleware.py b/src/graphql/execution/middleware.py index cb455faf..4a90be68 100644 --- a/src/graphql/execution/middleware.py +++ b/src/graphql/execution/middleware.py @@ -1,8 +1,9 @@ +"""Middleware manager""" + from functools import partial, reduce from inspect import isfunction from typing import Any, Callable, Dict, Iterator, List, Optional, Tuple - try: from typing import TypeAlias except ImportError: # Python < 3.10 @@ -32,7 +33,7 @@ class MiddlewareManager: _cached_resolvers: Dict[GraphQLFieldResolver, GraphQLFieldResolver] _middleware_resolvers: Optional[List[Callable]] - def __init__(self, *middlewares: Any): + def __init__(self, *middlewares: Any) -> None: self.middlewares = middlewares self._middleware_resolvers = ( list(get_middleware_resolvers(middlewares)) if middlewares else None diff --git a/src/graphql/execution/values.py b/src/graphql/execution/values.py index 9ae9c453..3080a1d7 100644 --- a/src/graphql/execution/values.py +++ b/src/graphql/execution/values.py @@ -1,3 +1,5 @@ +"""Helpers for handling values""" + from typing import Any, Callable, Collection, Dict, List, Optional, Union from ..error import GraphQLError @@ -29,16 +31,13 @@ from ..utilities.type_from_ast import type_from_ast from ..utilities.value_from_ast import value_from_ast - try: from typing import TypeAlias except ImportError: # Python < 3.10 from typing_extensions import TypeAlias - __all__ = ["get_argument_values", "get_directive_values", "get_variable_values"] - CoercedVariableValues: TypeAlias = Union[List[GraphQLError], Dict[str, Any]] @@ -58,10 +57,11 @@ def get_variable_values( def on_error(error: GraphQLError) -> None: if max_errors is not None and len(errors) >= max_errors: - raise GraphQLError( + msg = ( "Too many errors processing variables," " error limit reached. Execution aborted." ) + raise GraphQLError(msg) errors.append(error) try: @@ -129,9 +129,7 @@ def on_input_value_error( path: List[Union[str, int]], invalid_value: Any, error: GraphQLError ) -> None: invalid_str = inspect(invalid_value) - prefix = ( - f"Variable '${var_name}' got invalid value {invalid_str}" # noqa: B023 - ) + prefix = f"Variable '${var_name}' got invalid value {invalid_str}" # noqa: B023 if path: prefix += f" at '{var_name}{print_path_list(path)}'" # noqa: B023 on_error( @@ -170,11 +168,11 @@ def get_argument_values( if arg_def.default_value is not Undefined: coerced_values[arg_def.out_name or name] = arg_def.default_value elif is_non_null_type(arg_type): # pragma: no cover else - raise GraphQLError( + msg = ( f"Argument '{name}' of required type '{arg_type}'" - " was not provided.", - node, + " was not provided." ) + raise GraphQLError(msg, node) continue # pragma: no cover value_node = argument_node.value @@ -186,30 +184,26 @@ def get_argument_values( if arg_def.default_value is not Undefined: coerced_values[arg_def.out_name or name] = arg_def.default_value elif is_non_null_type(arg_type): # pragma: no cover else - raise GraphQLError( + msg = ( f"Argument '{name}' of required type '{arg_type}'" f" was provided the variable '${variable_name}'" - " which was not provided a runtime value.", - value_node, + " which was not provided a runtime value." ) + raise GraphQLError(msg, value_node) continue # pragma: no cover is_null = variable_values[variable_name] is None if is_null and is_non_null_type(arg_type): - raise GraphQLError( - f"Argument '{name}' of non-null type '{arg_type}' must not be null.", - value_node, - ) + msg = f"Argument '{name}' of non-null type '{arg_type}' must not be null." + raise GraphQLError(msg, value_node) coerced_value = value_from_ast(value_node, arg_type, variable_values) if coerced_value is Undefined: # Note: `values_of_correct_type` validation should catch this before # execution. This is a runtime check to ensure execution does not # continue with an invalid argument value. - raise GraphQLError( - f"Argument '{name}' has invalid value {print_ast(value_node)}.", - value_node, - ) + msg = f"Argument '{name}' has invalid value {print_ast(value_node)}." + raise GraphQLError(msg, value_node) coerced_values[arg_def.out_name or name] = coerced_value return coerced_values diff --git a/src/graphql/graphql.py b/src/graphql/graphql.py index 28afe7dc..b1460fd2 100644 --- a/src/graphql/graphql.py +++ b/src/graphql/graphql.py @@ -1,3 +1,5 @@ +"""Execute a GraphQL operation""" + from asyncio import ensure_future from typing import Any, Awaitable, Callable, Dict, Optional, Type, Union, cast @@ -13,7 +15,6 @@ validate_schema, ) - __all__ = ["graphql", "graphql_sync"] @@ -147,7 +148,8 @@ def graphql_sync( # Assert that the execution was synchronous. if default_is_awaitable(result): ensure_future(cast(Awaitable[ExecutionResult], result)).cancel() - raise RuntimeError("GraphQL execution failed to complete synchronously.") + msg = "GraphQL execution failed to complete synchronously." + raise RuntimeError(msg) return cast(ExecutionResult, result) diff --git a/src/graphql/language/ast.py b/src/graphql/language/ast.py index bbae00d2..35a06f11 100644 --- a/src/graphql/language/ast.py +++ b/src/graphql/language/ast.py @@ -1,19 +1,22 @@ +"""GraphQL Abstract Syntax Tree""" + from __future__ import annotations # Python < 3.10 from copy import copy, deepcopy from enum import Enum -from typing import Any, Dict, List, Optional, Tuple, Union - -from ..pyutils import camel_to_snake -from .source import Source -from .token_kind import TokenKind - +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union try: from typing import TypeAlias except ImportError: # Python < 3.10 from typing_extensions import TypeAlias +from ..pyutils import camel_to_snake + +if TYPE_CHECKING: + from .source import Source + from .token_kind import TokenKind + __all__ = [ "Location", @@ -131,7 +134,7 @@ def __repr__(self) -> str: def __inspect__(self) -> str: return repr(self) - def __eq__(self, other: Any) -> bool: + def __eq__(self, other: object) -> bool: if isinstance(other, Token): return ( self.kind == other.kind @@ -141,7 +144,7 @@ def __eq__(self, other: Any) -> bool: and self.column == other.column and self.value == other.value ) - elif isinstance(other, str): + if isinstance(other, str): return other == self.desc return False @@ -229,14 +232,14 @@ def __repr__(self) -> str: def __inspect__(self) -> str: return repr(self) - def __eq__(self, other: Any) -> bool: + def __eq__(self, other: object) -> bool: if isinstance(other, Location): return self.start == other.start and self.end == other.end - elif isinstance(other, (list, tuple)) and len(other) == 2: + if isinstance(other, (list, tuple)) and len(other) == 2: return self.start == other[0] and self.end == other[1] return False - def __ne__(self, other: Any) -> bool: + def __ne__(self, other: object) -> bool: return not self == other def __hash__(self) -> int: @@ -371,7 +374,7 @@ def __repr__(self) -> str: rep += f" at {loc}" return rep - def __eq__(self, other: Any) -> bool: + def __eq__(self, other: object) -> bool: """Test whether two nodes are equal (recursively).""" return ( isinstance(other, Node) @@ -425,6 +428,7 @@ def __init_subclass__(cls) -> None: cls.keys = tuple(keys) def to_dict(self, locations: bool = False) -> Dict: + """Concert node to a dictionary.""" from ..utilities import ast_to_dict return ast_to_dict(self, locations) @@ -502,7 +506,7 @@ class FieldNode(SelectionNode): class NullabilityAssertionNode(Node): __slots__ = ("nullability_assertion",) - nullability_assertion: Optional["NullabilityAssertionNode"] + nullability_assertion: Optional[NullabilityAssertionNode] class ListNullabilityOperatorNode(NullabilityAssertionNode): diff --git a/src/graphql/language/block_string.py b/src/graphql/language/block_string.py index 296c0b18..e3b8511e 100644 --- a/src/graphql/language/block_string.py +++ b/src/graphql/language/block_string.py @@ -1,7 +1,8 @@ +"""Helpers for block strings""" + from sys import maxsize from typing import Collection, List - __all__ = [ "dedent_block_string_lines", "is_printable_as_block_string", diff --git a/src/graphql/language/character_classes.py b/src/graphql/language/character_classes.py index 0f8db7de..628bd60f 100644 --- a/src/graphql/language/character_classes.py +++ b/src/graphql/language/character_classes.py @@ -1,3 +1,5 @@ +"""Character classes""" + __all__ = ["is_digit", "is_letter", "is_name_start", "is_name_continue"] diff --git a/src/graphql/language/directive_locations.py b/src/graphql/language/directive_locations.py index 3d88382f..f251658e 100644 --- a/src/graphql/language/directive_locations.py +++ b/src/graphql/language/directive_locations.py @@ -1,5 +1,6 @@ -from enum import Enum +"""Directive locations""" +from enum import Enum __all__ = ["DirectiveLocation"] diff --git a/src/graphql/language/lexer.py b/src/graphql/language/lexer.py index 5e2914f7..5c54abbc 100644 --- a/src/graphql/language/lexer.py +++ b/src/graphql/language/lexer.py @@ -1,3 +1,5 @@ +"""GraphQL Lexer""" + from typing import List, NamedTuple, Optional from ..error import GraphQLSyntaxError @@ -7,7 +9,6 @@ from .source import Source from .token_kind import TokenKind - __all__ = ["Lexer", "is_punctuator_token_kind"] @@ -27,7 +28,7 @@ class Lexer: EOF token whenever called. """ - def __init__(self, source: Source): + def __init__(self, source: Source) -> None: """Given a Source object, initialize a Lexer for that source.""" self.source = source self.token = self.last_token = Token(TokenKind.SOF, 0, 0, 0, 0) @@ -107,12 +108,12 @@ def read_next_token(self, start: int) -> Token: if char in " \t,\ufeff": position += 1 continue - elif char == "\n": + if char == "\n": position += 1 self.line += 1 self.line_start = position continue - elif char == "\r": + if char == "\r": if body[position + 1 : position + 2] == "\n": position += 2 else: @@ -139,9 +140,8 @@ def read_next_token(self, start: int) -> Token: if is_name_start(char): return self.read_name(position) - if char == ".": - if body[position + 1 : position + 3] == "..": - return self.create_token(TokenKind.SPREAD, position, position + 3) + if char == "." and body[position + 1 : position + 3] == "..": + return self.create_token(TokenKind.SPREAD, position, position + 3) message = ( "Unexpected single quote character (')," @@ -314,6 +314,7 @@ def read_string(self, start: int) -> Token: raise GraphQLSyntaxError(self.source, position, "Unterminated string.") def read_escaped_unicode_variable_width(self, position: int) -> EscapeSequence: + """Read escaped unicode with variable width""" body = self.source.body point = 0 size = 3 @@ -341,6 +342,7 @@ def read_escaped_unicode_variable_width(self, position: int) -> EscapeSequence: ) def read_escaped_unicode_fixed_width(self, position: int) -> EscapeSequence: + """Read escaped unicode with fixed width""" body = self.source.body code = read_16_bit_hex_code(body, position + 2) @@ -349,16 +351,15 @@ def read_escaped_unicode_fixed_width(self, position: int) -> EscapeSequence: # GraphQL allows JSON-style surrogate pair escape sequences, but only when # a valid pair is formed. - if 0xD800 <= code <= 0xDBFF: - if body[position + 6 : position + 8] == "\\u": - trailing_code = read_16_bit_hex_code(body, position + 8) - if 0xDC00 <= trailing_code <= 0xDFFF: - return EscapeSequence( - (chr(code) + chr(trailing_code)) - .encode("utf-16", "surrogatepass") - .decode("utf-16"), - 12, - ) + if 0xD800 <= code <= 0xDBFF and body[position + 6 : position + 8] == "\\u": + trailing_code = read_16_bit_hex_code(body, position + 8) + if 0xDC00 <= trailing_code <= 0xDFFF: + return EscapeSequence( + (chr(code) + chr(trailing_code)) + .encode("utf-16", "surrogatepass") + .decode("utf-16"), + 12, + ) raise GraphQLSyntaxError( self.source, @@ -367,6 +368,7 @@ def read_escaped_unicode_fixed_width(self, position: int) -> EscapeSequence: ) def read_escaped_character(self, position: int) -> EscapeSequence: + """Read escaped character sequence""" body = self.source.body value = _ESCAPED_CHARS.get(body[position + 1]) if value: @@ -544,9 +546,9 @@ def read_hex_digit(char: str) -> int: """ if "0" <= char <= "9": return ord(char) - 48 - elif "A" <= char <= "F": + if "A" <= char <= "F": return ord(char) - 55 - elif "a" <= char <= "f": + if "a" <= char <= "f": return ord(char) - 87 return -1 @@ -562,8 +564,7 @@ def is_unicode_scalar_value(char: str) -> bool: def is_supplementary_code_point(body: str, location: int) -> bool: - """ - Check whether the current location is a supplementary code point. + """Check whether the current location is a supplementary code point. The GraphQL specification defines source text as a sequence of unicode scalar values (which Unicode defines to exclude surrogate code points). diff --git a/src/graphql/language/location.py b/src/graphql/language/location.py index 529f2caf..6f191964 100644 --- a/src/graphql/language/location.py +++ b/src/graphql/language/location.py @@ -1,7 +1,8 @@ -from __future__ import annotations # Python < 3.10 +"""Source locations""" -from typing import TYPE_CHECKING, Any, NamedTuple +from __future__ import annotations # Python < 3.10 +from typing import TYPE_CHECKING, NamedTuple try: from typing import TypedDict @@ -9,7 +10,7 @@ from typing_extensions import TypedDict if TYPE_CHECKING: - from .source import Source # noqa: F401 + from .source import Source __all__ = ["get_location", "SourceLocation", "FormattedSourceLocation"] @@ -29,18 +30,19 @@ class SourceLocation(NamedTuple): @property def formatted(self) -> FormattedSourceLocation: - return dict(line=self.line, column=self.column) + """Get formatted source location.""" + return {"line": self.line, "column": self.column} - def __eq__(self, other: Any) -> bool: + def __eq__(self, other: object) -> bool: if isinstance(other, dict): return self.formatted == other return tuple(self) == other - def __ne__(self, other: Any) -> bool: + def __ne__(self, other: object) -> bool: return not self == other -def get_location(source: "Source", position: int) -> SourceLocation: +def get_location(source: Source, position: int) -> SourceLocation: """Get the line and column for a character position in the source. Takes a Source and a UTF-8 character offset, and returns the corresponding line and diff --git a/src/graphql/language/parser.py b/src/graphql/language/parser.py index e380152a..2c1e8c0e 100644 --- a/src/graphql/language/parser.py +++ b/src/graphql/language/parser.py @@ -1,5 +1,7 @@ +"""GraphQL parser""" + from functools import partial -from typing import Callable, Dict, List, Optional, TypeVar, Union, cast +from typing import Callable, List, Mapping, Optional, TypeVar, Union, cast from ..error import GraphQLError, GraphQLSyntaxError from .ast import ( @@ -67,7 +69,6 @@ from .source import Source, is_source from .token_kind import TokenKind - try: from typing import TypeAlias except ImportError: # Python < 3.10 @@ -140,7 +141,7 @@ def parse( no_location=no_location, max_tokens=max_tokens, allow_legacy_fragment_variables=allow_legacy_fragment_variables, - experimental_client_controlled_nullability=experimental_client_controlled_nullability, # noqa + experimental_client_controlled_nullability=experimental_client_controlled_nullability, ) return parser.parse_document() @@ -250,7 +251,7 @@ def __init__( max_tokens: Optional[int] = None, allow_legacy_fragment_variables: bool = False, experimental_client_controlled_nullability: bool = False, - ): + ) -> None: if not is_source(source): source = Source(cast(str, source)) @@ -278,7 +279,7 @@ def parse_document(self) -> DocumentNode: loc=self.loc(start), ) - _parse_type_system_definition_method_names: Dict[str, str] = { + _parse_type_system_definition_method_names: Mapping[str, str] = { "schema": "schema_definition", "scalar": "scalar_type_definition", "type": "object_type_definition", @@ -289,7 +290,7 @@ def parse_document(self) -> DocumentNode: "directive": "directive_definition", } - _parse_other_definition_method_names: Dict[str, str] = { + _parse_other_definition_method_names: Mapping[str, str] = { **dict.fromkeys(("query", "mutation", "subscription"), "operation_definition"), "fragment": "fragment_definition", "extend": "type_system_extension", @@ -367,8 +368,8 @@ def parse_operation_type(self) -> OperationType: operation_token = self.expect_token(TokenKind.NAME) try: return OperationType(operation_token.value) - except ValueError: - raise self.unexpected(operation_token) + except ValueError as error: + raise self.unexpected(operation_token) from error def parse_variable_definitions(self) -> List[VariableDefinitionNode]: """VariableDefinitions: (VariableDefinition+)""" @@ -546,7 +547,7 @@ def parse_type_condition(self) -> NamedTypeNode: # Implement the parsing rules in the Values section. - _parse_value_literal_method_names: Dict[TokenKind, str] = { + _parse_value_literal_method_names: Mapping[TokenKind, str] = { TokenKind.BRACKET_L: "list", TokenKind.BRACE_L: "object", TokenKind.INT: "int", @@ -685,7 +686,7 @@ def parse_named_type(self) -> NamedTypeNode: # Implement the parsing rules in the Type Definition section. - _parse_type_extension_method_names: Dict[str, str] = { + _parse_type_extension_method_names: Mapping[str, str] = { "schema": "schema_extension", "scalar": "scalar_type_extension", "type": "object_type_extension", diff --git a/src/graphql/language/predicates.py b/src/graphql/language/predicates.py index 3b132bb2..2b483ec9 100644 --- a/src/graphql/language/predicates.py +++ b/src/graphql/language/predicates.py @@ -1,3 +1,5 @@ +"""Predicates for GraphQL nodes""" + from typing import Union from .ast import ( @@ -17,7 +19,6 @@ VariableNode, ) - try: from typing import TypeGuard except ImportError: # Python < 3.10 diff --git a/src/graphql/language/print_location.py b/src/graphql/language/print_location.py index 6d13b1e1..e0ae5de5 100644 --- a/src/graphql/language/print_location.py +++ b/src/graphql/language/print_location.py @@ -1,3 +1,5 @@ +"""Print location in GraphQL source""" + import re from typing import Optional, Tuple, cast @@ -5,7 +7,6 @@ from .location import SourceLocation, get_location from .source import Source - __all__ = ["print_location", "print_source_location"] diff --git a/src/graphql/language/print_string.py b/src/graphql/language/print_string.py index f390b9d0..c90c67c6 100644 --- a/src/graphql/language/print_string.py +++ b/src/graphql/language/print_string.py @@ -1,10 +1,12 @@ +"""Print a string as a GraphQL expression.""" + __all__ = ["print_string"] def print_string(s: str) -> str: - """Print a string as a GraphQL StringValue literal. + r"""Print a string as a GraphQL StringValue literal. - Replaces control characters and excluded characters (" U+0022 and \\ U+005C) + Replaces control characters and excluded characters (" U+0022 and \ U+005C) with escape sequences. """ if not isinstance(s, str): diff --git a/src/graphql/language/printer.py b/src/graphql/language/printer.py index 56971058..7170ca5f 100644 --- a/src/graphql/language/printer.py +++ b/src/graphql/language/printer.py @@ -1,3 +1,5 @@ +"""Print AST""" + from typing import Any, Collection, Optional from ..language.ast import Node, OperationType @@ -5,7 +7,6 @@ from .print_string import print_string from .visitor import Visitor, visit - try: from typing import TypeAlias except ImportError: # Python < 3.10 diff --git a/src/graphql/language/source.py b/src/graphql/language/source.py index 10f0d05d..bd2c635d 100644 --- a/src/graphql/language/source.py +++ b/src/graphql/language/source.py @@ -1,14 +1,14 @@ +"""GraphQL source input""" + from typing import Any from .location import SourceLocation - try: from typing import TypeGuard except ImportError: # Python < 3.10 from typing_extensions import TypeGuard - __all__ = ["Source", "is_source"] DEFAULT_NAME = "GraphQL request" @@ -41,16 +41,15 @@ def __init__( if not isinstance(location_offset, SourceLocation): location_offset = SourceLocation._make(location_offset) if location_offset.line <= 0: - raise ValueError( - "line in location_offset is 1-indexed and must be positive." - ) + msg = "line in location_offset is 1-indexed and must be positive." + raise ValueError(msg) if location_offset.column <= 0: - raise ValueError( - "column in location_offset is 1-indexed and must be positive." - ) + msg = "column in location_offset is 1-indexed and must be positive." + raise ValueError(msg) self.location_offset = location_offset def get_location(self, position: int) -> SourceLocation: + """Get source location.""" lines = self.body[:position].splitlines() if lines: line = len(lines) @@ -63,12 +62,12 @@ def get_location(self, position: int) -> SourceLocation: def __repr__(self) -> str: return f"<{self.__class__.__name__} name={self.name!r}>" - def __eq__(self, other: Any) -> bool: + def __eq__(self, other: object) -> bool: return (isinstance(other, Source) and other.body == self.body) or ( isinstance(other, str) and other == self.body ) - def __ne__(self, other: Any) -> bool: + def __ne__(self, other: object) -> bool: return not self == other diff --git a/src/graphql/language/token_kind.py b/src/graphql/language/token_kind.py index 7f5a2607..f4eda1c5 100644 --- a/src/graphql/language/token_kind.py +++ b/src/graphql/language/token_kind.py @@ -1,5 +1,6 @@ -from enum import Enum +"""Token kinds""" +from enum import Enum __all__ = ["TokenKind"] diff --git a/src/graphql/language/visitor.py b/src/graphql/language/visitor.py index e6d4768d..a7dccaeb 100644 --- a/src/graphql/language/visitor.py +++ b/src/graphql/language/visitor.py @@ -1,3 +1,5 @@ +"""AST Visitor""" + from copy import copy from enum import Enum from typing import ( @@ -16,7 +18,6 @@ from . import ast from .ast import QUERY_DOCUMENT_KEYS, Node - try: from typing import TypeAlias except ImportError: # Python < 3.10 @@ -123,7 +124,7 @@ def __init_subclass__(cls) -> None: if len(attr_kind) < 2: kind: Optional[str] = None else: - attr, kind = attr_kind + attr, kind = attr_kind # noqa: PLW2901 if attr in ("enter", "leave") and kind: name = snake_to_camel(kind) + "Node" node_cls = getattr(ast, name, None) @@ -132,7 +133,8 @@ def __init_subclass__(cls) -> None: or not isinstance(node_cls, type) or not issubclass(node_cls, Node) ): - raise TypeError(f"Invalid AST node kind: {kind}.") + msg = f"Invalid AST node kind: {kind}." + raise TypeError(msg) def __init__(self) -> None: self.enter_leave_map = {} @@ -185,9 +187,11 @@ def visit( dictionary visitor_keys mapping node kinds to node attributes. """ if not isinstance(root, Node): - raise TypeError(f"Not an AST Node: {inspect(root)}.") + msg = f"Not an AST Node: {inspect(root)}." + raise TypeError(msg) if not isinstance(visitor, Visitor): - raise TypeError(f"Not an AST Visitor: {inspect(visitor)}.") + msg = f"Not an AST Visitor: {inspect(visitor)}." + raise TypeError(msg) if visitor_keys is None: visitor_keys = QUERY_DOCUMENT_KEYS @@ -250,7 +254,8 @@ def visit( result = None else: if not isinstance(node, Node): - raise TypeError(f"Invalid AST Node: {inspect(node)}.") + msg = f"Invalid AST Node: {inspect(node)}." + raise TypeError(msg) enter_leave = visitor.get_enter_leave_for_kind(node.kind) visit_fn = enter_leave.leave if is_leaving else enter_leave.enter if visit_fn: @@ -308,7 +313,7 @@ class ParallelVisitor(Visitor): If a prior visitor edits a node, no following visitors will see that node. """ - def __init__(self, visitors: Collection[Visitor]): + def __init__(self, visitors: Collection[Visitor]) -> None: """Create a new visitor from the given list of parallel visitors.""" super().__init__() self.visitors = visitors @@ -334,15 +339,14 @@ def get_enter_leave_for_kind(self, kind: str) -> EnterLeaveVisitor: def enter(node: Node, *args: Any) -> Optional[VisitorAction]: skipping = self.skipping for i, fn in enumerate(enter_list): - if not skipping[i]: - if fn: - result = fn(node, *args) - if result is SKIP or result is False: - skipping[i] = node - elif result is BREAK or result is True: - skipping[i] = BREAK - elif result is not None: - return result + if not skipping[i] and fn: + result = fn(node, *args) + if result is SKIP or result is False: + skipping[i] = node + elif result is BREAK or result is True: + skipping[i] = BREAK + elif result is not None: + return result return None def leave(node: Node, *args: Any) -> Optional[VisitorAction]: diff --git a/src/graphql/pyutils/async_reduce.py b/src/graphql/pyutils/async_reduce.py index 9755db80..2ffa3c82 100644 --- a/src/graphql/pyutils/async_reduce.py +++ b/src/graphql/pyutils/async_reduce.py @@ -1,9 +1,10 @@ +"""Reduce awaitable values""" + from typing import Any, Awaitable, Callable, Collection, TypeVar, cast from .awaitable_or_value import AwaitableOrValue from .is_awaitable import is_awaitable as default_is_awaitable - __all__ = ["async_reduce"] T = TypeVar("T") diff --git a/src/graphql/pyutils/awaitable_or_value.py b/src/graphql/pyutils/awaitable_or_value.py index dcd34ffc..c1b888d1 100644 --- a/src/graphql/pyutils/awaitable_or_value.py +++ b/src/graphql/pyutils/awaitable_or_value.py @@ -1,5 +1,6 @@ -from typing import Awaitable, TypeVar, Union +"""Awaitable or value type""" +from typing import Awaitable, TypeVar, Union try: from typing import TypeAlias diff --git a/src/graphql/pyutils/cached_property.py b/src/graphql/pyutils/cached_property.py index 4e34be22..d55e7427 100644 --- a/src/graphql/pyutils/cached_property.py +++ b/src/graphql/pyutils/cached_property.py @@ -1,5 +1,6 @@ -from typing import TYPE_CHECKING, Any, Callable +"""Cached properties""" +from typing import TYPE_CHECKING, Any, Callable if TYPE_CHECKING: standard_cached_property = None diff --git a/src/graphql/pyutils/convert_case.py b/src/graphql/pyutils/convert_case.py index ea09880e..4b211e27 100644 --- a/src/graphql/pyutils/convert_case.py +++ b/src/graphql/pyutils/convert_case.py @@ -1,8 +1,9 @@ +"""Conversion between camel and snake case""" + # uses code from https://github.com/daveoncode/python-string-utils import re - __all__ = ["camel_to_snake", "snake_to_camel"] _re_camel_to_snake = re.compile(r"([a-z]|[A-Z0-9]+)(?=[A-Z])") diff --git a/src/graphql/pyutils/description.py b/src/graphql/pyutils/description.py index c171979c..d7e9d37d 100644 --- a/src/graphql/pyutils/description.py +++ b/src/graphql/pyutils/description.py @@ -1,5 +1,6 @@ -from typing import Any, Tuple, Union +"""Human-readable descriptions""" +from typing import Any, Tuple, Union __all__ = [ "Description", @@ -10,7 +11,7 @@ class Description: - """Type checker for human readable descriptions. + """Type checker for human-readable descriptions. By default, only ordinary strings are accepted as descriptions, but you can register() other classes that will also be allowed, @@ -22,13 +23,15 @@ class Description: @classmethod def isinstance(cls, obj: Any) -> bool: + """Check whether this is an instance of a description.""" return isinstance(obj, cls.bases) @classmethod def register(cls, base: type) -> None: """Register a class that shall be accepted as a description.""" if not isinstance(base, type): - raise TypeError("Only types can be registered.") + msg = "Only types can be registered." + raise TypeError(msg) if base is object: cls.bases = object elif cls.bases is object: @@ -43,7 +46,8 @@ def register(cls, base: type) -> None: def unregister(cls, base: type) -> None: """Unregister a class that shall no more be accepted as a description.""" if not isinstance(base, type): - raise TypeError("Only types can be unregistered.") + msg = "Only types can be unregistered." + raise TypeError(msg) if isinstance(cls.bases, tuple): if base in cls.bases: cls.bases = tuple(b for b in cls.bases if b is not base) diff --git a/src/graphql/pyutils/did_you_mean.py b/src/graphql/pyutils/did_you_mean.py index 0078b228..de29e9e2 100644 --- a/src/graphql/pyutils/did_you_mean.py +++ b/src/graphql/pyutils/did_you_mean.py @@ -1,8 +1,9 @@ +"""Generating suggestions""" + from typing import Optional, Sequence from .format_list import or_list - __all__ = ["did_you_mean"] MAX_LENGTH = 5 diff --git a/src/graphql/pyutils/format_list.py b/src/graphql/pyutils/format_list.py index ddaf642d..b564e592 100644 --- a/src/graphql/pyutils/format_list.py +++ b/src/graphql/pyutils/format_list.py @@ -1,5 +1,6 @@ -from typing import Sequence +"""List formatting""" +from typing import Sequence __all__ = ["or_list", "and_list"] @@ -17,7 +18,8 @@ def and_list(items: Sequence[str]) -> str: def format_list(conjunction: str, items: Sequence[str]) -> str: """Given [ A, B, C ] return 'A, B, (conjunction) C'""" if not items: - raise ValueError("Missing list items to be formatted.") + msg = "Missing list items to be formatted." + raise ValueError(msg) n = len(items) if n == 1: diff --git a/src/graphql/pyutils/frozen_error.py b/src/graphql/pyutils/frozen_error.py index 01c02d15..76b359a9 100644 --- a/src/graphql/pyutils/frozen_error.py +++ b/src/graphql/pyutils/frozen_error.py @@ -1,3 +1,5 @@ +"""Error when changing immutable values""" + __all__ = ["FrozenError"] diff --git a/src/graphql/pyutils/group_by.py b/src/graphql/pyutils/group_by.py index 677aa1c1..d765d9e7 100644 --- a/src/graphql/pyutils/group_by.py +++ b/src/graphql/pyutils/group_by.py @@ -1,7 +1,8 @@ +"""Grouping function""" + from collections import defaultdict from typing import Callable, Collection, Dict, List, TypeVar - __all__ = ["group_by"] K = TypeVar("K") diff --git a/src/graphql/pyutils/identity_func.py b/src/graphql/pyutils/identity_func.py index 7fb6f86e..21c6ae28 100644 --- a/src/graphql/pyutils/identity_func.py +++ b/src/graphql/pyutils/identity_func.py @@ -1,8 +1,9 @@ +"""Identity function""" + from typing import Any, TypeVar, cast from .undefined import Undefined - __all__ = ["identity_func"] diff --git a/src/graphql/pyutils/inspect.py b/src/graphql/pyutils/inspect.py index 78c81e7d..305b697e 100644 --- a/src/graphql/pyutils/inspect.py +++ b/src/graphql/pyutils/inspect.py @@ -1,3 +1,5 @@ +"""Value inspection for error messages""" + from inspect import ( isasyncgen, isasyncgenfunction, @@ -13,7 +15,6 @@ from .undefined import Undefined - __all__ = ["inspect"] max_recursive_depth = 2 @@ -84,20 +85,18 @@ def inspect_recursive(value: Any, seen_values: List) -> str: if isinstance(value, frozenset): return f"frozenset({{{s}}})" return f"[{s}]" - else: - # handle collections that are nested too deep - if isinstance(value, (list, tuple, dict, set, frozenset)): - if not value: - return repr(value) - if isinstance(value, list): - return "[...]" - if isinstance(value, tuple): - return "(...)" - if isinstance(value, dict): - return "{...}" - if isinstance(value, set): - return "set(...)" - return "frozenset(...)" + elif isinstance(value, (list, tuple, dict, set, frozenset)): + if not value: + return repr(value) + if isinstance(value, list): + return "[...]" + if isinstance(value, tuple): + return "(...)" + if isinstance(value, dict): + return "{...}" + if isinstance(value, set): + return "set(...)" + return "frozenset(...)" if isinstance(value, Exception): type_ = "exception" value = type(value) @@ -141,7 +140,7 @@ def inspect_recursive(value: Any, seen_values: List) -> str: try: name = type(value).__name__ if not name or "<" in name or ">" in name: - raise AttributeError + raise AttributeError # noqa: TRY301 except AttributeError: return "" else: @@ -149,7 +148,7 @@ def inspect_recursive(value: Any, seen_values: List) -> str: try: name = value.__name__ if not name or "<" in name or ">" in name: - raise AttributeError + raise AttributeError # noqa: TRY301 except AttributeError: return f"<{type_}>" else: diff --git a/src/graphql/pyutils/is_awaitable.py b/src/graphql/pyutils/is_awaitable.py index d68a911d..3d450b82 100644 --- a/src/graphql/pyutils/is_awaitable.py +++ b/src/graphql/pyutils/is_awaitable.py @@ -1,8 +1,9 @@ +"""Check whether objects are awaitable""" + import inspect from types import CoroutineType, GeneratorType from typing import Any, Awaitable - try: from typing import TypeGuard except ImportError: # Python < 3.10 diff --git a/src/graphql/pyutils/is_iterable.py b/src/graphql/pyutils/is_iterable.py index f2f04bc9..8fb803be 100644 --- a/src/graphql/pyutils/is_iterable.py +++ b/src/graphql/pyutils/is_iterable.py @@ -1,6 +1,7 @@ -from array import array -from typing import Any, ByteString, Collection, Iterable, Mapping, Text, ValuesView +"""Check whether objects are iterable""" +from array import array +from typing import Any, ByteString, Collection, Iterable, Mapping, ValuesView try: from typing import TypeGuard @@ -19,7 +20,7 @@ collection_types[0] if len(collection_types) == 1 else tuple(collection_types) ) iterable_types: Any = Iterable -not_iterable_types: Any = (ByteString, Mapping, Text) +not_iterable_types: Any = (ByteString, Mapping, str) def is_collection(value: Any) -> TypeGuard[Collection]: diff --git a/src/graphql/pyutils/merge_kwargs.py b/src/graphql/pyutils/merge_kwargs.py index e557bddc..726d0dd6 100644 --- a/src/graphql/pyutils/merge_kwargs.py +++ b/src/graphql/pyutils/merge_kwargs.py @@ -1,5 +1,6 @@ -from typing import Any, Dict, TypeVar, cast +"""Merge arguments""" +from typing import Any, Dict, TypeVar, cast T = TypeVar("T") diff --git a/src/graphql/pyutils/natural_compare.py b/src/graphql/pyutils/natural_compare.py index 11525e84..1e8310e8 100644 --- a/src/graphql/pyutils/natural_compare.py +++ b/src/graphql/pyutils/natural_compare.py @@ -1,8 +1,9 @@ +"""Natural sort order""" + import re from itertools import cycle from typing import Tuple - __all__ = ["natural_comparison_key"] _re_digits = re.compile(r"(\d+)") diff --git a/src/graphql/pyutils/path.py b/src/graphql/pyutils/path.py index 19dd79ba..f2212dd3 100644 --- a/src/graphql/pyutils/path.py +++ b/src/graphql/pyutils/path.py @@ -1,8 +1,9 @@ +"""Path of indices""" + from __future__ import annotations # Python < 3.10 from typing import Any, List, NamedTuple, Optional, Union - __all__ = ["Path"] diff --git a/src/graphql/pyutils/print_path_list.py b/src/graphql/pyutils/print_path_list.py index 125829b0..dadbfac9 100644 --- a/src/graphql/pyutils/print_path_list.py +++ b/src/graphql/pyutils/print_path_list.py @@ -1,3 +1,6 @@ +"""Path printing""" + + from typing import Collection, Union diff --git a/src/graphql/pyutils/simple_pub_sub.py b/src/graphql/pyutils/simple_pub_sub.py index 8bd6c7f6..4b8b0795 100644 --- a/src/graphql/pyutils/simple_pub_sub.py +++ b/src/graphql/pyutils/simple_pub_sub.py @@ -1,3 +1,5 @@ +"""Simple public-subscribe system""" + from __future__ import annotations # Python < 3.10 from asyncio import Future, Queue, create_task, get_running_loop, sleep @@ -5,7 +7,6 @@ from .is_awaitable import is_awaitable - __all__ = ["SimplePubSub", "SimplePubSubIterator"] @@ -27,16 +28,19 @@ def emit(self, event: Any) -> bool: for subscriber in self.subscribers: result = subscriber(event) if is_awaitable(result): - create_task(result) # type: ignore + create_task(result) # type: ignore # noqa: RUF006 return bool(self.subscribers) def get_subscriber( self, transform: Optional[Callable] = None ) -> SimplePubSubIterator: + """Return subscriber iterator""" return SimplePubSubIterator(self, transform) class SimplePubSubIterator(AsyncIterator): + """Async iterator used for subscriptions.""" + def __init__(self, pubsub: SimplePubSub, transform: Optional[Callable]) -> None: self.pubsub = pubsub self.transform = transform @@ -59,10 +63,12 @@ async def __anext__(self) -> Any: return future async def aclose(self) -> None: + """Close the iterator.""" if self.listening: await self.empty_queue() async def empty_queue(self) -> None: + """Empty the queue.""" self.listening = False self.pubsub.subscribers.remove(self.push_value) while not self.pull_queue.empty(): @@ -72,6 +78,7 @@ async def empty_queue(self) -> None: await self.push_queue.get() async def push_value(self, event: Any) -> None: + """Push a new value.""" value = event if self.transform is None else self.transform(event) if self.pull_queue.empty(): await self.push_queue.put(value) diff --git a/src/graphql/pyutils/suggestion_list.py b/src/graphql/pyutils/suggestion_list.py index 7270e3e4..16526b34 100644 --- a/src/graphql/pyutils/suggestion_list.py +++ b/src/graphql/pyutils/suggestion_list.py @@ -1,8 +1,9 @@ +"""List with suggestions""" + from typing import Collection, List, Optional from .natural_compare import natural_comparison_key - __all__ = ["suggestion_list"] @@ -46,7 +47,7 @@ class LexicalDistance: _input_list: List[int] _rows: List[List[int]] - def __init__(self, input_: str): + def __init__(self, input_: str) -> None: self._input = input_ self._input_lower_case = input_.lower() row_size = len(input_) + 1 diff --git a/src/graphql/pyutils/undefined.py b/src/graphql/pyutils/undefined.py index 1e54ac32..00382867 100644 --- a/src/graphql/pyutils/undefined.py +++ b/src/graphql/pyutils/undefined.py @@ -1,8 +1,9 @@ +"""The Undefined value""" + from __future__ import annotations # Python < 3.10 import warnings -from typing import Any, Optional - +from typing import Optional __all__ = ["Undefined", "UndefinedType"] @@ -13,6 +14,7 @@ class UndefinedType: _instance: Optional[UndefinedType] = None def __new__(cls) -> UndefinedType: + """Create the Undefined singleton.""" if cls._instance is None: cls._instance = super().__new__(cls) else: @@ -33,10 +35,10 @@ def __hash__(self) -> int: def __bool__(self) -> bool: return False - def __eq__(self, other: Any) -> bool: + def __eq__(self, other: object) -> bool: return other is Undefined or other is None - def __ne__(self, other: Any) -> bool: + def __ne__(self, other: object) -> bool: return not self == other diff --git a/src/graphql/type/assert_name.py b/src/graphql/type/assert_name.py index d1fe8dd6..b7e94e2d 100644 --- a/src/graphql/type/assert_name.py +++ b/src/graphql/type/assert_name.py @@ -1,24 +1,28 @@ +"""Assertions for naming conventions""" + from ..error import GraphQLError from ..language.character_classes import is_name_continue, is_name_start - __all__ = ["assert_name", "assert_enum_value_name"] def assert_name(name: str) -> str: """Uphold the spec rules about naming.""" if name is None: - raise TypeError("Must provide name.") + msg = "Must provide name." + raise TypeError(msg) if not isinstance(name, str): - raise TypeError("Expected name to be a string.") + msg = "Expected name to be a string." + raise TypeError(msg) if not name: - raise GraphQLError("Expected name to be a non-empty string.") + msg = "Expected name to be a non-empty string." + raise GraphQLError(msg) if not all(is_name_continue(char) for char in name[1:]): - raise GraphQLError( - f"Names must only contain [_a-zA-Z0-9] but {name!r} does not." - ) + msg = f"Names must only contain [_a-zA-Z0-9] but {name!r} does not." + raise GraphQLError(msg) if not is_name_start(name[0]): - raise GraphQLError(f"Names must start with [_a-zA-Z] but {name!r} does not.") + msg = f"Names must start with [_a-zA-Z] but {name!r} does not." + raise GraphQLError(msg) return name @@ -26,5 +30,6 @@ def assert_enum_value_name(name: str) -> str: """Uphold the spec rules about naming enum values.""" assert_name(name) if name in {"true", "false", "null"}: - raise GraphQLError(f"Enum values cannot be named: {name}.") + msg = f"Enum values cannot be named: {name}." + raise GraphQLError(msg) return name diff --git a/src/graphql/type/definition.py b/src/graphql/type/definition.py index 73e34e08..2982ea4f 100644 --- a/src/graphql/type/definition.py +++ b/src/graphql/type/definition.py @@ -1,3 +1,5 @@ +"""GraphQL type definitions.""" + from __future__ import annotations # Python < 3.10 from enum import Enum @@ -58,7 +60,6 @@ from ..utilities.value_from_ast_untyped import value_from_ast_untyped from .assert_name import assert_enum_value_name, assert_name - try: from typing import TypedDict except ImportError: # Python < 3.8 @@ -69,8 +70,7 @@ from typing_extensions import TypeAlias, TypeGuard if TYPE_CHECKING: - from .schema import GraphQLSchema # noqa: F401 - + from .schema import GraphQLSchema __all__ = [ "is_type", @@ -178,18 +178,21 @@ class GraphQLType: def is_type(type_: Any) -> TypeGuard[GraphQLType]: + """Check whether this is a GraphQL type.""" return isinstance(type_, GraphQLType) def assert_type(type_: Any) -> GraphQLType: + """Assert that this is a GraphQL type.""" if not is_type(type_): - raise TypeError(f"Expected {type_} to be a GraphQL type.") + msg = f"Expected {type_} to be a GraphQL type." + raise TypeError(msg) return type_ # These types wrap and modify other types -GT = TypeVar("GT", bound=GraphQLType, covariant=True) +GT = TypeVar("GT", bound=GraphQLType, covariant=True) # noqa: PLC0105 class GraphQLWrappingType(GraphQLType, Generic[GT]): @@ -205,16 +208,21 @@ def __repr__(self) -> str: def is_wrapping_type(type_: Any) -> TypeGuard[GraphQLWrappingType]: + """Check whether this is a GraphQL wrapping type.""" return isinstance(type_, GraphQLWrappingType) def assert_wrapping_type(type_: Any) -> GraphQLWrappingType: + """Assert that this is a GraphQL wrapping type.""" if not is_wrapping_type(type_): - raise TypeError(f"Expected {type_} to be a GraphQL wrapping type.") + msg = f"Expected {type_} to be a GraphQL wrapping type." + raise TypeError(msg) return type_ class GraphQLNamedTypeKwargs(TypedDict, total=False): + """Arguments for GraphQL named types""" + name: str description: Optional[str] extensions: Dict[str, Any] @@ -233,11 +241,13 @@ class GraphQLNamedType(GraphQLType): ast_node: Optional[TypeDefinitionNode] extension_ast_nodes: Tuple[TypeExtensionNode, ...] - reserved_types: Dict[str, GraphQLNamedType] = {} + reserved_types: Mapping[str, GraphQLNamedType] = {} def __new__(cls, name: str, *_args: Any, **_kwargs: Any) -> GraphQLNamedType: + """Create a GraphQL named type.""" if name in cls.reserved_types: - raise TypeError(f"Redefinition of reserved type {name!r}") + msg = f"Redefinition of reserved type {name!r}" + raise TypeError(msg) return super().__new__(cls) def __reduce__(self) -> Tuple[Callable, Tuple]: @@ -274,6 +284,7 @@ def __str__(self) -> str: return self.name def to_kwargs(self) -> GraphQLNamedTypeKwargs: + """Get corresponding arguments.""" return GraphQLNamedTypeKwargs( name=self.name, description=self.description, @@ -310,6 +321,8 @@ def resolve_thunk(thunk: Thunk[T]) -> T: class GraphQLScalarTypeKwargs(GraphQLNamedTypeKwargs, total=False): + """Arguments for GraphQL scalar types""" + serialize: Optional[GraphQLScalarSerializer] parse_value: Optional[GraphQLScalarValueParser] parse_literal: Optional[GraphQLScalarLiteralParser] @@ -374,12 +387,12 @@ def __init__( self.parse_value = parse_value # type: ignore if parse_literal is not None: self.parse_literal = parse_literal # type: ignore - if parse_literal is not None: - if parse_value is None: - raise TypeError( - f"{name} must provide" - " both 'parse_value' and 'parse_literal' functions." - ) + if parse_literal is not None and parse_value is None: + msg = ( + f"{name} must provide" + " both 'parse_value' and 'parse_literal' functions." + ) + raise TypeError(msg) self.specified_by_url = specified_by_url def __repr__(self) -> str: @@ -417,6 +430,7 @@ def parse_literal( return self.parse_value(value_from_ast_untyped(node, variables)) def to_kwargs(self) -> GraphQLScalarTypeKwargs: + """Get corresponding arguments.""" # noinspection PyArgumentList return GraphQLScalarTypeKwargs( # type: ignore super().to_kwargs(), @@ -438,12 +452,15 @@ def __copy__(self) -> GraphQLScalarType: # pragma: no cover def is_scalar_type(type_: Any) -> TypeGuard[GraphQLScalarType]: + """Check whether this is a GraphQL scalar type.""" return isinstance(type_, GraphQLScalarType) def assert_scalar_type(type_: Any) -> GraphQLScalarType: + """Assert that this is a GraphQL scalar type.""" if not is_scalar_type(type_): - raise TypeError(f"Expected {type_} to be a GraphQL Scalar type.") + msg = f"Expected {type_} to be a GraphQL Scalar type." + raise TypeError(msg) return type_ @@ -451,6 +468,8 @@ def assert_scalar_type(type_: Any) -> GraphQLScalarType: class GraphQLFieldKwargs(TypedDict, total=False): + """Arguments for GraphQL fields""" + type_: GraphQLOutputType args: Optional[GraphQLArgumentMap] resolve: Optional[GraphQLFieldResolver] @@ -508,7 +527,7 @@ def __repr__(self) -> str: def __str__(self) -> str: return f"Field: {self.type}" - def __eq__(self, other: Any) -> bool: + def __eq__(self, other: object) -> bool: return self is other or ( isinstance(other, GraphQLField) and self.type == other.type @@ -520,6 +539,7 @@ def __eq__(self, other: Any) -> bool: ) def to_kwargs(self) -> GraphQLFieldKwargs: + """Get corresponding arguments.""" return GraphQLFieldKwargs( type_=self.type, args=self.args.copy() if self.args else None, @@ -550,7 +570,7 @@ class GraphQLResolveInfo(NamedTuple): return_type: GraphQLOutputType parent_type: GraphQLObjectType path: Path - schema: "GraphQLSchema" + schema: GraphQLSchema fragments: Dict[str, FragmentDefinitionNode] root_value: Any operation: OperationDefinitionNode @@ -584,6 +604,8 @@ class GraphQLResolveInfo(NamedTuple): class GraphQLArgumentKwargs(TypedDict, total=False): + """Python arguments for GraphQL arguments""" + type_: GraphQLInputType default_value: Any description: Optional[str] @@ -622,7 +644,7 @@ def __init__( self.extensions = extensions or {} self.ast_node = ast_node - def __eq__(self, other: Any) -> bool: + def __eq__(self, other: object) -> bool: return self is other or ( isinstance(other, GraphQLArgument) and self.type == other.type @@ -634,6 +656,7 @@ def __eq__(self, other: Any) -> bool: ) def to_kwargs(self) -> GraphQLArgumentKwargs: + """Get corresponding arguments.""" return GraphQLArgumentKwargs( type_=self.type, default_value=self.default_value, @@ -649,10 +672,13 @@ def __copy__(self) -> GraphQLArgument: # pragma: no cover def is_required_argument(arg: GraphQLArgument) -> bool: + """Check whether the argument is required.""" return is_non_null_type(arg.type) and arg.default_value is Undefined class GraphQLObjectTypeKwargs(GraphQLNamedTypeKwargs, total=False): + """Arguments for GraphQL object types""" + fields: GraphQLFieldMap interfaces: Tuple[GraphQLInterfaceType, ...] is_type_of: Optional[GraphQLIsTypeOfFn] @@ -713,6 +739,7 @@ def __init__( self.is_type_of = is_type_of def to_kwargs(self) -> GraphQLObjectTypeKwargs: + """Get corresponding arguments.""" # noinspection PyArgumentList return GraphQLObjectTypeKwargs( # type: ignore super().to_kwargs(), @@ -729,9 +756,10 @@ def fields(self) -> GraphQLFieldMap: """Get provided fields, wrapping them as GraphQLFields if needed.""" try: fields = resolve_thunk(self._fields) - except Exception as error: + except Exception as error: # noqa: BLE001 cls = GraphQLError if isinstance(error, GraphQLError) else TypeError - raise cls(f"{self.name} fields cannot be resolved. {error}") from error + msg = f"{self.name} fields cannot be resolved. {error}" + raise cls(msg) from error return { assert_name(name): value if isinstance(value, GraphQLField) @@ -746,23 +774,29 @@ def interfaces(self) -> Tuple[GraphQLInterfaceType, ...]: interfaces: Collection[GraphQLInterfaceType] = resolve_thunk( self._interfaces # type: ignore ) - except Exception as error: + except Exception as error: # noqa: BLE001 cls = GraphQLError if isinstance(error, GraphQLError) else TypeError - raise cls(f"{self.name} interfaces cannot be resolved. {error}") from error + msg = f"{self.name} interfaces cannot be resolved. {error}" + raise cls(msg) from error return tuple(interfaces) if interfaces else () def is_object_type(type_: Any) -> TypeGuard[GraphQLObjectType]: + """Check whether this is a graphql object type""" return isinstance(type_, GraphQLObjectType) def assert_object_type(type_: Any) -> GraphQLObjectType: + """Assume that this is a graphql object type""" if not is_object_type(type_): - raise TypeError(f"Expected {type_} to be a GraphQL Object type.") + msg = f"Expected {type_} to be a GraphQL Object type." + raise TypeError(msg) return type_ class GraphQLInterfaceTypeKwargs(GraphQLNamedTypeKwargs, total=False): + """Arguments for GraphQL interface types""" + fields: GraphQLFieldMap interfaces: Tuple[GraphQLInterfaceType, ...] resolve_type: Optional[GraphQLTypeResolver] @@ -810,6 +844,7 @@ def __init__( self.resolve_type = resolve_type def to_kwargs(self) -> GraphQLInterfaceTypeKwargs: + """Get corresponding arguments.""" # noinspection PyArgumentList return GraphQLInterfaceTypeKwargs( # type: ignore super().to_kwargs(), @@ -826,9 +861,10 @@ def fields(self) -> GraphQLFieldMap: """Get provided fields, wrapping them as GraphQLFields if needed.""" try: fields = resolve_thunk(self._fields) - except Exception as error: + except Exception as error: # noqa: BLE001 cls = GraphQLError if isinstance(error, GraphQLError) else TypeError - raise cls(f"{self.name} fields cannot be resolved. {error}") from error + msg = f"{self.name} fields cannot be resolved. {error}" + raise cls(msg) from error return { assert_name(name): value if isinstance(value, GraphQLField) @@ -843,23 +879,29 @@ def interfaces(self) -> Tuple[GraphQLInterfaceType, ...]: interfaces: Collection[GraphQLInterfaceType] = resolve_thunk( self._interfaces # type: ignore ) - except Exception as error: + except Exception as error: # noqa: BLE001 cls = GraphQLError if isinstance(error, GraphQLError) else TypeError - raise cls(f"{self.name} interfaces cannot be resolved. {error}") from error + msg = f"{self.name} interfaces cannot be resolved. {error}" + raise cls(msg) from error return tuple(interfaces) if interfaces else () def is_interface_type(type_: Any) -> TypeGuard[GraphQLInterfaceType]: + """Check whether this is a GraphQL interface type.""" return isinstance(type_, GraphQLInterfaceType) def assert_interface_type(type_: Any) -> GraphQLInterfaceType: + """Assert that this is a GraphQL interface type.""" if not is_interface_type(type_): - raise TypeError(f"Expected {type_} to be a GraphQL Interface type.") + msg = f"Expected {type_} to be a GraphQL Interface type." + raise TypeError(msg) return type_ class GraphQLUnionTypeKwargs(GraphQLNamedTypeKwargs, total=False): + """Arguments for GraphQL union types""" + types: Tuple[GraphQLObjectType, ...] resolve_type: Optional[GraphQLTypeResolver] @@ -907,6 +949,7 @@ def __init__( self.resolve_type = resolve_type def to_kwargs(self) -> GraphQLUnionTypeKwargs: + """Get corresponding arguments.""" # noinspection PyArgumentList return GraphQLUnionTypeKwargs( # type: ignore super().to_kwargs(), types=self.types, resolve_type=self.resolve_type @@ -920,19 +963,23 @@ def types(self) -> Tuple[GraphQLObjectType, ...]: """Get provided types.""" try: types: Collection[GraphQLObjectType] = resolve_thunk(self._types) - except Exception as error: + except Exception as error: # noqa: BLE001 cls = GraphQLError if isinstance(error, GraphQLError) else TypeError - raise cls(f"{self.name} types cannot be resolved. {error}") from error + msg = f"{self.name} types cannot be resolved. {error}" + raise cls(msg) from error return tuple(types) if types else () def is_union_type(type_: Any) -> TypeGuard[GraphQLUnionType]: + """Check whether this is a GraphQL union type.""" return isinstance(type_, GraphQLUnionType) def assert_union_type(type_: Any) -> GraphQLUnionType: + """Assert that this is a GraphQL union type.""" if not is_union_type(type_): - raise TypeError(f"Expected {type_} to be a GraphQL Union type.") + msg = f"Expected {type_} to be a GraphQL Union type." + raise TypeError(msg) return type_ @@ -940,6 +987,8 @@ def assert_union_type(type_: Any) -> GraphQLUnionType: class GraphQLEnumTypeKwargs(GraphQLNamedTypeKwargs, total=False): + """Arguments for GraphQL enum types""" + values: GraphQLEnumValueMap names_as_values: Optional[bool] @@ -1006,13 +1055,13 @@ def __init__( isinstance(name, str) for name in values ): try: - # noinspection PyTypeChecker values = dict(values) # type: ignore - except (TypeError, ValueError): - raise TypeError( + except (TypeError, ValueError) as error: + msg = ( f"{name} values must be an Enum or a mapping" " with value names as keys." ) + raise TypeError(msg) from error values = cast(Dict[str, Any], values) else: values = cast(Dict[str, Enum], values) @@ -1029,6 +1078,7 @@ def __init__( self.values = values def to_kwargs(self) -> GraphQLEnumTypeKwargs: + """Get corresponding arguments.""" # noinspection PyArgumentList return GraphQLEnumTypeKwargs( # type: ignore super().to_kwargs(), values=self.values.copy() @@ -1053,6 +1103,7 @@ def _value_lookup(self) -> Dict[Any, str]: return lookup def serialize(self, output_value: Any) -> str: + """Serialize an output value.""" try: return self._value_lookup[output_value] except KeyError: # hashable value not found @@ -1061,65 +1112,74 @@ def serialize(self, output_value: Any) -> str: for enum_name, enum_value in self.values.items(): if enum_value.value == output_value: return enum_name - raise GraphQLError( - f"Enum '{self.name}' cannot represent value: {inspect(output_value)}" - ) + msg = f"Enum '{self.name}' cannot represent value: {inspect(output_value)}" + raise GraphQLError(msg) def parse_value(self, input_value: str) -> Any: + """Parse an enum value.""" if isinstance(input_value, str): try: enum_value = self.values[input_value] - except KeyError: - raise GraphQLError( + except KeyError as error: + msg = ( f"Value '{input_value}' does not exist in '{self.name}' enum." + did_you_mean_enum_value(self, input_value) ) + raise GraphQLError(msg) from error return enum_value.value value_str = inspect(input_value) - raise GraphQLError( + msg = ( f"Enum '{self.name}' cannot represent non-string value: {value_str}." + did_you_mean_enum_value(self, value_str) ) + raise GraphQLError(msg) def parse_literal( self, value_node: ValueNode, _variables: Optional[Dict[str, Any]] = None ) -> Any: + """Parse literal value.""" # Note: variables will be resolved before calling this method. if isinstance(value_node, EnumValueNode): try: enum_value = self.values[value_node.value] - except KeyError: + except KeyError as error: value_str = print_ast(value_node) - raise GraphQLError( + msg = ( f"Value '{value_str}' does not exist in '{self.name}' enum." - + did_you_mean_enum_value(self, value_str), - value_node, + + did_you_mean_enum_value(self, value_str) ) + raise GraphQLError(msg, value_node) from error return enum_value.value value_str = print_ast(value_node) - raise GraphQLError( + msg = ( f"Enum '{self.name}' cannot represent non-enum value: {value_str}." - + did_you_mean_enum_value(self, value_str), - value_node, + + did_you_mean_enum_value(self, value_str) ) + raise GraphQLError(msg, value_node) def is_enum_type(type_: Any) -> TypeGuard[GraphQLEnumType]: + """Check whether this is a GraphQL enum type.""" return isinstance(type_, GraphQLEnumType) def assert_enum_type(type_: Any) -> GraphQLEnumType: + """Assert that this is a GraphQL enum type.""" if not is_enum_type(type_): - raise TypeError(f"Expected {type_} to be a GraphQL Enum type.") + msg = f"Expected {type_} to be a GraphQL Enum type." + raise TypeError(msg) return type_ def did_you_mean_enum_value(enum_type: GraphQLEnumType, unknown_value_str: str) -> str: + """Return suggestions for enum value.""" suggested_values = suggestion_list(unknown_value_str, enum_type.values) return did_you_mean(suggested_values, "the enum value") class GraphQLEnumValueKwargs(TypedDict, total=False): + """Arguments for GraphQL enum values""" + value: Any description: Optional[str] deprecation_reason: Optional[str] @@ -1128,6 +1188,8 @@ class GraphQLEnumValueKwargs(TypedDict, total=False): class GraphQLEnumValue: + """A GraphQL enum value.""" + value: Any description: Optional[str] deprecation_reason: Optional[str] @@ -1148,7 +1210,7 @@ def __init__( self.extensions = extensions or {} self.ast_node = ast_node - def __eq__(self, other: Any) -> bool: + def __eq__(self, other: object) -> bool: return self is other or ( isinstance(other, GraphQLEnumValue) and self.value == other.value @@ -1158,6 +1220,7 @@ def __eq__(self, other: Any) -> bool: ) def to_kwargs(self) -> GraphQLEnumValueKwargs: + """Get corresponding arguments.""" return GraphQLEnumValueKwargs( value=self.value, description=self.description, @@ -1175,6 +1238,8 @@ def __copy__(self) -> GraphQLEnumValue: # pragma: no cover class GraphQLInputObjectTypeKwargs(GraphQLNamedTypeKwargs, total=False): + """Arguments for GraphQL input object types""" + fields: GraphQLInputFieldMap out_type: Optional[GraphQLInputFieldOutType] @@ -1237,6 +1302,7 @@ def out_type(value: Dict[str, Any]) -> Any: return value def to_kwargs(self) -> GraphQLInputObjectTypeKwargs: + """Get corresponding arguments.""" # noinspection PyArgumentList return GraphQLInputObjectTypeKwargs( # type: ignore super().to_kwargs(), @@ -1254,9 +1320,10 @@ def fields(self) -> GraphQLInputFieldMap: """Get provided fields, wrap them as GraphQLInputField if needed.""" try: fields = resolve_thunk(self._fields) - except Exception as error: + except Exception as error: # noqa: BLE001 cls = GraphQLError if isinstance(error, GraphQLError) else TypeError - raise cls(f"{self.name} fields cannot be resolved. {error}") from error + msg = f"{self.name} fields cannot be resolved. {error}" + raise cls(msg) from error return { assert_name(name): value if isinstance(value, GraphQLInputField) @@ -1266,16 +1333,21 @@ def fields(self) -> GraphQLInputFieldMap: def is_input_object_type(type_: Any) -> TypeGuard[GraphQLInputObjectType]: + """Check whether this is a GraphQL input type.""" return isinstance(type_, GraphQLInputObjectType) def assert_input_object_type(type_: Any) -> GraphQLInputObjectType: + """Assert that this is a GraphQL input type.""" if not is_input_object_type(type_): - raise TypeError(f"Expected {type_} to be a GraphQL Input Object type.") + msg = f"Expected {type_} to be a GraphQL Input Object type." + raise TypeError(msg) return type_ class GraphQLInputFieldKwargs(TypedDict, total=False): + """Arguments for GraphQL input fields""" + type_: GraphQLInputType default_value: Any description: Optional[str] @@ -1314,7 +1386,7 @@ def __init__( self.extensions = extensions or {} self.ast_node = ast_node - def __eq__(self, other: Any) -> bool: + def __eq__(self, other: object) -> bool: return self is other or ( isinstance(other, GraphQLInputField) and self.type == other.type @@ -1326,6 +1398,7 @@ def __eq__(self, other: Any) -> bool: ) def to_kwargs(self) -> GraphQLInputFieldKwargs: + """Get corresponding arguments.""" return GraphQLInputFieldKwargs( type_=self.type, default_value=self.default_value, @@ -1341,6 +1414,7 @@ def __copy__(self) -> GraphQLInputField: # pragma: no cover def is_required_input_field(field: GraphQLInputField) -> bool: + """Check whether this is input field is required.""" return is_non_null_type(field.type) and field.default_value is Undefined @@ -1374,16 +1448,19 @@ def __str__(self) -> str: def is_list_type(type_: Any) -> TypeGuard[GraphQLList]: + """Check whether this is a GraphQL list type.""" return isinstance(type_, GraphQLList) def assert_list_type(type_: Any) -> GraphQLList: + """Assert that this is a GraphQL list type.""" if not is_list_type(type_): - raise TypeError(f"Expected {type_} to be a GraphQL List type.") + msg = f"Expected {type_} to be a GraphQL List type." + raise TypeError(msg) return type_ -GNT = TypeVar("GNT", bound="GraphQLNullableType", covariant=True) +GNT = TypeVar("GNT", bound="GraphQLNullableType", covariant=True) # noqa: PLC0105 class GraphQLNonNull(GraphQLWrappingType[GNT]): @@ -1406,7 +1483,7 @@ class RowType(GraphQLObjectType): Note: the enforcement of non-nullability occurs within the executor. """ - def __init__(self, type_: GNT): + def __init__(self, type_: GNT) -> None: super().__init__(type_=type_) def __str__(self) -> str: @@ -1425,7 +1502,6 @@ def __str__(self) -> str: GraphQLList, ] - # These types may be used as input types for arguments and directives. GraphQLNullableInputType: TypeAlias = Union[ @@ -1440,7 +1516,6 @@ def __str__(self) -> str: GraphQLNullableInputType, GraphQLNonNull[GraphQLNullableInputType] ] - # These types may be used as output types as the result of fields. GraphQLNullableOutputType: TypeAlias = Union[ @@ -1462,18 +1537,22 @@ def __str__(self) -> str: def is_input_type(type_: Any) -> TypeGuard[GraphQLInputType]: + """Check whether this is a GraphQL input type.""" return isinstance( type_, (GraphQLScalarType, GraphQLEnumType, GraphQLInputObjectType) ) or (isinstance(type_, GraphQLWrappingType) and is_input_type(type_.of_type)) def assert_input_type(type_: Any) -> GraphQLInputType: + """Assert that this is a GraphQL input type.""" if not is_input_type(type_): - raise TypeError(f"Expected {type_} to be a GraphQL input type.") + msg = f"Expected {type_} to be a GraphQL input type." + raise TypeError(msg) return type_ def is_output_type(type_: Any) -> TypeGuard[GraphQLOutputType]: + """Check whether this is a GraphQL output type.""" return isinstance( type_, ( @@ -1487,22 +1566,28 @@ def is_output_type(type_: Any) -> TypeGuard[GraphQLOutputType]: def assert_output_type(type_: Any) -> GraphQLOutputType: + """Assert that this is a GraphQL output type.""" if not is_output_type(type_): - raise TypeError(f"Expected {type_} to be a GraphQL output type.") + msg = f"Expected {type_} to be a GraphQL output type." + raise TypeError(msg) return type_ def is_non_null_type(type_: Any) -> TypeGuard[GraphQLNonNull]: + """Check whether this is a non-null GraphQL type.""" return isinstance(type_, GraphQLNonNull) def assert_non_null_type(type_: Any) -> GraphQLNonNull: + """Assert that this is a non-null GraphQL type.""" if not is_non_null_type(type_): - raise TypeError(f"Expected {type_} to be a GraphQL Non-Null type.") + msg = f"Expected {type_} to be a GraphQL Non-Null type." + raise TypeError(msg) return type_ def is_nullable_type(type_: Any) -> TypeGuard[GraphQLNullableType]: + """Check whether this is a nullable GraphQL type.""" return isinstance( type_, ( @@ -1518,8 +1603,10 @@ def is_nullable_type(type_: Any) -> TypeGuard[GraphQLNullableType]: def assert_nullable_type(type_: Any) -> GraphQLNullableType: + """Assert that this is a nullable GraphQL type.""" if not is_nullable_type(type_): - raise TypeError(f"Expected {type_} to be a GraphQL nullable type.") + msg = f"Expected {type_} to be a GraphQL nullable type." + raise TypeError(msg) return type_ @@ -1539,7 +1626,7 @@ def get_nullable_type(type_: GraphQLNonNull) -> GraphQLNullableType: def get_nullable_type( - type_: Optional[Union[GraphQLNullableType, GraphQLNonNull]] + type_: Optional[Union[GraphQLNullableType, GraphQLNonNull]], ) -> Optional[GraphQLNullableType]: """Unwrap possible non-null type""" if is_non_null_type(type_): @@ -1563,12 +1650,15 @@ def get_nullable_type( def is_named_type(type_: Any) -> TypeGuard[GraphQLNamedType]: + """Check whether this is a named GraphQL type.""" return isinstance(type_, GraphQLNamedType) def assert_named_type(type_: Any) -> GraphQLNamedType: + """Assert that this is a named GraphQL type.""" if not is_named_type(type_): - raise TypeError(f"Expected {type_} to be a GraphQL named type.") + msg = f"Expected {type_} to be a GraphQL named type." + raise TypeError(msg) return type_ @@ -1598,12 +1688,15 @@ def get_named_type(type_: Optional[GraphQLType]) -> Optional[GraphQLNamedType]: def is_leaf_type(type_: Any) -> TypeGuard[GraphQLLeafType]: + """Check whether this is a GraphQL leaf type.""" return isinstance(type_, (GraphQLScalarType, GraphQLEnumType)) def assert_leaf_type(type_: Any) -> GraphQLLeafType: + """Assert that this is a GraphQL leaf type.""" if not is_leaf_type(type_): - raise TypeError(f"Expected {type_} to be a GraphQL leaf type.") + msg = f"Expected {type_} to be a GraphQL leaf type." + raise TypeError(msg) return type_ @@ -1615,14 +1708,17 @@ def assert_leaf_type(type_: Any) -> GraphQLLeafType: def is_composite_type(type_: Any) -> TypeGuard[GraphQLCompositeType]: + """Check whether this is a GraphQL composite type.""" return isinstance( type_, (GraphQLObjectType, GraphQLInterfaceType, GraphQLUnionType) ) def assert_composite_type(type_: Any) -> GraphQLCompositeType: + """Assert that this is a GraphQL composite type.""" if not is_composite_type(type_): - raise TypeError(f"Expected {type_} to be a GraphQL composite type.") + msg = f"Expected {type_} to be a GraphQL composite type." + raise TypeError(msg) return type_ @@ -1632,10 +1728,13 @@ def assert_composite_type(type_: Any) -> GraphQLCompositeType: def is_abstract_type(type_: Any) -> TypeGuard[GraphQLAbstractType]: + """Check whether this is a GraphQL abstract type.""" return isinstance(type_, (GraphQLInterfaceType, GraphQLUnionType)) def assert_abstract_type(type_: Any) -> GraphQLAbstractType: + """Assert that this is a GraphQL abstract type.""" if not is_abstract_type(type_): - raise TypeError(f"Expected {type_} to be a GraphQL composite type.") + msg = f"Expected {type_} to be a GraphQL composite type." + raise TypeError(msg) return type_ diff --git a/src/graphql/type/directives.py b/src/graphql/type/directives.py index 324c9dff..7966f377 100644 --- a/src/graphql/type/directives.py +++ b/src/graphql/type/directives.py @@ -1,3 +1,5 @@ +"""GraphQL directives""" + from __future__ import annotations # Python < 3.10 from typing import Any, Collection, Dict, Optional, Tuple, cast @@ -8,7 +10,6 @@ from .definition import GraphQLArgument, GraphQLInputType, GraphQLNonNull from .scalars import GraphQLBoolean, GraphQLInt, GraphQLString - try: from typing import TypedDict except ImportError: # Python < 3.8 @@ -37,6 +38,8 @@ class GraphQLDirectiveKwargs(TypedDict, total=False): + """Arguments for GraphQL directives""" + name: str locations: Tuple[DirectiveLocation, ...] args: Dict[str, GraphQLArgument] @@ -79,11 +82,12 @@ def __init__( else DirectiveLocation[cast(str, value)] for value in locations ) - except (KeyError, TypeError): - raise TypeError( + except (KeyError, TypeError) as error: + msg = ( f"{name} locations must be specified" " as a collection of DirectiveLocation enum values." ) + raise TypeError(msg) from error if args: args = { assert_name(name): value @@ -107,7 +111,7 @@ def __str__(self) -> str: def __repr__(self) -> str: return f"<{self.__class__.__name__}({self})>" - def __eq__(self, other: Any) -> bool: + def __eq__(self, other: object) -> bool: return self is other or ( isinstance(other, GraphQLDirective) and self.name == other.name @@ -119,6 +123,7 @@ def __eq__(self, other: Any) -> bool: ) def to_kwargs(self) -> GraphQLDirectiveKwargs: + """Get corresponding arguments.""" return GraphQLDirectiveKwargs( name=self.name, locations=self.locations, @@ -134,13 +139,15 @@ def __copy__(self) -> GraphQLDirective: # pragma: no cover def is_directive(directive: Any) -> TypeGuard[GraphQLDirective]: - """Test if the given value is a GraphQL directive.""" + """Check whether this is a GraphQL directive.""" return isinstance(directive, GraphQLDirective) def assert_directive(directive: Any) -> GraphQLDirective: + """Assert that this is a GraphQL directive.""" if not is_directive(directive): - raise TypeError(f"Expected {inspect(directive)} to be a GraphQL directive.") + msg = f"Expected {inspect(directive)} to be a GraphQL directive." + raise TypeError(msg) return directive @@ -161,7 +168,6 @@ def assert_directive(directive: Any) -> GraphQLDirective: " only when the `if` argument is true.", ) - # Used to conditionally skip (exclude) fields or fragments: GraphQLSkipDirective = GraphQLDirective( name="skip", @@ -216,7 +222,6 @@ def assert_directive(directive: Any) -> GraphQLDirective: }, ) - # Constant string used for default reason for a deprecation: DEFAULT_DEPRECATION_REASON = "No longer supported" @@ -256,7 +261,6 @@ def assert_directive(directive: Any) -> GraphQLDirective: description="Exposes a URL that specifies the behaviour of this scalar.", ) - specified_directives: Tuple[GraphQLDirective, ...] = ( GraphQLIncludeDirective, GraphQLSkipDirective, diff --git a/src/graphql/type/introspection.py b/src/graphql/type/introspection.py index 17922d21..1edbdd9f 100644 --- a/src/graphql/type/introspection.py +++ b/src/graphql/type/introspection.py @@ -1,3 +1,5 @@ +"""GraphQL introspection""" + from enum import Enum from typing import Mapping @@ -25,7 +27,6 @@ ) from .scalars import GraphQLBoolean, GraphQLString - __all__ = [ "SchemaMetaFieldDef", "TypeKind", @@ -324,7 +325,8 @@ def kind(type_, _info): return TypeKind.NON_NULL # Not reachable. All possible types have been considered. - raise TypeError(f"Unexpected type: {inspect(type_)}.") # pragma: no cover + msg = f"Unexpected type: {inspect(type_)}." # pragma: no cover + raise TypeError(msg) # pragma: no cover @staticmethod def name(type_, _info): @@ -341,45 +343,52 @@ def specified_by_url(type_, _info): # noinspection PyPep8Naming @staticmethod def fields(type_, _info, includeDeprecated=False): - if is_object_type(type_) or is_interface_type(type_): - items = type_.fields.items() - return ( - list(items) - if includeDeprecated - else [item for item in items if item[1].deprecation_reason is None] - ) + if not (is_object_type(type_) or is_interface_type(type_)): + return None + items = type_.fields.items() + return ( + list(items) + if includeDeprecated + else [item for item in items if item[1].deprecation_reason is None] + ) @staticmethod def interfaces(type_, _info): - if is_object_type(type_) or is_interface_type(type_): - return type_.interfaces + return ( + type_.interfaces + if is_object_type(type_) or is_interface_type(type_) + else None + ) @staticmethod def possible_types(type_, info): - if is_abstract_type(type_): - return info.schema.get_possible_types(type_) + return ( + info.schema.get_possible_types(type_) if is_abstract_type(type_) else None + ) # noinspection PyPep8Naming @staticmethod def enum_values(type_, _info, includeDeprecated=False): - if is_enum_type(type_): - items = type_.values.items() - return ( - items - if includeDeprecated - else [item for item in items if item[1].deprecation_reason is None] - ) + if not is_enum_type(type_): + return None + items = type_.values.items() + return ( + items + if includeDeprecated + else [item for item in items if item[1].deprecation_reason is None] + ) # noinspection PyPep8Naming @staticmethod def input_fields(type_, _info, includeDeprecated=False): - if is_input_object_type(type_): - items = type_.fields.items() - return ( - items - if includeDeprecated - else [item for item in items if item[1].deprecation_reason is None] - ) + if not is_input_object_type(type_): + return None + items = type_.fields.items() + return ( + items + if includeDeprecated + else [item for item in items if item[1].deprecation_reason is None] + ) @staticmethod def of_type(type_, _info): @@ -573,6 +582,8 @@ def deprecation_reason(item, _info): class TypeKind(Enum): + """Kinds of types""" + SCALAR = "scalar" OBJECT = "object" INTERFACE = "interface" @@ -687,4 +698,4 @@ def is_introspection_type(type_: GraphQLNamedType) -> bool: # register the introspection types to avoid redefinition -GraphQLNamedType.reserved_types.update(introspection_types) +GraphQLNamedType.reserved_types.update(introspection_types) # type: ignore diff --git a/src/graphql/type/scalars.py b/src/graphql/type/scalars.py index 67f0b6b7..e9fbbdaa 100644 --- a/src/graphql/type/scalars.py +++ b/src/graphql/type/scalars.py @@ -1,3 +1,5 @@ +"""GraphQL scalar types""" + from math import isfinite from typing import Any, Mapping @@ -13,13 +15,11 @@ from ..pyutils import inspect from .definition import GraphQLNamedType, GraphQLScalarType - try: from typing import TypeGuard except ImportError: # Python < 3.10 from typing_extensions import TypeGuard - __all__ = [ "is_specified_scalar_type", "specified_scalar_types", @@ -55,21 +55,20 @@ def serialize_int(output_value: Any) -> int: elif isinstance(output_value, float): num = int(output_value) if num != output_value: - raise ValueError + raise ValueError # noqa: TRY301 elif not output_value and isinstance(output_value, str): output_value = "" - raise ValueError + raise ValueError # noqa: TRY301 else: num = int(output_value) # raises ValueError if not an integer - except (OverflowError, ValueError, TypeError): - raise GraphQLError( - "Int cannot represent non-integer value: " + inspect(output_value) - ) + except (OverflowError, ValueError, TypeError) as error: + msg = "Int cannot represent non-integer value: " + inspect(output_value) + raise GraphQLError(msg) from error if not GRAPHQL_MIN_INT <= num <= GRAPHQL_MAX_INT: - raise GraphQLError( - "Int cannot represent non 32-bit signed integer value: " - + inspect(output_value) + msg = "Int cannot represent non 32-bit signed integer value: " + inspect( + output_value ) + raise GraphQLError(msg) return num @@ -81,31 +80,27 @@ def coerce_int(input_value: Any) -> int: and isfinite(input_value) and int(input_value) == input_value ): - raise GraphQLError( - "Int cannot represent non-integer value: " + inspect(input_value) - ) + msg = "Int cannot represent non-integer value: " + inspect(input_value) + raise GraphQLError(msg) if not GRAPHQL_MIN_INT <= input_value <= GRAPHQL_MAX_INT: - raise GraphQLError( - "Int cannot represent non 32-bit signed integer value: " - + inspect(input_value) + msg = "Int cannot represent non 32-bit signed integer value: " + inspect( + input_value ) + raise GraphQLError(msg) return int(input_value) def parse_int_literal(value_node: ValueNode, _variables: Any = None) -> int: """Parse an integer value node in the AST.""" if not isinstance(value_node, IntValueNode): - raise GraphQLError( - "Int cannot represent non-integer value: " + print_ast(value_node), - value_node, - ) + msg = "Int cannot represent non-integer value: " + print_ast(value_node) + raise GraphQLError(msg, value_node) num = int(value_node.value) if not GRAPHQL_MIN_INT <= num <= GRAPHQL_MAX_INT: - raise GraphQLError( - "Int cannot represent non 32-bit signed integer value: " - + print_ast(value_node), - value_node, + msg = "Int cannot represent non 32-bit signed integer value: " + print_ast( + value_node ) + raise GraphQLError(msg, value_node) return num @@ -126,14 +121,13 @@ def serialize_float(output_value: Any) -> float: try: if not output_value and isinstance(output_value, str): output_value = "" - raise ValueError + raise ValueError # noqa: TRY301 num = output_value if isinstance(output_value, float) else float(output_value) if not isfinite(num): - raise ValueError - except (ValueError, TypeError): - raise GraphQLError( - "Float cannot represent non numeric value: " + inspect(output_value) - ) + raise ValueError # noqa: TRY301 + except (ValueError, TypeError) as error: + msg = "Float cannot represent non numeric value: " + inspect(output_value) + raise GraphQLError(msg) from error return num @@ -310,7 +304,6 @@ def parse_id_literal(value_node: ValueNode, _variables: Any = None) -> str: parse_literal=parse_id_literal, ) - specified_scalar_types: Mapping[str, GraphQLScalarType] = { type_.name: type_ for type_ in ( @@ -329,4 +322,4 @@ def is_specified_scalar_type(type_: GraphQLNamedType) -> TypeGuard[GraphQLScalar # register the scalar types to avoid redefinition -GraphQLNamedType.reserved_types.update(specified_scalar_types) +GraphQLNamedType.reserved_types.update(specified_scalar_types) # type: ignore diff --git a/src/graphql/type/schema.py b/src/graphql/type/schema.py index 386f10a5..4fa7d233 100644 --- a/src/graphql/type/schema.py +++ b/src/graphql/type/schema.py @@ -1,10 +1,25 @@ +"""GraphQL schemas""" + from __future__ import annotations # Python < 3.10 from copy import copy, deepcopy -from typing import Any, Collection, Dict, List, NamedTuple, Optional, Set, Tuple, cast +from typing import ( + TYPE_CHECKING, + Any, + Collection, + Dict, + List, + NamedTuple, + Optional, + Set, + Tuple, + cast, +) + +if TYPE_CHECKING: + from ..error import GraphQLError + from ..language import OperationType, ast -from ..error import GraphQLError -from ..language import OperationType, ast from ..pyutils import inspect from .definition import ( GraphQLAbstractType, @@ -29,7 +44,6 @@ introspection_types, ) - try: from typing import TypedDict except ImportError: # Python < 3.8 @@ -39,10 +53,8 @@ except ImportError: # Python < 3.10 from typing_extensions import TypeAlias, TypeGuard - __all__ = ["GraphQLSchema", "GraphQLSchemaKwargs", "is_schema", "assert_schema"] - TypeMap: TypeAlias = Dict[str, GraphQLNamedType] @@ -52,6 +64,8 @@ class InterfaceImplementations(NamedTuple): class GraphQLSchemaKwargs(TypedDict, total=False): + """Arguments for GraphQL schemas""" + query: Optional[GraphQLObjectType] mutation: Optional[GraphQLObjectType] subscription: Optional[GraphQLObjectType] @@ -207,15 +221,17 @@ def __init__( type_name = getattr(named_type, "name", None) if not type_name: - raise TypeError( + msg = ( "One of the provided types for building the Schema" - " is missing a name.", + " is missing a name." ) + raise TypeError(msg) if type_name in type_map: - raise TypeError( + msg = ( "Schema must contain uniquely named types" f" but contains multiple types named '{type_name}'." ) + raise TypeError(msg) type_map[type_name] = named_type @@ -245,6 +261,7 @@ def __init__( implementations.objects.append(named_type) def to_kwargs(self) -> GraphQLSchemaKwargs: + """Get corresponding arguments.""" return GraphQLSchemaKwargs( query=self.query_type, mutation=self.mutation_type, @@ -296,9 +313,11 @@ def __deepcopy__(self, memo_: Dict) -> GraphQLSchema: ) def get_root_type(self, operation: OperationType) -> Optional[GraphQLObjectType]: + """Get the root type.""" return getattr(self, f"{operation.value}_type") def get_type(self, name: str) -> Optional[GraphQLNamedType]: + """Get the type with the given name.""" return self.type_map.get(name) def get_possible_types( @@ -316,6 +335,7 @@ def get_possible_types( def get_implementations( self, interface_type: GraphQLInterfaceType ) -> InterfaceImplementations: + """Get implementations for the given interface type.""" return self._implementations_map.get( interface_type.name, InterfaceImplementations(objects=[], interfaces=[]) ) @@ -345,6 +365,7 @@ def is_sub_type( return maybe_sub_type.name in types def get_directive(self, name: str) -> Optional[GraphQLDirective]: + """Get the directive with the given name.""" for directive in self.directives: if directive.name == name: return directive @@ -381,6 +402,7 @@ def get_field( @property def validation_errors(self) -> Optional[List[GraphQLError]]: + """Get validation errors.""" return self._validation_errors @@ -418,13 +440,15 @@ def collect_referenced_types(self, type_: GraphQLType) -> None: def is_schema(schema: Any) -> TypeGuard[GraphQLSchema]: - """Test if the given value is a GraphQL schema.""" + """Check whether this is a GraphQL schema.""" return isinstance(schema, GraphQLSchema) def assert_schema(schema: Any) -> GraphQLSchema: + """Assert that this is a GraphQL schema.""" if not is_schema(schema): - raise TypeError(f"Expected {inspect(schema)} to be a GraphQL schema.") + msg = f"Expected {inspect(schema)} to be a GraphQL schema." + raise TypeError(msg) return schema @@ -449,17 +473,17 @@ def remap_named_type(type_: GraphQLNamedType, type_map: TypeMap) -> None: ] fields = type_.fields for field_name, field in fields.items(): - field = copy(field) + field = copy(field) # noqa: PLW2901 field.type = remapped_type(field.type, type_map) args = field.args for arg_name, arg in args.items(): - arg = copy(arg) + arg = copy(arg) # noqa: PLW2901 arg.type = remapped_type(arg.type, type_map) args[arg_name] = arg fields[field_name] = field elif is_input_object_type(type_): fields = type_.fields for field_name, field in fields.items(): - field = copy(field) + field = copy(field) # noqa: PLW2901 field.type = remapped_type(field.type, type_map) fields[field_name] = field diff --git a/src/graphql/type/validate.py b/src/graphql/type/validate.py index 3fdb2f09..505cebde 100644 --- a/src/graphql/type/validate.py +++ b/src/graphql/type/validate.py @@ -1,3 +1,5 @@ +"""Schema validation""" + from collections import defaultdict from operator import attrgetter, itemgetter from typing import Any, Collection, Dict, List, Optional, Set, Tuple, Union, cast @@ -37,7 +39,6 @@ from .introspection import is_introspection_type from .schema import GraphQLSchema, assert_schema - __all__ = ["validate_schema", "assert_valid_schema"] @@ -55,7 +56,7 @@ def validate_schema(schema: GraphQLSchema) -> List[GraphQLError]: # If this Schema has already been validated, return the previous results. # noinspection PyProtectedMember - errors = schema._validation_errors + errors = schema._validation_errors # noqa: SLF001 if errors is None: # Validate the schema, producing a list of errors. context = SchemaValidationContext(schema) @@ -66,7 +67,7 @@ def validate_schema(schema: GraphQLSchema) -> List[GraphQLError]: # Persist the results of validation before returning to ensure validation does # not run multiple times for this schema. errors = context.errors - schema._validation_errors = errors + schema._validation_errors = errors # noqa: SLF001 return errors @@ -87,7 +88,7 @@ class SchemaValidationContext: errors: List[GraphQLError] schema: GraphQLSchema - def __init__(self, schema: GraphQLSchema): + def __init__(self, schema: GraphQLSchema) -> None: self.errors = [] self.schema = schema @@ -499,7 +500,7 @@ def get_operation_type_node( class InputObjectCircularRefsValidator: """Modified copy of algorithm from validation.rules.NoFragmentCycles""" - def __init__(self, context: SchemaValidationContext): + def __init__(self, context: SchemaValidationContext) -> None: self.context = context # Tracks already visited types to maintain O(N) and to ensure that cycles # are not redundantly reported. diff --git a/src/graphql/utilities/ast_from_value.py b/src/graphql/utilities/ast_from_value.py index c8e75996..2c10b4e9 100644 --- a/src/graphql/utilities/ast_from_value.py +++ b/src/graphql/utilities/ast_from_value.py @@ -1,3 +1,5 @@ +"""GraphQL AST creation from Python""" + import re from math import isfinite from typing import Any, Mapping, Optional @@ -26,7 +28,6 @@ is_non_null_type, ) - __all__ = ["ast_from_value"] _re_integer_string = re.compile("^-?(?:0|[1-9][0-9]*)$") @@ -128,7 +129,9 @@ def ast_from_value(value: Any, type_: GraphQLInputType) -> Optional[ValueNode]: return StringValueNode(value=serialized) - raise TypeError(f"Cannot convert value to AST: {inspect(serialized)}.") + msg = f"Cannot convert value to AST: {inspect(serialized)}." + raise TypeError(msg) # Not reachable. All possible input types have been considered. - raise TypeError(f"Unexpected input type: {inspect(type_)}.") + msg = f"Unexpected input type: {inspect(type_)}." # pragma: no cover + raise TypeError(msg) # pragma: no cover diff --git a/src/graphql/utilities/ast_to_dict.py b/src/graphql/utilities/ast_to_dict.py index 9cacd8ab..a04e31a5 100644 --- a/src/graphql/utilities/ast_to_dict.py +++ b/src/graphql/utilities/ast_to_dict.py @@ -1,9 +1,10 @@ +"""Python dictionary creation from GraphQL AST""" + from typing import Any, Collection, Dict, List, Optional, overload from ..language import Node, OperationType from ..pyutils import is_iterable - __all__ = ["ast_to_dict"] @@ -39,7 +40,6 @@ def ast_to_dict( Set `location` to True in order to get the locations as well. """ - """Convert a node to a nested Python dictionary.""" if isinstance(node, Node): if cache is None: @@ -56,7 +56,7 @@ def ast_to_dict( if locations: loc = node.loc if loc: - res["loc"] = dict(start=loc.start, end=loc.end) + res["loc"] = {"start": loc.start, "end": loc.end} return res if is_iterable(node): return [ast_to_dict(sub_node, locations, cache) for sub_node in node] diff --git a/src/graphql/utilities/build_ast_schema.py b/src/graphql/utilities/build_ast_schema.py index acbef291..4ec86f02 100644 --- a/src/graphql/utilities/build_ast_schema.py +++ b/src/graphql/utilities/build_ast_schema.py @@ -1,3 +1,5 @@ +"""GraphQL Schema creation from GraphQL AST""" + from typing import Union, cast from ..language import DocumentNode, Source, parse @@ -9,7 +11,6 @@ ) from .extend_schema import ExtendSchemaImpl - __all__ = [ "build_ast_schema", "build_schema", @@ -73,7 +74,7 @@ def build_ast_schema( # If specified directives were not explicitly declared, add them. directives = schema_kwargs["directives"] - directive_names = set(directive.name for directive in directives) + directive_names = {directive.name for directive in directives} missing_directives = [] for directive in specified_directives: if directive.name not in directive_names: diff --git a/src/graphql/utilities/build_client_schema.py b/src/graphql/utilities/build_client_schema.py index 94c08a45..65e567a7 100644 --- a/src/graphql/utilities/build_client_schema.py +++ b/src/graphql/utilities/build_client_schema.py @@ -1,3 +1,5 @@ +"""GraphQL client schema creation""" + from itertools import chain from typing import Callable, Collection, Dict, List, Union, cast @@ -45,7 +47,6 @@ ) from .value_from_ast import value_from_ast - __all__ = ["build_client_schema"] @@ -68,11 +69,12 @@ def build_client_schema( if not isinstance(introspection, dict) or not isinstance( introspection.get("__schema"), dict ): - raise TypeError( + msg = ( "Invalid or incomplete introspection result. Ensure that you" " are passing the 'data' attribute of an introspection response" f" and no 'errors' were returned alongside: {inspect(introspection)}." ) + raise TypeError(msg) # Get the schema from the introspection result. schema_introspection = introspection["__schema"] @@ -84,13 +86,15 @@ def get_type(type_ref: IntrospectionTypeRef) -> GraphQLType: if kind == TypeKind.LIST.name: item_ref = type_ref.get("ofType") if not item_ref: - raise TypeError("Decorated type deeper than introspection query.") + msg = "Decorated type deeper than introspection query." + raise TypeError(msg) item_ref = cast(IntrospectionTypeRef, item_ref) return GraphQLList(get_type(item_ref)) if kind == TypeKind.NON_NULL.name: nullable_ref = type_ref.get("ofType") if not nullable_ref: - raise TypeError("Decorated type deeper than introspection query.") + msg = "Decorated type deeper than introspection query." + raise TypeError(msg) nullable_ref = cast(IntrospectionTypeRef, nullable_ref) nullable_type = get_type(nullable_ref) return GraphQLNonNull(assert_nullable_type(nullable_type)) @@ -100,15 +104,17 @@ def get_type(type_ref: IntrospectionTypeRef) -> GraphQLType: def get_named_type(type_ref: IntrospectionType) -> GraphQLNamedType: type_name = type_ref.get("name") if not type_name: - raise TypeError(f"Unknown type reference: {inspect(type_ref)}.") + msg = f"Unknown type reference: {inspect(type_ref)}." + raise TypeError(msg) type_ = type_map.get(type_name) if not type_: - raise TypeError( + msg = ( f"Invalid or incomplete schema, unknown type: {type_name}." " Ensure that a full introspection query is used in order" " to build a client schema." ) + raise TypeError(msg) return type_ def get_object_type(type_ref: IntrospectionObjectType) -> GraphQLObjectType: @@ -125,11 +131,12 @@ def build_type(type_: IntrospectionType) -> GraphQLNamedType: builder = type_builders.get(type_["kind"]) if builder: # pragma: no cover else return builder(type_) - raise TypeError( + msg = ( "Invalid or incomplete introspection result." " Ensure that a full introspection query is used in order" f" to build a client schema: {inspect(type_)}." ) + raise TypeError(msg) def build_scalar_def( scalar_introspection: IntrospectionScalarType, @@ -155,10 +162,11 @@ def build_implementations_list( # 'interfaces' on interface types if implementing_introspection["kind"] == TypeKind.INTERFACE.name: return [] - raise TypeError( + msg = ( "Introspection result missing interfaces:" f" {inspect(implementing_introspection)}." ) + raise TypeError(msg) interfaces = cast(Collection[IntrospectionInterfaceType], maybe_interfaces) return [get_interface_type(interface) for interface in interfaces] @@ -191,10 +199,11 @@ def build_union_def( ) -> GraphQLUnionType: maybe_possible_types = union_introspection.get("possibleTypes") if maybe_possible_types is None: - raise TypeError( + msg = ( "Introspection result missing possibleTypes:" f" {inspect(union_introspection)}." ) + raise TypeError(msg) possible_types = cast(Collection[IntrospectionObjectType], maybe_possible_types) return GraphQLUnionType( name=union_introspection["name"], @@ -204,10 +213,11 @@ def build_union_def( def build_enum_def(enum_introspection: IntrospectionEnumType) -> GraphQLEnumType: if enum_introspection.get("enumValues") is None: - raise TypeError( + msg = ( "Introspection result missing enumValues:" f" {inspect(enum_introspection)}." ) + raise TypeError(msg) name = enum_introspection["name"] try: return cast(GraphQLEnumType, GraphQLEnumType.reserved_types[name]) @@ -229,10 +239,11 @@ def build_input_object_def( input_object_introspection: IntrospectionInputObjectType, ) -> GraphQLInputObjectType: if input_object_introspection.get("inputFields") is None: - raise TypeError( + msg = ( "Introspection result missing inputFields:" f" {inspect(input_object_introspection)}." ) + raise TypeError(msg) return GraphQLInputObjectType( name=input_object_introspection["name"], description=input_object_introspection.get("description"), @@ -254,9 +265,9 @@ def build_field_def_map( type_introspection: Union[IntrospectionObjectType, IntrospectionInterfaceType], ) -> Dict[str, GraphQLField]: if type_introspection.get("fields") is None: - raise TypeError( - f"Introspection result missing fields: {type_introspection}." - ) + msg = f"Introspection result missing fields: {type_introspection}." + + raise TypeError(msg) return { field_introspection["name"]: build_field(field_introspection) for field_introspection in type_introspection["fields"] @@ -266,17 +277,19 @@ def build_field(field_introspection: IntrospectionField) -> GraphQLField: type_introspection = cast(IntrospectionType, field_introspection["type"]) type_ = get_type(type_introspection) if not is_output_type(type_): - raise TypeError( + msg = ( "Introspection must provide output type for fields," f" but received: {inspect(type_)}." ) + raise TypeError(msg) args_introspection = field_introspection.get("args") if args_introspection is None: - raise TypeError( + msg = ( "Introspection result missing field args:" f" {inspect(field_introspection)}." ) + raise TypeError(msg) return GraphQLField( type_, @@ -299,10 +312,11 @@ def build_argument( type_introspection = cast(IntrospectionType, argument_introspection["type"]) type_ = get_type(type_introspection) if not is_input_type(type_): - raise TypeError( + msg = ( "Introspection must provide input type for arguments," f" but received: {inspect(type_)}." ) + raise TypeError(msg) default_value_introspection = argument_introspection.get("defaultValue") default_value = ( @@ -333,10 +347,11 @@ def build_input_value( type_introspection = cast(IntrospectionType, input_value_introspection["type"]) type_ = get_type(type_introspection) if not is_input_type(type_): - raise TypeError( + msg = ( "Introspection must provide input type for input fields," f" but received: {inspect(type_)}." ) + raise TypeError(msg) default_value_introspection = input_value_introspection.get("defaultValue") default_value = ( @@ -355,15 +370,17 @@ def build_directive( directive_introspection: IntrospectionDirective, ) -> GraphQLDirective: if directive_introspection.get("args") is None: - raise TypeError( + msg = ( "Introspection result missing directive args:" f" {inspect(directive_introspection)}." ) + raise TypeError(msg) if directive_introspection.get("locations") is None: - raise TypeError( + msg = ( "Introspection result missing directive locations:" f" {inspect(directive_introspection)}." ) + raise TypeError(msg) return GraphQLDirective( name=directive_introspection["name"], description=directive_introspection.get("description"), diff --git a/src/graphql/utilities/coerce_input_value.py b/src/graphql/utilities/coerce_input_value.py index ac2d87ff..23883285 100644 --- a/src/graphql/utilities/coerce_input_value.py +++ b/src/graphql/utilities/coerce_input_value.py @@ -1,3 +1,5 @@ +"""Input value coercion""" + from typing import Any, Callable, Dict, List, Optional, Union, cast from ..error import GraphQLError @@ -19,7 +21,6 @@ is_non_null_type, ) - try: from typing import TypeAlias except ImportError: # Python < 3.10 @@ -139,7 +140,7 @@ def coerce_input_value( except GraphQLError as error: on_error(path.as_list() if path else [], input_value, error) return Undefined - except Exception as error: + except Exception as error: # noqa: BLE001 on_error( path.as_list() if path else [], input_value, @@ -157,4 +158,5 @@ def coerce_input_value( return parse_result # Not reachable. All possible input types have been considered. - raise TypeError(f"Unexpected input type: {inspect(type_)}.") + msg = f"Unexpected input type: {inspect(type_)}." # pragma: no cover + raise TypeError(msg) # pragma: no cover diff --git a/src/graphql/utilities/concat_ast.py b/src/graphql/utilities/concat_ast.py index 6aca4b18..901d985e 100644 --- a/src/graphql/utilities/concat_ast.py +++ b/src/graphql/utilities/concat_ast.py @@ -1,9 +1,10 @@ +"""AST concatenation""" + from itertools import chain from typing import Collection from ..language.ast import DocumentNode - __all__ = ["concat_ast"] diff --git a/src/graphql/utilities/extend_schema.py b/src/graphql/utilities/extend_schema.py index 17858d5f..906383e7 100644 --- a/src/graphql/utilities/extend_schema.py +++ b/src/graphql/utilities/extend_schema.py @@ -1,3 +1,5 @@ +"""GraphQL schema extension""" + from collections import defaultdict from functools import partial from typing import ( @@ -88,7 +90,6 @@ ) from .value_from_ast import value_from_ast - __all__ = [ "extend_schema", "ExtendSchemaImpl", @@ -178,7 +179,7 @@ class ExtendSchemaImpl: type_map: Dict[str, GraphQLNamedType] type_extensions: TypeExtensionsMap - def __init__(self, type_extensions: TypeExtensionsMap): + def __init__(self, type_extensions: TypeExtensionsMap) -> None: self.type_map = {} self.type_extensions = type_extensions @@ -270,6 +271,7 @@ def extend_schema_args( # noinspection PyTypeChecker,PyUnresolvedReferences def replace_type(self, type_: GraphQLType) -> GraphQLType: + """Replace a GraphQL type.""" if is_list_type(type_): return GraphQLList(self.replace_type(type_.of_type)) if is_non_null_type(type_): @@ -277,6 +279,7 @@ def replace_type(self, type_: GraphQLType) -> GraphQLType: return self.replace_named_type(type_) # type: ignore def replace_named_type(self, type_: GraphQLNamedType) -> GraphQLNamedType: + """Replace a named GraphQL type.""" # Note: While this could make early assertions to get the correctly # typed values below, that would throw immediately while type system # validation with validate_schema() will produce more actionable results. @@ -284,6 +287,7 @@ def replace_named_type(self, type_: GraphQLNamedType) -> GraphQLNamedType: # noinspection PyShadowingNames def replace_directive(self, directive: GraphQLDirective) -> GraphQLDirective: + """Replace a GraphQL directive.""" if is_specified_directive(directive): # Builtin directives are not extended. return directive @@ -299,6 +303,7 @@ def replace_directive(self, directive: GraphQLDirective) -> GraphQLDirective: ) def extend_named_type(self, type_: GraphQLNamedType) -> GraphQLNamedType: + """Extend a named GraphQL type.""" if is_introspection_type(type_) or is_specified_scalar_type(type_): # Builtin types are not extended. return type_ @@ -316,11 +321,13 @@ def extend_named_type(self, type_: GraphQLNamedType) -> GraphQLNamedType: return self.extend_input_object_type(type_) # Not reachable. All possible types have been considered. - raise TypeError(f"Unexpected type: {inspect(type_)}.") # pragma: no cover + msg = f"Unexpected type: {inspect(type_)}." # pragma: no cover + raise TypeError(msg) # pragma: no cover def extend_input_object_type_fields( self, kwargs: Dict[str, Any], extensions: Tuple[Any, ...] ) -> GraphQLInputFieldMap: + """Extend GraphQL input object type fields.""" return { **{ name: GraphQLInputField( @@ -339,6 +346,7 @@ def extend_input_object_type( self, type_: GraphQLInputObjectType, ) -> GraphQLInputObjectType: + """Extend a GraphQL input object type.""" kwargs = type_.to_kwargs() extensions = tuple(self.type_extensions.input_object[kwargs["name"]]) @@ -353,6 +361,7 @@ def extend_input_object_type( ) def extend_enum_type(self, type_: GraphQLEnumType) -> GraphQLEnumType: + """Extend a GraphQL enum type.""" kwargs = type_.to_kwargs() extensions = tuple(self.type_extensions.enum[kwargs["name"]]) @@ -365,6 +374,7 @@ def extend_enum_type(self, type_: GraphQLEnumType) -> GraphQLEnumType: ) def extend_scalar_type(self, type_: GraphQLScalarType) -> GraphQLScalarType: + """Extend a GraphQL scalar type.""" kwargs = type_.to_kwargs() extensions = tuple(self.type_extensions.scalar[kwargs["name"]]) @@ -383,6 +393,7 @@ def extend_scalar_type(self, type_: GraphQLScalarType) -> GraphQLScalarType: def extend_object_type_interfaces( self, kwargs: Dict[str, Any], extensions: Tuple[Any, ...] ) -> List[GraphQLInterfaceType]: + """Extend a GraphQL object type interface.""" return [ cast(GraphQLInterfaceType, self.replace_named_type(interface)) for interface in kwargs["interfaces"] @@ -391,6 +402,7 @@ def extend_object_type_interfaces( def extend_object_type_fields( self, kwargs: Dict[str, Any], extensions: Tuple[Any, ...] ) -> GraphQLFieldMap: + """Extend GraphQL object type fields.""" return { **{ name: self.extend_field(field) @@ -401,6 +413,7 @@ def extend_object_type_fields( # noinspection PyShadowingNames def extend_object_type(self, type_: GraphQLObjectType) -> GraphQLObjectType: + """Extend a GraphQL object type.""" kwargs = type_.to_kwargs() extensions = tuple(self.type_extensions.object[kwargs["name"]]) @@ -418,6 +431,7 @@ def extend_object_type(self, type_: GraphQLObjectType) -> GraphQLObjectType: def extend_interface_type_interfaces( self, kwargs: Dict[str, Any], extensions: Tuple[Any, ...] ) -> List[GraphQLInterfaceType]: + """Extend GraphQL interface type interfaces.""" return [ cast(GraphQLInterfaceType, self.replace_named_type(interface)) for interface in kwargs["interfaces"] @@ -426,6 +440,7 @@ def extend_interface_type_interfaces( def extend_interface_type_fields( self, kwargs: Dict[str, Any], extensions: Tuple[Any, ...] ) -> GraphQLFieldMap: + """Extend GraphQL interface type fields.""" return { **{ name: self.extend_field(field) @@ -438,6 +453,7 @@ def extend_interface_type_fields( def extend_interface_type( self, type_: GraphQLInterfaceType ) -> GraphQLInterfaceType: + """Extend a GraphQL interface type.""" kwargs = type_.to_kwargs() extensions = tuple(self.type_extensions.interface[kwargs["name"]]) @@ -455,12 +471,14 @@ def extend_interface_type( def extend_union_type_types( self, kwargs: Dict[str, Any], extensions: Tuple[Any, ...] ) -> List[GraphQLObjectType]: + """Extend types of a GraphQL union type.""" return [ cast(GraphQLObjectType, self.replace_named_type(member_type)) for member_type in kwargs["types"] ] + self.build_union_types(extensions) def extend_union_type(self, type_: GraphQLUnionType) -> GraphQLUnionType: + """Extend a GraphQL union type.""" kwargs = type_.to_kwargs() extensions = tuple(self.type_extensions.union[kwargs["name"]]) @@ -474,6 +492,7 @@ def extend_union_type(self, type_: GraphQLUnionType) -> GraphQLUnionType: # noinspection PyShadowingNames def extend_field(self, field: GraphQLField) -> GraphQLField: + """Extend a GraphQL field.""" return GraphQLField( **merge_kwargs( field.to_kwargs(), @@ -483,6 +502,7 @@ def extend_field(self, field: GraphQLField) -> GraphQLField: ) def extend_arg(self, arg: GraphQLArgument) -> GraphQLArgument: + """Extend a GraphQL argument.""" return GraphQLArgument( **merge_kwargs( arg.to_kwargs(), @@ -494,6 +514,7 @@ def extend_arg(self, arg: GraphQLArgument) -> GraphQLArgument: def get_operation_types( self, nodes: Collection[Union[SchemaDefinitionNode, SchemaExtensionNode]] ) -> Dict[OperationType, GraphQLNamedType]: + """Extend GraphQL operation types.""" # Note: While this could make early assertions to get the correctly # typed values below, that would throw immediately while type system # validation with validate_schema() will produce more actionable results. @@ -505,14 +526,17 @@ def get_operation_types( # noinspection PyShadowingNames def get_named_type(self, node: NamedTypeNode) -> GraphQLNamedType: + """Get name GraphQL type for a given named type node.""" name = node.name.value type_ = std_type_map.get(name) or self.type_map.get(name) if not type_: - raise TypeError(f"Unknown type: '{name}'.") + msg = f"Unknown type: '{name}'." + raise TypeError(msg) return type_ def get_wrapped_type(self, node: TypeNode) -> GraphQLType: + """Get wrapped GraphQL type for a given type node.""" if isinstance(node, ListTypeNode): return GraphQLList(self.get_wrapped_type(node.type)) if isinstance(node, NonNullTypeNode): @@ -522,6 +546,7 @@ def get_wrapped_type(self, node: TypeNode) -> GraphQLType: return self.get_named_type(cast(NamedTypeNode, node)) def build_directive(self, node: DirectiveDefinitionNode) -> GraphQLDirective: + """Build a GraphQL directive for a given directive definition node.""" locations = [DirectiveLocation[node.value] for node in node.locations] return GraphQLDirective( @@ -544,6 +569,7 @@ def build_field_map( ] ], ) -> GraphQLFieldMap: + """Build a GraphQL field map.""" field_map: GraphQLFieldMap = {} for node in nodes: for field in node.fields or []: @@ -563,6 +589,7 @@ def build_argument_map( self, args: Optional[Collection[InputValueDefinitionNode]], ) -> GraphQLArgumentMap: + """Build a GraphQL argument map.""" arg_map: GraphQLArgumentMap = {} for arg in args or []: # Note: While this could make assertions to get the correctly typed @@ -584,6 +611,7 @@ def build_input_field_map( Union[InputObjectTypeDefinitionNode, InputObjectTypeExtensionNode] ], ) -> GraphQLInputFieldMap: + """Build a GraphQL input field map.""" input_field_map: GraphQLInputFieldMap = {} for node in nodes: for field in node.fields or []: @@ -602,8 +630,9 @@ def build_input_field_map( @staticmethod def build_enum_value_map( - nodes: Collection[Union[EnumTypeDefinitionNode, EnumTypeExtensionNode]] + nodes: Collection[Union[EnumTypeDefinitionNode, EnumTypeExtensionNode]], ) -> GraphQLEnumValueMap: + """Build a GraphQL enum value map.""" enum_value_map: GraphQLEnumValueMap = {} for node in nodes: for value in node.values or []: @@ -630,6 +659,7 @@ def build_interfaces( ] ], ) -> List[GraphQLInterfaceType]: + """Build GraphQL interface types for the given nodes.""" # Note: While this could make assertions to get the correctly typed # value, that would throw immediately while type system validation # with validate_schema() will produce more actionable results. @@ -643,6 +673,7 @@ def build_union_types( self, nodes: Collection[Union[UnionTypeDefinitionNode, UnionTypeExtensionNode]], ) -> List[GraphQLObjectType]: + """Build GraphQL object types for the given union type nodes.""" # Note: While this could make assertions to get the correctly typed # value, that would throw immediately while type system validation # with validate_schema() will produce more actionable results. @@ -655,6 +686,7 @@ def build_union_types( def build_object_type( self, ast_node: ObjectTypeDefinitionNode ) -> GraphQLObjectType: + """Build a GraphQL object type for the given object type definition node.""" extension_nodes = self.type_extensions.object[ast_node.name.value] all_nodes: List[Union[ObjectTypeDefinitionNode, ObjectTypeExtensionNode]] = [ ast_node, @@ -673,6 +705,7 @@ def build_interface_type( self, ast_node: InterfaceTypeDefinitionNode, ) -> GraphQLInterfaceType: + """Build a GraphQL interface type for the given type definition nodes.""" extension_nodes = self.type_extensions.interface[ast_node.name.value] all_nodes: List[ Union[InterfaceTypeDefinitionNode, InterfaceTypeExtensionNode] @@ -687,6 +720,7 @@ def build_interface_type( ) def build_enum_type(self, ast_node: EnumTypeDefinitionNode) -> GraphQLEnumType: + """Build a GraphQL enum type for the given enum type definition nodes.""" extension_nodes = self.type_extensions.enum[ast_node.name.value] all_nodes: List[Union[EnumTypeDefinitionNode, EnumTypeExtensionNode]] = [ ast_node, @@ -701,6 +735,7 @@ def build_enum_type(self, ast_node: EnumTypeDefinitionNode) -> GraphQLEnumType: ) def build_union_type(self, ast_node: UnionTypeDefinitionNode) -> GraphQLUnionType: + """Build a GraphQL union type for the given union type definition nodes.""" extension_nodes = self.type_extensions.union[ast_node.name.value] all_nodes: List[Union[UnionTypeDefinitionNode, UnionTypeExtensionNode]] = [ ast_node, @@ -717,6 +752,7 @@ def build_union_type(self, ast_node: UnionTypeDefinitionNode) -> GraphQLUnionTyp def build_scalar_type( self, ast_node: ScalarTypeDefinitionNode ) -> GraphQLScalarType: + """Build a GraphQL scalar type for the given scalar type definition node.""" extension_nodes = self.type_extensions.scalar[ast_node.name.value] return GraphQLScalarType( name=ast_node.name.value, @@ -730,6 +766,7 @@ def build_input_object_type( self, ast_node: InputObjectTypeDefinitionNode, ) -> GraphQLInputObjectType: + """Build a GraphQL input object type for the given node.""" extension_nodes = self.type_extensions.input_object[ast_node.name.value] all_nodes: List[ Union[InputObjectTypeDefinitionNode, InputObjectTypeExtensionNode] @@ -743,6 +780,7 @@ def build_input_object_type( ) def build_type(self, ast_node: TypeDefinitionNode) -> GraphQLNamedType: + """Build a named GraphQL type for the given type definition node.""" kind = ast_node.kind try: kind = kind.removesuffix("_definition") @@ -751,11 +789,12 @@ def build_type(self, ast_node: TypeDefinitionNode) -> GraphQLNamedType: kind = kind[:-11] try: build = getattr(self, f"build_{kind}") - except AttributeError: # pragma: no cover + except AttributeError as error: # pragma: no cover # Not reachable. All possible type definition nodes have been considered. - raise TypeError( # pragma: no cover + msg = ( # pragma: no cover f"Unexpected type definition node: {inspect(ast_node)}." ) + raise TypeError(msg) from error # pragma: no cover return build(ast_node) @@ -766,7 +805,7 @@ def build_type(self, ast_node: TypeDefinitionNode) -> GraphQLNamedType: def get_deprecation_reason( - node: Union[EnumValueDefinitionNode, FieldDefinitionNode, InputValueDefinitionNode] + node: Union[EnumValueDefinitionNode, FieldDefinitionNode, InputValueDefinitionNode], ) -> Optional[str]: """Given a field or enum value node, get deprecation reason as string.""" from ..execution import get_directive_values @@ -776,7 +815,7 @@ def get_deprecation_reason( def get_specified_by_url( - node: Union[ScalarTypeDefinitionNode, ScalarTypeExtensionNode] + node: Union[ScalarTypeDefinitionNode, ScalarTypeExtensionNode], ) -> Optional[str]: """Given a scalar node, return the string value for the specifiedByURL.""" from ..execution import get_directive_values diff --git a/src/graphql/utilities/find_breaking_changes.py b/src/graphql/utilities/find_breaking_changes.py index c205d78a..c4899f7b 100644 --- a/src/graphql/utilities/find_breaking_changes.py +++ b/src/graphql/utilities/find_breaking_changes.py @@ -1,3 +1,5 @@ +"""Find breaking changes between GraphQL schemas""" + from enum import Enum from typing import Any, Collection, Dict, List, NamedTuple, Union @@ -30,7 +32,6 @@ from ..utilities.sort_value_node import sort_value_node from .ast_from_value import ast_from_value - try: from typing import TypeAlias except ImportError: # Python < 3.10 @@ -48,6 +49,8 @@ class BreakingChangeType(Enum): + """Types of breaking changes""" + TYPE_REMOVED = 10 TYPE_CHANGED_KIND = 11 TYPE_REMOVED_FROM_UNION = 20 @@ -67,6 +70,8 @@ class BreakingChangeType(Enum): class DangerousChangeType(Enum): + """Types of dangerous changes""" + VALUE_ADDED_TO_ENUM = 60 TYPE_ADDED_TO_UNION = 61 OPTIONAL_INPUT_FIELD_ADDED = 62 @@ -76,11 +81,15 @@ class DangerousChangeType(Enum): class BreakingChange(NamedTuple): + """Type and description of a breaking change""" + type: BreakingChangeType description: str class DangerousChange(NamedTuple): + """Type and description of a dangerous change""" + type: DangerousChangeType description: str @@ -205,12 +214,12 @@ def find_type_changes( schema_changes.extend(find_union_type_changes(old_type, new_type)) elif is_input_object_type(old_type) and is_input_object_type(new_type): schema_changes.extend(find_input_object_type_changes(old_type, new_type)) - elif is_object_type(old_type) and is_object_type(new_type): - schema_changes.extend(find_field_changes(old_type, new_type)) - schema_changes.extend( - find_implemented_interfaces_changes(old_type, new_type) - ) - elif is_interface_type(old_type) and is_interface_type(new_type): + elif ( + is_object_type(old_type) + and is_object_type(new_type) + or is_interface_type(old_type) + and is_interface_type(new_type) + ): schema_changes.extend(find_field_changes(old_type, new_type)) schema_changes.extend( find_implemented_interfaces_changes(old_type, new_type) @@ -490,8 +499,7 @@ def is_change_safe_for_object_or_interface_field( if is_named_type(old_type): return ( # if they're both named types, see if their names are equivalent - is_named_type(new_type) - and old_type.name == new_type.name + is_named_type(new_type) and old_type.name == new_type.name ) or ( # moving from nullable to non-null of same underlying type is safe is_non_null_type(new_type) @@ -499,7 +507,8 @@ def is_change_safe_for_object_or_interface_field( ) # Not reachable. All possible output types have been considered. - raise TypeError(f"Unexpected type {inspect(old_type)}") + msg = f"Unexpected type {inspect(old_type)}" # pragma: no cover + raise TypeError(msg) # pragma: no cover def is_change_safe_for_input_object_field_or_field_arg( @@ -531,12 +540,12 @@ def is_change_safe_for_input_object_field_or_field_arg( if is_named_type(old_type): return ( # if they're both named types, see if their names are equivalent - is_named_type(new_type) - and old_type.name == new_type.name + is_named_type(new_type) and old_type.name == new_type.name ) # Not reachable. All possible output types have been considered. - raise TypeError(f"Unexpected type {inspect(old_type)}") + msg = f"Unexpected type {inspect(old_type)}" # pragma: no cover + raise TypeError(msg) # pragma: no cover def type_kind_name(type_: GraphQLNamedType) -> str: @@ -554,13 +563,15 @@ def type_kind_name(type_: GraphQLNamedType) -> str: return "an Input type" # Not reachable. All possible output types have been considered. - raise TypeError(f"Unexpected type {inspect(type_)}") + msg = f"Unexpected type {inspect(type_)}" # pragma: no cover + raise TypeError(msg) # pragma: no cover def stringify_value(value: Any, type_: GraphQLInputType) -> str: ast = ast_from_value(value, type_) if ast is None: # pragma: no cover - raise TypeError(f"Invalid value: {inspect(value)}") + msg = f"Invalid value: {inspect(value)}" + raise TypeError(msg) return print_ast(sort_value_node(ast)) diff --git a/src/graphql/utilities/get_introspection_query.py b/src/graphql/utilities/get_introspection_query.py index 93478dc2..67feb598 100644 --- a/src/graphql/utilities/get_introspection_query.py +++ b/src/graphql/utilities/get_introspection_query.py @@ -1,9 +1,10 @@ +"""Get introspection query""" + from textwrap import dedent from typing import Any, Dict, List, Optional, Union from ..language import DirectiveLocation - try: from typing import Literal, TypedDict except ImportError: # Python < 3.8 diff --git a/src/graphql/utilities/get_operation_ast.py b/src/graphql/utilities/get_operation_ast.py index 08f8bb9a..8a211f3d 100644 --- a/src/graphql/utilities/get_operation_ast.py +++ b/src/graphql/utilities/get_operation_ast.py @@ -1,8 +1,9 @@ +""""Get operation AST node""" + from typing import Optional from ..language import DocumentNode, OperationDefinitionNode - __all__ = ["get_operation_ast"] diff --git a/src/graphql/utilities/introspection_from_schema.py b/src/graphql/utilities/introspection_from_schema.py index 4fb0a65f..4b67fb8f 100644 --- a/src/graphql/utilities/introspection_from_schema.py +++ b/src/graphql/utilities/introspection_from_schema.py @@ -1,3 +1,5 @@ +"""Building introspection queries from GraphQL schemas""" + from typing import cast from ..error import GraphQLError @@ -5,7 +7,6 @@ from ..type import GraphQLSchema from .get_introspection_query import IntrospectionQuery, get_introspection_query - __all__ = ["introspection_from_schema"] @@ -39,9 +40,11 @@ def introspection_from_schema( result = execute_sync(schema, document) if not isinstance(result, ExecutionResult): # pragma: no cover - raise RuntimeError("Introspection cannot be executed") + msg = "Introspection cannot be executed" + raise RuntimeError(msg) # noqa: TRY004 if result.errors: # pragma: no cover raise result.errors[0] if not result.data: # pragma: no cover - raise GraphQLError("Introspection did not return a result") + msg = "Introspection did not return a result" + raise GraphQLError(msg) return cast(IntrospectionQuery, result.data) diff --git a/src/graphql/utilities/lexicographic_sort_schema.py b/src/graphql/utilities/lexicographic_sort_schema.py index b6f6cc8f..810717de 100644 --- a/src/graphql/utilities/lexicographic_sort_schema.py +++ b/src/graphql/utilities/lexicographic_sort_schema.py @@ -1,3 +1,5 @@ +"""Sorting GraphQL schemas""" + from typing import Collection, Dict, Optional, Tuple, Union, cast from ..language import DirectiveLocation @@ -29,7 +31,6 @@ is_union_type, ) - __all__ = ["lexicographic_sort_schema"] @@ -40,7 +41,7 @@ def lexicographic_sort_schema(schema: GraphQLSchema) -> GraphQLSchema: """ def replace_type( - type_: Union[GraphQLList, GraphQLNonNull, GraphQLNamedType] + type_: Union[GraphQLList, GraphQLNonNull, GraphQLNamedType], ) -> Union[GraphQLList, GraphQLNonNull, GraphQLNamedType]: if is_list_type(type_): return GraphQLList(replace_type(type_.of_type)) @@ -89,7 +90,7 @@ def sort_fields(fields_map: Dict[str, GraphQLField]) -> Dict[str, GraphQLField]: return fields def sort_input_fields( - fields_map: Dict[str, GraphQLInputField] + fields_map: Dict[str, GraphQLInputField], ) -> Dict[str, GraphQLInputField]: return { name: GraphQLInputField( @@ -155,7 +156,8 @@ def sort_named_type(type_: GraphQLNamedType) -> GraphQLNamedType: ) # Not reachable. All possible types have been considered. - raise TypeError(f"Unexpected type: {inspect(type_)}.") + msg = f"Unexpected type: {inspect(type_)}." # pragma: no cover + raise TypeError(msg) # pragma: no cover type_map: Dict[str, GraphQLNamedType] = { type_.name: sort_named_type(type_) @@ -180,6 +182,6 @@ def sort_named_type(type_: GraphQLNamedType) -> GraphQLNamedType: def sort_by_name_key( - type_: Union[GraphQLNamedType, GraphQLDirective, DirectiveLocation] + type_: Union[GraphQLNamedType, GraphQLDirective, DirectiveLocation], ) -> Tuple: return natural_comparison_key(type_.name) diff --git a/src/graphql/utilities/print_schema.py b/src/graphql/utilities/print_schema.py index 00faad69..b3a5ba23 100644 --- a/src/graphql/utilities/print_schema.py +++ b/src/graphql/utilities/print_schema.py @@ -1,3 +1,5 @@ +"""Printing GraphQL Schemas in SDL format""" + from typing import Any, Callable, Dict, List, Optional, Union from ..language import StringValueNode, print_ast @@ -28,21 +30,23 @@ ) from .ast_from_value import ast_from_value - __all__ = ["print_schema", "print_introspection_schema", "print_type", "print_value"] def print_schema(schema: GraphQLSchema) -> str: + """Print the given GraphQL schema in SDL format.""" return print_filtered_schema( schema, lambda n: not is_specified_directive(n), is_defined_type ) def print_introspection_schema(schema: GraphQLSchema) -> str: + """Print the built-in introspection schema in SDL format.""" return print_filtered_schema(schema, is_specified_directive, is_introspection_type) def is_defined_type(type_: GraphQLNamedType) -> bool: + """Check if the given named GraphQL type is a defined type.""" return type_.name not in GraphQLNamedType.reserved_types @@ -51,6 +55,7 @@ def print_filtered_schema( directive_filter: Callable[[GraphQLDirective], bool], type_filter: Callable[[GraphQLNamedType], bool], ) -> str: + """Print a GraphQL schema filtered by the specified directives and types.""" directives = filter(directive_filter, schema.directives) types = filter(type_filter, schema.type_map.values()) @@ -64,6 +69,7 @@ def print_filtered_schema( def print_schema_definition(schema: GraphQLSchema) -> Optional[str]: + """Print GraphQL schema definitions.""" if schema.description is None and is_schema_of_common_names(schema): return None @@ -112,6 +118,7 @@ def is_schema_of_common_names(schema: GraphQLSchema) -> bool: def print_type(type_: GraphQLNamedType) -> str: + """Print a named GraphQL type.""" if is_scalar_type(type_): return print_scalar(type_) if is_object_type(type_): @@ -126,10 +133,12 @@ def print_type(type_: GraphQLNamedType) -> str: return print_input_object(type_) # Not reachable. All possible types have been considered. - raise TypeError(f"Unexpected type: {inspect(type_)}.") + msg = f"Unexpected type: {inspect(type_)}." # pragma: no cover + raise TypeError(msg) # pragma: no cover def print_scalar(type_: GraphQLScalarType) -> str: + """Print a GraphQL scalar type.""" return ( print_description(type_) + f"scalar {type_.name}" @@ -138,13 +147,15 @@ def print_scalar(type_: GraphQLScalarType) -> str: def print_implemented_interfaces( - type_: Union[GraphQLObjectType, GraphQLInterfaceType] + type_: Union[GraphQLObjectType, GraphQLInterfaceType], ) -> str: + """Print the interfaces implemented by a GraphQL object or interface type.""" interfaces = type_.interfaces return " implements " + " & ".join(i.name for i in interfaces) if interfaces else "" def print_object(type_: GraphQLObjectType) -> str: + """Print a GraphQL object type.""" return ( print_description(type_) + f"type {type_.name}" @@ -154,6 +165,7 @@ def print_object(type_: GraphQLObjectType) -> str: def print_interface(type_: GraphQLInterfaceType) -> str: + """Print a GraphQL interface type.""" return ( print_description(type_) + f"interface {type_.name}" @@ -163,12 +175,14 @@ def print_interface(type_: GraphQLInterfaceType) -> str: def print_union(type_: GraphQLUnionType) -> str: + """Print a GraphQL union type.""" types = type_.types possible_types = " = " + " | ".join(t.name for t in types) if types else "" return print_description(type_) + f"union {type_.name}" + possible_types def print_enum(type_: GraphQLEnumType) -> str: + """Print a GraphQL enum type.""" values = [ print_description(value, " ", not i) + f" {name}" @@ -179,6 +193,7 @@ def print_enum(type_: GraphQLEnumType) -> str: def print_input_object(type_: GraphQLInputObjectType) -> str: + """Print a GraphQL input object type.""" fields = [ print_description(field, " ", not i) + " " + print_input_value(name, field) for i, (name, field) in enumerate(type_.fields.items()) @@ -187,6 +202,7 @@ def print_input_object(type_: GraphQLInputObjectType) -> str: def print_fields(type_: Union[GraphQLObjectType, GraphQLInterfaceType]) -> str: + """Print the fields of a GraphQL object or interface type.""" fields = [ print_description(field, " ", not i) + f" {name}" @@ -199,10 +215,12 @@ def print_fields(type_: Union[GraphQLObjectType, GraphQLInterfaceType]) -> str: def print_block(items: List[str]) -> str: + """Print a block with the given items.""" return " {\n" + "\n".join(items) + "\n}" if items else "" def print_args(args: Dict[str, GraphQLArgument], indentation: str = "") -> str: + """Print the given GraphQL arguments.""" if not args: return "" @@ -227,6 +245,7 @@ def print_args(args: Dict[str, GraphQLArgument], indentation: str = "") -> str: def print_input_value(name: str, arg: GraphQLArgument) -> str: + """Print an input value.""" default_ast = ast_from_value(arg.default_value, arg.type) arg_decl = f"{name}: {arg.type}" if default_ast: @@ -235,6 +254,7 @@ def print_input_value(name: str, arg: GraphQLArgument) -> str: def print_directive(directive: GraphQLDirective) -> str: + """Print a GraphQL directive.""" return ( print_description(directive) + f"directive @{directive.name}" @@ -246,6 +266,7 @@ def print_directive(directive: GraphQLDirective) -> str: def print_deprecated(reason: Optional[str]) -> str: + """Print a deprecation reason.""" if reason is None: return "" if reason != DEFAULT_DEPRECATION_REASON: @@ -255,6 +276,7 @@ def print_deprecated(reason: Optional[str]) -> str: def print_specified_by_url(scalar: GraphQLScalarType) -> str: + """Print a specification URL.""" if scalar.specified_by_url is None: return "" ast_value = print_ast(StringValueNode(value=scalar.specified_by_url)) @@ -272,6 +294,7 @@ def print_description( indentation: str = "", first_in_block: bool = True, ) -> str: + """Print a description.""" description = def_.description if description is None: return "" diff --git a/src/graphql/utilities/separate_operations.py b/src/graphql/utilities/separate_operations.py index bc3ac7d2..864b0f4e 100644 --- a/src/graphql/utilities/separate_operations.py +++ b/src/graphql/utilities/separate_operations.py @@ -1,3 +1,5 @@ +"""Separation of GraphQL operations""" + from typing import Any, Dict, List, Set from ..language import ( @@ -10,7 +12,6 @@ visit, ) - try: from typing import TypeAlias except ImportError: # Python < 3.10 diff --git a/src/graphql/utilities/sort_value_node.py b/src/graphql/utilities/sort_value_node.py index 5edd0069..8a0c7935 100644 --- a/src/graphql/utilities/sort_value_node.py +++ b/src/graphql/utilities/sort_value_node.py @@ -1,10 +1,11 @@ +"""Sorting value nodes""" + from copy import copy from typing import Tuple from ..language import ListValueNode, ObjectFieldNode, ObjectValueNode, ValueNode from ..pyutils import natural_comparison_key - __all__ = ["sort_value_node"] diff --git a/src/graphql/utilities/strip_ignored_characters.py b/src/graphql/utilities/strip_ignored_characters.py index ce21678d..3e2c1658 100644 --- a/src/graphql/utilities/strip_ignored_characters.py +++ b/src/graphql/utilities/strip_ignored_characters.py @@ -1,3 +1,5 @@ +"""Removal of insignificant characters""" + from typing import Union, cast from ..language import Lexer, TokenKind @@ -5,7 +7,6 @@ from ..language.lexer import is_punctuator_token_kind from ..language.source import Source, is_source - __all__ = ["strip_ignored_characters"] @@ -31,7 +32,6 @@ def strip_ignored_characters(source: Union[str, Source]) -> str: Warning: It is guaranteed that this function will always produce stable results. However, it's not guaranteed that it will stay the same between different releases due to bugfixes or changes in the GraphQL specification. - """ ''' Query example:: @@ -51,20 +51,20 @@ def strip_ignored_characters(source: Union[str, Source]) -> str: SDL example:: - """ + \"\"\" Type description - """ + \"\"\" type Foo { - """ + \"\"\" Field description - """ + \"\"\" bar: String } Becomes:: - """Type description""" type Foo{"""Field description""" bar:String} - ''' + \"\"\"Type description\"\"\" type Foo{\"\"\"Field description\"\"\" bar:String} + """ if not is_source(source): source = Source(cast(str, source)) diff --git a/src/graphql/utilities/type_comparators.py b/src/graphql/utilities/type_comparators.py index c40a7e70..3ab50dc5 100644 --- a/src/graphql/utilities/type_comparators.py +++ b/src/graphql/utilities/type_comparators.py @@ -1,3 +1,5 @@ +"""GraphQL type comparators""" + from ..type import ( GraphQLCompositeType, GraphQLSchema, @@ -9,14 +11,14 @@ is_object_type, ) - __all__ = ["is_equal_type", "is_type_sub_type_of", "do_types_overlap"] def is_equal_type(type_a: GraphQLType, type_b: GraphQLType) -> bool: """Check whether two types are equal. - Provided two types, return true if the types are equal (invariant).""" + Provided two types, return true if the types are equal (invariant). + """ # Equivalent types are equal. if type_a is type_b: return True @@ -54,7 +56,7 @@ def is_type_sub_type_of( schema, maybe_subtype.of_type, super_type.of_type ) return False - elif is_non_null_type(maybe_subtype): + if is_non_null_type(maybe_subtype): # If super_type is nullable, maybe_subtype may be non-null or nullable. return is_type_sub_type_of(schema, maybe_subtype.of_type, super_type) @@ -65,7 +67,7 @@ def is_type_sub_type_of( schema, maybe_subtype.of_type, super_type.of_type ) return False - elif is_list_type(maybe_subtype): + if is_list_type(maybe_subtype): # If super_type is not a list, maybe_subtype must also be not a list. return False diff --git a/src/graphql/utilities/type_from_ast.py b/src/graphql/utilities/type_from_ast.py index 749cb5bb..a978ffad 100644 --- a/src/graphql/utilities/type_from_ast.py +++ b/src/graphql/utilities/type_from_ast.py @@ -1,3 +1,5 @@ +"""Generating GraphQL types from AST nodes""" + from typing import Optional, cast, overload from ..language import ListTypeNode, NamedTypeNode, NonNullTypeNode, TypeNode @@ -11,7 +13,6 @@ GraphQLType, ) - __all__ = ["type_from_ast"] @@ -65,4 +66,5 @@ def type_from_ast( return schema.get_type(type_node.name.value) # Not reachable. All possible type nodes have been considered. - raise TypeError(f"Unexpected type node: {inspect(type_node)}.") + msg = f"Unexpected type node: {inspect(type_node)}." # pragma: no cover + raise TypeError(msg) # pragma: no cover diff --git a/src/graphql/utilities/type_info.py b/src/graphql/utilities/type_info.py index eeba22f9..2057c87f 100644 --- a/src/graphql/utilities/type_info.py +++ b/src/graphql/utilities/type_info.py @@ -1,3 +1,5 @@ +"""Managing type information""" + from __future__ import annotations # Python < 3.10 from typing import Any, Callable, List, Optional @@ -39,7 +41,6 @@ ) from .type_from_ast import type_from_ast - try: from typing import TypeAlias except ImportError: # Python < 3.10 @@ -144,7 +145,7 @@ def leave(self, node: Node) -> None: method() # noinspection PyUnusedLocal - def enter_selection_set(self, node: SelectionSetNode) -> None: + def enter_selection_set(self, _node: SelectionSetNode) -> None: named_type = get_named_type(self.get_type()) self._parent_type_stack.append( named_type if is_composite_type(named_type) else None @@ -196,7 +197,7 @@ def enter_argument(self, node: ArgumentNode) -> None: self._input_type_stack.append(arg_type if is_input_type(arg_type) else None) # noinspection PyUnusedLocal - def enter_list_value(self, node: ListValueNode) -> None: + def enter_list_value(self, _node: ListValueNode) -> None: list_type = get_nullable_type(self.get_input_type()) item_type = list_type.of_type if is_list_type(list_type) else list_type # List positions never have a default value. @@ -268,7 +269,7 @@ def get_field_def( class TypeInfoVisitor(Visitor): """A visitor which maintains a provided TypeInfo.""" - def __init__(self, type_info: TypeInfo, visitor: Visitor): + def __init__(self, type_info: TypeInfo, visitor: Visitor) -> None: super().__init__() self.type_info = type_info self.visitor = visitor @@ -276,13 +277,14 @@ def __init__(self, type_info: TypeInfo, visitor: Visitor): def enter(self, node: Node, *args: Any) -> Any: self.type_info.enter(node) fn = self.visitor.get_enter_leave_for_kind(node.kind).enter - if fn: - result = fn(node, *args) - if result is not None: - self.type_info.leave(node) - if isinstance(result, Node): - self.type_info.enter(result) - return result + if not fn: + return None + result = fn(node, *args) + if result is not None: + self.type_info.leave(node) + if isinstance(result, Node): + self.type_info.enter(result) + return result def leave(self, node: Node, *args: Any) -> Any: fn = self.visitor.get_enter_leave_for_kind(node.kind).leave diff --git a/src/graphql/utilities/value_from_ast.py b/src/graphql/utilities/value_from_ast.py index c5c3224a..51d64c73 100644 --- a/src/graphql/utilities/value_from_ast.py +++ b/src/graphql/utilities/value_from_ast.py @@ -1,3 +1,5 @@ +"""Conversion from GraphQL value AST to Python values.""" + from typing import Any, Dict, List, Optional, cast from ..language import ( @@ -17,7 +19,6 @@ is_non_null_type, ) - __all__ = ["value_from_ast"] @@ -127,12 +128,13 @@ def value_from_ast( result = type_.parse_literal(value_node, variables) else: result = type_.parse_literal(value_node) - except Exception: + except Exception: # noqa: BLE001 return Undefined return result # Not reachable. All possible input types have been considered. - raise TypeError(f"Unexpected input type: {inspect(type_)}.") + msg = f"Unexpected input type: {inspect(type_)}." # pragma: no cover + raise TypeError(msg) # pragma: no cover def is_missing_variable( diff --git a/src/graphql/utilities/value_from_ast_untyped.py b/src/graphql/utilities/value_from_ast_untyped.py index d5ba819c..26c1bfb7 100644 --- a/src/graphql/utilities/value_from_ast_untyped.py +++ b/src/graphql/utilities/value_from_ast_untyped.py @@ -1,3 +1,5 @@ +"""Conversion from GraphQL value AST to Python values without type.""" + from math import nan from typing import Any, Callable, Dict, Optional, Union @@ -15,7 +17,6 @@ ) from ..pyutils import Undefined, inspect - __all__ = ["value_from_ast_untyped"] @@ -44,9 +45,8 @@ def value_from_ast_untyped( return func(value_node, variables) # Not reachable. All possible value nodes have been considered. - raise TypeError( # pragma: no cover - f"Unexpected value node: {inspect(value_node)}." - ) + msg = f"Unexpected value node: {inspect(value_node)}." # pragma: no cover + raise TypeError(msg) # pragma: no cover def value_from_null(_value_node: NullValueNode, _variables: Any) -> Any: diff --git a/src/graphql/validation/rules/__init__.py b/src/graphql/validation/rules/__init__.py index 1b0c5d57..2ea665e4 100644 --- a/src/graphql/validation/rules/__init__.py +++ b/src/graphql/validation/rules/__init__.py @@ -16,11 +16,12 @@ class ASTValidationRule(Visitor): context: ASTValidationContext - def __init__(self, context: ASTValidationContext): + def __init__(self, context: ASTValidationContext) -> None: super().__init__() self.context = context def report_error(self, error: GraphQLError) -> None: + """Report a GraphQL error.""" self.context.report_error(error) diff --git a/src/graphql/validation/rules/custom/no_deprecated.py b/src/graphql/validation/rules/custom/no_deprecated.py index 94d76221..238e8fa0 100644 --- a/src/graphql/validation/rules/custom/no_deprecated.py +++ b/src/graphql/validation/rules/custom/no_deprecated.py @@ -1,3 +1,5 @@ +"""No deprecated rule""" + from typing import Any from ....error import GraphQLError @@ -5,7 +7,6 @@ from ....type import get_named_type, is_input_object_type from .. import ValidationRule - __all__ = ["NoDeprecatedCustomRule"] diff --git a/src/graphql/validation/rules/custom/no_schema_introspection.py b/src/graphql/validation/rules/custom/no_schema_introspection.py index 97fb4346..1a16d169 100644 --- a/src/graphql/validation/rules/custom/no_schema_introspection.py +++ b/src/graphql/validation/rules/custom/no_schema_introspection.py @@ -1,3 +1,5 @@ +"""No schema introspection rule""" + from typing import Any from ....error import GraphQLError @@ -5,7 +7,6 @@ from ....type import get_named_type, is_introspection_type from .. import ValidationRule - __all__ = ["NoSchemaIntrospectionCustomRule"] diff --git a/src/graphql/validation/rules/defer_stream_directive_label.py b/src/graphql/validation/rules/defer_stream_directive_label.py index 9703ff82..6b688133 100644 --- a/src/graphql/validation/rules/defer_stream_directive_label.py +++ b/src/graphql/validation/rules/defer_stream_directive_label.py @@ -1,3 +1,5 @@ +"""Defer stream directive label rule""" + from typing import Any, Dict, List from ...error import GraphQLError @@ -5,7 +7,6 @@ from ...type import GraphQLDeferDirective, GraphQLStreamDirective from . import ASTValidationRule, ValidationContext - __all__ = ["DeferStreamDirectiveLabel"] @@ -16,7 +17,7 @@ class DeferStreamDirectiveLabel(ASTValidationRule): is static and unique. """ - def __init__(self, context: ValidationContext): + def __init__(self, context: ValidationContext) -> None: super().__init__(context) self.known_labels: Dict[str, Node] = {} diff --git a/src/graphql/validation/rules/defer_stream_directive_on_root_field.py b/src/graphql/validation/rules/defer_stream_directive_on_root_field.py index 707ee9f3..dbb274b3 100644 --- a/src/graphql/validation/rules/defer_stream_directive_on_root_field.py +++ b/src/graphql/validation/rules/defer_stream_directive_on_root_field.py @@ -1,3 +1,5 @@ +"""Defer stream directive on root field rule""" + from typing import Any, List, cast from ...error import GraphQLError @@ -5,7 +7,6 @@ from ...type import GraphQLDeferDirective, GraphQLStreamDirective from . import ASTValidationRule, ValidationContext - __all__ = ["DeferStreamDirectiveOnRootField"] diff --git a/src/graphql/validation/rules/executable_definitions.py b/src/graphql/validation/rules/executable_definitions.py index 01c38fd6..5c8f5f67 100644 --- a/src/graphql/validation/rules/executable_definitions.py +++ b/src/graphql/validation/rules/executable_definitions.py @@ -1,3 +1,5 @@ +"""Executable definitions rule""" + from typing import Any, Union, cast from ...error import GraphQLError @@ -13,7 +15,6 @@ ) from . import ASTValidationRule - __all__ = ["ExecutableDefinitionsRule"] diff --git a/src/graphql/validation/rules/fields_on_correct_type.py b/src/graphql/validation/rules/fields_on_correct_type.py index 3af8b4ca..3eef26ea 100644 --- a/src/graphql/validation/rules/fields_on_correct_type.py +++ b/src/graphql/validation/rules/fields_on_correct_type.py @@ -1,3 +1,5 @@ +"""Fields on correct type rule""" + from collections import defaultdict from functools import cmp_to_key from typing import Any, Dict, List, Union @@ -16,7 +18,6 @@ ) from . import ValidationRule - __all__ = ["FieldsOnCorrectTypeRule"] @@ -62,8 +63,7 @@ def enter_field(self, node: FieldNode, *_args: Any) -> None: def get_suggested_type_names( schema: GraphQLSchema, type_: GraphQLOutputType, field_name: str ) -> List[str]: - """ - Get a list of suggested type names. + """Get a list of suggested type names. Go through all of the implementations of type, as well as the interfaces that they implement. If any of those types include the provided field, diff --git a/src/graphql/validation/rules/fragments_on_composite_types.py b/src/graphql/validation/rules/fragments_on_composite_types.py index 06737f4f..c679b59d 100644 --- a/src/graphql/validation/rules/fragments_on_composite_types.py +++ b/src/graphql/validation/rules/fragments_on_composite_types.py @@ -1,3 +1,5 @@ +"""Fragments on composite type rule""" + from typing import Any from ...error import GraphQLError @@ -6,7 +8,6 @@ from ...utilities import type_from_ast from . import ValidationRule - __all__ = ["FragmentsOnCompositeTypesRule"] diff --git a/src/graphql/validation/rules/known_argument_names.py b/src/graphql/validation/rules/known_argument_names.py index e66cc078..da6b7481 100644 --- a/src/graphql/validation/rules/known_argument_names.py +++ b/src/graphql/validation/rules/known_argument_names.py @@ -1,3 +1,5 @@ +"""Known argument names on directives rule""" + from typing import Any, Dict, List, Union, cast from ...error import GraphQLError @@ -12,7 +14,6 @@ from ...type import specified_directives from . import ASTValidationRule, SDLValidationContext, ValidationContext - __all__ = ["KnownArgumentNamesRule", "KnownArgumentNamesOnDirectivesRule"] @@ -26,7 +27,7 @@ class KnownArgumentNamesOnDirectivesRule(ASTValidationRule): context: Union[ValidationContext, SDLValidationContext] - def __init__(self, context: Union[ValidationContext, SDLValidationContext]): + def __init__(self, context: Union[ValidationContext, SDLValidationContext]) -> None: super().__init__(context) directive_args: Dict[str, List[str]] = {} @@ -76,7 +77,7 @@ class KnownArgumentNamesRule(KnownArgumentNamesOnDirectivesRule): context: ValidationContext - def __init__(self, context: ValidationContext): + def __init__(self, context: ValidationContext) -> None: super().__init__(context) def enter_argument(self, arg_node: ArgumentNode, *args: Any) -> None: diff --git a/src/graphql/validation/rules/known_directives.py b/src/graphql/validation/rules/known_directives.py index f0cad1d8..b7504542 100644 --- a/src/graphql/validation/rules/known_directives.py +++ b/src/graphql/validation/rules/known_directives.py @@ -1,3 +1,5 @@ +"""Known directives rule""" + from typing import Any, Dict, List, Optional, Tuple, Union, cast from ...error import GraphQLError @@ -11,7 +13,6 @@ from ...type import specified_directives from . import ASTValidationRule, SDLValidationContext, ValidationContext - __all__ = ["KnownDirectivesRule"] @@ -26,7 +27,7 @@ class KnownDirectivesRule(ASTValidationRule): context: Union[ValidationContext, SDLValidationContext] - def __init__(self, context: Union[ValidationContext, SDLValidationContext]): + def __init__(self, context: Union[ValidationContext, SDLValidationContext]) -> None: super().__init__(context) locations_map: Dict[str, Tuple[DirectiveLocation, ...]] = {} @@ -104,17 +105,17 @@ def get_directive_location_for_ast_path( ) -> Optional[DirectiveLocation]: applied_to = ancestors[-1] if not isinstance(applied_to, Node): # pragma: no cover - raise TypeError("Unexpected error in directive.") + msg = "Unexpected error in directive." + raise TypeError(msg) kind = applied_to.kind if kind == "operation_definition": applied_to = cast(OperationDefinitionNode, applied_to) return _operation_location[applied_to.operation.value] - elif kind == "input_value_definition": + if kind == "input_value_definition": parent_node = ancestors[-3] return ( DirectiveLocation.INPUT_FIELD_DEFINITION if parent_node.kind == "input_object_type_definition" else DirectiveLocation.ARGUMENT_DEFINITION ) - else: - return _directive_location.get(kind) + return _directive_location.get(kind) diff --git a/src/graphql/validation/rules/known_fragment_names.py b/src/graphql/validation/rules/known_fragment_names.py index 40e5173e..990436ed 100644 --- a/src/graphql/validation/rules/known_fragment_names.py +++ b/src/graphql/validation/rules/known_fragment_names.py @@ -1,10 +1,11 @@ +"""Known fragment names rule""" + from typing import Any from ...error import GraphQLError from ...language import FragmentSpreadNode from . import ValidationRule - __all__ = ["KnownFragmentNamesRule"] diff --git a/src/graphql/validation/rules/known_type_names.py b/src/graphql/validation/rules/known_type_names.py index ead8161a..f914e409 100644 --- a/src/graphql/validation/rules/known_type_names.py +++ b/src/graphql/validation/rules/known_type_names.py @@ -1,3 +1,5 @@ +"""Known type names rule""" + from typing import Any, Collection, List, Union, cast from ...error import GraphQLError @@ -14,7 +16,6 @@ from ...type import introspection_types, specified_scalar_types from . import ASTValidationRule, SDLValidationContext, ValidationContext - try: from typing import TypeGuard except ImportError: # Python < 3.10 @@ -33,7 +34,7 @@ class KnownTypeNamesRule(ASTValidationRule): See https://spec.graphql.org/draft/#sec-Fragment-Spread-Type-Existence """ - def __init__(self, context: Union[ValidationContext, SDLValidationContext]): + def __init__(self, context: Union[ValidationContext, SDLValidationContext]) -> None: super().__init__(context) schema = context.schema self.existing_types_map = schema.type_map if schema else {} @@ -85,7 +86,7 @@ def enter_named_type( def is_sdl_node( - value: Union[Node, Collection[Node], None] + value: Union[Node, Collection[Node], None], ) -> TypeGuard[Union[TypeSystemDefinitionNode, TypeSystemExtensionNode]]: return ( value is not None diff --git a/src/graphql/validation/rules/lone_anonymous_operation.py b/src/graphql/validation/rules/lone_anonymous_operation.py index aa9e9052..dedde5ca 100644 --- a/src/graphql/validation/rules/lone_anonymous_operation.py +++ b/src/graphql/validation/rules/lone_anonymous_operation.py @@ -1,10 +1,11 @@ +"""Lone anonymous operation rule""" + from typing import Any from ...error import GraphQLError from ...language import DocumentNode, OperationDefinitionNode from . import ASTValidationContext, ASTValidationRule - __all__ = ["LoneAnonymousOperationRule"] @@ -17,7 +18,7 @@ class LoneAnonymousOperationRule(ASTValidationRule): See https://spec.graphql.org/draft/#sec-Lone-Anonymous-Operation """ - def __init__(self, context: ASTValidationContext): + def __init__(self, context: ASTValidationContext) -> None: super().__init__(context) self.operation_count = 0 diff --git a/src/graphql/validation/rules/lone_schema_definition.py b/src/graphql/validation/rules/lone_schema_definition.py index cee76405..0e732c47 100644 --- a/src/graphql/validation/rules/lone_schema_definition.py +++ b/src/graphql/validation/rules/lone_schema_definition.py @@ -1,10 +1,11 @@ +"""Lone Schema definition rule""" + from typing import Any from ...error import GraphQLError from ...language import SchemaDefinitionNode from . import SDLValidationContext, SDLValidationRule - __all__ = ["LoneSchemaDefinitionRule"] @@ -14,7 +15,7 @@ class LoneSchemaDefinitionRule(SDLValidationRule): A GraphQL document is only valid if it contains only one schema definition. """ - def __init__(self, context: SDLValidationContext): + def __init__(self, context: SDLValidationContext) -> None: super().__init__(context) old_schema = context.schema self.already_defined = old_schema and ( diff --git a/src/graphql/validation/rules/no_fragment_cycles.py b/src/graphql/validation/rules/no_fragment_cycles.py index 971d42ee..5f1a0955 100644 --- a/src/graphql/validation/rules/no_fragment_cycles.py +++ b/src/graphql/validation/rules/no_fragment_cycles.py @@ -1,10 +1,11 @@ +"""No fragment cycles rule""" + from typing import Any, Dict, List, Set from ...error import GraphQLError from ...language import SKIP, FragmentDefinitionNode, FragmentSpreadNode, VisitorAction from . import ASTValidationContext, ASTValidationRule - __all__ = ["NoFragmentCyclesRule"] @@ -18,7 +19,7 @@ class NoFragmentCyclesRule(ASTValidationRule): See https://spec.graphql.org/draft/#sec-Fragment-spreads-must-not-form-cycles """ - def __init__(self, context: ASTValidationContext): + def __init__(self, context: ASTValidationContext) -> None: super().__init__(context) # Tracks already visited fragments to maintain O(N) and to ensure that # cycles are not redundantly reported. diff --git a/src/graphql/validation/rules/no_undefined_variables.py b/src/graphql/validation/rules/no_undefined_variables.py index 73dec8b3..33ff1be8 100644 --- a/src/graphql/validation/rules/no_undefined_variables.py +++ b/src/graphql/validation/rules/no_undefined_variables.py @@ -1,10 +1,11 @@ +"""No undefined variables rule""" + from typing import Any, Set from ...error import GraphQLError from ...language import OperationDefinitionNode, VariableDefinitionNode from . import ValidationContext, ValidationRule - __all__ = ["NoUndefinedVariablesRule"] @@ -17,7 +18,7 @@ class NoUndefinedVariablesRule(ValidationRule): See https://spec.graphql.org/draft/#sec-All-Variable-Uses-Defined """ - def __init__(self, context: ValidationContext): + def __init__(self, context: ValidationContext) -> None: super().__init__(context) self.defined_variable_names: Set[str] = set() diff --git a/src/graphql/validation/rules/no_unused_fragments.py b/src/graphql/validation/rules/no_unused_fragments.py index a5edb2d8..d13da572 100644 --- a/src/graphql/validation/rules/no_unused_fragments.py +++ b/src/graphql/validation/rules/no_unused_fragments.py @@ -1,3 +1,5 @@ +"""No unused fragments rule""" + from typing import Any, List from ...error import GraphQLError @@ -9,7 +11,6 @@ ) from . import ASTValidationContext, ASTValidationRule - __all__ = ["NoUnusedFragmentsRule"] @@ -22,7 +23,7 @@ class NoUnusedFragmentsRule(ASTValidationRule): See https://spec.graphql.org/draft/#sec-Fragments-Must-Be-Used """ - def __init__(self, context: ASTValidationContext): + def __init__(self, context: ASTValidationContext) -> None: super().__init__(context) self.operation_defs: List[OperationDefinitionNode] = [] self.fragment_defs: List[FragmentDefinitionNode] = [] diff --git a/src/graphql/validation/rules/no_unused_variables.py b/src/graphql/validation/rules/no_unused_variables.py index d402b7b6..8e714e83 100644 --- a/src/graphql/validation/rules/no_unused_variables.py +++ b/src/graphql/validation/rules/no_unused_variables.py @@ -1,10 +1,11 @@ +"""No unused variables rule""" + from typing import Any, List, Set from ...error import GraphQLError from ...language import OperationDefinitionNode, VariableDefinitionNode from . import ValidationContext, ValidationRule - __all__ = ["NoUnusedVariablesRule"] @@ -17,7 +18,7 @@ class NoUnusedVariablesRule(ValidationRule): See https://spec.graphql.org/draft/#sec-All-Variables-Used """ - def __init__(self, context: ValidationContext): + def __init__(self, context: ValidationContext) -> None: super().__init__(context) self.variable_defs: List[VariableDefinitionNode] = [] diff --git a/src/graphql/validation/rules/overlapping_fields_can_be_merged.py b/src/graphql/validation/rules/overlapping_fields_can_be_merged.py index 11ab44fa..fe0a6adb 100644 --- a/src/graphql/validation/rules/overlapping_fields_can_be_merged.py +++ b/src/graphql/validation/rules/overlapping_fields_can_be_merged.py @@ -1,3 +1,5 @@ +"""Overlapping fields can be merged rule""" + from itertools import chain from typing import Any, Dict, List, Optional, Sequence, Tuple, Union, cast @@ -29,7 +31,6 @@ from ...utilities.sort_value_node import sort_value_node from . import ValidationContext, ValidationRule - try: from typing import TypeAlias except ImportError: # Python < 3.10 @@ -60,7 +61,7 @@ class OverlappingFieldsCanBeMergedRule(ValidationRule): See https://spec.graphql.org/draft/#sec-Field-Selection-Merging """ - def __init__(self, context: ValidationContext): + def __init__(self, context: ValidationContext) -> None: super().__init__(context) # A memoization for when two fragments are compared "between" each other for # conflicts. Two fragments may be compared many times, so memoizing this can @@ -236,7 +237,7 @@ def collect_conflicts_between_fields_and_fragment( """ fragment = context.get_fragment(fragment_name) if not fragment: - return None + return field_map2, referenced_fragment_names = get_referenced_fields_and_fragment_names( context, cached_fields_and_fragment_names, fragment @@ -309,7 +310,7 @@ def collect_conflicts_between_fragments( fragment1 = context.get_fragment(fragment_name1) fragment2 = context.get_fragment(fragment_name2) if not fragment1 or not fragment2: - return None + return field_map1, referenced_fragment_names1 = get_referenced_fields_and_fragment_names( context, cached_fields_and_fragment_names, fragment1 diff --git a/src/graphql/validation/rules/possible_fragment_spreads.py b/src/graphql/validation/rules/possible_fragment_spreads.py index d59f0716..d2a39c2e 100644 --- a/src/graphql/validation/rules/possible_fragment_spreads.py +++ b/src/graphql/validation/rules/possible_fragment_spreads.py @@ -1,3 +1,5 @@ +"""Possible fragment spread rule""" + from typing import Any, Optional from ...error import GraphQLError @@ -6,7 +8,6 @@ from ...utilities import do_types_overlap, type_from_ast from . import ValidationRule - __all__ = ["PossibleFragmentSpreadsRule"] diff --git a/src/graphql/validation/rules/possible_type_extensions.py b/src/graphql/validation/rules/possible_type_extensions.py index 6ad06723..8eab7111 100644 --- a/src/graphql/validation/rules/possible_type_extensions.py +++ b/src/graphql/validation/rules/possible_type_extensions.py @@ -1,3 +1,5 @@ +"""Possible type extension rule""" + import re from functools import partial from typing import Any, Optional @@ -15,7 +17,6 @@ ) from . import SDLValidationContext, SDLValidationRule - __all__ = ["PossibleTypeExtensionsRule"] @@ -25,7 +26,7 @@ class PossibleTypeExtensionsRule(SDLValidationRule): A type extension is only valid if the type is defined and has the same kind. """ - def __init__(self, context: SDLValidationContext): + def __init__(self, context: SDLValidationContext) -> None: super().__init__(context) self.schema = context.schema self.defined_types = { @@ -93,7 +94,8 @@ def type_to_ext_kind(type_: Any) -> str: return "input_object_type_extension" # Not reachable. All possible types have been considered. - raise TypeError(f"Unexpected type: {inspect(type_)}.") + msg = f"Unexpected type: {inspect(type_)}." # pragma: no cover + raise TypeError(msg) # pragma: no cover _type_names_for_extension_kinds = { diff --git a/src/graphql/validation/rules/provided_required_arguments.py b/src/graphql/validation/rules/provided_required_arguments.py index 08ae7a5c..9da2395f 100644 --- a/src/graphql/validation/rules/provided_required_arguments.py +++ b/src/graphql/validation/rules/provided_required_arguments.py @@ -1,3 +1,5 @@ +"""Provided required arguments on directives rule""" + from typing import Any, Dict, List, Union, cast from ...error import GraphQLError @@ -15,7 +17,6 @@ from ...type import GraphQLArgument, is_required_argument, is_type, specified_directives from . import ASTValidationRule, SDLValidationContext, ValidationContext - __all__ = ["ProvidedRequiredArgumentsRule", "ProvidedRequiredArgumentsOnDirectivesRule"] @@ -30,7 +31,7 @@ class ProvidedRequiredArgumentsOnDirectivesRule(ASTValidationRule): context: Union[ValidationContext, SDLValidationContext] - def __init__(self, context: Union[ValidationContext, SDLValidationContext]): + def __init__(self, context: Union[ValidationContext, SDLValidationContext]) -> None: super().__init__(context) required_args_map: Dict[ str, Dict[str, Union[GraphQLArgument, InputValueDefinitionNode]] @@ -89,7 +90,7 @@ class ProvidedRequiredArgumentsRule(ProvidedRequiredArgumentsOnDirectivesRule): context: ValidationContext - def __init__(self, context: ValidationContext): + def __init__(self, context: ValidationContext) -> None: super().__init__(context) def leave_field(self, field_node: FieldNode, *_args: Any) -> VisitorAction: diff --git a/src/graphql/validation/rules/scalar_leafs.py b/src/graphql/validation/rules/scalar_leafs.py index 9a6c07c4..31ba0550 100644 --- a/src/graphql/validation/rules/scalar_leafs.py +++ b/src/graphql/validation/rules/scalar_leafs.py @@ -1,3 +1,5 @@ +"""Scalar leafs rule""" + from typing import Any from ...error import GraphQLError @@ -5,7 +7,6 @@ from ...type import get_named_type, is_leaf_type from . import ValidationRule - __all__ = ["ScalarLeafsRule"] diff --git a/src/graphql/validation/rules/single_field_subscriptions.py b/src/graphql/validation/rules/single_field_subscriptions.py index 968cda48..40d37eb2 100644 --- a/src/graphql/validation/rules/single_field_subscriptions.py +++ b/src/graphql/validation/rules/single_field_subscriptions.py @@ -1,3 +1,5 @@ +"""Single field subscriptions rule""" + from typing import Any, Dict, cast from ...error import GraphQLError @@ -10,7 +12,6 @@ ) from . import ValidationRule - __all__ = ["SingleFieldSubscriptionsRule"] diff --git a/src/graphql/validation/rules/stream_directive_on_list_field.py b/src/graphql/validation/rules/stream_directive_on_list_field.py index 41ba6066..0006915c 100644 --- a/src/graphql/validation/rules/stream_directive_on_list_field.py +++ b/src/graphql/validation/rules/stream_directive_on_list_field.py @@ -1,3 +1,5 @@ +"""Stream directive on list field rule""" + from typing import Any, List, cast from ...error import GraphQLError @@ -5,7 +7,6 @@ from ...type import GraphQLStreamDirective, is_list_type, is_wrapping_type from . import ASTValidationRule, ValidationContext - __all__ = ["StreamDirectiveOnListField"] diff --git a/src/graphql/validation/rules/unique_argument_definition_names.py b/src/graphql/validation/rules/unique_argument_definition_names.py index 6664373b..24afa4db 100644 --- a/src/graphql/validation/rules/unique_argument_definition_names.py +++ b/src/graphql/validation/rules/unique_argument_definition_names.py @@ -1,3 +1,5 @@ +"""Unique argument definition names rule""" + from operator import attrgetter from typing import Any, Collection @@ -17,7 +19,6 @@ from ...pyutils import group_by from . import SDLValidationRule - __all__ = ["UniqueArgumentDefinitionNamesRule"] diff --git a/src/graphql/validation/rules/unique_argument_names.py b/src/graphql/validation/rules/unique_argument_names.py index 027b0788..bf226592 100644 --- a/src/graphql/validation/rules/unique_argument_names.py +++ b/src/graphql/validation/rules/unique_argument_names.py @@ -1,3 +1,5 @@ +"""Unique argument names rule""" + from operator import attrgetter from typing import Any, Collection @@ -6,7 +8,6 @@ from ...pyutils import group_by from . import ASTValidationRule - __all__ = ["UniqueArgumentNamesRule"] @@ -22,7 +23,7 @@ class UniqueArgumentNamesRule(ASTValidationRule): def enter_field(self, node: FieldNode, *_args: Any) -> None: self.check_arg_uniqueness(node.arguments) - def enter_directive(self, node: DirectiveNode, *args: Any) -> None: + def enter_directive(self, node: DirectiveNode, *_args: Any) -> None: self.check_arg_uniqueness(node.arguments) def check_arg_uniqueness(self, argument_nodes: Collection[ArgumentNode]) -> None: diff --git a/src/graphql/validation/rules/unique_directive_names.py b/src/graphql/validation/rules/unique_directive_names.py index 989ed366..039b1b48 100644 --- a/src/graphql/validation/rules/unique_directive_names.py +++ b/src/graphql/validation/rules/unique_directive_names.py @@ -1,10 +1,11 @@ +"""Unique directive names rule""" + from typing import Any, Dict from ...error import GraphQLError from ...language import SKIP, DirectiveDefinitionNode, NameNode, VisitorAction from . import SDLValidationContext, SDLValidationRule - __all__ = ["UniqueDirectiveNamesRule"] @@ -14,7 +15,7 @@ class UniqueDirectiveNamesRule(SDLValidationRule): A GraphQL document is only valid if all defined directives have unique names. """ - def __init__(self, context: SDLValidationContext): + def __init__(self, context: SDLValidationContext) -> None: super().__init__(context) self.known_directive_names: Dict[str, NameNode] = {} self.schema = context.schema diff --git a/src/graphql/validation/rules/unique_directives_per_location.py b/src/graphql/validation/rules/unique_directives_per_location.py index 2f7ba6ec..040c148f 100644 --- a/src/graphql/validation/rules/unique_directives_per_location.py +++ b/src/graphql/validation/rules/unique_directives_per_location.py @@ -1,3 +1,5 @@ +"""Unique directive names per location rule""" + from collections import defaultdict from typing import Any, Dict, List, Union, cast @@ -14,7 +16,6 @@ from ...type import specified_directives from . import ASTValidationRule, SDLValidationContext, ValidationContext - __all__ = ["UniqueDirectivesPerLocationRule"] @@ -29,7 +30,7 @@ class UniqueDirectivesPerLocationRule(ASTValidationRule): context: Union[ValidationContext, SDLValidationContext] - def __init__(self, context: Union[ValidationContext, SDLValidationContext]): + def __init__(self, context: Union[ValidationContext, SDLValidationContext]) -> None: super().__init__(context) unique_directive_map: Dict[str, bool] = {} diff --git a/src/graphql/validation/rules/unique_enum_value_names.py b/src/graphql/validation/rules/unique_enum_value_names.py index e680ce2c..ef50ca2c 100644 --- a/src/graphql/validation/rules/unique_enum_value_names.py +++ b/src/graphql/validation/rules/unique_enum_value_names.py @@ -1,3 +1,5 @@ +"""Unique enum value names rule""" + from collections import defaultdict from typing import Any, Dict @@ -6,7 +8,6 @@ from ...type import is_enum_type from . import SDLValidationContext, SDLValidationRule - __all__ = ["UniqueEnumValueNamesRule"] @@ -16,7 +17,7 @@ class UniqueEnumValueNamesRule(SDLValidationRule): A GraphQL enum type is only valid if all its values are uniquely named. """ - def __init__(self, context: SDLValidationContext): + def __init__(self, context: SDLValidationContext) -> None: super().__init__(context) schema = context.schema self.existing_type_map = schema.type_map if schema else {} diff --git a/src/graphql/validation/rules/unique_field_definition_names.py b/src/graphql/validation/rules/unique_field_definition_names.py index 28f16711..8c7ca9af 100644 --- a/src/graphql/validation/rules/unique_field_definition_names.py +++ b/src/graphql/validation/rules/unique_field_definition_names.py @@ -1,3 +1,5 @@ +"""Unique field definition names rule""" + from collections import defaultdict from typing import Any, Dict @@ -6,7 +8,6 @@ from ...type import is_input_object_type, is_interface_type, is_object_type from . import SDLValidationContext, SDLValidationRule - __all__ = ["UniqueFieldDefinitionNamesRule"] @@ -16,7 +17,7 @@ class UniqueFieldDefinitionNamesRule(SDLValidationRule): A GraphQL complex type is only valid if all its fields are uniquely named. """ - def __init__(self, context: SDLValidationContext): + def __init__(self, context: SDLValidationContext) -> None: super().__init__(context) schema = context.schema self.existing_type_map = schema.type_map if schema else {} diff --git a/src/graphql/validation/rules/unique_fragment_names.py b/src/graphql/validation/rules/unique_fragment_names.py index 82e1bf60..40433944 100644 --- a/src/graphql/validation/rules/unique_fragment_names.py +++ b/src/graphql/validation/rules/unique_fragment_names.py @@ -1,10 +1,11 @@ +"""Unique fragment names rule""" + from typing import Any, Dict from ...error import GraphQLError from ...language import SKIP, FragmentDefinitionNode, NameNode, VisitorAction from . import ASTValidationContext, ASTValidationRule - __all__ = ["UniqueFragmentNamesRule"] @@ -16,7 +17,7 @@ class UniqueFragmentNamesRule(ASTValidationRule): See https://spec.graphql.org/draft/#sec-Fragment-Name-Uniqueness """ - def __init__(self, context: ASTValidationContext): + def __init__(self, context: ASTValidationContext) -> None: super().__init__(context) self.known_fragment_names: Dict[str, NameNode] = {} diff --git a/src/graphql/validation/rules/unique_input_field_names.py b/src/graphql/validation/rules/unique_input_field_names.py index 0b7d49d2..a76efcd1 100644 --- a/src/graphql/validation/rules/unique_input_field_names.py +++ b/src/graphql/validation/rules/unique_input_field_names.py @@ -1,10 +1,11 @@ +"""Unique input field names rule""" + from typing import Any, Dict, List from ...error import GraphQLError from ...language import NameNode, ObjectFieldNode from . import ASTValidationContext, ASTValidationRule - __all__ = ["UniqueInputFieldNamesRule"] @@ -17,7 +18,7 @@ class UniqueInputFieldNamesRule(ASTValidationRule): See https://spec.graphql.org/draft/#sec-Input-Object-Field-Uniqueness """ - def __init__(self, context: ASTValidationContext): + def __init__(self, context: ASTValidationContext) -> None: super().__init__(context) self.known_names_stack: List[Dict[str, NameNode]] = [] self.known_names: Dict[str, NameNode] = {} diff --git a/src/graphql/validation/rules/unique_operation_names.py b/src/graphql/validation/rules/unique_operation_names.py index f151d0d8..4752d23f 100644 --- a/src/graphql/validation/rules/unique_operation_names.py +++ b/src/graphql/validation/rules/unique_operation_names.py @@ -1,10 +1,11 @@ +"""Unique operation names rule""" + from typing import Any, Dict from ...error import GraphQLError from ...language import SKIP, NameNode, OperationDefinitionNode, VisitorAction from . import ASTValidationContext, ASTValidationRule - __all__ = ["UniqueOperationNamesRule"] @@ -16,7 +17,7 @@ class UniqueOperationNamesRule(ASTValidationRule): See https://spec.graphql.org/draft/#sec-Operation-Name-Uniqueness """ - def __init__(self, context: ASTValidationContext): + def __init__(self, context: ASTValidationContext) -> None: super().__init__(context) self.known_operation_names: Dict[str, NameNode] = {} diff --git a/src/graphql/validation/rules/unique_operation_types.py b/src/graphql/validation/rules/unique_operation_types.py index f8170fc7..ca00f6fa 100644 --- a/src/graphql/validation/rules/unique_operation_types.py +++ b/src/graphql/validation/rules/unique_operation_types.py @@ -1,4 +1,6 @@ -from typing import Any, Dict, Optional, Union +"""Unique operation types rule""" + +from typing import TYPE_CHECKING, Any, Dict, Optional, Union from ...error import GraphQLError from ...language import ( @@ -9,9 +11,10 @@ SchemaExtensionNode, VisitorAction, ) -from ...type import GraphQLObjectType -from . import SDLValidationContext, SDLValidationRule +if TYPE_CHECKING: + from ...type import GraphQLObjectType +from . import SDLValidationContext, SDLValidationRule __all__ = ["UniqueOperationTypesRule"] @@ -22,7 +25,7 @@ class UniqueOperationTypesRule(SDLValidationRule): A GraphQL document is only valid if it has only one type per operation. """ - def __init__(self, context: SDLValidationContext): + def __init__(self, context: SDLValidationContext) -> None: super().__init__(context) schema = context.schema self.defined_operation_types: Dict[ diff --git a/src/graphql/validation/rules/unique_type_names.py b/src/graphql/validation/rules/unique_type_names.py index 723fa7df..41e0767d 100644 --- a/src/graphql/validation/rules/unique_type_names.py +++ b/src/graphql/validation/rules/unique_type_names.py @@ -1,10 +1,11 @@ +"""Unique type names rule""" + from typing import Any, Dict from ...error import GraphQLError from ...language import SKIP, NameNode, TypeDefinitionNode, VisitorAction from . import SDLValidationContext, SDLValidationRule - __all__ = ["UniqueTypeNamesRule"] @@ -14,7 +15,7 @@ class UniqueTypeNamesRule(SDLValidationRule): A GraphQL document is only valid if all defined types have unique names. """ - def __init__(self, context: SDLValidationContext): + def __init__(self, context: SDLValidationContext) -> None: super().__init__(context) self.known_type_names: Dict[str, NameNode] = {} self.schema = context.schema diff --git a/src/graphql/validation/rules/unique_variable_names.py b/src/graphql/validation/rules/unique_variable_names.py index 8e547382..2e8a40ac 100644 --- a/src/graphql/validation/rules/unique_variable_names.py +++ b/src/graphql/validation/rules/unique_variable_names.py @@ -1,3 +1,5 @@ +"""Unique variable names rule""" + from operator import attrgetter from typing import Any @@ -6,7 +8,6 @@ from ...pyutils import group_by from . import ASTValidationRule - __all__ = ["UniqueVariableNamesRule"] diff --git a/src/graphql/validation/rules/values_of_correct_type.py b/src/graphql/validation/rules/values_of_correct_type.py index bc390f9d..0d5cc8da 100644 --- a/src/graphql/validation/rules/values_of_correct_type.py +++ b/src/graphql/validation/rules/values_of_correct_type.py @@ -1,3 +1,5 @@ +"""Value literals of correct type rule""" + from typing import Any, cast from ...error import GraphQLError @@ -29,7 +31,6 @@ ) from . import ValidationRule - __all__ = ["ValuesOfCorrectTypeRule"] @@ -147,7 +148,7 @@ def is_valid_value_node(self, node: ValueNode) -> None: ) except GraphQLError as error: self.report_error(error) - except Exception as error: + except Exception as error: # noqa: BLE001 self.report_error( GraphQLError( f"Expected value of type '{location_type}'," diff --git a/src/graphql/validation/rules/variables_are_input_types.py b/src/graphql/validation/rules/variables_are_input_types.py index 30ef713c..e135b667 100644 --- a/src/graphql/validation/rules/variables_are_input_types.py +++ b/src/graphql/validation/rules/variables_are_input_types.py @@ -1,3 +1,5 @@ +"""Variables are input types rule""" + from typing import Any from ...error import GraphQLError @@ -6,7 +8,6 @@ from ...utilities import type_from_ast from . import ValidationRule - __all__ = ["VariablesAreInputTypesRule"] diff --git a/src/graphql/validation/rules/variables_in_allowed_position.py b/src/graphql/validation/rules/variables_in_allowed_position.py index 312a11bc..ef9beccf 100644 --- a/src/graphql/validation/rules/variables_in_allowed_position.py +++ b/src/graphql/validation/rules/variables_in_allowed_position.py @@ -1,3 +1,5 @@ +"""Variables in allowed position rule""" + from typing import Any, Dict, Optional from ...error import GraphQLError @@ -12,7 +14,6 @@ from ...utilities import is_type_sub_type_of, type_from_ast from . import ValidationContext, ValidationRule - __all__ = ["VariablesInAllowedPositionRule"] @@ -24,7 +25,7 @@ class VariablesInAllowedPositionRule(ValidationRule): See https://spec.graphql.org/draft/#sec-All-Variable-Usages-are-Allowed """ - def __init__(self, context: ValidationContext): + def __init__(self, context: ValidationContext) -> None: super().__init__(context) self.var_def_map: Dict[str, Any] = {} diff --git a/src/graphql/validation/specified_rules.py b/src/graphql/validation/specified_rules.py index e831c25f..d8c225d8 100644 --- a/src/graphql/validation/specified_rules.py +++ b/src/graphql/validation/specified_rules.py @@ -1,3 +1,5 @@ +"""Specified rules""" + from typing import Tuple, Type from .rules import ASTValidationRule @@ -105,7 +107,6 @@ # Spec Section: "All Variable Usages Are Allowed" from .rules.variables_in_allowed_position import VariablesInAllowedPositionRule - __all__ = ["specified_rules", "specified_sdl_rules"] diff --git a/src/graphql/validation/validate.py b/src/graphql/validation/validate.py index b410ebbb..13c75d89 100644 --- a/src/graphql/validation/validate.py +++ b/src/graphql/validation/validate.py @@ -1,3 +1,5 @@ +"""Validation""" + from typing import Collection, List, Optional, Type from ..error import GraphQLError @@ -8,7 +10,6 @@ from .specified_rules import specified_rules, specified_sdl_rules from .validation_context import SDLValidationContext, ValidationContext - __all__ = ["assert_valid_sdl", "assert_valid_sdl_extension", "validate", "validate_sdl"] @@ -100,7 +101,6 @@ def assert_valid_sdl(document_ast: DocumentNode) -> None: Utility function which asserts a SDL document is valid by throwing an error if it is invalid. """ - errors = validate_sdl(document_ast) if errors: raise TypeError("\n\n".join(error.message for error in errors)) @@ -114,7 +114,6 @@ def assert_valid_sdl_extension( Utility function which asserts a SDL document is valid by throwing an error if it is invalid. """ - errors = validate_sdl(document_ast, schema) if errors: raise TypeError("\n\n".join(error.message for error in errors)) diff --git a/src/graphql/validation/validation_context.py b/src/graphql/validation/validation_context.py index c9930188..b7be4bca 100644 --- a/src/graphql/validation/validation_context.py +++ b/src/graphql/validation/validation_context.py @@ -1,3 +1,5 @@ +"""Validation context""" + from typing import Any, Callable, Dict, List, NamedTuple, Optional, Set, Union, cast from ..error import GraphQLError @@ -24,7 +26,6 @@ ) from ..utilities import TypeInfo, TypeInfoVisitor - try: from typing import TypeAlias except ImportError: # Python < 3.10 @@ -43,6 +44,8 @@ class VariableUsage(NamedTuple): + """Variable usage""" + node: VariableNode type: Optional[GraphQLInputType] default_value: Any @@ -53,7 +56,7 @@ class VariableUsageVisitor(Visitor): usages: List[VariableUsage] - def __init__(self, type_info: TypeInfo): + def __init__(self, type_info: TypeInfo) -> None: super().__init__() self.usages = [] self._append_usage = self.usages.append diff --git a/src/graphql/version.py b/src/graphql/version.py index bbe3b1fe..544d59f5 100644 --- a/src/graphql/version.py +++ b/src/graphql/version.py @@ -1,9 +1,10 @@ +"""GraphQL-core version number""" + from __future__ import annotations # Python < 3.10 import re from typing import NamedTuple - __all__ = ["version", "version_info", "version_js", "version_info_js"] diff --git a/tests/benchmarks/test_async_iterable.py b/tests/benchmarks/test_async_iterable.py index 2be53bf7..d4a01166 100644 --- a/tests/benchmarks/test_async_iterable.py +++ b/tests/benchmarks/test_async_iterable.py @@ -3,7 +3,6 @@ from graphql import ExecutionResult, build_schema, execute, parse from graphql.pyutils import is_awaitable - schema = build_schema("type Query { listField: [String] }") document = parse("{ listField }") @@ -11,7 +10,7 @@ class Data: # noinspection PyPep8Naming @staticmethod - async def listField(info_): + async def listField(_info): for index in range(1000): yield index diff --git a/tests/benchmarks/test_build_client_schema.py b/tests/benchmarks/test_build_client_schema.py index 6c4b6e40..e56ca7a1 100644 --- a/tests/benchmarks/test_build_client_schema.py +++ b/tests/benchmarks/test_build_client_schema.py @@ -4,7 +4,8 @@ def test_build_schema_from_introspection( - benchmark, big_schema_introspection_result # noqa: F811 + benchmark, + big_schema_introspection_result, # noqa: F811 ): schema: GraphQLSchema = benchmark( lambda: build_client_schema( diff --git a/tests/benchmarks/test_execution_async.py b/tests/benchmarks/test_execution_async.py index 1db3a157..70de8729 100644 --- a/tests/benchmarks/test_execution_async.py +++ b/tests/benchmarks/test_execution_async.py @@ -8,7 +8,6 @@ graphql, ) - user = GraphQLObjectType( name="User", fields={ @@ -18,7 +17,7 @@ ) -async def resolve_user(obj, info): +async def resolve_user(_obj, _info): return { "id": "1", "name": "Sarah", diff --git a/tests/benchmarks/test_execution_sync.py b/tests/benchmarks/test_execution_sync.py index 5d7d9135..7ae78ea9 100644 --- a/tests/benchmarks/test_execution_sync.py +++ b/tests/benchmarks/test_execution_sync.py @@ -6,7 +6,6 @@ graphql_sync, ) - user = GraphQLObjectType( name="User", fields={ @@ -16,7 +15,7 @@ ) -def resolve_user(obj, info): +def resolve_user(_obj, _info): return { "id": "1", "name": "Sarah", diff --git a/tests/error/test_graphql_error.py b/tests/error/test_graphql_error.py index 58b019c1..121c5c3e 100644 --- a/tests/error/test_graphql_error.py +++ b/tests/error/test_graphql_error.py @@ -11,7 +11,6 @@ from ..utils import dedent - source = Source( dedent( """ @@ -25,7 +24,8 @@ ast = parse(source) operation_node = ast.definitions[0] operation_node = cast(OperationDefinitionNode, operation_node) -assert operation_node and operation_node.kind == "operation_definition" +assert operation_node +assert operation_node.kind == "operation_definition" field_node = operation_node.selection_set.selections[0] assert field_node @@ -247,18 +247,19 @@ def always_stores_path_as_list(): def is_comparable(): e1 = GraphQLError("msg,", path=["field", 1]) - assert e1 == e1 + assert e1 == e1 # noqa: PLR0124 + assert e1 == e1.formatted + assert e1 == e1 # noqa: PLR0124 assert e1 == e1.formatted - assert not e1 != e1 - assert not e1 != e1.formatted e2 = GraphQLError("msg,", path=["field", 1]) assert e1 == e2 - assert not e1 != e2 - assert e2.path and e2.path[1] == 1 + assert e1 == e2 + assert e2.path + assert e2.path[1] == 1 e2.path[1] = 2 - assert not e1 == e2 assert e1 != e2 - assert not e1 == e2.formatted + assert e1 != e2 + assert e1 != e2.formatted assert e1 != e2.formatted def is_hashable(): @@ -297,7 +298,9 @@ def prints_an_error_with_nodes_from_different_sources(): ) op_a = doc_a.definitions[0] op_a = cast(ObjectTypeDefinitionNode, op_a) - assert op_a and op_a.kind == "object_type_definition" and op_a.fields + assert op_a + assert op_a.kind == "object_type_definition" + assert op_a.fields field_a = op_a.fields[0] doc_b = parse( Source( @@ -313,7 +316,9 @@ def prints_an_error_with_nodes_from_different_sources(): ) op_b = doc_b.definitions[0] op_b = cast(ObjectTypeDefinitionNode, op_b) - assert op_b and op_b.kind == "object_type_definition" and op_b.fields + assert op_b + assert op_b.kind == "object_type_definition" + assert op_b.fields field_b = op_b.fields[0] error = GraphQLError( @@ -386,14 +391,14 @@ def includes_extension_fields(): } def can_be_created_from_dict(): - args = dict( - nodes=[operation_node], - source=source, - positions=[6], - path=["path", 2, "a"], - original_error=Exception("I like turtles"), - extensions=dict(hee="I like turtles"), - ) + args = { + "nodes": [operation_node], + "source": source, + "positions": [6], + "path": ["path", 2, "a"], + "original_error": Exception("I like turtles"), + "extensions": {"hee": "I like turtles"}, + } error = GraphQLError("msg", **args) # type: ignore assert error.formatted == { "message": "msg", diff --git a/tests/execution/test_abstract.py b/tests/execution/test_abstract.py index d8282176..becca13d 100644 --- a/tests/execution/test_abstract.py +++ b/tests/execution/test_abstract.py @@ -1,7 +1,6 @@ from typing import Any, NamedTuple, Optional -from pytest import mark - +import pytest from graphql.execution import ExecutionResult, execute, execute_sync from graphql.language import parse from graphql.pyutils import is_awaitable @@ -20,15 +19,15 @@ def sync_and_async(spec): """Decorator for running a test synchronously and asynchronously.""" - return mark.asyncio( - mark.parametrize("sync", (True, False), ids=("sync", "async"))(spec) + return pytest.mark.asyncio( + pytest.mark.parametrize("sync", (True, False), ids=("sync", "async"))(spec) ) def access_variants(spec): """Decorator for tests with dict and object access, including inheritance.""" - return mark.asyncio( - mark.parametrize("access", ("dict", "object", "inheritance"))(spec) + return pytest.mark.asyncio( + pytest.mark.parametrize("access", ("dict", "object", "inheritance"))(spec) ) @@ -40,9 +39,7 @@ async def execute_query( assert isinstance(schema, GraphQLSchema) assert isinstance(query, str) document = parse(query) - result = (execute_sync if sync else execute)( - schema, document, root_value - ) # type: ignore + result = (execute_sync if sync else execute)(schema, document, root_value) # type: ignore if not sync and is_awaitable(result): result = await result assert isinstance(result, ExecutionResult) diff --git a/tests/execution/test_customize.py b/tests/execution/test_customize.py index 5b839fc8..1eca78eb 100644 --- a/tests/execution/test_customize.py +++ b/tests/execution/test_customize.py @@ -1,17 +1,15 @@ from inspect import isasyncgen -from pytest import mark - +import pytest from graphql.execution import ExecutionContext, execute, subscribe from graphql.language import parse from graphql.type import GraphQLField, GraphQLObjectType, GraphQLSchema, GraphQLString - try: - anext + anext # noqa: B018 except NameError: # pragma: no cover (Python < 3.10) # noinspection PyShadowingBuiltins - async def anext(iterator): + async def anext(iterator): # noqa: A001 """Return the next item from an async iterator.""" return await iterator.__anext__() @@ -59,7 +57,7 @@ def execute_field( def describe_customize_subscription(): - @mark.asyncio + @pytest.mark.asyncio() async def uses_a_custom_subscribe_field_resolver(): schema = GraphQLSchema( query=GraphQLObjectType("Query", {"foo": GraphQLField(GraphQLString)}), @@ -88,7 +86,7 @@ async def custom_foo(): await subscription.aclose() - @mark.asyncio + @pytest.mark.asyncio() async def uses_a_custom_execution_context_class(): class TestExecutionContext(ExecutionContext): def build_resolve_info(self, *args, **kwargs): diff --git a/tests/execution/test_defer.py b/tests/execution/test_defer.py index 241a53b3..32d205f0 100644 --- a/tests/execution/test_defer.py +++ b/tests/execution/test_defer.py @@ -1,8 +1,7 @@ from asyncio import sleep from typing import Any, Dict, List, NamedTuple -from pytest import mark, raises - +import pytest from graphql.error import GraphQLError from graphql.execution import ( ExecutionContext, @@ -27,7 +26,6 @@ GraphQLString, ) - friend_type = GraphQLObjectType( "Friend", {"id": GraphQLField(GraphQLID), "name": GraphQLField(GraphQLString)} ) @@ -54,12 +52,12 @@ async def resolve_bad(_obj, _info) -> str: def resolve_null_sync(_obj, _info) -> None: """Simulate a resolver returning a null value synchronously.""" - return None + return async def resolve_null_async(_obj, _info) -> None: """Simulate a resolver returning a null value asynchronously.""" - return None + return hero_type = GraphQLObjectType( @@ -331,7 +329,7 @@ def can_print_deferred_fragment_record(): "path=['bar'], label='foo', parent_context, data)" ) - @mark.asyncio + @pytest.mark.asyncio() async def can_defer_fragments_containing_scalar_types(): document = parse( """ @@ -359,7 +357,7 @@ async def can_defer_fragments_containing_scalar_types(): }, ] - @mark.asyncio + @pytest.mark.asyncio() async def can_disable_defer_using_if_argument(): document = parse( """ @@ -385,7 +383,7 @@ async def can_disable_defer_using_if_argument(): }, } - @mark.asyncio + @pytest.mark.asyncio() async def does_not_disable_defer_with_null_if_argument(): document = parse( """ @@ -410,7 +408,7 @@ async def does_not_disable_defer_with_null_if_argument(): }, ] - @mark.asyncio + @pytest.mark.asyncio() async def throws_an_error_for_defer_directive_with_non_string_label(): document = parse( """ @@ -431,7 +429,7 @@ async def throws_an_error_for_defer_directive_with_non_string_label(): ], } - @mark.asyncio + @pytest.mark.asyncio() async def can_defer_fragments_on_the_top_level_query_field(): document = parse( """ @@ -457,7 +455,7 @@ async def can_defer_fragments_on_the_top_level_query_field(): }, ] - @mark.asyncio + @pytest.mark.asyncio() async def can_defer_fragments_with_errors_on_the_top_level_query_field(): document = parse( """ @@ -494,7 +492,7 @@ async def can_defer_fragments_with_errors_on_the_top_level_query_field(): }, ] - @mark.asyncio + @pytest.mark.asyncio() async def can_defer_a_fragment_within_an_already_deferred_fragment(): document = parse( """ @@ -542,7 +540,7 @@ async def can_defer_a_fragment_within_an_already_deferred_fragment(): }, ] - @mark.asyncio + @pytest.mark.asyncio() async def can_defer_a_fragment_that_is_also_not_deferred_with_deferred_first(): document = parse( """ @@ -574,7 +572,7 @@ async def can_defer_a_fragment_that_is_also_not_deferred_with_deferred_first(): }, ] - @mark.asyncio + @pytest.mark.asyncio() async def can_defer_a_fragment_that_is_also_not_deferred_with_non_deferred_first(): document = parse( """ @@ -606,7 +604,7 @@ async def can_defer_a_fragment_that_is_also_not_deferred_with_non_deferred_first }, ] - @mark.asyncio + @pytest.mark.asyncio() async def can_defer_an_inline_fragment(): document = parse( """ @@ -636,7 +634,7 @@ async def can_defer_an_inline_fragment(): }, ] - @mark.asyncio + @pytest.mark.asyncio() async def handles_errors_thrown_in_deferred_fragments(): document = parse( """ @@ -673,7 +671,7 @@ async def handles_errors_thrown_in_deferred_fragments(): }, ] - @mark.asyncio + @pytest.mark.asyncio() async def handles_non_nullable_errors_thrown_in_deferred_fragments(): document = parse( """ @@ -711,7 +709,7 @@ async def handles_non_nullable_errors_thrown_in_deferred_fragments(): }, ] - @mark.asyncio + @pytest.mark.asyncio() async def handles_non_nullable_errors_thrown_outside_deferred_fragments(): document = parse( """ @@ -740,7 +738,7 @@ async def handles_non_nullable_errors_thrown_outside_deferred_fragments(): ], } - @mark.asyncio + @pytest.mark.asyncio() async def handles_async_non_nullable_errors_thrown_in_deferred_fragments(): document = parse( """ @@ -778,7 +776,7 @@ async def handles_async_non_nullable_errors_thrown_in_deferred_fragments(): }, ] - @mark.asyncio + @pytest.mark.asyncio() async def returns_payloads_in_correct_order(): document = parse( """ @@ -831,7 +829,7 @@ async def returns_payloads_in_correct_order(): }, ] - @mark.asyncio + @pytest.mark.asyncio() async def returns_payloads_from_synchronous_data_in_correct_order(): document = parse( """ @@ -884,7 +882,7 @@ async def returns_payloads_from_synchronous_data_in_correct_order(): }, ] - @mark.asyncio + @pytest.mark.asyncio() async def original_execute_function_throws_error_if_deferred_and_all_is_sync(): document = parse( """ @@ -894,7 +892,7 @@ async def original_execute_function_throws_error_if_deferred_and_all_is_sync(): """ ) - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: await execute(schema, document, {}) # type: ignore assert str(exc_info.value) == ( @@ -902,7 +900,7 @@ async def original_execute_function_throws_error_if_deferred_and_all_is_sync(): " multiple payloads (due to @defer or @stream directive)" ) - @mark.asyncio + @pytest.mark.asyncio() async def original_execute_function_throws_error_if_deferred_and_not_all_is_sync(): document = parse( """ diff --git a/tests/execution/test_directives.py b/tests/execution/test_directives.py index 80abb8ff..d7f45dd6 100644 --- a/tests/execution/test_directives.py +++ b/tests/execution/test_directives.py @@ -2,7 +2,6 @@ from graphql.language import parse from graphql.type import GraphQLField, GraphQLObjectType, GraphQLSchema, GraphQLString - schema = GraphQLSchema( GraphQLObjectType( "TestType", {"a": GraphQLField(GraphQLString), "b": GraphQLField(GraphQLString)} diff --git a/tests/execution/test_execution_result.py b/tests/execution/test_execution_result.py index c6b13d40..28ba17af 100644 --- a/tests/execution/test_execution_result.py +++ b/tests/execution/test_execution_result.py @@ -1,5 +1,4 @@ -from pytest import raises - +import pytest from graphql.error import GraphQLError from graphql.execution import ExecutionResult @@ -107,9 +106,9 @@ def compares_to_another_execution_result(): def unpacks_as_two_tuple(): res = ExecutionResult(data, errors) - res_data, res_errors = res # type: ignore - assert res_data == data # type: ignore - assert res_errors == errors # type: ignore - with raises(ValueError): - res = ExecutionResult(data, errors, extensions) - _res_data, _res_errors, _res_extensions = res # type: ignore + res_data, res_errors = res + assert res_data == data + assert res_errors == errors + res = ExecutionResult(data, errors, extensions) + with pytest.raises(ValueError, match="not enough values to unpack"): + _res_data, _res_errors, _res_extensions = res diff --git a/tests/execution/test_executor.py b/tests/execution/test_executor.py index e197ab27..f0c1477d 100644 --- a/tests/execution/test_executor.py +++ b/tests/execution/test_executor.py @@ -1,8 +1,7 @@ import asyncio from typing import Any, Awaitable, Optional, cast -from pytest import mark - +import pytest from graphql.error import GraphQLError from graphql.execution import execute, execute_sync from graphql.language import FieldNode, OperationDefinitionNode, parse @@ -30,7 +29,7 @@ def accepts_positional_arguments(): schema = GraphQLSchema( GraphQLObjectType( "Type", - {"a": GraphQLField(GraphQLString, resolve=lambda obj, *args: obj)}, + {"a": GraphQLField(GraphQLString, resolve=lambda obj, *_args: obj)}, ) ) @@ -38,7 +37,7 @@ def accepts_positional_arguments(): assert result == ({"a": "rootValue"}, None) - @mark.asyncio + @pytest.mark.asyncio() async def executes_arbitrary_code(): # noinspection PyMethodMayBeStatic,PyMethodMayBeStatic class Data: @@ -242,7 +241,8 @@ def resolve(_obj, info): assert len(resolved_infos) == 1 operation = cast(OperationDefinitionNode, document.definitions[0]) - assert operation and operation.kind == "operation_definition" + assert operation + assert operation.kind == "operation_definition" field = cast(FieldNode, operation.selection_set.selections[0]) assert resolved_infos[0] == GraphQLResolveInfo( @@ -369,7 +369,7 @@ def resolve(_obj, _info, **args): assert len(resolved_args) == 1 assert resolved_args[0] == {"numArg": 123, "stringArg": "foo"} - @mark.asyncio + @pytest.mark.asyncio() async def nulls_out_error_subtrees(): document = parse( """ @@ -779,7 +779,7 @@ def resolves_to_an_error_if_schema_does_not_support_operation(): ], ) - @mark.asyncio + @pytest.mark.asyncio() async def correct_field_ordering_despite_execution_order(): schema = GraphQLSchema( GraphQLObjectType( @@ -895,7 +895,7 @@ def does_not_include_arguments_that_were_not_set(): None, ) - @mark.asyncio + @pytest.mark.asyncio() async def fails_when_is_type_of_check_is_not_met(): class Special: value: str @@ -955,7 +955,8 @@ async def async_is_special(): def fails_when_serialize_of_custom_scalar_does_not_return_a_value(): custom_scalar = GraphQLScalarType( - "CustomScalar", serialize=lambda _value: Undefined # returns nothing + "CustomScalar", + serialize=lambda _value: Undefined, # returns nothing ) schema = GraphQLSchema( GraphQLObjectType( diff --git a/tests/execution/test_flatten_async_iterable.py b/tests/execution/test_flatten_async_iterable.py index f98c16d0..74b8f9c6 100644 --- a/tests/execution/test_flatten_async_iterable.py +++ b/tests/execution/test_flatten_async_iterable.py @@ -1,21 +1,20 @@ +from contextlib import suppress from typing import AsyncGenerator -from pytest import mark, raises - +import pytest from graphql.execution import flatten_async_iterable - try: # pragma: no cover - anext + anext # noqa: B018 except NameError: # pragma: no cover (Python < 3.10) # noinspection PyShadowingBuiltins - async def anext(iterator): + async def anext(iterator): # noqa: A001 """Return the next item from an async iterator.""" return await iterator.__anext__() def describe_flatten_async_iterable(): - @mark.asyncio + @pytest.mark.asyncio() async def flattens_nested_async_generators(): async def source(): async def nested1() -> AsyncGenerator[float, None]: @@ -35,7 +34,7 @@ async def nested2() -> AsyncGenerator[float, None]: assert result == [1.1, 1.2, 2.1, 2.2] - @mark.asyncio + @pytest.mark.asyncio() async def allows_returning_early_from_a_nested_async_generator(): async def source(): async def nested1() -> AsyncGenerator[float, None]: @@ -63,18 +62,16 @@ async def nested3() -> AsyncGenerator[float, None]: assert await anext(doubles) == 2.1 # early return - try: + with suppress(RuntimeError): # suppress error for Python < 3.8 await doubles.aclose() - except RuntimeError: # Python < 3.8 - pass # subsequent anext calls - with raises(StopAsyncIteration): + with pytest.raises(StopAsyncIteration): assert await anext(doubles) - with raises(StopAsyncIteration): + with pytest.raises(StopAsyncIteration): assert await anext(doubles) - @mark.asyncio + @pytest.mark.asyncio() async def allows_throwing_errors_from_a_nested_async_generator(): async def source(): async def nested1() -> AsyncGenerator[float, None]: @@ -102,10 +99,10 @@ async def nested3() -> AsyncGenerator[float, None]: assert await anext(doubles) == 2.1 # throw error - with raises(RuntimeError, match="ouch"): + with pytest.raises(RuntimeError, match="ouch"): await doubles.athrow(RuntimeError, "ouch") - @mark.asyncio + @pytest.mark.asyncio() async def completely_yields_sub_iterables_even_when_anext_called_in_parallel(): async def source(): async def nested1() -> AsyncGenerator[float, None]: @@ -127,10 +124,10 @@ async def nested2() -> AsyncGenerator[float, None]: assert await anext2 == 1.2 assert await anext(doubles) == 2.1 assert await anext(doubles) == 2.2 - with raises(StopAsyncIteration): + with pytest.raises(StopAsyncIteration): assert await anext(doubles) - @mark.asyncio + @pytest.mark.asyncio() async def closes_nested_async_iterators(): closed = [] @@ -177,7 +174,7 @@ async def aclose(self): assert closed == [1.2, 2.2, 2] - @mark.asyncio + @pytest.mark.asyncio() async def works_with_nested_async_iterators_that_have_no_close_method(): class Source: def __init__(self): diff --git a/tests/execution/test_lists.py b/tests/execution/test_lists.py index 2558b719..91e1bb3f 100644 --- a/tests/execution/test_lists.py +++ b/tests/execution/test_lists.py @@ -1,7 +1,6 @@ from typing import Any, AsyncGenerator -from pytest import mark - +import pytest from graphql.execution import ExecutionResult, execute, execute_sync from graphql.language import parse from graphql.pyutils import is_awaitable @@ -143,7 +142,7 @@ async def _complete_object_lists( resolve: GraphQLFieldResolver, count=3 ) -> ExecutionResult: async def _list_field( - obj_: Any, info_: GraphQLResolveInfo + _obj: Any, _info: GraphQLResolveInfo ) -> AsyncGenerator[_IndexData, None]: for index in range(count): yield _IndexData(index) @@ -172,7 +171,7 @@ async def _list_field( assert is_awaitable(result) return await result - @mark.asyncio + @pytest.mark.asyncio() async def accepts_an_async_generator_as_a_list_value(): async def list_field(): yield "two" @@ -184,7 +183,7 @@ async def list_field(): None, ) - @mark.asyncio + @pytest.mark.asyncio() async def accepts_a_custom_async_iterable_as_a_list_value(): class ListField: def __aiter__(self): @@ -203,7 +202,7 @@ async def __anext__(self): None, ) - @mark.asyncio + @pytest.mark.asyncio() async def handles_an_async_generator_that_throws(): async def list_field(): yield "two" @@ -215,7 +214,7 @@ async def list_field(): [{"message": "bad", "locations": [(1, 3)], "path": ["listField", 2]}], ) - @mark.asyncio + @pytest.mark.asyncio() async def handles_an_async_generator_where_intermediate_value_triggers_an_error(): async def list_field(): yield "two" @@ -233,7 +232,7 @@ async def list_field(): ], ) - @mark.asyncio + @pytest.mark.asyncio() async def handles_errors_from_complete_value_in_async_iterables(): async def list_field(): yield "two" @@ -250,9 +249,9 @@ async def list_field(): ], ) - @mark.asyncio + @pytest.mark.asyncio() async def handles_async_functions_from_complete_value_in_async_iterables(): - async def resolve(data: _IndexData, info_: GraphQLResolveInfo) -> int: + async def resolve(data: _IndexData, _info: GraphQLResolveInfo) -> int: return data.index assert await _complete_object_lists(resolve) == ( @@ -260,9 +259,9 @@ async def resolve(data: _IndexData, info_: GraphQLResolveInfo) -> int: None, ) - @mark.asyncio + @pytest.mark.asyncio() async def handles_single_async_functions_from_complete_value_in_async_iterables(): - async def resolve(data: _IndexData, info_: GraphQLResolveInfo) -> int: + async def resolve(data: _IndexData, _info: GraphQLResolveInfo) -> int: return data.index assert await _complete_object_lists(resolve, 1) == ( @@ -270,9 +269,9 @@ async def resolve(data: _IndexData, info_: GraphQLResolveInfo) -> int: None, ) - @mark.asyncio + @pytest.mark.asyncio() async def handles_async_errors_from_complete_value_in_async_iterables(): - async def resolve(data: _IndexData, info_: GraphQLResolveInfo) -> int: + async def resolve(data: _IndexData, _info: GraphQLResolveInfo) -> int: index = data.index if index == 2: raise RuntimeError("bad") @@ -289,7 +288,7 @@ async def resolve(data: _IndexData, info_: GraphQLResolveInfo) -> int: ], ) - @mark.asyncio + @pytest.mark.asyncio() async def handles_nulls_yielded_by_async_generator(): async def list_field(): yield 1 @@ -323,7 +322,7 @@ def execute_query(list_value: Any) -> Any: return result - @mark.asyncio + @pytest.mark.asyncio() async def contains_values(): list_field = [1, 2] assert await _complete(list_field, "[Int]") == ({"listField": [1, 2]}, None) @@ -331,7 +330,7 @@ async def contains_values(): assert await _complete(list_field, "[Int!]") == ({"listField": [1, 2]}, None) assert await _complete(list_field, "[Int!]!") == ({"listField": [1, 2]}, None) - @mark.asyncio + @pytest.mark.asyncio() async def contains_null(): list_field = [1, None, 2] errors = [ @@ -352,7 +351,7 @@ async def contains_null(): assert await _complete(list_field, "[Int!]") == ({"listField": None}, errors) assert await _complete(list_field, "[Int!]!") == (None, errors) - @mark.asyncio + @pytest.mark.asyncio() async def returns_null(): list_field = None errors = [ @@ -367,7 +366,7 @@ async def returns_null(): assert await _complete(list_field, "[Int!]") == ({"listField": None}, None) assert await _complete(list_field, "[Int!]!") == (None, errors) - @mark.asyncio + @pytest.mark.asyncio() async def contains_error(): list_field = [1, RuntimeError("bad"), 2] errors = [ @@ -394,7 +393,7 @@ async def contains_error(): errors, ) - @mark.asyncio + @pytest.mark.asyncio() async def results_in_errors(): list_field = RuntimeError("bad") errors = [ diff --git a/tests/execution/test_map_async_iterable.py b/tests/execution/test_map_async_iterable.py index e5d2312d..dd4aa3a8 100644 --- a/tests/execution/test_map_async_iterable.py +++ b/tests/execution/test_map_async_iterable.py @@ -1,13 +1,11 @@ -from pytest import mark, raises - +import pytest from graphql.execution import map_async_iterable - try: # pragma: no cover - anext + anext # noqa: B018 except NameError: # pragma: no cover (Python < 3.10) # noinspection PyShadowingBuiltins - async def anext(iterator): + async def anext(iterator): # noqa: A001 """Return the next item from an async iterator.""" return await iterator.__anext__() @@ -23,7 +21,7 @@ async def throw(_x: int) -> int: def describe_map_async_iterable(): - @mark.asyncio + @pytest.mark.asyncio() async def maps_over_async_generator(): async def source(): yield 1 @@ -35,10 +33,10 @@ async def source(): assert await anext(doubles) == 2 assert await anext(doubles) == 4 assert await anext(doubles) == 6 - with raises(StopAsyncIteration): + with pytest.raises(StopAsyncIteration): assert await anext(doubles) - @mark.asyncio + @pytest.mark.asyncio() async def maps_over_async_iterable(): items = [1, 2, 3] @@ -59,7 +57,7 @@ async def __anext__(self): assert not items assert values == [2, 4, 6] - @mark.asyncio + @pytest.mark.asyncio() async def compatible_with_async_for(): async def source(): yield 1 @@ -72,7 +70,7 @@ async def source(): assert values == [2, 4, 6] - @mark.asyncio + @pytest.mark.asyncio() async def allows_returning_early_from_mapped_async_generator(): async def source(): yield 1 @@ -88,12 +86,12 @@ async def source(): await doubles.aclose() # Subsequent next calls - with raises(StopAsyncIteration): + with pytest.raises(StopAsyncIteration): await anext(doubles) - with raises(StopAsyncIteration): + with pytest.raises(StopAsyncIteration): await anext(doubles) - @mark.asyncio + @pytest.mark.asyncio() async def allows_returning_early_from_mapped_async_iterable(): items = [1, 2, 3] @@ -116,12 +114,12 @@ async def __anext__(self): await doubles.aclose() # Subsequent next calls - with raises(StopAsyncIteration): + with pytest.raises(StopAsyncIteration): await anext(doubles) - with raises(StopAsyncIteration): + with pytest.raises(StopAsyncIteration): await anext(doubles) - @mark.asyncio + @pytest.mark.asyncio() async def allows_throwing_errors_through_async_iterable(): items = [1, 2, 3] @@ -142,17 +140,17 @@ async def __anext__(self): # Throw error message = "allows throwing errors when mapping async iterable" - with raises(RuntimeError) as exc_info: + with pytest.raises(RuntimeError) as exc_info: await doubles.athrow(RuntimeError(message)) assert str(exc_info.value) == message - with raises(StopAsyncIteration): + with pytest.raises(StopAsyncIteration): await anext(doubles) - with raises(StopAsyncIteration): + with pytest.raises(StopAsyncIteration): await anext(doubles) - @mark.asyncio + @pytest.mark.asyncio() async def allows_throwing_errors_with_values_through_async_iterables(): class Iterable: def __aiter__(self): @@ -169,16 +167,16 @@ async def __anext__(self): try: raise RuntimeError("Ouch") except RuntimeError as error: - with raises(RuntimeError, match="Ouch") as exc_info: + with pytest.raises(RuntimeError, match="Ouch") as exc_info: await one.athrow(error.__class__, error) - assert exc_info.value is error - assert exc_info.tb is error.__traceback__ + assert exc_info.value is error # noqa: PT017 + assert exc_info.tb is error.__traceback__ # noqa: PT017 - with raises(StopAsyncIteration): + with pytest.raises(StopAsyncIteration): await anext(one) - @mark.asyncio + @pytest.mark.asyncio() async def allows_throwing_errors_with_traceback_through_async_iterables(): class Iterable: def __aiter__(self): @@ -195,16 +193,17 @@ async def __anext__(self): try: raise RuntimeError("Ouch") except RuntimeError as error: - with raises(RuntimeError) as exc_info: + with pytest.raises(RuntimeError) as exc_info: await one.athrow(error.__class__, None, error.__traceback__) - assert exc_info.tb and error.__traceback__ - assert exc_info.tb.tb_frame is error.__traceback__.tb_frame + assert exc_info.tb + assert error.__traceback__ # noqa: PT017 + assert exc_info.tb.tb_frame is error.__traceback__.tb_frame # noqa: PT017 - with raises(StopAsyncIteration): + with pytest.raises(StopAsyncIteration): await anext(one) - @mark.asyncio + @pytest.mark.asyncio() async def does_not_map_over_thrown_errors(): async def source(): yield 1 @@ -214,12 +213,12 @@ async def source(): assert await anext(doubles) == 2 - with raises(RuntimeError) as exc_info: + with pytest.raises(RuntimeError) as exc_info: await anext(doubles) assert str(exc_info.value) == "Goodbye" - @mark.asyncio + @pytest.mark.asyncio() async def does_not_map_over_externally_thrown_errors(): async def source(): yield 1 @@ -228,12 +227,12 @@ async def source(): assert await anext(doubles) == 2 - with raises(RuntimeError) as exc_info: + with pytest.raises(RuntimeError) as exc_info: await doubles.athrow(RuntimeError("Goodbye")) assert str(exc_info.value) == "Goodbye" - @mark.asyncio + @pytest.mark.asyncio() async def iterable_is_closed_when_mapped_iterable_is_closed(): class Iterable: def __init__(self): @@ -254,10 +253,10 @@ async def aclose(self): assert not iterable.closed await doubles.aclose() assert iterable.closed - with raises(StopAsyncIteration): + with pytest.raises(StopAsyncIteration): await anext(doubles) - @mark.asyncio + @pytest.mark.asyncio() async def iterable_is_closed_on_callback_error(): class Iterable: def __init__(self): @@ -274,13 +273,13 @@ async def aclose(self): iterable = Iterable() doubles = map_async_iterable(iterable, throw) - with raises(RuntimeError, match="Ouch"): + with pytest.raises(RuntimeError, match="Ouch"): await anext(doubles) assert iterable.closed - with raises(StopAsyncIteration): + with pytest.raises(StopAsyncIteration): await anext(doubles) - @mark.asyncio + @pytest.mark.asyncio() async def iterable_exits_on_callback_error(): exited = False @@ -293,13 +292,13 @@ async def iterable(): exited = True doubles = map_async_iterable(iterable(), throw) - with raises(RuntimeError, match="Ouch"): + with pytest.raises(RuntimeError, match="Ouch"): await anext(doubles) assert exited - with raises(StopAsyncIteration): + with pytest.raises(StopAsyncIteration): await anext(doubles) - @mark.asyncio + @pytest.mark.asyncio() async def mapped_iterable_is_closed_when_iterable_cannot_be_closed(): class Iterable: def __aiter__(self): @@ -311,10 +310,10 @@ async def __anext__(self): doubles = map_async_iterable(Iterable(), double) assert await anext(doubles) == 2 await doubles.aclose() - with raises(StopAsyncIteration): + with pytest.raises(StopAsyncIteration): await anext(doubles) - @mark.asyncio + @pytest.mark.asyncio() async def ignores_that_iterable_cannot_be_closed_on_callback_error(): class Iterable: def __aiter__(self): @@ -324,7 +323,7 @@ async def __anext__(self): return 1 doubles = map_async_iterable(Iterable(), throw) - with raises(RuntimeError, match="Ouch"): + with pytest.raises(RuntimeError, match="Ouch"): await anext(doubles) - with raises(StopAsyncIteration): + with pytest.raises(StopAsyncIteration): await anext(doubles) diff --git a/tests/execution/test_middleware.py b/tests/execution/test_middleware.py index 0d19e08e..4927b52f 100644 --- a/tests/execution/test_middleware.py +++ b/tests/execution/test_middleware.py @@ -1,7 +1,6 @@ from typing import Awaitable, cast -from pytest import mark, raises - +import pytest from graphql.execution import Middleware, MiddlewareManager, execute from graphql.language.parser import parse from graphql.type import GraphQLField, GraphQLObjectType, GraphQLSchema, GraphQLString @@ -90,7 +89,7 @@ def capitalize_middleware(next_, *args, **kwargs): assert result.data == {"first": "Eno", "second": "Owt"} # type: ignore - @mark.asyncio + @pytest.mark.asyncio() async def single_async_function(): doc = parse("{ first second }") @@ -200,7 +199,7 @@ def resolve(self, next_, *args, **kwargs): ) assert result.data == {"field": "devloseR"} # type: ignore - @mark.asyncio + @pytest.mark.asyncio() async def with_async_function_and_object(): doc = parse("{ field }") @@ -277,7 +276,7 @@ def bad_middleware_object(): "TestType", {"field": GraphQLField(GraphQLString)} ) - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: # noinspection PyTypeChecker execute( GraphQLSchema(test_type), diff --git a/tests/execution/test_mutations.py b/tests/execution/test_mutations.py index 022b4900..9f8d6b06 100644 --- a/tests/execution/test_mutations.py +++ b/tests/execution/test_mutations.py @@ -1,8 +1,7 @@ from asyncio import sleep from typing import Any, Awaitable, List -from pytest import mark - +import pytest from graphql.execution import ( ExperimentalIncrementalExecutionResults, execute, @@ -105,7 +104,7 @@ async def promise_to_get_the_number(holder: NumberHolder, _info) -> int: def describe_execute_handles_mutation_execution_ordering(): - @mark.asyncio + @pytest.mark.asyncio() async def evaluates_mutations_serially(): document = parse( """ @@ -153,7 +152,7 @@ def does_not_include_illegal_mutation_fields_in_output(): result = execute_sync(schema=schema, document=document) assert result == ({}, None) - @mark.asyncio + @pytest.mark.asyncio() async def evaluates_mutations_correctly_in_presence_of_a_failed_mutation(): document = parse( """ @@ -210,7 +209,7 @@ async def evaluates_mutations_correctly_in_presence_of_a_failed_mutation(): ], ) - @mark.asyncio + @pytest.mark.asyncio() async def mutation_fields_with_defer_do_not_block_next_mutation(): document = parse( """ @@ -255,7 +254,7 @@ async def mutation_fields_with_defer_do_not_block_next_mutation(): }, ] - @mark.asyncio + @pytest.mark.asyncio() async def mutation_inside_of_a_fragment(): document = parse( """ @@ -281,7 +280,7 @@ async def mutation_inside_of_a_fragment(): None, ) - @mark.asyncio + @pytest.mark.asyncio() async def mutation_with_defer_is_not_executed_serially(): document = parse( """ diff --git a/tests/execution/test_nonnull.py b/tests/execution/test_nonnull.py index fe3dacee..053009a9 100644 --- a/tests/execution/test_nonnull.py +++ b/tests/execution/test_nonnull.py @@ -2,8 +2,7 @@ import re from typing import Any, Awaitable, cast -from pytest import mark - +import pytest from graphql.execution import ExecutionResult, execute, execute_sync from graphql.language import parse from graphql.pyutils import AwaitableOrValue @@ -17,7 +16,6 @@ ) from graphql.utilities import build_schema - sync_error = RuntimeError("sync") sync_non_null_error = RuntimeError("syncNonNull") promise_error = RuntimeError("promise") @@ -127,12 +125,12 @@ def describe_nulls_a_nullable_field(): } """ - @mark.asyncio + @pytest.mark.asyncio() async def returns_null(): result = await execute_sync_and_async(query, NullingData()) assert result == ({"sync": None}, None) - @mark.asyncio + @pytest.mark.asyncio() async def throws(): result = await execute_sync_and_async(query, ThrowingData()) assert result == ( @@ -155,7 +153,7 @@ def describe_nulls_a_returned_object_that_contains_a_non_null_field(): } """ - @mark.asyncio + @pytest.mark.asyncio() async def that_returns_null(): result = await execute_sync_and_async(query, NullingData()) assert result == ( @@ -170,7 +168,7 @@ async def that_returns_null(): ], ) - @mark.asyncio + @pytest.mark.asyncio() async def that_throws(): result = await execute_sync_and_async(query, ThrowingData()) assert result == ( @@ -216,14 +214,14 @@ def describe_nulls_a_complex_tree_of_nullable_fields_each(): }, } - @mark.asyncio + @pytest.mark.asyncio() async def returns_null(): result = await cast( Awaitable[ExecutionResult], execute_query(query, NullingData()) ) assert result == (data, None) - @mark.asyncio + @pytest.mark.asyncio() async def throws(): result = await cast( Awaitable[ExecutionResult], execute_query(query, ThrowingData()) @@ -350,7 +348,7 @@ def describe_nulls_first_nullable_after_long_chain_of_non_null_fields(): "anotherPromiseNest": None, } - @mark.asyncio + @pytest.mark.asyncio() async def returns_null(): result = await cast( Awaitable[ExecutionResult], execute_query(query, NullingData()) @@ -413,7 +411,7 @@ async def returns_null(): ], ) - @mark.asyncio + @pytest.mark.asyncio() async def throws(): result = await cast( Awaitable[ExecutionResult], execute_query(query, ThrowingData()) @@ -479,7 +477,7 @@ def describe_nulls_the_top_level_if_non_nullable_field(): } """ - @mark.asyncio + @pytest.mark.asyncio() async def returns_null(): result = await execute_sync_and_async(query, NullingData()) await asyncio.sleep(0) # strangely needed to get coverage on Python 3.11 @@ -495,7 +493,7 @@ async def returns_null(): ], ) - @mark.asyncio + @pytest.mark.asyncio() async def throws(): result = await execute_sync_and_async(query, ThrowingData()) await asyncio.sleep(0) # strangely needed to get coverage on Python 3.11 diff --git a/tests/execution/test_parallel.py b/tests/execution/test_parallel.py index 9d28c3c3..faacd0c4 100644 --- a/tests/execution/test_parallel.py +++ b/tests/execution/test_parallel.py @@ -1,8 +1,7 @@ import asyncio from typing import Awaitable -from pytest import mark - +import pytest from graphql.execution import execute from graphql.language import parse from graphql.type import ( @@ -32,7 +31,7 @@ async def wait(self) -> bool: def describe_parallel_execution(): - @mark.asyncio + @pytest.mark.asyncio() async def resolve_single_field(): # make sure that the special case of resolving a single field works async def resolve(*_args): @@ -53,7 +52,7 @@ async def resolve(*_args): assert result == ({"foo": True}, None) - @mark.asyncio + @pytest.mark.asyncio() async def resolve_fields_in_parallel(): barrier = Barrier(2) @@ -79,7 +78,7 @@ async def resolve(*_args): assert result == ({"foo": True, "bar": True}, None) - @mark.asyncio + @pytest.mark.asyncio() async def resolve_single_element_list(): # make sure that the special case of resolving a single element list works async def resolve(*_args): @@ -98,7 +97,7 @@ async def resolve(*_args): assert result == ({"foo": [True]}, None) - @mark.asyncio + @pytest.mark.asyncio() async def resolve_list_in_parallel(): barrier = Barrier(2) @@ -128,7 +127,7 @@ async def resolve_list(*args): assert result == ({"foo": [True, True]}, None) - @mark.asyncio + @pytest.mark.asyncio() async def resolve_is_type_of_in_parallel(): FooType = GraphQLInterfaceType("Foo", {"foo": GraphQLField(GraphQLString)}) diff --git a/tests/execution/test_resolve.py b/tests/execution/test_resolve.py index cccb6105..1c77af8b 100644 --- a/tests/execution/test_resolve.py +++ b/tests/execution/test_resolve.py @@ -54,7 +54,7 @@ def default_function_accesses_keys_of_chain_map(): def default_function_calls_methods(): class RootValue: - _secret = "secretValue" + _secret = "secretValue" # noqa: S105 def test(self, _info): return self._secret @@ -108,7 +108,7 @@ def uses_provided_resolve_function(): "aStr": GraphQLArgument(GraphQLString), "aInt": GraphQLArgument(GraphQLInt), }, - resolve=lambda source, info, **args: repr([source, args]), + resolve=lambda source, _info, **args: repr([source, args]), ) ) @@ -146,7 +146,7 @@ def transforms_arguments_using_out_names(): "aStr": GraphQLArgument(GraphQLString, out_name="a_str"), "aInt": GraphQLArgument(GraphQLInt, out_name="a_int"), }, - resolve=lambda source, info, **args: repr([source, args]), + resolve=lambda source, _info, **args: repr([source, args]), ) ) @@ -187,7 +187,7 @@ def transforms_arguments_with_inputs_using_out_names(): GraphQLField( GraphQLString, args={"aInput": GraphQLArgument(TestInputObject, out_name="a_input")}, - resolve=lambda source, info, **args: repr([source, args]), + resolve=lambda source, _info, **args: repr([source, args]), ) ) diff --git a/tests/execution/test_schema.py b/tests/execution/test_schema.py index 69e4f973..de93e1de 100644 --- a/tests/execution/test_schema.py +++ b/tests/execution/test_schema.py @@ -20,7 +20,7 @@ def describe_execute_handles_execution_with_a_complex_schema(): def executes_using_a_schema(): class Article: # noinspection PyShadowingBuiltins - def __init__(self, id: int): + def __init__(self, id: int): # noqa: A002 self.id = id self.isPublished = True self.author = JohnSmith() @@ -78,7 +78,7 @@ def __init__(self, id: int): "article": GraphQLField( BlogArticle, args={"id": GraphQLArgument(GraphQLID)}, - resolve=lambda _obj, _info, id: Article(id), + resolve=lambda _obj, _info, id: Article(id), # noqa: A002 ), "feed": GraphQLField( GraphQLList(BlogArticle), @@ -91,7 +91,7 @@ def __init__(self, id: int): # noinspection PyPep8Naming,PyMethodMayBeStatic class Author: - def pic(self, info_, width: int, height: int) -> Pic: + def pic(self, _info, width: int, height: int) -> Pic: return Pic(123, width, height) @property diff --git a/tests/execution/test_stream.py b/tests/execution/test_stream.py index 7edb3845..67385b3b 100644 --- a/tests/execution/test_stream.py +++ b/tests/execution/test_stream.py @@ -1,8 +1,7 @@ from asyncio import Event, Lock, gather, sleep from typing import Any, Awaitable, Dict, List, NamedTuple -from pytest import mark, raises - +import pytest from graphql.error import GraphQLError from graphql.execution import ( ExecutionContext, @@ -24,12 +23,11 @@ GraphQLString, ) - try: # pragma: no cover - anext + anext # noqa: B018 except NameError: # pragma: no cover (Python < 3.10) # noinspection PyShadowingBuiltins - async def anext(iterator): + async def anext(iterator): # noqa: A001 """Return the next item from an async iterator.""" return await iterator.__anext__() @@ -219,7 +217,7 @@ def can_compare_incremental_stream_result(): assert result != dict(list(args.items())[:2] + [("path", ["foo", 2])]) assert result != {**args, "label": "baz"} - @mark.asyncio + @pytest.mark.asyncio() async def can_stream_a_list_field(): document = parse("{ scalarList @stream(initialCount: 1) }") result = await complete( @@ -242,7 +240,7 @@ async def can_stream_a_list_field(): }, ] - @mark.asyncio + @pytest.mark.asyncio() async def can_use_default_value_of_initial_count(): document = parse("{ scalarList @stream }") result = await complete( @@ -269,7 +267,7 @@ async def can_use_default_value_of_initial_count(): }, ] - @mark.asyncio + @pytest.mark.asyncio() async def negative_values_of_initial_count_throw_field_errors(): document = parse("{ scalarList @stream(initialCount: -2) }") result = await complete( @@ -288,7 +286,7 @@ async def negative_values_of_initial_count_throw_field_errors(): ], } - @mark.asyncio + @pytest.mark.asyncio() async def non_integer_values_of_initial_count_throw_field_errors(): document = parse("{ scalarList @stream(initialCount: 1.5) }") result = await complete(document, {"scalarList": ["apple", "half of a banana"]}) @@ -305,7 +303,7 @@ async def non_integer_values_of_initial_count_throw_field_errors(): ], } - @mark.asyncio + @pytest.mark.asyncio() async def returns_label_from_stream_directive(): document = parse( '{ scalarList @stream(initialCount: 1, label: "scalar-stream") }' @@ -342,7 +340,7 @@ async def returns_label_from_stream_directive(): }, ] - @mark.asyncio + @pytest.mark.asyncio() async def throws_an_error_for_stream_directive_with_non_string_label(): document = parse("{ scalarList @stream(initialCount: 1, label: 42) }") result = await complete(document, {"scalarList": ["some apples"]}) @@ -362,7 +360,7 @@ async def throws_an_error_for_stream_directive_with_non_string_label(): ], } - @mark.asyncio + @pytest.mark.asyncio() async def can_disable_stream_using_if_argument(): document = parse("{ scalarList @stream(initialCount: 0, if: false) }") result = await complete( @@ -374,7 +372,7 @@ async def can_disable_stream_using_if_argument(): }, } - @mark.asyncio + @pytest.mark.asyncio() async def does_not_disable_stream_with_null_if_argument(): document = parse( "query ($shouldStream: Boolean)" @@ -401,7 +399,7 @@ async def does_not_disable_stream_with_null_if_argument(): }, ] - @mark.asyncio + @pytest.mark.asyncio() async def can_stream_multi_dimensional_lists(): document = parse("{ scalarListList @stream(initialCount: 1) }") result = await complete( @@ -441,7 +439,7 @@ async def can_stream_multi_dimensional_lists(): }, ] - @mark.asyncio + @pytest.mark.asyncio() async def can_stream_a_field_that_returns_a_list_of_awaitables(): document = parse( """ @@ -483,7 +481,7 @@ async def await_friend(f): }, ] - @mark.asyncio + @pytest.mark.asyncio() async def can_stream_in_correct_order_with_list_of_awaitables(): document = parse( """ @@ -538,7 +536,7 @@ async def await_friend(f): }, ] - @mark.asyncio + @pytest.mark.asyncio() async def handles_error_in_list_of_awaitables_before_initial_count_reached(): document = parse( """ @@ -588,7 +586,7 @@ async def await_friend(f, i): }, ] - @mark.asyncio + @pytest.mark.asyncio() async def handles_error_in_list_of_awaitables_after_initial_count_reached(): document = parse( """ @@ -647,7 +645,7 @@ async def await_friend(f, i): }, ] - @mark.asyncio + @pytest.mark.asyncio() async def can_stream_a_field_that_returns_an_async_iterable(): document = parse( """ @@ -700,7 +698,7 @@ async def friend_list(_info): }, ] - @mark.asyncio + @pytest.mark.asyncio() async def can_stream_a_field_that_returns_an_async_iterable_with_initial_count(): document = parse( """ @@ -740,7 +738,7 @@ async def friend_list(_info): }, ] - @mark.asyncio + @pytest.mark.asyncio() async def negative_initial_count_throw_error_on_field_returning_async_iterable(): document = parse( """ @@ -768,7 +766,7 @@ async def friend_list(_info): "data": {"friendList": None}, } - @mark.asyncio + @pytest.mark.asyncio() async def can_handle_concurrent_calls_to_next_without_waiting(): document = parse( """ @@ -816,7 +814,7 @@ async def friend_list(_info): {"done": True, "value": None}, ] - @mark.asyncio + @pytest.mark.asyncio() async def handles_error_in_async_iterable_before_initial_count_is_reached(): document = parse( """ @@ -847,7 +845,7 @@ async def friend_list(_info): "data": {"friendList": [{"name": "Luke", "id": "1"}, None]}, } - @mark.asyncio + @pytest.mark.asyncio() async def handles_error_in_async_iterable_after_initial_count_is_reached(): document = parse( """ @@ -892,7 +890,7 @@ async def friend_list(_info): }, ] - @mark.asyncio + @pytest.mark.asyncio() async def handles_null_for_non_null_list_items_after_initial_count_is_reached(): document = parse( """ @@ -933,7 +931,7 @@ async def handles_null_for_non_null_list_items_after_initial_count_is_reached(): }, ] - @mark.asyncio + @pytest.mark.asyncio() async def handles_null_for_non_null_async_items_after_initial_count_is_reached(): document = parse( """ @@ -981,7 +979,7 @@ async def friend_list(_info): }, ] - @mark.asyncio + @pytest.mark.asyncio() async def handles_error_thrown_in_complete_value_after_initial_count_is_reached(): document = parse( """ @@ -1020,7 +1018,7 @@ async def scalar_list(_info): }, ] - @mark.asyncio + @pytest.mark.asyncio() async def handles_async_error_in_complete_value_after_initial_count_is_reached(): document = parse( """ @@ -1073,7 +1071,7 @@ def get_friends(_info): }, ] - @mark.asyncio + @pytest.mark.asyncio() async def handles_async_error_after_initial_count_reached_from_async_iterable(): document = parse( """ @@ -1137,7 +1135,7 @@ async def get_friends(_info): }, ] - @mark.asyncio + @pytest.mark.asyncio() async def filters_payloads_that_are_nulled(): document = parse( """ @@ -1154,7 +1152,6 @@ async def filters_payloads_that_are_nulled(): async def resolve_null(_info): await sleep(0) - return None async def friend_list(_info): await sleep(0) @@ -1189,7 +1186,7 @@ async def friend_list(_info): }, } - @mark.asyncio + @pytest.mark.asyncio() async def does_not_filter_payloads_when_null_error_is_in_a_different_path(): document = parse( """ @@ -1256,7 +1253,7 @@ async def friend_list(_info): }, ] - @mark.asyncio + @pytest.mark.asyncio() async def filters_stream_payloads_that_are_nulled_in_a_deferred_payload(): document = parse( """ @@ -1277,7 +1274,6 @@ async def filters_stream_payloads_that_are_nulled_in_a_deferred_payload(): async def resolve_null(_info): await sleep(0) - return None async def friend_list(_info): await sleep(0) @@ -1327,7 +1323,7 @@ async def friend_list(_info): }, ] - @mark.asyncio + @pytest.mark.asyncio() async def filters_defer_payloads_that_are_nulled_in_a_stream_response(): document = parse( """ @@ -1344,7 +1340,6 @@ async def filters_defer_payloads_that_are_nulled_in_a_stream_response(): async def resolve_null(_info): await sleep(0) - return None async def friend(): await sleep(0) @@ -1385,14 +1380,13 @@ async def friend_list(_info): }, ] - @mark.timeout(1) - @mark.asyncio + @pytest.mark.timeout(1) + @pytest.mark.asyncio() async def returns_iterator_and_ignores_error_when_stream_payloads_are_filtered(): finished = False async def resolve_null(_info): await sleep(0) - return None async def iterable(_info): nonlocal finished @@ -1461,12 +1455,12 @@ async def iterable(_info): "hasNext": False, } - with raises(StopAsyncIteration): + with pytest.raises(StopAsyncIteration): await anext(iterator) assert not finished # running iterator cannot be canceled - @mark.asyncio + @pytest.mark.asyncio() async def handles_awaitables_from_complete_value_after_initial_count_is_reached(): document = parse( """ @@ -1526,7 +1520,7 @@ async def get_friends(_info): }, ] - @mark.asyncio + @pytest.mark.asyncio() async def returns_payloads_properly_when_parent_deferred_slower_than_stream(): resolve_slow_field = Event() @@ -1604,11 +1598,11 @@ async def get_friends(_info): "hasNext": False, } - with raises(StopAsyncIteration): + with pytest.raises(StopAsyncIteration): await anext(iterator) - @mark.timeout(1) - @mark.asyncio + @pytest.mark.timeout(1) + @pytest.mark.asyncio() async def can_defer_fields_that_are_resolved_after_async_iterable_is_complete(): resolve_slow_field = Event() resolve_iterable = Event() @@ -1683,10 +1677,10 @@ async def get_friends(_info): "hasNext": False, } - with raises(StopAsyncIteration): + with pytest.raises(StopAsyncIteration): await anext(iterator) - @mark.asyncio + @pytest.mark.asyncio() async def can_defer_fields_that_are_resolved_before_async_iterable_is_complete(): resolve_slow_field = Event() resolve_iterable = Event() @@ -1767,10 +1761,10 @@ async def get_friends(_info): "hasNext": False, } - with raises(StopAsyncIteration): + with pytest.raises(StopAsyncIteration): await anext(iterator) - @mark.asyncio + @pytest.mark.asyncio() async def finishes_async_iterable_when_returned_generator_is_closed(): finished = False @@ -1804,12 +1798,12 @@ async def iterable(_info): assert result1 == {"data": {"friendList": [{"id": "1"}]}, "hasNext": True} await iterator.aclose() - with raises(StopAsyncIteration): + with pytest.raises(StopAsyncIteration): await anext(iterator) assert finished - @mark.asyncio + @pytest.mark.asyncio() async def finishes_async_iterable_when_underlying_iterator_has_no_close_method(): class Iterable: def __init__(self): @@ -1853,12 +1847,12 @@ async def __anext__(self): } await iterator.aclose() - with raises(StopAsyncIteration): + with pytest.raises(StopAsyncIteration): await anext(iterator) assert iterable.index == 4 - @mark.asyncio + @pytest.mark.asyncio() async def finishes_async_iterable_when_error_is_raised_in_returned_generator(): finished = False @@ -1891,10 +1885,10 @@ async def iterable(_info): result1 = execute_result.initial_result assert result1 == {"data": {"friendList": [{"id": "1"}]}, "hasNext": True} - with raises(RuntimeError, match="bad"): + with pytest.raises(RuntimeError, match="bad"): await iterator.athrow(RuntimeError("bad")) - with raises(StopAsyncIteration): + with pytest.raises(StopAsyncIteration): await anext(iterator) assert finished diff --git a/tests/execution/test_subscribe.py b/tests/execution/test_subscribe.py index 3d14c260..1db123e4 100644 --- a/tests/execution/test_subscribe.py +++ b/tests/execution/test_subscribe.py @@ -1,4 +1,5 @@ import asyncio +from contextlib import suppress from typing import ( Any, AsyncIterable, @@ -11,8 +12,7 @@ Union, ) -from pytest import mark, raises - +import pytest from graphql.execution import ( ExecutionResult, create_source_event_stream, @@ -36,17 +36,16 @@ from ..fixtures import cleanup from ..utils.assert_equal_awaitables_or_values import assert_equal_awaitables_or_values - try: from typing import TypedDict except ImportError: # Python < 3.8 from typing_extensions import TypedDict try: - anext + anext # noqa: B018 except NameError: # pragma: no cover (Python < 3.10) # noinspection PyShadowingBuiltins - async def anext(iterator): + async def anext(iterator): # noqa: A001 """Return the next item from an async iterator.""" return await iterator.__anext__() @@ -164,9 +163,7 @@ def transform(new_email): "importantEmail": pubsub.get_subscriber(transform), } - return ( - subscribe if original_subscribe else experimental_subscribe_incrementally - )( # type: ignore + return (subscribe if original_subscribe else experimental_subscribe_incrementally)( # type: ignore email_schema, document, data, variable_values=variable_values ) @@ -201,7 +198,7 @@ def subscribe_with_bad_args( # Check all error cases when initializing the subscription. def describe_subscription_initialization_phase(): - @mark.asyncio + @pytest.mark.asyncio() async def accepts_positional_arguments(): document = parse( """ @@ -217,11 +214,11 @@ async def empty_async_iterable(_info): ai = subscribe(email_schema, document, {"importantEmail": empty_async_iterable}) - with raises(StopAsyncIteration): + with pytest.raises(StopAsyncIteration): await anext(ai) await ai.aclose() # type: ignore - @mark.asyncio + @pytest.mark.asyncio() async def accepts_multiple_subscription_fields_defined_in_schema(): schema = GraphQLSchema( query=DummyQueryType, @@ -246,7 +243,7 @@ async def foo_generator(_info): await subscription.aclose() # type: ignore - @mark.asyncio + @pytest.mark.asyncio() async def accepts_type_definition_with_sync_subscribe_function(): async def foo_generator(_obj, _info): yield {"foo": "FooValue"} @@ -266,7 +263,7 @@ async def foo_generator(_obj, _info): await subscription.aclose() # type: ignore - @mark.asyncio + @pytest.mark.asyncio() async def accepts_type_definition_with_async_subscribe_function(): async def foo_generator(_obj, _info): await asyncio.sleep(0) @@ -294,7 +291,7 @@ async def subscribe_fn(obj, info): await subscription.aclose() # type: ignore - @mark.asyncio + @pytest.mark.asyncio() async def should_only_resolve_the_first_field_of_invalid_multi_field(): did_resolve = {"foo": False, "bar": False} @@ -329,7 +326,7 @@ async def subscribe_bar(_obj, _info): # pragma: no cover await subscription.aclose() # type: ignore - @mark.asyncio + @pytest.mark.asyncio() async def resolves_to_an_error_if_schema_does_not_support_subscriptions(): schema = GraphQLSchema(query=DummyQueryType) document = parse("subscription { unknownField }") @@ -347,7 +344,7 @@ async def resolves_to_an_error_if_schema_does_not_support_subscriptions(): ], ) - @mark.asyncio + @pytest.mark.asyncio() async def resolves_to_an_error_for_unknown_subscription_field(): schema = GraphQLSchema( query=DummyQueryType, @@ -368,7 +365,7 @@ async def resolves_to_an_error_for_unknown_subscription_field(): ], ) - @mark.asyncio + @pytest.mark.asyncio() async def should_pass_through_unexpected_errors_thrown_in_subscribe(): schema = GraphQLSchema( query=DummyQueryType, @@ -376,11 +373,11 @@ async def should_pass_through_unexpected_errors_thrown_in_subscribe(): "Subscription", {"foo": GraphQLField(GraphQLString)} ), ) - with raises(AttributeError): + with pytest.raises(AttributeError): subscribe_with_bad_args(schema=schema, document={}) # type: ignore - @mark.asyncio - @mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") + @pytest.mark.asyncio() + @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") async def throws_an_error_if_subscribe_does_not_return_an_iterator(): expected_result = ( None, @@ -409,7 +406,7 @@ async def async_fn(obj, info): del result cleanup() - @mark.asyncio + @pytest.mark.asyncio() async def resolves_to_an_error_for_subscription_resolver_errors(): expected_result = ( None, @@ -451,7 +448,7 @@ async def reject_with_error(*args): assert is_awaitable(result) assert await result == expected_result - @mark.asyncio + @pytest.mark.asyncio() async def resolves_to_an_error_if_variables_were_wrong_type(): schema = GraphQLSchema( query=DummyQueryType, @@ -496,7 +493,7 @@ async def resolves_to_an_error_if_variables_were_wrong_type(): # Once a subscription returns a valid AsyncIterator, it can still yield errors. def describe_subscription_publish_phase(): - @mark.asyncio + @pytest.mark.asyncio() async def produces_a_payload_for_multiple_subscribe_in_same_subscription(): pubsub = SimplePubSub() @@ -531,7 +528,7 @@ async def produces_a_payload_for_multiple_subscribe_in_same_subscription(): assert await payload1 == (expected_payload, None) assert await payload2 == (expected_payload, None) - @mark.asyncio + @pytest.mark.asyncio() async def produces_a_payload_when_queried_fields_are_async(): pubsub = SimplePubSub() subscription = create_subscription(pubsub, {"asyncResolver": True}) @@ -563,14 +560,12 @@ async def produces_a_payload_when_queried_fields_are_async(): None, ) - try: + with suppress(RuntimeError): # suppress error for Python < 3.8 await subscription.aclose() # type: ignore - except RuntimeError: # Python < 3.8 - pass - with raises(StopAsyncIteration): + with pytest.raises(StopAsyncIteration): await anext(subscription) - @mark.asyncio + @pytest.mark.asyncio() async def produces_a_payload_per_subscription_event(): pubsub = SimplePubSub() subscription = create_subscription(pubsub) @@ -629,10 +624,8 @@ async def produces_a_payload_per_subscription_event(): # The client decides to disconnect. # noinspection PyUnresolvedReferences - try: + with suppress(RuntimeError): # suppress error for Python < 3.8 await subscription.aclose() # type: ignore - except RuntimeError: # Python < 3.8 - pass # Which may result in disconnecting upstream services as well. assert ( @@ -648,10 +641,10 @@ async def produces_a_payload_per_subscription_event(): ) # No more listeners. # Awaiting subscription after closing it results in completed results. - with raises(StopAsyncIteration): + with pytest.raises(StopAsyncIteration): assert await anext(subscription) - @mark.asyncio + @pytest.mark.asyncio() async def produces_additional_payloads_for_subscriptions_with_defer(): pubsub = SimplePubSub() subscription = create_subscription(pubsub, {"shouldDefer": True}) @@ -768,16 +761,14 @@ async def produces_additional_payloads_for_subscriptions_with_defer(): } # The client disconnects before the deferred payload is consumed. - try: + with suppress(RuntimeError): # suppress error for Python < 3.8 await subscription.aclose() # type: ignore - except RuntimeError: # Python < 3.8 - pass # Awaiting a subscription after closing it results in completed results. - with raises(StopAsyncIteration): + with pytest.raises(StopAsyncIteration): assert await anext(subscription) - @mark.asyncio + @pytest.mark.asyncio() async def original_subscribe_function_returns_errors_with_defer(): pubsub = SimplePubSub() subscription = create_subscription(pubsub, {"shouldDefer": True}, True) @@ -840,10 +831,10 @@ async def original_subscribe_function_returns_errors_with_defer(): await subscription.aclose() # type: ignore # Awaiting a subscription after closing it results in completed results. - with raises(StopAsyncIteration): + with pytest.raises(StopAsyncIteration): assert await anext(subscription) - @mark.asyncio + @pytest.mark.asyncio() async def produces_a_payload_when_there_are_multiple_events(): pubsub = SimplePubSub() subscription = create_subscription(pubsub) @@ -899,7 +890,7 @@ async def produces_a_payload_when_there_are_multiple_events(): None, ) - @mark.asyncio + @pytest.mark.asyncio() async def should_not_trigger_when_subscription_is_already_done(): pubsub = SimplePubSub() subscription = create_subscription(pubsub) @@ -931,10 +922,8 @@ async def should_not_trigger_when_subscription_is_already_done(): ) payload = anext(subscription) - try: + with suppress(RuntimeError): # suppress error for Python < 3.8 await subscription.aclose() # type: ignore - except RuntimeError: # Python < 3.8 - pass # A new email arrives! assert ( @@ -949,10 +938,10 @@ async def should_not_trigger_when_subscription_is_already_done(): is False ) - with raises(StopAsyncIteration): + with pytest.raises(StopAsyncIteration): await payload - @mark.asyncio + @pytest.mark.asyncio() async def should_not_trigger_when_subscription_is_thrown(): pubsub = SimplePubSub() subscription = create_subscription(pubsub) @@ -986,14 +975,14 @@ async def should_not_trigger_when_subscription_is_thrown(): payload = anext(subscription) # Throw error - with raises(RuntimeError) as exc_info: + with pytest.raises(RuntimeError) as exc_info: await subscription.athrow(RuntimeError("ouch")) # type: ignore assert str(exc_info.value) == "ouch" - with raises(StopAsyncIteration): + with pytest.raises(StopAsyncIteration): await payload - @mark.asyncio + @pytest.mark.asyncio() async def event_order_is_correct_for_multiple_publishes(): pubsub = SimplePubSub() subscription = create_subscription(pubsub) @@ -1049,7 +1038,7 @@ async def event_order_is_correct_for_multiple_publishes(): None, ) - @mark.asyncio + @pytest.mark.asyncio() async def should_handle_error_during_execution_of_source_event(): async def generate_messages(_obj, _info): yield "Hello" @@ -1097,7 +1086,7 @@ def resolve_message(message, _info): # Subsequent events are still executed. assert await anext(subscription) == ({"newMessage": "Bonjour"}, None) - @mark.asyncio + @pytest.mark.asyncio() async def should_pass_through_error_thrown_in_source_event_stream(): async def generate_messages(_obj, _info): yield "Hello" @@ -1126,15 +1115,15 @@ def resolve_message(message, _info): assert await anext(subscription) == ({"newMessage": "Hello"}, None) - with raises(RuntimeError) as exc_info: + with pytest.raises(RuntimeError) as exc_info: await anext(subscription) assert str(exc_info.value) == "test error" - with raises(StopAsyncIteration): + with pytest.raises(StopAsyncIteration): await anext(subscription) - @mark.asyncio + @pytest.mark.asyncio() async def should_work_with_sync_resolve_function(): async def generate_messages(_obj, _info): yield "Hello" @@ -1162,7 +1151,7 @@ def resolve_message(message, _info): assert await anext(subscription) == ({"newMessage": "Hello"}, None) - @mark.asyncio + @pytest.mark.asyncio() async def should_work_with_async_resolve_function(): async def generate_messages(_obj, _info): await asyncio.sleep(0) @@ -1192,7 +1181,7 @@ async def resolve_message(message, _info): assert await anext(subscription) == ({"newMessage": "Hello"}, None) - @mark.asyncio + @pytest.mark.asyncio() async def should_work_with_custom_async_iterator(): class MessageGenerator: resolved: List[str] = [] @@ -1210,7 +1199,7 @@ async def __anext__(self): return self.values.pop(0) @classmethod - async def resolve(cls, message, _info): + async def resolve(cls, message, _info) -> str: await asyncio.sleep(0) cls.resolved.append(message) return message + "!" @@ -1242,7 +1231,7 @@ async def resolve(cls, message, _info): await subscription.aclose() # type: ignore - @mark.asyncio + @pytest.mark.asyncio() async def should_close_custom_async_iterator(): class MessageGenerator: closed: bool = False @@ -1261,13 +1250,13 @@ async def __anext__(self): return self.values.pop(0) @classmethod - async def resolve(cls, message, _info): + async def resolve(cls, message, _info) -> str: await asyncio.sleep(0) cls.resolved.append(message) return message + "!" @classmethod - async def aclose(cls): + async def aclose(cls) -> None: cls.closed = True schema = GraphQLSchema( diff --git a/tests/execution/test_sync.py b/tests/execution/test_sync.py index 300eded1..36f8c9a5 100644 --- a/tests/execution/test_sync.py +++ b/tests/execution/test_sync.py @@ -1,5 +1,4 @@ -from pytest import mark, raises - +import pytest from graphql import graphql_sync from graphql.execution import execute, execute_sync from graphql.language import parse @@ -52,7 +51,7 @@ def does_not_return_an_awaitable_if_mutation_fields_are_all_synchronous(): None, ) - @mark.asyncio + @pytest.mark.asyncio() async def returns_an_awaitable_if_any_field_is_asynchronous(): doc = "query Example { syncField, asyncField }" result = execute(schema, parse(doc), "rootValue") @@ -81,11 +80,11 @@ def does_not_throw_if_not_encountering_async_execution_with_check_sync(): None, ) - @mark.asyncio - @mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") + @pytest.mark.asyncio() + @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") async def throws_if_encountering_async_execution_with_check_sync(): doc = "query Example { syncField, asyncField }" - with raises(RuntimeError) as exc_info: + with pytest.raises(RuntimeError) as exc_info: execute_sync( schema, document=parse(doc), root_value="rootValue", check_sync=True ) @@ -94,8 +93,8 @@ async def throws_if_encountering_async_execution_with_check_sync(): del exc_info cleanup() - @mark.asyncio - @mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") + @pytest.mark.asyncio() + @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") async def throws_if_encountering_async_operation_without_check_sync(): doc = "query Example { syncField, asyncField }" result = execute_sync(schema, document=parse(doc), root_value="rootValue") @@ -113,8 +112,8 @@ async def throws_if_encountering_async_operation_without_check_sync(): del result cleanup() - @mark.asyncio - @mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") + @pytest.mark.asyncio() + @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") async def throws_if_encountering_async_iterable_execution_with_check_sync(): doc = """ query Example { @@ -124,7 +123,7 @@ async def throws_if_encountering_async_iterable_execution_with_check_sync(): syncField } """ - with raises(RuntimeError) as exc_info: + with pytest.raises(RuntimeError) as exc_info: execute_sync( schema, document=parse(doc), root_value="rootValue", check_sync=True ) @@ -133,8 +132,8 @@ async def throws_if_encountering_async_iterable_execution_with_check_sync(): del exc_info cleanup() - @mark.asyncio - @mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") + @pytest.mark.asyncio() + @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") async def throws_if_encountering_async_iterable_execution_without_check_sync(): doc = """ query Example { @@ -144,7 +143,7 @@ async def throws_if_encountering_async_iterable_execution_without_check_sync(): syncField } """ - with raises(RuntimeError) as exc_info: + with pytest.raises(RuntimeError) as exc_info: execute_sync(schema, document=parse(doc), root_value="rootValue") msg = str(exc_info.value) assert msg == "GraphQL execution failed to complete synchronously." @@ -189,19 +188,19 @@ def does_not_throw_if_not_encountering_async_operation_with_check_sync(): None, ) - @mark.asyncio - @mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") + @pytest.mark.asyncio() + @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") async def throws_if_encountering_async_operation_with_check_sync(): doc = "query Example { syncField, asyncField }" - with raises(RuntimeError) as exc_info: + with pytest.raises(RuntimeError) as exc_info: graphql_sync(schema, doc, "rootValue", check_sync=True) msg = str(exc_info.value) assert msg == "GraphQL execution failed to complete synchronously." del exc_info cleanup() - @mark.asyncio - @mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") + @pytest.mark.asyncio() + @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") async def throws_if_encountering_async_operation_without_check_sync(): doc = "query Example { syncField, asyncField }" result = graphql_sync(schema, doc, "rootValue") diff --git a/tests/execution/test_union_interface.py b/tests/execution/test_union_interface.py index 4a3099a9..efccd669 100644 --- a/tests/execution/test_union_interface.py +++ b/tests/execution/test_union_interface.py @@ -65,7 +65,8 @@ def __init__( NamedType = GraphQLInterfaceType("Named", {"name": GraphQLField(GraphQLString)}) LifeType = GraphQLInterfaceType( - "Life", lambda: {"progeny": GraphQLField(GraphQLList(LifeType))} # type: ignore + "Life", + lambda: {"progeny": GraphQLField(GraphQLList(LifeType))}, # type: ignore ) MammalType = GraphQLInterfaceType( @@ -88,7 +89,7 @@ def __init__( "father": GraphQLField(DogType), # type: ignore }, interfaces=[MammalType, LifeType, NamedType], - is_type_of=lambda value, info: isinstance(value, Dog), + is_type_of=lambda value, _info: isinstance(value, Dog), ) CatType = GraphQLObjectType( @@ -101,7 +102,7 @@ def __init__( "father": GraphQLField(CatType), # type: ignore }, interfaces=[MammalType, LifeType, NamedType], - is_type_of=lambda value, info: isinstance(value, Cat), + is_type_of=lambda value, _info: isinstance(value, Cat), ) @@ -112,7 +113,7 @@ def resolve_pet_type(value, _info, _type): return CatType.name # Not reachable. All possible types have been considered. - assert False, "Unexpected pet type" + assert False, "Unexpected pet type" # pragma: no cover PetType = GraphQLUnionType("Pet", [DogType, CatType], resolve_type=resolve_pet_type) diff --git a/tests/fixtures/__init__.py b/tests/fixtures/__init__.py index be23672c..3df1c2f0 100644 --- a/tests/fixtures/__init__.py +++ b/tests/fixtures/__init__.py @@ -1,10 +1,10 @@ """Fixtures for graphql tests""" + import json from gc import collect -from os.path import dirname, join - -from pytest import fixture +from pathlib import Path +import pytest __all__ = [ "cleanup", @@ -25,30 +25,32 @@ def cleanup(rounds=5): def read_graphql(name): - path = join(dirname(__file__), name + ".graphql") - return open(path, encoding="utf-8").read() + path = (Path(__file__).parent / name).with_suffix(".graphql") + with path.open(encoding="utf-8") as file: + return file.read() def read_json(name): - path = join(dirname(__file__), name + ".json") - return json.load(open(path, encoding="utf-8")) + path = (Path(__file__).parent / name).with_suffix(".json") + with path.open(encoding="utf-8") as file: + return json.load(file) -@fixture(scope="module") +@pytest.fixture(scope="module") def kitchen_sink_query(): return read_graphql("kitchen_sink") -@fixture(scope="module") +@pytest.fixture(scope="module") def kitchen_sink_sdl(): return read_graphql("schema_kitchen_sink") -@fixture(scope="module") +@pytest.fixture(scope="module") def big_schema_sdl(): return read_graphql("github_schema") -@fixture(scope="module") +@pytest.fixture(scope="module") def big_schema_introspection_result(): return read_json("github_schema") diff --git a/tests/language/test_ast.py b/tests/language/test_ast.py index 76e8a66c..35f39171 100644 --- a/tests/language/test_ast.py +++ b/tests/language/test_ast.py @@ -52,7 +52,7 @@ def can_check_equality(): token1 = Token(TokenKind.NAME, 1, 2, 1, 2, value="test") token2 = Token(TokenKind.NAME, 1, 2, 1, 2, value="test") assert token2 == token1 - assert not token2 != token1 + assert token2 == token1 token3 = Token(TokenKind.NAME, 1, 2, 1, 2, value="text") assert token3 != token1 token4 = Token(TokenKind.NAME, 1, 4, 1, 2, value="test") @@ -62,8 +62,8 @@ def can_check_equality(): def can_compare_with_string(): token = Token(TokenKind.NAME, 1, 2, 1, 2, value="test") - assert token == "Name 'test'" - assert token != "Name 'foo'" + assert token == "Name 'test'" # noqa: S105 + assert token != "Name 'foo'" # noqa: S105 def does_not_equal_incompatible_object(): token = Token(TokenKind.NAME, 1, 2, 1, 2, value="test") @@ -121,16 +121,16 @@ def can_check_equality_with_tuple_or_list(): loc = Location(token1, token2, source) assert loc == (1, 3) assert loc == [1, 3] - assert not loc != (1, 3) - assert not loc != [1, 3] + assert loc == (1, 3) + assert loc == [1, 3] assert loc != (1, 2) assert loc != [2, 3] def does_not_equal_incompatible_object(): loc = Location(token1, token2, source) - assert not loc == (1, 2, 3) assert loc != (1, 2, 3) - assert not loc == {1: 2} + assert loc != (1, 2, 3) + assert loc != {1: 2} assert loc != {1: 2} def can_hash(): @@ -191,7 +191,7 @@ def can_check_equality(): node = SampleTestNode(alpha=1, beta=2) node2 = SampleTestNode(alpha=1, beta=2) assert node2 == node - assert not node2 != node + assert node2 == node node2 = SampleTestNode(alpha=1, beta=1) assert node2 != node node3 = Node(alpha=1, beta=2) @@ -213,13 +213,13 @@ def caches_are_hashed(): assert not hasattr(node, "_hash") hash1 = hash(node) assert hasattr(node, "_hash") - assert hash1 == node._hash + assert hash1 == node._hash # noqa: SLF001 node.alpha = 2 assert not hasattr(node, "_hash") hash2 = hash(node) assert hash2 != hash1 assert hasattr(node, "_hash") - assert hash2 == node._hash + assert hash2 == node._hash # noqa: SLF001 def can_create_weak_reference(): node = SampleTestNode(alpha=1, beta=2) diff --git a/tests/language/test_block_string_fuzz.py b/tests/language/test_block_string_fuzz.py index 647db3c1..feb7ca2b 100644 --- a/tests/language/test_block_string_fuzz.py +++ b/tests/language/test_block_string_fuzz.py @@ -1,5 +1,4 @@ -from pytest import mark - +import pytest from graphql.language import Lexer, Source, TokenKind from graphql.language.block_string import ( is_printable_as_block_string, @@ -41,8 +40,8 @@ def assert_non_printable_block_string(test_value: str) -> None: def describe_print_block_string(): - @mark.slow - @mark.timeout(80) + @pytest.mark.slow() + @pytest.mark.timeout(80) def correctly_print_random_strings(): # Testing with length >7 is taking exponentially more time. However, it is # highly recommended testing with increased limit if you make any change. diff --git a/tests/language/test_character_classes.py b/tests/language/test_character_classes.py index c682b76a..8fef324c 100644 --- a/tests/language/test_character_classes.py +++ b/tests/language/test_character_classes.py @@ -8,8 +8,7 @@ is_name_start, ) - -non_ascii = "¯_±¹²³½£ºµÄäÖöØø×〇᧐〸αΑωΩ" +non_ascii = "¯_±¹²³½£ºµÄäÖöØø×〇᧐〸αΑωΩ" # noqa: RUF001 def describe_digit(): diff --git a/tests/language/test_lexer.py b/tests/language/test_lexer.py index 83e47953..439446d8 100644 --- a/tests/language/test_lexer.py +++ b/tests/language/test_lexer.py @@ -1,7 +1,6 @@ from typing import List, Optional, Tuple -from pytest import raises - +import pytest from graphql.error import GraphQLSyntaxError from graphql.language import Lexer, Source, SourceLocation, Token, TokenKind from graphql.language.lexer import is_punctuator_token_kind @@ -9,7 +8,6 @@ from ..utils import dedent - try: from typing import TypeAlias except ImportError: # Python < 3.10 @@ -31,7 +29,7 @@ def lex_second(s: str) -> Token: def assert_syntax_error(text: str, message: str, location: Location) -> None: - with raises(GraphQLSyntaxError) as exc_info: + with pytest.raises(GraphQLSyntaxError) as exc_info: lex_second(text) error = exc_info.value assert error.message == f"Syntax Error: {message}" @@ -79,7 +77,7 @@ def skips_whitespace_and_comments(): assert token == Token(TokenKind.NAME, 3, 6, 1, 4, "foo") def errors_respect_whitespace(): - with raises(GraphQLSyntaxError) as exc_info: + with pytest.raises(GraphQLSyntaxError) as exc_info: lex_one("\n\n ~\n") assert str(exc_info.value) == dedent( @@ -97,7 +95,7 @@ def errors_respect_whitespace(): def updates_line_numbers_in_error_for_file_context(): s = "\n\n ~\n\n" source = Source(s, "foo.js", SourceLocation(11, 12)) - with raises(GraphQLSyntaxError) as exc_info: + with pytest.raises(GraphQLSyntaxError) as exc_info: Lexer(source).advance() assert str(exc_info.value) == dedent( """ @@ -113,7 +111,7 @@ def updates_line_numbers_in_error_for_file_context(): def updates_column_numbers_in_error_for_file_context(): source = Source("~", "foo.js", SourceLocation(1, 5)) - with raises(GraphQLSyntaxError) as exc_info: + with pytest.raises(GraphQLSyntaxError) as exc_info: Lexer(source).advance() assert str(exc_info.value) == dedent( """ @@ -398,18 +396,15 @@ def lexes_block_strings(): ) == Token(TokenKind.BLOCK_STRING, 0, 68, 1, 1, "spans\n multiple\n lines") def advance_line_after_lexing_multiline_block_string(): - assert ( - lex_second( - '''""" + assert lex_second( + '''""" spans multiple lines \n """ second_token''' - ) - == Token(TokenKind.NAME, 71, 83, 8, 6, "second_token") - ) + ) == Token(TokenKind.NAME, 71, 83, 8, 6, "second_token") def lex_reports_useful_block_string_errors(): assert_syntax_error('"""', "Unterminated string.", (1, 4)) @@ -555,7 +550,7 @@ def lex_reports_useful_information_for_dashes_in_names(): lexer = Lexer(source) first_token = lexer.advance() assert first_token == Token(TokenKind.NAME, 0, 1, 1, 1, "a") - with raises(GraphQLSyntaxError) as exc_info: + with pytest.raises(GraphQLSyntaxError) as exc_info: lexer.advance() error = exc_info.value assert error.message == ( diff --git a/tests/language/test_location.py b/tests/language/test_location.py index 62096c19..c9ae2c14 100644 --- a/tests/language/test_location.py +++ b/tests/language/test_location.py @@ -10,34 +10,34 @@ def can_compare_with_other_source_location(): location = SourceLocation(1, 2) same_location = SourceLocation(1, 2) assert location == same_location - assert not location != same_location + assert not location != same_location # noqa: SIM202 different_location = SourceLocation(1, 1) - assert not location == different_location + assert not location == different_location # noqa: SIM201 assert location != different_location different_location = SourceLocation(2, 2) - assert not location == different_location + assert not location == different_location # noqa: SIM201 assert location != different_location def can_compare_with_location_tuple(): location = SourceLocation(1, 2) same_location = (1, 2) assert location == same_location - assert not location != same_location + assert not location != same_location # noqa: SIM202 different_location = (1, 1) - assert not location == different_location + assert not location == different_location # noqa: SIM201 assert location != different_location different_location = (2, 2) - assert not location == different_location + assert not location == different_location # noqa: SIM201 assert location != different_location def can_compare_with_formatted_location(): location = SourceLocation(1, 2) same_location = location.formatted assert location == same_location - assert not location != same_location + assert not location != same_location # noqa: SIM202 different_location = SourceLocation(1, 1).formatted - assert not location == different_location + assert not location == different_location # noqa: SIM201 assert location != different_location different_location = SourceLocation(2, 2).formatted - assert not location == different_location + assert not location == different_location # noqa: SIM201 assert location != different_location diff --git a/tests/language/test_parser.py b/tests/language/test_parser.py index 82d787f3..2199a8fc 100644 --- a/tests/language/test_parser.py +++ b/tests/language/test_parser.py @@ -1,7 +1,6 @@ from typing import Optional, Tuple, cast -from pytest import raises - +import pytest from graphql.error import GraphQLSyntaxError from graphql.language import ( ArgumentNode, @@ -40,7 +39,6 @@ from ..fixtures import kitchen_sink_query # noqa: F401 from ..utils import dedent - try: from typing import TypeAlias except ImportError: # Python < 3.10 @@ -55,7 +53,7 @@ def parse_ccn(source: str) -> DocumentNode: def assert_syntax_error(text: str, message: str, location: Location) -> None: - with raises(GraphQLSyntaxError) as exc_info: + with pytest.raises(GraphQLSyntaxError) as exc_info: parse(text) error = exc_info.value assert error.message == f"Syntax Error: {message}" @@ -64,7 +62,7 @@ def assert_syntax_error(text: str, message: str, location: Location) -> None: def assert_syntax_error_ccn(text: str, message: str, location: Location) -> None: - with raises(GraphQLSyntaxError) as exc_info: + with pytest.raises(GraphQLSyntaxError) as exc_info: parse_ccn(text) error = exc_info.value assert error.message == f"Syntax Error: {message}" @@ -74,7 +72,7 @@ def assert_syntax_error_ccn(text: str, message: str, location: Location) -> None def describe_parser(): def parse_provides_useful_errors(): - with raises(GraphQLSyntaxError) as exc_info: + with pytest.raises(GraphQLSyntaxError) as exc_info: parse("{") error = exc_info.value assert error.message == "Syntax Error: Expected Name, found ." @@ -102,7 +100,7 @@ def parse_provides_useful_errors(): assert_syntax_error('{ ""', "Expected Name, found String ''.", (1, 3)) def parse_provides_useful_error_when_using_source(): - with raises(GraphQLSyntaxError) as exc_info: + with pytest.raises(GraphQLSyntaxError) as exc_info: parse(Source("query", "MyQuery.graphql")) error = exc_info.value assert str(error) == dedent( @@ -117,14 +115,14 @@ def parse_provides_useful_error_when_using_source(): def limits_maximum_number_of_tokens(): parse("{ foo }", max_tokens=3) - with raises( + with pytest.raises( GraphQLSyntaxError, match="Syntax Error:" r" Document contains more that 2 tokens\. Parsing aborted\.", ): parse("{ foo }", max_tokens=2) parse('{ foo(bar: "baz") }', max_tokens=8) - with raises( + with pytest.raises( GraphQLSyntaxError, match="Syntax Error:" r" Document contains more that 7 tokens\. Parsing aborted\.", @@ -617,7 +615,7 @@ def allows_parsing_without_source_location_information(): def legacy_allows_parsing_fragment_defined_variables(): document = "fragment a($v: Boolean = false) on t { f(v: $v) }" parse(document, allow_legacy_fragment_variables=True) - with raises(GraphQLSyntaxError): + with pytest.raises(GraphQLSyntaxError): parse(document) def contains_location_information_that_only_stringifies_start_end(): @@ -629,7 +627,8 @@ def contains_location_information_that_only_stringifies_start_end(): def contains_references_to_source(): source = Source("{ id }") result = parse(source) - assert result.loc and result.loc.source is source + assert result.loc + assert result.loc.source is source def contains_references_to_start_and_end_tokens(): result = parse("{ id }") @@ -650,13 +649,16 @@ def allows_comments_everywhere_in_the_source(): # bottom comment""" ) top_comment = result.loc and result.loc.start_token.next - assert top_comment and top_comment.kind is TokenKind.COMMENT + assert top_comment + assert top_comment.kind is TokenKind.COMMENT assert top_comment.value == " top comment" field_comment = top_comment.next.next.next # type: ignore - assert field_comment and field_comment.kind is TokenKind.COMMENT + assert field_comment + assert field_comment.kind is TokenKind.COMMENT assert field_comment.value == " field comment" bottom_comment = field_comment.next.next # type: ignore - assert bottom_comment and bottom_comment.kind is TokenKind.COMMENT + assert bottom_comment + assert bottom_comment.kind is TokenKind.COMMENT assert bottom_comment.value == " bottom comment" @@ -728,7 +730,7 @@ def allows_variables(): assert name.value == "var" def correct_message_for_incomplete_variable(): - with raises(GraphQLSyntaxError) as exc_info: + with pytest.raises(GraphQLSyntaxError) as exc_info: parse_value("$") assert exc_info.value == { "message": "Syntax Error: Expected Name, found .", @@ -736,7 +738,7 @@ def correct_message_for_incomplete_variable(): } def correct_message_for_unexpected_token(): - with raises(GraphQLSyntaxError) as exc_info: + with pytest.raises(GraphQLSyntaxError) as exc_info: parse_value(":") assert exc_info.value == { "message": "Syntax Error: Unexpected ':'.", @@ -762,7 +764,7 @@ def parses_values(): assert value.block is False def does_not_allow_variables(): - with raises(GraphQLSyntaxError) as exc_info: + with pytest.raises(GraphQLSyntaxError) as exc_info: parse_const_value("{ field: $var }") assert exc_info.value == { "message": "Syntax Error: Unexpected variable '$var' in constant value.", @@ -770,7 +772,7 @@ def does_not_allow_variables(): } def correct_message_for_unexpected_token(): - with raises(GraphQLSyntaxError) as exc_info: + with pytest.raises(GraphQLSyntaxError) as exc_info: parse_const_value("$$") assert exc_info.value == { "message": "Syntax Error: Unexpected '$'.", diff --git a/tests/language/test_predicates.py b/tests/language/test_predicates.py index 498829f9..f87148e4 100644 --- a/tests/language/test_predicates.py +++ b/tests/language/test_predicates.py @@ -18,7 +18,6 @@ parse_value, ) - all_ast_nodes = sorted( [ node_type() diff --git a/tests/language/test_printer.py b/tests/language/test_printer.py index 3f91cc4a..7669e963 100644 --- a/tests/language/test_printer.py +++ b/tests/language/test_printer.py @@ -1,7 +1,6 @@ from copy import deepcopy -from pytest import raises - +import pytest from graphql.language import FieldNode, NameNode, parse, print_ast from ..fixtures import kitchen_sink_query # noqa: F401 @@ -15,12 +14,12 @@ def prints_minimal_ast(): def produces_helpful_error_messages(): bad_ast = {"random": "Data"} - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: # noinspection PyTypeChecker print_ast(bad_ast) # type: ignore assert str(exc_info.value) == "Not an AST Node: {'random': 'Data'}." corrupt_ast = FieldNode(name="random data") - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: print_ast(corrupt_ast) assert str(exc_info.value) == "Invalid AST Node: 'random data'." diff --git a/tests/language/test_schema_parser.py b/tests/language/test_schema_parser.py index 5faa5c52..f9100a03 100644 --- a/tests/language/test_schema_parser.py +++ b/tests/language/test_schema_parser.py @@ -3,8 +3,7 @@ from textwrap import dedent from typing import List, Optional, Tuple -from pytest import raises - +import pytest from graphql.error import GraphQLSyntaxError from graphql.language import ( ArgumentNode, @@ -39,7 +38,6 @@ from ..fixtures import kitchen_sink_sdl # noqa: F401 - try: from typing import TypeAlias except ImportError: # Python < 3.10 @@ -50,7 +48,7 @@ def assert_syntax_error(text: str, message: str, location: Location) -> None: - with raises(GraphQLSyntaxError) as exc_info: + with pytest.raises(GraphQLSyntaxError) as exc_info: parse(text) error = exc_info.value assert error.message == f"Syntax Error: {message}" diff --git a/tests/language/test_schema_printer.py b/tests/language/test_schema_printer.py index 93190216..35da0b06 100644 --- a/tests/language/test_schema_printer.py +++ b/tests/language/test_schema_printer.py @@ -1,7 +1,6 @@ from copy import deepcopy -from pytest import raises - +import pytest from graphql.language import NameNode, ScalarTypeDefinitionNode, parse, print_ast from ..fixtures import kitchen_sink_sdl # noqa: F401 @@ -15,7 +14,7 @@ def prints_minimal_ast(): def produces_helpful_error_messages(): bad_ast = {"random": "Data"} - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: # noinspection PyTypeChecker print_ast(bad_ast) # type: ignore msg = str(exc_info.value) diff --git a/tests/language/test_source.py b/tests/language/test_source.py index ca5592ea..9da76d2f 100644 --- a/tests/language/test_source.py +++ b/tests/language/test_source.py @@ -1,8 +1,7 @@ import weakref from typing import Tuple, cast -from pytest import raises - +import pytest from graphql.language import Source, SourceLocation from ..utils import dedent @@ -54,17 +53,17 @@ def can_be_stringified(): def can_be_compared(): source = Source("foo") - assert source == source - assert not source != source + assert source == source # noqa: PLR0124 + assert not source != source # noqa: PLR0124, SIM202 assert source == "foo" - assert not source != "foo" + assert not source != "foo" # noqa: SIM202 same_source = Source("foo") assert source == same_source - assert not source != same_source + assert not source != same_source # noqa: SIM202 different_source = Source("bar") - assert not source == different_source + assert not source == different_source # noqa: SIM201 assert source != different_source - assert not source == "bar" + assert not source == "bar" # noqa: SIM201 assert source != "bar" def can_create_weak_reference(): @@ -81,32 +80,32 @@ def rejects_invalid_location_offset(): def create_source(location_offset: Tuple[int, int]) -> Source: return Source("", "", cast(SourceLocation, location_offset)) - with raises(TypeError): + with pytest.raises(TypeError): create_source(None) # type: ignore - with raises(TypeError): + with pytest.raises(TypeError): create_source(1) # type: ignore - with raises(TypeError): + with pytest.raises(TypeError): create_source((1,)) # type: ignore - with raises(TypeError): + with pytest.raises(TypeError): create_source((1, 2, 3)) # type: ignore - with raises( + with pytest.raises( ValueError, match="line in location_offset is 1-indexed and must be positive\\.", ): create_source((0, 1)) - with raises( + with pytest.raises( ValueError, match="line in location_offset is 1-indexed and must be positive\\.", ): create_source((-1, 1)) - with raises( + with pytest.raises( ValueError, match="column in location_offset is 1-indexed and must be positive\\.", ): create_source((1, 0)) - with raises( + with pytest.raises( ValueError, match="column in location_offset is 1-indexed and must be positive\\.", ): diff --git a/tests/language/test_visitor.py b/tests/language/test_visitor.py index 06baa5af..dd2fc791 100644 --- a/tests/language/test_visitor.py +++ b/tests/language/test_visitor.py @@ -2,8 +2,7 @@ from functools import partial from typing import Any, List, Optional, cast -from pytest import mark, raises - +import pytest from graphql.language import ( BREAK, REMOVE, @@ -83,7 +82,7 @@ def get_value(node): def describe_visitor(): def visit_with_invalid_node(): - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: # noinspection PyTypeChecker visit("invalid", Visitor()) # type: ignore assert str(exc_info.value) == "Not an AST Node: 'invalid'." @@ -95,7 +94,7 @@ class TestVisitor: def enter(self, *_args): pass - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: # noinspection PyTypeChecker visit(ast, TestVisitor()) # type: ignore assert str(exc_info.value) == "Not an AST Visitor: ." @@ -107,61 +106,53 @@ def enter(self, node, *args): assert isinstance(node, Node) assert len(args) == 4 visited.append(f"enter:{node.kind}") - pass def leave(self, node, *args): assert isinstance(self, TestVisitorWithInstanceMethods) assert isinstance(node, Node) assert len(args) == 4 visited.append(f"leave:{node.kind}") - pass def enter_field(self, node, *args): assert isinstance(self, TestVisitorWithInstanceMethods) assert isinstance(node, Node) assert len(args) == 4 visited.append(f"enter_field:{node.kind}") - pass def leave_field(self, node, *args): assert isinstance(self, TestVisitorWithInstanceMethods) assert isinstance(node, Node) assert len(args) == 4 visited.append(f"leave_field:{node.kind}") - pass class TestVisitorWithClassMethods(Visitor): @classmethod - def enter(cls, node, *args): + def enter(cls, node, *args) -> None: assert cls is TestVisitorWithClassMethods assert isinstance(node, Node) assert len(args) == 4 visited.append(f"enter:{node.kind}") - pass @classmethod - def leave(cls, node, *args): + def leave(cls, node, *args) -> None: assert cls is TestVisitorWithClassMethods assert isinstance(node, Node) assert len(args) == 4 visited.append(f"leave:{node.kind}") - pass @classmethod - def enter_field(cls, node, *args): + def enter_field(cls, node, *args) -> None: assert cls is TestVisitorWithClassMethods assert isinstance(node, Node) assert len(args) == 4 visited.append(f"enter_field:{node.kind}") - pass @classmethod - def leave_field(cls, node, *args): + def leave_field(cls, node, *args) -> None: assert cls is TestVisitorWithClassMethods assert isinstance(node, Node) assert len(args) == 4 visited.append(f"leave_field:{node.kind}") - pass class TestVisitorWithStaticMethods(Visitor): @staticmethod @@ -169,28 +160,24 @@ def enter(node, *args): assert isinstance(node, Node) assert len(args) == 4 visited.append(f"enter:{node.kind}") - pass @staticmethod def leave(node, *args): assert isinstance(node, Node) assert len(args) == 4 visited.append(f"leave:{node.kind}") - pass @staticmethod def enter_field(node, *args): assert isinstance(node, Node) assert len(args) == 4 visited.append(f"enter_field:{node.kind}") - pass @staticmethod def leave_field(node, *args): assert isinstance(node, Node) assert len(args) == 4 visited.append(f"leave_field:{node.kind}") - pass for visitor_class in ( TestVisitorWithInstanceMethods, @@ -342,7 +329,9 @@ def leave_operation_definition(self, *args): assert edited_ast == ast assert visited == ["enter", "leave"] - @mark.parametrize("remove_action", (REMOVE, Ellipsis), ids=("REMOVE", "Ellipsis")) + @pytest.mark.parametrize( + "remove_action", [REMOVE, Ellipsis], ids=["REMOVE", "Ellipsis"] + ) def allows_for_editing_on_enter(remove_action): ast = parse("{ a, b, c { a, b, c } }", no_location=True) @@ -353,12 +342,15 @@ def enter(*args): node = args[0] if isinstance(node, FieldNode) and node.name.value == "b": return remove_action + return None edited_ast = visit(ast, TestVisitor()) assert ast == parse("{ a, b, c { a, b, c } }", no_location=True) assert edited_ast == parse("{ a, c { a, c } }", no_location=True) - @mark.parametrize("remove_action", (REMOVE, Ellipsis), ids=("REMOVE", "Ellipsis")) + @pytest.mark.parametrize( + "remove_action", [REMOVE, Ellipsis], ids=["REMOVE", "Ellipsis"] + ) def allows_for_editing_on_leave(remove_action): ast = parse("{ a, b, c { a, b, c } }", no_location=True) @@ -369,18 +361,19 @@ def leave(*args): node = args[0] if isinstance(node, FieldNode) and node.name.value == "b": return remove_action + return None edited_ast = visit(ast, TestVisitor()) assert ast == parse("{ a, b, c { a, b, c } }", no_location=True) assert edited_ast == parse("{ a, c { a, c } }", no_location=True) - @mark.parametrize("skip_action", (SKIP, False), ids=("SKIP", "False")) + @pytest.mark.parametrize("skip_action", [SKIP, False], ids=["SKIP", "False"]) def ignores_false_returned_on_leave(skip_action): ast = parse("{ a, b, c { a, b, c } }", no_location=True) class TestVisitor(Visitor): @staticmethod - def leave(*args): + def leave(*_args): return skip_action returned_ast = visit(ast, TestVisitor()) @@ -401,16 +394,18 @@ def enter(self, *args): assert node.selection_set node.selection_set.selections = ( added_field, - ) + node.selection_set.selections + *node.selection_set.selections, + ) return node if node == added_field: self.did_visit_added_field = True + return None visitor = TestVisitor() visit(ast, visitor) assert visitor.did_visit_added_field - @mark.parametrize("skip_action", (SKIP, False), ids=("SKIP", "False")) + @pytest.mark.parametrize("skip_action", [SKIP, False], ids=["SKIP", "False"]) def allows_skipping_a_sub_tree(skip_action): ast = parse("{ a, b { x }, c }", no_location=True) visited = [] @@ -424,6 +419,7 @@ def enter(*args): visited.append(["enter", kind, value]) if kind == "field" and node.name.value == "b": return skip_action + return None @staticmethod def leave(*args): @@ -451,7 +447,7 @@ def leave(*args): ["leave", "document", None], ] - @mark.parametrize("break_action", (BREAK, True), ids=("BREAK", "True")) + @pytest.mark.parametrize("break_action", [BREAK, True], ids=["BREAK", "True"]) def allows_early_exit_while_visiting(break_action): ast = parse("{ a, b { x }, c }", no_location=True) visited = [] @@ -465,6 +461,7 @@ def enter(*args): visited.append(["enter", kind, value]) if kind == "name" and node.value == "x": return break_action + return None @staticmethod def leave(*args): @@ -490,7 +487,7 @@ def leave(*args): ["enter", "name", "x"], ] - @mark.parametrize("break_action", (BREAK, True), ids=("BREAK", "True")) + @pytest.mark.parametrize("break_action", [BREAK, True], ids=["BREAK", "True"]) def allows_early_exit_while_leaving(break_action): ast = parse("{ a, b { x }, c }", no_location=True) visited = [] @@ -511,6 +508,7 @@ def leave(*args): visited.append(["leave", kind, value]) if kind == "name" and node.value == "x": return break_action + return None visit(ast, TestVisitor()) assert visited == [ @@ -582,7 +580,8 @@ class CustomFieldNode(SelectionNode): custom_selection_set = cast(FieldNode, custom_ast.definitions[0]).selection_set assert custom_selection_set is not None - custom_selection_set.selections = custom_selection_set.selections + ( + custom_selection_set.selections = ( + *custom_selection_set.selections, CustomFieldNode( name=NameNode(value="NameNodeToBeSkipped"), selection_set=SelectionSetNode( @@ -656,7 +655,7 @@ def leave(node, *_args): ] def cannot_define_visitor_with_unknown_ast_nodes(): - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: class VisitorWithNonExistingNode(Visitor): def enter_field(self, *_args): @@ -667,7 +666,7 @@ def leave_garfield(self, *_args): assert str(exc_info.value) == "Invalid AST node kind: garfield." - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: class VisitorWithUnspecificNode(Visitor): def enter_type_system_extension(self, *_args): @@ -1377,7 +1376,7 @@ def leave(*args): def describe_visit_in_parallel(): - @mark.parametrize("skip_action", (SKIP, False), ids=("SKIP", "False")) + @pytest.mark.parametrize("skip_action", [SKIP, False], ids=["SKIP", "False"]) def allows_skipping_a_sub_tree(skip_action): # Note: nearly identical to the above test but using ParallelVisitor ast = parse("{ a, b { x }, c }") @@ -1392,6 +1391,7 @@ def enter(*args): visited.append(["enter", kind, value]) if kind == "field" and node.name.value == "b": return skip_action + return None @staticmethod def leave(*args): @@ -1419,7 +1419,7 @@ def leave(*args): ["leave", "document", None], ] - @mark.parametrize("skip_action", (SKIP, False), ids=("SKIP", "False")) + @pytest.mark.parametrize("skip_action", [SKIP, False], ids=["SKIP", "False"]) def allows_skipping_different_sub_trees(skip_action): ast = parse("{ a { x }, b { y} }") visited = [] @@ -1437,6 +1437,7 @@ def enter(self, *args): visited.append([f"no-{name}", "enter", kind, value]) if kind == "field" and node.name.value == name: return skip_action + return None def leave(self, *args): check_visitor_fn_args(ast, *args) @@ -1483,7 +1484,7 @@ def leave(self, *args): ["no-b", "leave", "document", None], ] - @mark.parametrize("break_action", (BREAK, True), ids=("BREAK", "True")) + @pytest.mark.parametrize("break_action", [BREAK, True], ids=["BREAK", "True"]) def allows_early_exit_while_visiting(break_action): # Note: nearly identical to the above test but using ParallelVisitor. ast = parse("{ a, b { x }, c }") @@ -1498,6 +1499,7 @@ def enter(*args): visited.append(["enter", kind, value]) if kind == "name" and node.value == "x": return break_action + return None @staticmethod def leave(*args): @@ -1523,7 +1525,7 @@ def leave(*args): ["enter", "name", "x"], ] - @mark.parametrize("break_action", (BREAK, True), ids=("BREAK", "True")) + @pytest.mark.parametrize("break_action", [BREAK, True], ids=["BREAK", "True"]) def allows_early_exit_from_different_points(break_action): ast = parse("{ a { y }, b { x } }") visited = [] @@ -1541,6 +1543,7 @@ def enter(self, *args): visited.append([f"break-{name}", "enter", kind, value]) if kind == "name" and node.value == name: return break_action + return None def leave(self, *args): assert self.name == "b" @@ -1574,7 +1577,7 @@ def leave(self, *args): ["break-b", "enter", "name", "b"], ] - @mark.parametrize("break_action", (BREAK, True), ids=("BREAK", "True")) + @pytest.mark.parametrize("break_action", [BREAK, True], ids=["BREAK", "True"]) def allows_early_exit_while_leaving(break_action): # Note: nearly identical to the above test but using ParallelVisitor. ast = parse("{ a, b { x }, c }") @@ -1596,6 +1599,7 @@ def leave(*args): visited.append(["leave", kind, value]) if kind == "name" and node.value == "x": return break_action + return None visit(ast, ParallelVisitor([TestVisitor()])) assert visited == [ @@ -1615,7 +1619,7 @@ def leave(*args): ["leave", "name", "x"], ] - @mark.parametrize("break_action", (BREAK, True), ids=("BREAK", "True")) + @pytest.mark.parametrize("break_action", [BREAK, True], ids=["BREAK", "True"]) def allows_early_exit_from_leaving_different_points(break_action): ast = parse("{ a { y }, b { x } }") visited = [] @@ -1640,6 +1644,7 @@ def leave(self, *args): visited.append([f"break-{name}", "leave", kind, value]) if kind == "field" and node.name.value == name: return break_action + return None visit(ast, ParallelVisitor([TestVisitor("a"), TestVisitor("b")])) assert visited == [ @@ -1681,7 +1686,9 @@ def leave(self, *args): ["break-b", "leave", "field", None], ] - @mark.parametrize("remove_action", (REMOVE, Ellipsis), ids=("REMOVE", "Ellipsis")) + @pytest.mark.parametrize( + "remove_action", [REMOVE, Ellipsis], ids=["REMOVE", "Ellipsis"] + ) def allows_for_editing_on_enter(remove_action): ast = parse("{ a, b, c { a, b, c } }", no_location=True) visited = [] @@ -1693,6 +1700,7 @@ def enter(*args): node = args[0] if node.kind == "field" and node.name.value == "b": return remove_action + return None class TestVisitor2(Visitor): @staticmethod @@ -1739,7 +1747,9 @@ def leave(*args): ["leave", "document", None], ] - @mark.parametrize("remove_action", (REMOVE, Ellipsis), ids=("REMOVE", "Ellipsis")) + @pytest.mark.parametrize( + "remove_action", [REMOVE, Ellipsis], ids=["REMOVE", "Ellipsis"] + ) def allows_for_editing_on_leave(remove_action): ast = parse("{ a, b, c { a, b, c } }", no_location=True) visited = [] @@ -1751,6 +1761,7 @@ def leave(*args): node = args[0] if node.kind == "field" and node.name.value == "b": return remove_action + return None class TestVisitor2(Visitor): @staticmethod diff --git a/tests/pyutils/test_async_reduce.py b/tests/pyutils/test_async_reduce.py index 15c47595..cbcef554 100644 --- a/tests/pyutils/test_async_reduce.py +++ b/tests/pyutils/test_async_reduce.py @@ -1,7 +1,6 @@ from functools import reduce -from pytest import mark - +import pytest from graphql.pyutils import async_reduce, is_awaitable @@ -17,7 +16,7 @@ def callback(accumulator, current_value): assert result == 42 assert result == reduce(callback, values, initial_value) - @mark.asyncio + @pytest.mark.asyncio() async def works_with_sync_values_and_sync_initial_value(): def callback(accumulator, current_value): return accumulator + "-" + current_value @@ -27,7 +26,7 @@ def callback(accumulator, current_value): assert not is_awaitable(result) assert result == "foo-bar-baz" - @mark.asyncio + @pytest.mark.asyncio() async def works_with_async_initial_value(): async def async_initial_value(): return "foo" @@ -40,7 +39,7 @@ def callback(accumulator, current_value): assert is_awaitable(result) assert await result == "foo-bar-baz" - @mark.asyncio + @pytest.mark.asyncio() async def works_with_async_callback(): async def async_callback(accumulator, current_value): return accumulator + "-" + current_value @@ -50,7 +49,7 @@ async def async_callback(accumulator, current_value): assert is_awaitable(result) assert await result == "foo-bar-baz" - @mark.asyncio + @pytest.mark.asyncio() async def works_with_async_callback_and_async_initial_value(): async def async_initial_value(): return 1 / 8 diff --git a/tests/pyutils/test_description.py b/tests/pyutils/test_description.py index db72792d..57edff39 100644 --- a/tests/pyutils/test_description.py +++ b/tests/pyutils/test_description.py @@ -1,8 +1,7 @@ from contextlib import contextmanager from typing import cast -from pytest import raises - +import pytest from graphql import graphql_sync from graphql.pyutils import ( Description, @@ -95,12 +94,12 @@ def can_register_and_unregister(): Description.bases = str def can_only_register_types(): - with raises(TypeError, match="Only types can be registered\\."): + with pytest.raises(TypeError, match="Only types can be registered\\."): # noinspection PyTypeChecker register_description("foo") # type: ignore def can_only_unregister_types(): - with raises(TypeError, match="Only types can be unregistered\\."): + with pytest.raises(TypeError, match="Only types can be unregistered\\."): # noinspection PyTypeChecker unregister_description("foo") # type: ignore diff --git a/tests/pyutils/test_format_list.py b/tests/pyutils/test_format_list.py index bdc6b62f..ee425eca 100644 --- a/tests/pyutils/test_format_list.py +++ b/tests/pyutils/test_format_list.py @@ -1,11 +1,10 @@ -from pytest import raises - +import pytest from graphql.pyutils import and_list, or_list def describe_and_list(): def does_not_accept_an_empty_list(): - with raises(ValueError): + with pytest.raises(ValueError, match="Missing list items to be formatted"): and_list([]) def handles_single_item(): @@ -23,7 +22,7 @@ def handles_more_than_five_items(): def describe_or_list(): def does_not_accept_an_empty_list(): - with raises(ValueError): + with pytest.raises(ValueError, match="Missing list items to be formatted"): or_list([]) def handles_single_item(): diff --git a/tests/pyutils/test_group_by.py b/tests/pyutils/test_group_by.py index 1309fb6f..cafe4742 100644 --- a/tests/pyutils/test_group_by.py +++ b/tests/pyutils/test_group_by.py @@ -4,7 +4,7 @@ def describe_group_by(): def does_accept_an_empty_list(): def key_fn(_x: str) -> str: - raise TypeError("Unexpected call of key function.") + raise TypeError("Unexpected call of key function.") # pragma: no cover assert group_by([], key_fn) == {} diff --git a/tests/pyutils/test_inspect.py b/tests/pyutils/test_inspect.py index 272044c5..be8e1e0a 100644 --- a/tests/pyutils/test_inspect.py +++ b/tests/pyutils/test_inspect.py @@ -3,8 +3,7 @@ from math import inf, nan from typing import Any, Dict, FrozenSet, List, Set, Tuple -from pytest import mark - +import pytest from graphql.pyutils import Undefined, inspect from graphql.type import ( GraphQLDirective, @@ -16,7 +15,6 @@ GraphQLString, ) - inspect_module = import_module(inspect.__module__) @@ -138,7 +136,7 @@ def test_generator(): assert inspect(test_generator) == "" assert inspect(test_generator()) == "" - @mark.asyncio + @pytest.mark.asyncio() async def inspect_coroutine(): async def test_coroutine(): pass @@ -260,8 +258,10 @@ def inspect_sets(): def inspect_overly_large_set(): s = set(range(20)) r = inspect(s) - assert r.startswith("{") and r.endswith("}") - assert "..., " in r and "5" not in s # sets are unordered + assert r.startswith("{") + assert r.endswith("}") + assert "..., " in r + assert "5" not in s # sets are unordered assert len(r) == 36 with increased_list_size(): assert inspect(s) == repr(s) @@ -285,8 +285,10 @@ def inspect_frozensets(): def inspect_overly_large_frozenset(): s = frozenset(range(20)) r = inspect(s) - assert r.startswith("frozenset({") and r.endswith("})") - assert "..., " in r and "5" not in s # frozensets are unordered + assert r.startswith("frozenset({") + assert r.endswith("})") + assert "..., " in r + assert "5" not in s # frozensets are unordered assert len(r) == 47 with increased_list_size(): assert inspect(s) == repr(s) diff --git a/tests/pyutils/test_is_awaitable.py b/tests/pyutils/test_is_awaitable.py index 2847e0f2..dcee07d9 100644 --- a/tests/pyutils/test_is_awaitable.py +++ b/tests/pyutils/test_is_awaitable.py @@ -2,8 +2,7 @@ from inspect import isawaitable from sys import version_info as python_version -from pytest import mark - +import pytest from graphql.pyutils import is_awaitable @@ -67,7 +66,7 @@ async def some_async_function(): assert not isawaitable(some_async_function) assert not is_awaitable(some_async_function) - @mark.asyncio + @pytest.mark.asyncio() async def recognizes_a_coroutine_object(): async def some_async_function(): return True @@ -79,8 +78,8 @@ async def some_async_function(): assert await some_coroutine is True - @mark.filterwarnings("ignore::Warning") # Deprecation and Runtime warnings - @mark.skipif( + @pytest.mark.filterwarnings("ignore::Warning") # Deprecation and Runtime warnings + @pytest.mark.skipif( python_version >= (3, 11), reason="Generator-based coroutines not supported any more since Python 3.11", ) @@ -93,7 +92,7 @@ def some_function(): assert is_awaitable(some_old_style_coroutine) assert is_awaitable(some_old_style_coroutine) - @mark.asyncio + @pytest.mark.asyncio() async def recognizes_a_future_object(): async def some_async_function(): return True @@ -106,7 +105,7 @@ async def some_async_function(): assert await some_future is True - @mark.asyncio + @pytest.mark.asyncio() async def declines_an_async_generator(): async def some_async_generator_function(): yield True diff --git a/tests/pyutils/test_is_iterable.py b/tests/pyutils/test_is_iterable.py index e40e6961..00883604 100644 --- a/tests/pyutils/test_is_iterable.py +++ b/tests/pyutils/test_is_iterable.py @@ -18,7 +18,7 @@ def should_return_true_for_tuples(): assert is_collection(("A", "B", "C")) is True def should_return_true_for_named_tuples(): - named = namedtuple("named", "A B C") + named = namedtuple("named", "A B C") # noqa: PYI024 assert is_collection(named(0, 1, 2)) is True def should_return_true_for_arrays(): @@ -122,7 +122,7 @@ def should_return_true_for_tuples(): assert is_iterable(("A", "B", "C")) is True def should_return_true_for_named_tuples(): - named = namedtuple("named", "a b c") + named = namedtuple("named", "a b c") # noqa: PYI024 assert is_iterable(named(0, 1, 2)) is True def should_return_true_for_arrays(): diff --git a/tests/pyutils/test_merge_kwargs.py b/tests/pyutils/test_merge_kwargs.py index f60bf562..05ef249a 100644 --- a/tests/pyutils/test_merge_kwargs.py +++ b/tests/pyutils/test_merge_kwargs.py @@ -1,6 +1,5 @@ from graphql.pyutils import merge_kwargs - try: from typing import TypedDict except ImportError: # Python < 3.8 diff --git a/tests/pyutils/test_natural_compare.py b/tests/pyutils/test_natural_compare.py index f3c2584c..dd3bb8d8 100644 --- a/tests/pyutils/test_natural_compare.py +++ b/tests/pyutils/test_natural_compare.py @@ -1,6 +1,5 @@ from graphql.pyutils import natural_comparison_key - key = natural_comparison_key diff --git a/tests/pyutils/test_simple_pub_sub.py b/tests/pyutils/test_simple_pub_sub.py index 093a6e61..2f30a8e2 100644 --- a/tests/pyutils/test_simple_pub_sub.py +++ b/tests/pyutils/test_simple_pub_sub.py @@ -1,12 +1,11 @@ from asyncio import sleep -from pytest import mark, raises - +import pytest from graphql.pyutils import SimplePubSub, is_awaitable def describe_simple_pub_sub(): - @mark.asyncio + @pytest.mark.asyncio() async def subscribe_async_iterator_mock(): pubsub = SimplePubSub() iterator = pubsub.get_subscriber() @@ -43,14 +42,14 @@ async def subscribe_async_iterator_mock(): assert pubsub.emit("Fig") is False # Find that cancelled read-ahead got a "done" result - with raises(StopAsyncIteration): + with pytest.raises(StopAsyncIteration): await i5 # And next returns empty completion value - with raises(StopAsyncIteration): + with pytest.raises(StopAsyncIteration): await iterator.__anext__() - @mark.asyncio + @pytest.mark.asyncio() async def iterator_aclose_empties_push_queue(): pubsub = SimplePubSub() assert not pubsub.subscribers @@ -68,7 +67,7 @@ async def iterator_aclose_empties_push_queue(): assert iterator.pull_queue.qsize() == 0 assert not iterator.listening - @mark.asyncio + @pytest.mark.asyncio() async def iterator_aclose_empties_pull_queue(): pubsub = SimplePubSub() assert not pubsub.subscribers @@ -85,7 +84,7 @@ async def iterator_aclose_empties_pull_queue(): assert iterator.pull_queue.qsize() == 0 assert not iterator.listening - @mark.asyncio + @pytest.mark.asyncio() async def iterator_aclose_is_idempotent(): pubsub = SimplePubSub() iterator = pubsub.get_subscriber() diff --git a/tests/pyutils/test_undefined.py b/tests/pyutils/test_undefined.py index 16d71b0a..b6f62eea 100644 --- a/tests/pyutils/test_undefined.py +++ b/tests/pyutils/test_undefined.py @@ -1,7 +1,6 @@ import pickle -from pytest import warns - +import pytest from graphql.pyutils import Undefined, UndefinedType @@ -23,14 +22,13 @@ def as_bool_is_false(): def only_equal_to_itself_and_none(): # because we want it to behave similarly to JavaScript - assert Undefined == Undefined - assert not Undefined != Undefined + assert Undefined == Undefined # noqa: PLR0124 none_object = None assert Undefined == none_object - assert not Undefined != none_object + assert none_object == Undefined false_object = False assert Undefined != false_object - assert not Undefined == false_object + assert false_object != Undefined def should_not_be_an_exception(): # because we want to create similar code to JavaScript where @@ -39,7 +37,7 @@ def should_not_be_an_exception(): assert not isinstance(Undefined, Exception) def cannot_be_redefined(): - with warns(RuntimeWarning, match="Redefinition of 'Undefined'"): + with pytest.warns(RuntimeWarning, match="Redefinition of 'Undefined'"): redefined_undefined = UndefinedType() assert redefined_undefined is Undefined diff --git a/tests/star_wars_data.py b/tests/star_wars_data.py index 55951692..68768534 100644 --- a/tests/star_wars_data.py +++ b/tests/star_wars_data.py @@ -1,13 +1,12 @@ -"""This defines a basic set of data for our Star Wars Schema. +"""Define a basic set of data for our Star Wars Schema. -This data is hard coded for the sake of the demo, but you could imagine fetching this +The data is hard coded for the sake of the demo, but you could imagine fetching this data from a backend service rather than from hardcoded JSON objects in a more complex demo. """ from typing import Awaitable, Collection, Dict, Iterator, Optional - __all__ = ["get_droid", "get_friends", "get_hero", "get_human", "get_secret_backstory"] # These are classes which correspond to the schema. @@ -27,7 +26,7 @@ class Human(Character): homePlanet: str # noinspection PyShadowingBuiltins - def __init__(self, id, name, friends, appearsIn, homePlanet): + def __init__(self, id, name, friends, appearsIn, homePlanet): # noqa: A002 self.id, self.name = id, name self.friends, self.appearsIn = friends, appearsIn self.homePlanet = homePlanet @@ -39,7 +38,7 @@ class Droid(Character): primaryFunction: str # noinspection PyShadowingBuiltins - def __init__(self, id, name, friends, appearsIn, primaryFunction): + def __init__(self, id, name, friends, appearsIn, primaryFunction): # noqa: A002 self.id, self.name = id, name self.friends, self.appearsIn = friends, appearsIn self.primaryFunction = primaryFunction @@ -109,7 +108,7 @@ def __init__(self, id, name, friends, appearsIn, primaryFunction): # noinspection PyShadowingBuiltins -async def get_character(id: str) -> Optional[Character]: +async def get_character(id: str) -> Optional[Character]: # noqa: A002 """Helper function to get a character by ID.""" # We use an async function just to illustrate that GraphQL-core supports it. return human_data.get(id) or droid_data.get(id) @@ -131,18 +130,18 @@ def get_hero(episode: int) -> Character: # noinspection PyShadowingBuiltins -def get_human(id: str) -> Optional[Human]: +def get_human(id: str) -> Optional[Human]: # noqa: A002 """Allows us to query for the human with the given id.""" return human_data.get(id) # noinspection PyShadowingBuiltins -def get_droid(id: str) -> Optional[Droid]: +def get_droid(id: str) -> Optional[Droid]: # noqa: A002 """Allows us to query for the droid with the given id.""" return droid_data.get(id) # noinspection PyUnusedLocal -def get_secret_backstory(character: Character) -> str: +def get_secret_backstory(character: Character) -> str: # noqa: ARG001 """Raise an error when attempting to get the secret backstory.""" raise RuntimeError("secretBackstory is secret.") diff --git a/tests/star_wars_schema.py b/tests/star_wars_schema.py index 65eeeebe..3f8713ab 100644 --- a/tests/star_wars_schema.py +++ b/tests/star_wars_schema.py @@ -54,6 +54,7 @@ GraphQLSchema, GraphQLString, ) + from tests.star_wars_data import ( get_droid, get_friends, @@ -62,7 +63,6 @@ get_secret_backstory, ) - __all__ = ["star_wars_schema"] # We begin by setting up our schema. diff --git a/tests/test_docs.py b/tests/test_docs.py index cfe419f0..618dcb47 100644 --- a/tests/test_docs.py +++ b/tests/test_docs.py @@ -5,7 +5,6 @@ from .utils import dedent - try: from typing import TypeAlias except ImportError: # Python < 3.10 @@ -20,7 +19,8 @@ def get_snippets(source, indent=4): if not source.endswith(".rst"): # pragma: no cover source += ".rst" source_path = Path(__file__).parents[1] / "docs" / source - lines = open(source_path).readlines() + with source_path.open() as source_file: + lines = source_file.readlines() snippets: List[str] = [] snippet: List[str] = [] snippet_start = " " * indent @@ -29,10 +29,9 @@ def get_snippets(source, indent=4): snippet.append(line) elif line.startswith(snippet_start): snippet.append(line[indent:]) - else: - if snippet: - snippets.append("".join(snippet).rstrip() + "\n") - snippet = [] + elif snippet: + snippets.append("".join(snippet).rstrip() + "\n") + snippet = [] if snippet: snippets.append("".join(snippet).rstrip() + "\n") return snippets @@ -56,7 +55,8 @@ def describe_introduction(): def getting_started(capsys): intro = get_snippets("intro") pip_install = intro.pop(0) - assert "pip install" in pip_install and "graphql-core" in pip_install + assert "pip install" in pip_install + assert "graphql-core" in pip_install poetry_install = intro.pop(0) assert "poetry install" in poetry_install create_schema = intro.pop(0) @@ -65,7 +65,9 @@ def getting_started(capsys): exec(create_schema, scope) schema = scope.get("schema") schema_class = scope.get("GraphQLSchema") - assert schema and schema_class and isinstance(schema, schema_class) + assert schema + assert schema_class + assert isinstance(schema, schema_class) query = intro.pop(0) assert "graphql_sync" in query exec(query, scope) @@ -140,7 +142,8 @@ def executing_queries(capsys): queries = get_snippets("usage/queries") async_query = queries.pop(0) - assert "asyncio" in async_query and "graphql_sync" not in async_query + assert "asyncio" in async_query + assert "graphql_sync" not in async_query assert "asyncio.run" in async_query from asyncio import run # noqa: F401 @@ -151,7 +154,8 @@ def executing_queries(capsys): assert out == expected_result(queries) sync_query = queries.pop(0) - assert "graphql_sync" in sync_query and "asyncio" not in sync_query + assert "graphql_sync" in sync_query + assert "asyncio" not in sync_query exec(sync_query, scope) out, err = capsys.readouterr() assert not err @@ -171,7 +175,8 @@ def executing_queries(capsys): exec(typename_query, scope) out, err = capsys.readouterr() assert not err - assert "__typename" in out and "Human" in out + assert "__typename" in out + assert "Human" in out assert out == expected_result(queries) backstory_query = queries.pop(0) @@ -179,7 +184,8 @@ def executing_queries(capsys): exec(backstory_query, scope) out, err = capsys.readouterr() assert not err - assert "errors" in out and "secretBackstory" in out + assert "errors" in out + assert "secretBackstory" in out assert out == expected_result(queries) def using_the_sdl(capsys): @@ -208,11 +214,14 @@ def using_the_sdl(capsys): assert schema.get_type("Episode").values["EMPIRE"].value == 5 query = use_sdl.pop(0) - assert "graphql_sync" in query and "print(result)" in query + assert "graphql_sync" in query + assert "print(result)" in query exec(query, scope) out, err = capsys.readouterr() assert not err - assert "Luke" in out and "appearsIn" in out and "EMPIRE" in out + assert "Luke" in out + assert "appearsIn" in out + assert "EMPIRE" in out assert out == expected_result(use_sdl) def using_resolver_methods(capsys): @@ -229,11 +238,14 @@ def using_resolver_methods(capsys): assert "Root" in scope query = methods.pop(0) - assert "graphql_sync" in query and "Root()" in query + assert "graphql_sync" in query + assert "Root()" in query exec(query, scope) out, err = capsys.readouterr() assert not err - assert "R2-D2" in out and "primaryFunction" in out and "Astromech" in out + assert "R2-D2" in out + assert "primaryFunction" in out + assert "Astromech" in out assert out == expected_result(methods) def using_introspection(capsys): @@ -343,7 +355,8 @@ def extending_a_schema(capsys): exec(query, scope) out, err = capsys.readouterr() assert not err - assert "lastName" in out and "Skywalker" in out + assert "lastName" in out + assert "Skywalker" in out assert out == expected_result(extension) def validating_queries(): diff --git a/tests/test_star_wars_query.py b/tests/test_star_wars_query.py index 5c6fa40e..6e5bbf59 100644 --- a/tests/test_star_wars_query.py +++ b/tests/test_star_wars_query.py @@ -1,5 +1,4 @@ -from pytest import mark - +import pytest from graphql import graphql, graphql_sync from .star_wars_schema import star_wars_schema as schema @@ -7,7 +6,7 @@ def describe_star_wars_query_tests(): def describe_basic_queries(): - @mark.asyncio + @pytest.mark.asyncio() async def correctly_identifies_r2_d2_as_hero_of_the_star_wars_saga(): source = """ query HeroNameQuery { @@ -19,7 +18,7 @@ async def correctly_identifies_r2_d2_as_hero_of_the_star_wars_saga(): result = await graphql(schema=schema, source=source) assert result == ({"hero": {"name": "R2-D2"}}, None) - @mark.asyncio + @pytest.mark.asyncio() async def accepts_positional_arguments_to_graphql(): source = """ query HeroNameQuery { @@ -34,7 +33,7 @@ async def accepts_positional_arguments_to_graphql(): sync_result = graphql_sync(schema, source) assert sync_result == result - @mark.asyncio + @pytest.mark.asyncio() async def allows_us_to_query_for_the_id_and_friends_of_r2_d2(): source = """ query HeroNameAndFriendsQuery { @@ -64,7 +63,7 @@ async def allows_us_to_query_for_the_id_and_friends_of_r2_d2(): ) def describe_nested_queries(): - @mark.asyncio + @pytest.mark.asyncio() async def allows_us_to_query_for_the_friends_of_friends_of_r2_d2(): source = """ query NestedQuery { @@ -122,7 +121,7 @@ async def allows_us_to_query_for_the_friends_of_friends_of_r2_d2(): ) def describe_using_ids_and_query_parameters_to_refetch_objects(): - @mark.asyncio + @pytest.mark.asyncio() async def allows_us_to_query_for_r2_d2_directly_using_his_id(): source = """ query { @@ -134,7 +133,7 @@ async def allows_us_to_query_for_r2_d2_directly_using_his_id(): result = await graphql(schema=schema, source=source) assert result == ({"droid": {"name": "R2-D2"}}, None) - @mark.asyncio + @pytest.mark.asyncio() async def allows_us_to_query_characters_directly_using_their_id(): source = """ query FetchLukeAndC3POQuery { @@ -152,7 +151,7 @@ async def allows_us_to_query_characters_directly_using_their_id(): None, ) - @mark.asyncio + @pytest.mark.asyncio() async def allows_creating_a_generic_query_to_fetch_luke_using_his_id(): source = """ query FetchSomeIDQuery($someId: String!) { @@ -167,7 +166,7 @@ async def allows_creating_a_generic_query_to_fetch_luke_using_his_id(): ) assert result == ({"human": {"name": "Luke Skywalker"}}, None) - @mark.asyncio + @pytest.mark.asyncio() async def allows_creating_a_generic_query_to_fetch_han_using_his_id(): source = """ query FetchSomeIDQuery($someId: String!) { @@ -182,7 +181,7 @@ async def allows_creating_a_generic_query_to_fetch_han_using_his_id(): ) assert result == ({"human": {"name": "Han Solo"}}, None) - @mark.asyncio + @pytest.mark.asyncio() async def generic_query_that_gets_null_back_when_passed_invalid_id(): source = """ query humanQuery($id: String!) { @@ -198,7 +197,7 @@ async def generic_query_that_gets_null_back_when_passed_invalid_id(): assert result == ({"human": None}, None) def describe_using_aliases_to_change_the_key_in_the_response(): - @mark.asyncio + @pytest.mark.asyncio() async def allows_us_to_query_for_luke_changing_his_key_with_an_alias(): source = """ query FetchLukeAliased { @@ -210,7 +209,7 @@ async def allows_us_to_query_for_luke_changing_his_key_with_an_alias(): result = await graphql(schema=schema, source=source) assert result == ({"luke": {"name": "Luke Skywalker"}}, None) - @mark.asyncio + @pytest.mark.asyncio() async def query_for_luke_and_leia_using_two_root_fields_and_an_alias(): source = """ query FetchLukeAndLeiaAliased { @@ -229,7 +228,7 @@ async def query_for_luke_and_leia_using_two_root_fields_and_an_alias(): ) def describe_uses_fragments_to_express_more_complex_queries(): - @mark.asyncio + @pytest.mark.asyncio() async def allows_us_to_query_using_duplicated_content(): source = """ query DuplicateFields { @@ -252,7 +251,7 @@ async def allows_us_to_query_using_duplicated_content(): None, ) - @mark.asyncio + @pytest.mark.asyncio() async def allows_us_to_use_a_fragment_to_avoid_duplicating_content(): source = """ query UseFragment { @@ -278,7 +277,7 @@ async def allows_us_to_use_a_fragment_to_avoid_duplicating_content(): ) def describe_using_typename_to_find_the_type_of_an_object(): - @mark.asyncio + @pytest.mark.asyncio() async def allows_us_to_verify_that_r2_d2_is_a_droid(): source = """ query CheckTypeOfR2 { @@ -291,7 +290,7 @@ async def allows_us_to_verify_that_r2_d2_is_a_droid(): result = await graphql(schema=schema, source=source) assert result == ({"hero": {"__typename": "Droid", "name": "R2-D2"}}, None) - @mark.asyncio + @pytest.mark.asyncio() async def allows_us_to_verify_that_luke_is_a_human(): source = """ query CheckTypeOfLuke { @@ -308,7 +307,7 @@ async def allows_us_to_verify_that_luke_is_a_human(): ) def describe_reporting_errors_raised_in_resolvers(): - @mark.asyncio + @pytest.mark.asyncio() async def correctly_reports_error_on_accessing_secret_backstory(): source = """ query HeroNameQuery { @@ -330,7 +329,7 @@ async def correctly_reports_error_on_accessing_secret_backstory(): ], ) - @mark.asyncio + @pytest.mark.asyncio() async def correctly_reports_error_on_accessing_backstory_in_a_list(): source = """ query HeroNameQuery { @@ -374,7 +373,7 @@ async def correctly_reports_error_on_accessing_backstory_in_a_list(): ], ) - @mark.asyncio + @pytest.mark.asyncio() async def correctly_reports_error_on_accessing_through_an_alias(): source = """ query HeroNameQuery { diff --git a/tests/test_user_registry.py b/tests/test_user_registry.py index 8f2879b7..bcb8321e 100644 --- a/tests/test_user_registry.py +++ b/tests/test_user_registry.py @@ -9,8 +9,7 @@ from enum import Enum from typing import Any, AsyncIterable, Dict, List, NamedTuple, Optional -from pytest import fixture, mark - +import pytest from graphql import ( GraphQLArgument, GraphQLBoolean, @@ -132,19 +131,17 @@ async def resolve_user(_root, info, **args): async def resolve_create_user(_root, info, data): """Resolver function for creating a user object""" - user = await info.context["registry"].create(**data) - return user + return await info.context["registry"].create(**data) # noinspection PyShadowingBuiltins -async def resolve_update_user(_root, info, id, data): +async def resolve_update_user(_root, info, id, data): # noqa: A002 """Resolver function for updating a user object""" - user = await info.context["registry"].update(id, **data) - return user + return await info.context["registry"].update(id, **data) # noinspection PyShadowingBuiltins -async def resolve_delete_user(_root, info, id): +async def resolve_delete_user(_root, info, id): # noqa: A002 """Resolver function for deleting a user object""" user = await info.context["registry"].get(id) await info.context["registry"].delete(user.id) @@ -152,7 +149,7 @@ async def resolve_delete_user(_root, info, id): # noinspection PyShadowingBuiltins -async def subscribe_user(_root, info, id=None): +async def subscribe_user(_root, info, id=None): # noqa: A002 """Subscribe to mutations of a specific user object or all user objects""" async_iterator = info.context["registry"].event_iterator(id) async for event in async_iterator: @@ -160,7 +157,7 @@ async def subscribe_user(_root, info, id=None): # noinspection PyShadowingBuiltins,PyUnusedLocal -async def resolve_subscription_user(event, info, id): +async def resolve_subscription_user(event, info, id): # noqa: ARG001, A002 """Resolver function for user subscriptions""" user = event["user"] mutation = MutationEnum(event["mutation"]).value @@ -213,13 +210,13 @@ async def resolve_subscription_user(event, info, id): ) -@fixture +@pytest.fixture() def context(): return {"registry": UserRegistry()} def describe_query(): - @mark.asyncio + @pytest.mark.asyncio() async def query_user(context): user = await context["registry"].create( firstName="John", lastName="Doe", tweets=42, verified=True @@ -251,7 +248,7 @@ async def query_user(context): def describe_mutation(): - @mark.asyncio + @pytest.mark.asyncio() async def create_user(context): received = {} @@ -262,7 +259,7 @@ def receive(msg): return receive # noinspection PyProtectedMember - pubsub = context["registry"]._pubsub + pubsub = context["registry"]._pubsub # noqa: SLF001s pubsub[None].subscribers.add(subscriber("User")) pubsub["0"].subscribers.add(subscriber("User 0")) @@ -273,7 +270,12 @@ def receive(msg): } } """ - user_data = dict(firstName="John", lastName="Doe", tweets=42, verified=True) + user_data = { + "firstName": "John", + "lastName": "Doe", + "tweets": 42, + "verified": True, + } variables = {"userData": user_data} result = await graphql( schema, query, context_value=context, variable_values=variables @@ -298,7 +300,7 @@ def receive(msg): "User 0": {"user": user, "mutation": MutationEnum.CREATED.value}, } - @mark.asyncio + @pytest.mark.asyncio() async def update_user(context): received = {} @@ -309,7 +311,7 @@ def receive(msg): return receive # noinspection PyProtectedMember - pubsub = context["registry"]._pubsub + pubsub = context["registry"]._pubsub # noqa: SLF001 pubsub[None].subscribers.add(subscriber("User")) pubsub["0"].subscribers.add(subscriber("User 0")) @@ -354,7 +356,7 @@ def receive(msg): "User 0": {"user": user, "mutation": MutationEnum.UPDATED.value}, } - @mark.asyncio + @pytest.mark.asyncio() async def delete_user(context): received = {} @@ -365,7 +367,7 @@ def receive(msg): return receive # noinspection PyProtectedMember - pubsub = context["registry"]._pubsub + pubsub = context["registry"]._pubsub # noqa: SLF001 pubsub[None].subscribers.add(subscriber("User")) pubsub["0"].subscribers.add(subscriber("User 0")) @@ -396,7 +398,7 @@ def receive(msg): def describe_subscription(): - @mark.asyncio + @pytest.mark.asyncio() async def subscribe_to_user_mutations(context): query = """ subscription ($userId: ID!) { diff --git a/tests/test_version.py b/tests/test_version.py index 2a4ba509..49dad199 100644 --- a/tests/test_version.py +++ b/tests/test_version.py @@ -9,7 +9,6 @@ version_js, ) - _re_version = re.compile(r"(\d+)\.(\d+)\.(\d+)(?:(a|b|r?c)(\d+))?$") diff --git a/tests/type/test_assert_name.py b/tests/type/test_assert_name.py index 06dd1116..55ef75c7 100644 --- a/tests/type/test_assert_name.py +++ b/tests/type/test_assert_name.py @@ -1,5 +1,4 @@ -from pytest import mark, raises - +import pytest from graphql.error import GraphQLError from graphql.type import assert_enum_value_name, assert_name @@ -9,19 +8,19 @@ def pass_through_valid_name(): assert assert_name("_ValidName123") == "_ValidName123" def throws_on_empty_strings(): - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: assert_name("") msg = str(exc_info.value) assert msg == "Expected name to be a non-empty string." def throws_for_names_with_invalid_characters(): - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: assert_name(">--()-->") msg = str(exc_info.value) assert msg == "Names must only contain [_a-zA-Z0-9] but '>--()-->' does not." def throws_for_names_starting_with_invalid_characters(): - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: assert_name("42MeaningsOfLife") msg = str(exc_info.value) assert msg == ( @@ -34,35 +33,35 @@ def pass_through_valid_name(): assert assert_enum_value_name("_ValidName123") == "_ValidName123" def throws_for_non_strings(): - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: # noinspection PyTypeChecker assert_enum_value_name({}) # type: ignore msg = str(exc_info.value) assert msg == "Expected name to be a string." def throws_on_empty_strings(): - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: assert_enum_value_name("") msg = str(exc_info.value) assert msg == "Expected name to be a non-empty string." def throws_for_names_with_invalid_characters(): - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: assert_enum_value_name(">--()-->") msg = str(exc_info.value) assert msg == "Names must only contain [_a-zA-Z0-9] but '>--()-->' does not." def throws_for_names_starting_with_invalid_characters(): - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: assert_enum_value_name("42MeaningsOfLife") msg = str(exc_info.value) assert msg == ( "Names must start with [_a-zA-Z] but '42MeaningsOfLife' does not." ) - @mark.parametrize("name", ("true", "false", "null")) + @pytest.mark.parametrize("name", ["true", "false", "null"]) def throws_for_restricted_names(name): - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: assert_enum_value_name(name) msg = str(exc_info.value) assert msg == (f"Enum values cannot be named: {name}.") diff --git a/tests/type/test_custom_scalars.py b/tests/type/test_custom_scalars.py index e2576498..2fa91d9d 100644 --- a/tests/type/test_custom_scalars.py +++ b/tests/type/test_custom_scalars.py @@ -15,7 +15,6 @@ ) from graphql.utilities import value_from_ast_untyped - # this test is not (yet) part of GraphQL.js, see # https://github.com/graphql/graphql-js/issues/2657 diff --git a/tests/type/test_definition.py b/tests/type/test_definition.py index c06c1a68..d57f558e 100644 --- a/tests/type/test_definition.py +++ b/tests/type/test_definition.py @@ -3,8 +3,7 @@ from math import isnan, nan from typing import Dict -from pytest import mark, raises - +import pytest from graphql.error import GraphQLError from graphql.language import ( EnumTypeDefinitionNode, @@ -44,7 +43,6 @@ introspection_types, ) - ScalarType = GraphQLScalarType("Scalar") ObjectType = GraphQLObjectType("Object", {}) InterfaceType = GraphQLInterfaceType("Interface", {}) @@ -80,7 +78,7 @@ def defines_a_scalar_type(): } def accepts_a_scalar_type_defining_serialize(): - def serialize(value): + def serialize(_value): pass scalar = GraphQLScalarType("SomeScalar", serialize) @@ -156,23 +154,23 @@ def accepts_a_scalar_type_with_ast_node_and_extension_ast_nodes(): assert scalar.extension_ast_nodes == tuple(extension_ast_nodes) def rejects_a_scalar_type_with_incorrectly_typed_name(): - with raises(TypeError, match="missing .* required .* 'name'"): + with pytest.raises(TypeError, match="missing .* required .* 'name'"): # noinspection PyArgumentList GraphQLScalarType() # type: ignore - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: # noinspection PyTypeChecker GraphQLScalarType(None) # type: ignore assert str(exc_info.value) == "Must provide name." - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: # noinspection PyTypeChecker GraphQLScalarType(42, {}) # type: ignore assert str(exc_info.value) == "Expected name to be a string." def rejects_a_scalar_type_with_invalid_name(): - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: GraphQLScalarType("") assert str(exc_info.value) == "Expected name to be a non-empty string." - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: GraphQLScalarType("bad-name") assert str(exc_info.value) == ( "Names must only contain [_a-zA-Z0-9] but 'bad-name' does not." @@ -182,7 +180,7 @@ def rejects_a_scalar_type_defining_parse_literal_but_not_parse_value(): def parse_literal(_node: ValueNode, _vars=None): return Undefined # pragma: no cover - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: GraphQLScalarType("SomeScalar", parse_literal=parse_literal) assert str(exc_info.value) == ( "SomeScalar must provide both" @@ -413,7 +411,8 @@ def accepts_a_lambda_as_an_object_field_resolver(): "SomeObject", { "f": GraphQLField( - ScalarType, resolve=lambda _obj, _info: {} # pragma: no cover + ScalarType, + resolve=lambda _obj, _info: {}, # pragma: no cover ) }, ) @@ -432,23 +431,23 @@ def accepts_an_object_type_with_ast_node_and_extension_ast_nodes(): assert object_type.extension_ast_nodes == tuple(extension_ast_nodes) def rejects_an_object_type_with_incorrectly_typed_name(): - with raises(TypeError, match="missing .* required .* 'name'"): + with pytest.raises(TypeError, match="missing .* required .* 'name'"): # noinspection PyArgumentList GraphQLObjectType() # type: ignore - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: # noinspection PyTypeChecker GraphQLObjectType(None, {}) # type: ignore assert str(exc_info.value) == "Must provide name." - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: # noinspection PyTypeChecker GraphQLObjectType(42, {}) # type: ignore assert str(exc_info.value) == "Expected name to be a string." def rejects_an_object_type_with_invalid_name(): - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: GraphQLObjectType("", {}) assert str(exc_info.value) == "Expected name to be a non-empty string." - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: GraphQLObjectType("bad-name", {}) assert str(exc_info.value) == ( "Names must only contain [_a-zA-Z0-9] but 'bad-name' does not." @@ -458,7 +457,7 @@ def rejects_an_object_type_with_incorrectly_named_fields(): obj_type = GraphQLObjectType( "SomeObject", {"bad-name": GraphQLField(ScalarType)} ) - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: assert not obj_type.fields msg = str(exc_info.value) assert msg == "Names must only contain [_a-zA-Z0-9] but 'bad-name' does not." @@ -468,7 +467,7 @@ def fields(): raise RuntimeError("Oops!") obj_type = GraphQLObjectType("SomeObject", fields) - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: assert not obj_type.fields assert str(exc_info.value) == "SomeObject fields cannot be resolved. Oops!" @@ -481,7 +480,7 @@ def rejects_an_object_type_with_incorrectly_named_field_args(): ) }, ) - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: assert not obj_type.fields msg = str(exc_info.value) assert msg == ( @@ -494,7 +493,7 @@ def interfaces(): raise RuntimeError("Oops!") obj_type = GraphQLObjectType("SomeObject", {}, interfaces=interfaces) - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: assert not obj_type.interfaces assert str(exc_info.value) == "SomeObject interfaces cannot be resolved. Oops!" @@ -531,7 +530,8 @@ def resolve_type(_obj, _info, _type): def accepts_an_interface_type_with_output_types_as_fields(): interface = GraphQLInterfaceType( - "AnotherInterface", {"someField": ScalarType} # type: ignore + "AnotherInterface", + {"someField": ScalarType}, # type: ignore ) fields = interface.fields assert isinstance(fields, dict) @@ -604,15 +604,15 @@ def fields(): raise RuntimeError("Oops!") interface = GraphQLInterfaceType("SomeInterface", fields) - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: assert not interface.fields assert str(exc_info.value) == "SomeInterface fields cannot be resolved. Oops!" def rejects_an_interface_type_with_invalid_name(): - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: GraphQLInterfaceType("", {}) assert str(exc_info.value) == "Expected name to be a non-empty string." - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: GraphQLInterfaceType("bad-name", {}) assert str(exc_info.value) == ( "Names must only contain [_a-zA-Z0-9] but 'bad-name' does not." @@ -623,7 +623,7 @@ def interfaces(): raise RuntimeError("Oops!") interface = GraphQLInterfaceType("AnotherInterface", {}, interfaces) - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: assert not interface.interfaces assert ( str(exc_info.value) @@ -644,7 +644,9 @@ def accepts_a_union_type_with_function_returning_a_list_of_types(): assert union_type.types == (ObjectType,) def accepts_a_union_type_without_types(): - with raises(TypeError, match="missing 1 required positional argument: 'types'"): + with pytest.raises( + TypeError, match="missing 1 required positional argument: 'types'" + ): # noinspection PyArgumentList GraphQLUnionType("SomeUnion") # type: ignore union_type = GraphQLUnionType("SomeUnion", None) # type: ignore @@ -665,10 +667,10 @@ def accepts_a_union_type_with_ast_node_and_extension_ast_nodes(): assert union_type.extension_ast_nodes == tuple(extension_ast_nodes) def rejects_a_union_type_with_invalid_name(): - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: GraphQLUnionType("", []) assert str(exc_info.value) == "Expected name to be a non-empty string." - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: GraphQLUnionType("bad-name", []) assert str(exc_info.value) == ( "Names must only contain [_a-zA-Z0-9] but 'bad-name' does not." @@ -679,7 +681,7 @@ def types(): raise RuntimeError("Oops!") union_type = GraphQLUnionType("SomeUnion", types) - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: assert not union_type.types assert str(exc_info.value) == "SomeUnion types cannot be resolved. Oops!" @@ -732,7 +734,9 @@ def defines_an_enum_using_members_of_a_python_enum(): def defines_an_enum_type_with_a_description(): description = "nice enum" enum_type = GraphQLEnumType( - "SomeEnum", {}, description=description # type: ignore + "SomeEnum", + {}, # type: ignore + description=description, ) assert enum_type.description is description assert enum_type.to_kwargs()["description"] is description @@ -802,30 +806,30 @@ def serializes_an_enum(): assert enum_type.values["FOO"].value == "fooValue" assert enum_type.values["BAR"].value == ["barValue"] assert enum_type.values["BAZ"].value is None - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: enum_type.serialize(None) msg = exc_info.value.message assert msg == "Enum 'SomeEnum' cannot represent value: None" - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: enum_type.serialize(Undefined) msg = exc_info.value.message assert msg == "Enum 'SomeEnum' cannot represent value: Undefined" assert enum_type.serialize("fooValue") == "FOO" - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: enum_type.serialize("FOO") msg = exc_info.value.message assert msg == "Enum 'SomeEnum' cannot represent value: 'FOO'" assert enum_type.serialize(["barValue"]) == "BAR" - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: enum_type.serialize("BAR") msg = exc_info.value.message assert msg == "Enum 'SomeEnum' cannot represent value: 'BAR'" assert enum_type.serialize("BAZ") == "BAZ" - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: enum_type.serialize("bazValue") msg = exc_info.value.message assert msg == "Enum 'SomeEnum' cannot represent value: 'bazValue'" - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: enum_type.serialize(["bazValue"]) msg = exc_info.value.message assert msg == "Enum 'SomeEnum' cannot represent value: ['bazValue']" @@ -841,37 +845,37 @@ def parses_an_enum(): "SomeEnum", {"FOO": "fooValue", "BAR": ["barValue"], "BAZ": None} ) assert enum_type.parse_value("FOO") == "fooValue" - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: enum_type.parse_value("fooValue") msg = exc_info.value.message assert msg == "Value 'fooValue' does not exist in 'SomeEnum' enum." assert enum_type.parse_value("BAR") == ["barValue"] - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: # noinspection PyTypeChecker enum_type.parse_value(["barValue"]) # type: ignore msg = exc_info.value.message assert msg == "Enum 'SomeEnum' cannot represent non-string value: ['barValue']." assert enum_type.parse_value("BAZ") is None assert enum_type.parse_literal(EnumValueNode(value="FOO")) == "fooValue" - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: enum_type.parse_literal(StringValueNode(value="FOO")) assert exc_info.value.message == ( "Enum 'SomeEnum' cannot represent non-enum value: \"FOO\"." " Did you mean the enum value 'FOO'?" ) - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: enum_type.parse_literal(EnumValueNode(value="fooValue")) msg = exc_info.value.message assert msg == "Value 'fooValue' does not exist in 'SomeEnum' enum." assert enum_type.parse_literal(EnumValueNode(value="BAR")) == ["barValue"] - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: enum_type.parse_literal(StringValueNode(value="BAR")) assert exc_info.value.message == ( "Enum 'SomeEnum' cannot represent non-enum value: \"BAR\"." " Did you mean the enum value 'BAR' or 'BAZ'?" ) assert enum_type.parse_literal(EnumValueNode(value="BAZ")) is None - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: enum_type.parse_literal(StringValueNode(value="BAZ")) assert exc_info.value.message == ( "Enum 'SomeEnum' cannot represent non-enum value: \"BAZ\"." @@ -891,40 +895,40 @@ def accepts_an_enum_type_with_ast_node_and_extension_ast_nodes(): assert enum_type.extension_ast_nodes == tuple(extension_ast_nodes) def rejects_an_enum_type_with_incorrectly_typed_name(): - with raises(TypeError, match="missing .* required .* 'name'"): + with pytest.raises(TypeError, match="missing .* required .* 'name'"): # noinspection PyArgumentList GraphQLEnumType() # type: ignore - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: # noinspection PyTypeChecker GraphQLEnumType(None, {}) # type: ignore assert str(exc_info.value) == "Must provide name." - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: # noinspection PyTypeChecker GraphQLEnumType(42, {}) # type: ignore assert str(exc_info.value) == "Expected name to be a string." def rejects_an_enum_type_with_invalid_name(): values: Dict[str, GraphQLEnumValue] = {} - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: GraphQLEnumType("", values) assert str(exc_info.value) == "Expected name to be a non-empty string." - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: GraphQLEnumType("bad-name", values) assert str(exc_info.value) == ( "Names must only contain [_a-zA-Z0-9] but 'bad-name' does not." ) def rejects_an_enum_type_with_incorrectly_named_values(): - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: GraphQLEnumType("SomeEnum", {"bad-name": GraphQLField(ScalarType)}) msg = str(exc_info.value) assert msg == "Names must only contain [_a-zA-Z0-9] but 'bad-name' does not." def rejects_an_enum_type_without_values(): - with raises(TypeError, match="missing .* required .* 'values'"): + with pytest.raises(TypeError, match="missing .* required .* 'values'"): # noinspection PyArgumentList GraphQLEnumType("SomeEnum") # type: ignore - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: # noinspection PyTypeChecker GraphQLEnumType("SomeEnum", values=None) # type: ignore assert str(exc_info.value) == ( @@ -932,7 +936,7 @@ def rejects_an_enum_type_without_values(): ) def rejects_an_enum_type_with_incorrectly_typed_values(): - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: # noinspection PyTypeChecker GraphQLEnumType("SomeEnum", [{"FOO": 10}]) # type: ignore assert str(exc_info.value) == ( @@ -1031,7 +1035,8 @@ def accepts_an_input_object_type_with_fields(): def accepts_an_input_object_type_with_input_type_as_field(): # this is a shortcut syntax for simple input fields input_obj_type = GraphQLInputObjectType( - "SomeInputObject", {"f": ScalarType} # type: ignore + "SomeInputObject", + {"f": ScalarType}, # type: ignore ) field = input_obj_type.fields["f"] assert isinstance(field, GraphQLInputField) @@ -1053,23 +1058,23 @@ def accepts_an_input_object_type_with_a_field_function(): assert input_field.out_name is None def rejects_an_input_object_type_with_incorrectly_typed_name(): - with raises(TypeError, match="missing .* required .* 'name'"): + with pytest.raises(TypeError, match="missing .* required .* 'name'"): # noinspection PyArgumentList GraphQLInputObjectType() # type: ignore - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: # noinspection PyTypeChecker GraphQLInputObjectType(None, {}) # type: ignore assert str(exc_info.value) == "Must provide name." - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: # noinspection PyTypeChecker GraphQLInputObjectType(42, {}) # type: ignore assert str(exc_info.value) == "Expected name to be a string." def rejects_an_input_object_type_with_invalid_name(): - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: GraphQLInputObjectType("", {}) assert str(exc_info.value) == "Expected name to be a non-empty string." - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: GraphQLInputObjectType("bad-name", {}) assert str(exc_info.value) == ( "Names must only contain [_a-zA-Z0-9] but 'bad-name' does not." @@ -1079,7 +1084,7 @@ def rejects_an_input_object_type_with_incorrectly_named_fields(): input_obj_type = GraphQLInputObjectType( "SomeInputObject", {"bad-name": GraphQLInputField(ScalarType)} ) - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: assert not input_obj_type.fields msg = str(exc_info.value) assert msg == ( @@ -1091,7 +1096,7 @@ def fields(): raise RuntimeError("Oops!") input_obj_type = GraphQLInputObjectType("SomeInputObject", fields) - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: assert not input_obj_type.fields assert str(exc_info.value) == ( "SomeInputObject fields cannot be resolved. Oops!" @@ -1102,7 +1107,7 @@ def rejects_an_input_object_type_with_resolvers(): def resolve(): pass - with raises( + with pytest.raises( TypeError, match="got an unexpected keyword argument 'resolve'" ): # noinspection PyArgumentList @@ -1117,7 +1122,7 @@ def resolve(): ) def rejects_an_input_object_type_with_resolver_constant(): - with raises( + with pytest.raises( TypeError, match="got an unexpected keyword argument 'resolve'" ): # noinspection PyArgumentList @@ -1154,7 +1159,7 @@ def accepts_an_argument_with_an_ast_node(): assert argument.to_kwargs()["ast_node"] is ast_node def rejects_an_argument_without_type(): - with raises(TypeError, match="missing 1 required positional argument"): + with pytest.raises(TypeError, match="missing 1 required positional argument"): # noinspection PyArgumentList GraphQLArgument() # type: ignore @@ -1186,7 +1191,7 @@ def accepts_an_input_field_with_an_ast_node(): assert input_field.to_kwargs()["ast_node"] is ast_node def rejects_an_input_field_without_type(): - with raises(TypeError, match="missing 1 required positional argument"): + with pytest.raises(TypeError, match="missing 1 required positional argument"): # noinspection PyArgumentList GraphQLInputField() # type: ignore @@ -1220,7 +1225,7 @@ def describe_type_system_list(): NonNullScalarType, ] - @mark.parametrize("type_", types, ids=lambda type_: type_.__class__.__name__) + @pytest.mark.parametrize("type_", types, ids=lambda type_: type_.__class__.__name__) def accepts_a_type_as_item_type_of_list(type_): assert GraphQLList(type_) @@ -1237,7 +1242,7 @@ def describe_type_system_non_null(): ListOfNonNullScalarsType, ] - @mark.parametrize("type_", types, ids=lambda type_: type_.__class__.__name__) + @pytest.mark.parametrize("type_", types, ids=lambda type_: type_.__class__.__name__) def accepts_a_type_as_nullable_type_of_non_null(type_): assert GraphQLNonNull(type_) @@ -1292,5 +1297,7 @@ def describe_type_system_introspection_types(): def cannot_redefine_introspection_types(): for name, introspection_type in introspection_types.items(): assert introspection_type.name == name - with raises(TypeError, match=f"Redefinition of reserved type '{name}'"): + with pytest.raises( + TypeError, match=f"Redefinition of reserved type '{name}'" + ): introspection_type.__class__(**introspection_type.to_kwargs()) diff --git a/tests/type/test_directives.py b/tests/type/test_directives.py index 8a6fb332..3f29a947 100644 --- a/tests/type/test_directives.py +++ b/tests/type/test_directives.py @@ -1,5 +1,4 @@ -from pytest import raises - +import pytest from graphql.error import GraphQLError from graphql.language import DirectiveDefinitionNode, DirectiveLocation from graphql.type import GraphQLArgument, GraphQLDirective, GraphQLInt, GraphQLString @@ -61,7 +60,9 @@ def defines_a_repeatable_directive(): def directive_accepts_input_types_as_arguments(): # noinspection PyTypeChecker directive = GraphQLDirective( - name="Foo", locations=[], args={"arg": GraphQLString} # type: ignore + name="Foo", + locations=[], + args={"arg": GraphQLString}, # type: ignore ) arg = directive.args["arg"] assert isinstance(arg, GraphQLArgument) @@ -70,7 +71,8 @@ def directive_accepts_input_types_as_arguments(): def directive_accepts_strings_as_locations(): # noinspection PyTypeChecker directive = GraphQLDirective( - name="Foo", locations=["SCHEMA", "OBJECT"] # type: ignore + name="Foo", + locations=["SCHEMA", "OBJECT"], # type: ignore ) assert directive.locations == ( DirectiveLocation.SCHEMA, @@ -88,52 +90,52 @@ def directive_has_repr(): def can_compare_with_other_source_directive(): locations = [DirectiveLocation.QUERY] directive = GraphQLDirective("Foo", locations) - assert directive == directive - assert not directive != directive - assert not directive == {} + assert directive == directive # noqa: PLR0124 + assert not directive != directive # noqa: PLR0124, SIM202 + assert not directive == {} # noqa: SIM201 assert directive != {} same_directive = GraphQLDirective("Foo", locations) assert directive == same_directive - assert not directive != same_directive + assert not directive != same_directive # noqa: SIM202 other_directive = GraphQLDirective("Bar", locations) - assert not directive == other_directive + assert not directive == other_directive # noqa: SIM201 assert directive != other_directive other_locations = [DirectiveLocation.MUTATION] other_directive = GraphQLDirective("Foo", other_locations) - assert not directive == other_directive + assert not directive == other_directive # noqa: SIM201 assert directive != other_directive other_directive = GraphQLDirective("Foo", locations, is_repeatable=True) - assert not directive == other_directive + assert not directive == other_directive # noqa: SIM201 assert directive != other_directive other_directive = GraphQLDirective("Foo", locations, description="other") - assert not directive == other_directive + assert not directive == other_directive # noqa: SIM201 assert directive != other_directive def rejects_a_directive_with_incorrectly_typed_name(): - with raises(TypeError, match="missing .* required .* 'name'"): + with pytest.raises(TypeError, match="missing .* required .* 'name'"): # noinspection PyArgumentList GraphQLDirective() # type: ignore - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: # noinspection PyTypeChecker GraphQLDirective(None, []) # type: ignore assert str(exc_info.value) == "Must provide name." - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: # noinspection PyTypeChecker GraphQLDirective(42, {}) # type: ignore assert str(exc_info.value) == "Expected name to be a string." def rejects_a_directive_with_invalid_name(): - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: GraphQLDirective("", []) assert str(exc_info.value) == "Expected name to be a non-empty string." - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: GraphQLDirective("bad-name", []) assert str(exc_info.value) == ( "Names must only contain [_a-zA-Z0-9] but 'bad-name' does not." ) def rejects_a_directive_with_incorrectly_named_args(): - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: GraphQLDirective( "Foo", locations=[DirectiveLocation.QUERY], @@ -144,7 +146,7 @@ def rejects_a_directive_with_incorrectly_named_args(): ) def rejects_a_directive_with_undefined_locations(): - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: # noinspection PyTypeChecker GraphQLDirective("Foo", locations=None) # type: ignore assert str(exc_info.value) == ( @@ -153,14 +155,14 @@ def rejects_a_directive_with_undefined_locations(): ) def rejects_a_directive_with_incorrectly_typed_locations(): - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: # noinspection PyTypeChecker GraphQLDirective("Foo", locations="bad") # type: ignore assert ( str(exc_info.value) == "Foo locations must be specified" " as a collection of DirectiveLocation enum values." ) - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: # noinspection PyTypeChecker GraphQLDirective("Foo", locations=["bad"]) # type: ignore assert str(exc_info.value) == ( diff --git a/tests/type/test_enum.py b/tests/type/test_enum.py index 0de836b3..3219224d 100644 --- a/tests/type/test_enum.py +++ b/tests/type/test_enum.py @@ -16,7 +16,6 @@ ) from graphql.utilities import introspection_from_schema - ColorType = GraphQLEnumType("Color", values={"RED": 0, "GREEN": 1, "BLUE": 2}) @@ -28,7 +27,7 @@ class ColorTypeEnumValues(Enum): class Complex1: # noinspection PyMethodMayBeStatic - some_random_object = datetime.now() + some_random_object = datetime.now() # noqa: DTZ005 class Complex2: @@ -52,7 +51,7 @@ class Complex2: "fromInt": GraphQLArgument(GraphQLInt), "fromString": GraphQLArgument(GraphQLString), }, - resolve=lambda _source, info, **args: args.get("fromInt") + resolve=lambda _source, _info, **args: args.get("fromInt") or args.get("fromString") or args.get("fromEnum"), ), @@ -62,7 +61,7 @@ class Complex2: "fromEnum": GraphQLArgument(ColorType), "fromInt": GraphQLArgument(GraphQLInt), }, - resolve=lambda _source, info, **args: args.get("fromEnum"), + resolve=lambda _source, _info, **args: args.get("fromEnum"), ), "complexEnum": GraphQLField( ComplexEnum, @@ -73,13 +72,16 @@ class Complex2: "provideGoodValue": GraphQLArgument(GraphQLBoolean), "provideBadValue": GraphQLArgument(GraphQLBoolean), }, - resolve=lambda _source, info, **args: + resolve=lambda _source, _info, **args: # Note: this is one of the references of the internal values # which ComplexEnum allows. - complex2 if args.get("provideGoodValue") + complex2 + if args.get("provideGoodValue") # Note: similar object, but not the same *reference* as # complex2 above. Enum internal values require object equality. - else Complex2() if args.get("provideBadValue") else args.get("fromEnum"), + else Complex2() + if args.get("provideBadValue") + else args.get("fromEnum"), ), }, ) @@ -90,7 +92,7 @@ class Complex2: "favoriteEnum": GraphQLField( ColorType, args={"color": GraphQLArgument(ColorType)}, - resolve=lambda _source, info, color=None: color, + resolve=lambda _source, _info, color=None: color, ) }, ) @@ -101,7 +103,7 @@ class Complex2: "subscribeToEnum": GraphQLField( ColorType, args={"color": GraphQLArgument(ColorType)}, - resolve=lambda _source, info, color=None: color, + resolve=lambda _source, _info, color=None: color, ) }, ) @@ -118,8 +120,8 @@ def execute_query(source: str, variable_values: Optional[Dict[str, Any]] = None) def describe_type_system_enum_values(): def can_use_python_enums_instead_of_dicts(): assert ColorType2.values == ColorType.values - keys = [key for key in ColorType.values] - keys2 = [key for key in ColorType2.values] + keys = list(ColorType.values) + keys2 = list(ColorType2.values) assert keys2 == keys values = [value.value for value in ColorType.values.values()] values2 = [value.value for value in ColorType2.values.values()] diff --git a/tests/type/test_extensions.py b/tests/type/test_extensions.py index 26a0d966..5aa087e2 100644 --- a/tests/type/test_extensions.py +++ b/tests/type/test_extensions.py @@ -1,5 +1,4 @@ -from pytest import param - +import pytest from graphql.type import ( GraphQLArgument, GraphQLDirective, @@ -15,10 +14,12 @@ GraphQLUnionType, ) - dummy_type = GraphQLScalarType("DummyScalar") -bad_extensions = [param([], id="list"), param({1: "ext"}, id="non_string_key")] +bad_extensions = [ + pytest.param([], id="list"), + pytest.param({1: "ext"}, id="non_string_key"), +] def describe_type_system_extensions(): diff --git a/tests/type/test_predicate.py b/tests/type/test_predicate.py index be3b6e82..bd006e74 100644 --- a/tests/type/test_predicate.py +++ b/tests/type/test_predicate.py @@ -1,7 +1,6 @@ from typing import Any -from pytest import raises - +import pytest from graphql.language import DirectiveLocation from graphql.type import ( GraphQLArgument, @@ -70,7 +69,6 @@ is_wrapping_type, ) - ObjectType = GraphQLObjectType("Object", {}) InterfaceType = GraphQLInterfaceType("Interface", {}) UnionType = GraphQLUnionType("Union", types=[ObjectType]) @@ -94,12 +92,12 @@ def returns_true_for_wrapped_types(): def returns_false_for_type_classes_rather_than_instance(): assert is_type(GraphQLObjectType) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_type(GraphQLObjectType) def returns_false_for_random_garbage(): assert is_type({"what": "is this"}) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_type({"what": "is this"}) def describe_is_scalar_type(): @@ -113,28 +111,28 @@ def returns_true_for_custom_scalar(): def returns_false_for_scalar_class_rather_than_instance(): assert is_scalar_type(GraphQLScalarType) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_scalar_type(GraphQLScalarType) def returns_false_for_wrapped_scalar(): assert is_scalar_type(GraphQLList(ScalarType)) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_scalar_type(GraphQLList(ScalarType)) def returns_false_for_non_scalar(): assert is_scalar_type(EnumType) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_scalar_type(EnumType) assert is_scalar_type(Directive) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_scalar_type(Directive) def returns_false_for_random_garbage(): assert is_scalar_type(None) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_scalar_type(None) assert is_scalar_type({"what": "is this"}) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_scalar_type({"what": "is this"}) def describe_is_specified_scalar_type(): @@ -152,12 +150,12 @@ def returns_true_for_object_type(): def returns_false_for_wrapped_object_type(): assert is_object_type(GraphQLList(ObjectType)) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_object_type(GraphQLList(ObjectType)) def returns_false_for_non_object_type(): assert is_scalar_type(InterfaceType) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_scalar_type(InterfaceType) def describe_is_interface_type(): @@ -167,12 +165,12 @@ def returns_true_for_interface_type(): def returns_false_for_wrapped_interface_type(): assert is_interface_type(GraphQLList(InterfaceType)) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_interface_type(GraphQLList(InterfaceType)) def returns_false_for_non_interface_type(): assert is_interface_type(ObjectType) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_interface_type(ObjectType) def describe_is_union_type(): @@ -182,12 +180,12 @@ def returns_true_for_union_type(): def returns_false_for_wrapped_union_type(): assert is_union_type(GraphQLList(UnionType)) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_union_type(GraphQLList(UnionType)) def returns_false_for_non_union_type(): assert is_union_type(ObjectType) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_union_type(ObjectType) def describe_is_enum_type(): @@ -197,12 +195,12 @@ def returns_true_for_enum_type(): def returns_false_for_wrapped_enum_type(): assert is_enum_type(GraphQLList(EnumType)) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_enum_type(GraphQLList(EnumType)) def returns_false_for_non_enum_type(): assert is_enum_type(ScalarType) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_enum_type(ScalarType) def describe_is_input_object_type(): @@ -212,12 +210,12 @@ def returns_true_for_input_object_type(): def returns_false_for_wrapped_input_object_type(): assert is_input_object_type(GraphQLList(InputObjectType)) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_input_object_type(GraphQLList(InputObjectType)) def returns_false_for_non_input_object_type(): assert is_input_object_type(ObjectType) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_input_object_type(ObjectType) def describe_is_list_type(): @@ -227,12 +225,12 @@ def returns_true_for_a_list_wrapped_type(): def returns_false_for_a_unwrapped_type(): assert is_list_type(ObjectType) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_list_type(ObjectType) def returns_false_for_a_non_list_wrapped_type(): assert is_list_type(GraphQLNonNull(GraphQLList(ObjectType))) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_list_type(GraphQLNonNull(GraphQLList(ObjectType))) def describe_is_non_null_type(): @@ -242,12 +240,12 @@ def returns_true_for_a_non_null_wrapped_type(): def returns_false_for_an_unwrapped_type(): assert is_non_null_type(ObjectType) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_non_null_type(ObjectType) def returns_false_for_a_not_non_null_wrapped_type(): assert is_non_null_type(GraphQLList(GraphQLNonNull(ObjectType))) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_non_null_type(GraphQLList(GraphQLNonNull(ObjectType))) def describe_is_input_type(): @@ -271,7 +269,7 @@ def returns_true_for_a_wrapped_input_type(): def _assert_non_input_type(type_: Any): assert is_input_type(type_) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_input_type(type_) def returns_false_for_an_output_type(): @@ -315,7 +313,7 @@ def returns_true_for_a_wrapped_output_type(): def _assert_non_output_type(type_: Any): assert is_output_type(type_) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_output_type(type_) def returns_false_for_an_input_type(): @@ -334,17 +332,17 @@ def returns_true_for_scalar_and_enum_types(): def returns_false_for_wrapped_leaf_type(): assert is_leaf_type(GraphQLList(ScalarType)) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_leaf_type(GraphQLList(ScalarType)) def returns_false_for_non_leaf_type(): assert is_leaf_type(ObjectType) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_leaf_type(ObjectType) def returns_false_for_wrapped_non_leaf_type(): assert is_leaf_type(GraphQLList(ObjectType)) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_leaf_type(GraphQLList(ObjectType)) def describe_is_composite_type(): @@ -358,17 +356,17 @@ def returns_true_for_object_interface_and_union_types(): def returns_false_for_wrapped_composite_type(): assert is_composite_type(GraphQLList(ObjectType)) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_composite_type(GraphQLList(ObjectType)) def returns_false_for_non_composite_type(): assert is_composite_type(InputObjectType) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_composite_type(InputObjectType) def returns_false_for_wrapped_non_composite_type(): assert is_composite_type(GraphQLList(InputObjectType)) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_composite_type(GraphQLList(InputObjectType)) def describe_is_abstract_type(): @@ -380,17 +378,17 @@ def returns_true_for_interface_and_union_types(): def returns_false_for_wrapped_abstract_type(): assert is_abstract_type(GraphQLList(InterfaceType)) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_abstract_type(GraphQLList(InterfaceType)) def returns_false_for_non_abstract_type(): assert is_abstract_type(ObjectType) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_abstract_type(ObjectType) def returns_false_for_wrapped_non_abstract_type(): assert is_abstract_type(GraphQLList(ObjectType)) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_abstract_type(GraphQLList(ObjectType)) def describe_is_wrapping_type(): @@ -402,7 +400,7 @@ def returns_true_for_list_and_non_null_types(): def returns_false_for_unwrapped_types(): assert is_wrapping_type(ObjectType) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_wrapping_type(ObjectType) def describe_is_nullable_type(): @@ -416,7 +414,7 @@ def returns_true_for_list_of_non_null_types(): def returns_false_for_non_null_types(): assert is_nullable_type(GraphQLNonNull(ObjectType)) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_nullable_type(GraphQLNonNull(ObjectType)) def describe_get_nullable_type(): @@ -438,10 +436,10 @@ def returns_true_for_unwrapped_types(): def returns_false_for_list_and_non_null_types(): assert is_named_type(GraphQLList(ObjectType)) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_named_type(GraphQLList(ObjectType)) assert is_named_type(GraphQLNonNull(ObjectType)) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_named_type(GraphQLNonNull(ObjectType)) def describe_get_named_type(): @@ -513,23 +511,23 @@ def returns_true_for_custom_directive(): def returns_false_for_directive_class_rather_than_instance(): assert is_directive(GraphQLDirective) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_directive(GraphQLScalarType) def returns_false_for_non_directive(): assert is_directive(EnumType) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_directive(EnumType) assert is_directive(ScalarType) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_directive(ScalarType) def returns_false_for_random_garbage(): assert is_directive(None) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_directive(None) assert is_directive({"what": "is this"}) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_directive({"what": "is this"}) def describe_is_specified_directive(): @@ -552,18 +550,18 @@ def returns_true_for_schema(): def returns_false_for_schema_class_rather_than_instance(): assert is_schema(GraphQLSchema) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_schema(GraphQLSchema) def returns_false_for_non_schema(): assert is_schema(EnumType) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_schema(EnumType) assert is_schema(ScalarType) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_schema(ScalarType) def return_false_for_random_garbage(): assert is_schema({"what": "is this"}) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_schema({"what": "is this"}) diff --git a/tests/type/test_scalars.py b/tests/type/test_scalars.py index f2a45a67..27255388 100644 --- a/tests/type/test_scalars.py +++ b/tests/type/test_scalars.py @@ -2,8 +2,7 @@ from math import inf, nan, pi from typing import Any -from pytest import raises - +import pytest from graphql.error import GraphQLError from graphql.language import parse_value as parse_value_to_ast from graphql.pyutils import Undefined @@ -23,7 +22,7 @@ def parse_value(): _parse_value = GraphQLInt.parse_value def _parse_value_raises(s: Any, message: str): - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: _parse_value(s) assert str(exc_info.value) == message @@ -60,7 +59,7 @@ def _parse_literal(s: str): return GraphQLInt.parse_literal(parse_value_to_ast(s)) def _parse_literal_raises(s: str, message: str): - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: _parse_literal(s) assert str(exc_info.value).startswith(message + "\n") @@ -118,64 +117,64 @@ def serializes(): # The GraphQL specification does not allow serializing non-integer # values as Int to avoid accidental data loss. - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: serialize(0.1) assert str(exc_info.value) == "Int cannot represent non-integer value: 0.1" - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: serialize(1.1) assert str(exc_info.value) == "Int cannot represent non-integer value: 1.1" - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: serialize(-1.1) assert str(exc_info.value) == "Int cannot represent non-integer value: -1.1" - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: serialize("-1.1") assert ( str(exc_info.value) == "Int cannot represent non-integer value: '-1.1'" ) # Maybe a safe JavaScript int, but bigger than 2^32, so not # representable as a GraphQL Int - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: serialize(9876504321) assert str(exc_info.value) == ( "Int cannot represent non 32-bit signed integer value: 9876504321" ) - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: serialize(-9876504321) assert str(exc_info.value) == ( "Int cannot represent non 32-bit signed integer value: -9876504321" ) # Too big to represent as an Int in JavaScript or GraphQL - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: serialize(1e100) assert str(exc_info.value) == ( "Int cannot represent non 32-bit signed integer value: 1e+100" ) - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: serialize(-1e100) assert str(exc_info.value) == ( "Int cannot represent non 32-bit signed integer value: -1e+100" ) - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: serialize("one") assert ( str(exc_info.value) == "Int cannot represent non-integer value: 'one'" ) # Doesn't represent number - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: serialize("") assert str(exc_info.value) == "Int cannot represent non-integer value: ''" - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: serialize(nan) assert str(exc_info.value) == "Int cannot represent non-integer value: nan" - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: serialize(inf) assert str(exc_info.value) == "Int cannot represent non-integer value: inf" - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: serialize([5]) assert str(exc_info.value) == "Int cannot represent non-integer value: [5]" def cannot_be_redefined(): - with raises(TypeError, match="Redefinition of reserved type 'Int'"): + with pytest.raises(TypeError, match="Redefinition of reserved type 'Int'"): GraphQLScalarType(name="Int") def pickles(): @@ -186,7 +185,7 @@ def parse_value(): _parse_value = GraphQLFloat.parse_value def _parse_value_raises(s: Any, message: str): - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: _parse_value(s) assert str(exc_info.value) == message @@ -222,7 +221,7 @@ def _parse_literal(s: str): return GraphQLFloat.parse_literal(parse_value_to_ast(s)) def _parse_literal_raises(s: str, message: str): - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: _parse_literal(s) assert str(exc_info.value).startswith(message + "\n") @@ -280,32 +279,34 @@ def serializes(): assert serialize(True) == 1 assert serialize(type("Float", (float,), {})(5.5)) == 5.5 - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: serialize(nan) assert ( str(exc_info.value) == "Float cannot represent non numeric value: nan" ) - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: serialize(inf) assert ( str(exc_info.value) == "Float cannot represent non numeric value: inf" ) - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: serialize("one") assert str(exc_info.value) == ( "Float cannot represent non numeric value: 'one'" ) - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: serialize("") assert str(exc_info.value) == "Float cannot represent non numeric value: ''" - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: serialize([5]) assert ( str(exc_info.value) == "Float cannot represent non numeric value: [5]" ) def cannot_be_redefined(): - with raises(TypeError, match="Redefinition of reserved type 'Float'"): + with pytest.raises( + TypeError, match="Redefinition of reserved type 'Float'" + ): GraphQLScalarType(name="Float") def pickles(): @@ -316,7 +317,7 @@ def parse_value(): _parse_value = GraphQLString.parse_value def _parse_value_raises(s: Any, message: str): - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: _parse_value(s) assert str(exc_info.value) == message @@ -346,7 +347,7 @@ def _parse_literal(s: str): return GraphQLString.parse_literal(parse_value_to_ast(s)) def _parse_literal_raises(s: str, message: str): - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: _parse_literal(s) assert str(exc_info.value).startswith(message + "\n") @@ -398,19 +399,19 @@ def __str__(self): assert serialize(StringableObjValue()) == "something useful" - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: serialize(nan) assert str(exc_info.value) == "String cannot represent value: nan" - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: serialize([1]) assert str(exc_info.value) == "String cannot represent value: [1]" - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: serialize({}) assert str(exc_info.value) == "String cannot represent value: {}" - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: serialize({"value_of": "value_of string"}) assert ( str(exc_info.value) == "String cannot represent value:" @@ -418,7 +419,9 @@ def __str__(self): ) def cannot_be_redefined(): - with raises(TypeError, match="Redefinition of reserved type 'String'"): + with pytest.raises( + TypeError, match="Redefinition of reserved type 'String'" + ): GraphQLScalarType(name="String") def pickles(): @@ -429,7 +432,7 @@ def parse_value(): _parse_value = GraphQLBoolean.parse_value def _parse_value_raises(s: Any, message: str): - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: _parse_value(s) assert str(exc_info.value) == message @@ -467,7 +470,7 @@ def _parse_literal(s: str): return GraphQLBoolean.parse_literal(parse_value_to_ast(s)) def _parse_literal_raises(s: str, message: str): - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: _parse_literal(s) assert str(exc_info.value).startswith(message + "\n") @@ -532,42 +535,44 @@ def serializes(): assert serialize(0) is False assert serialize(True) is True assert serialize(False) is False - with raises(TypeError, match="not an acceptable base type"): + with pytest.raises(TypeError, match="not an acceptable base type"): # you can't subclass bool in Python assert serialize(type("Boolean", (bool,), {})(True)) is True - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: serialize(nan) assert str(exc_info.value) == ( "Boolean cannot represent a non boolean value: nan" ) - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: serialize("") assert str(exc_info.value) == ( "Boolean cannot represent a non boolean value: ''" ) - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: serialize("True") assert str(exc_info.value) == ( "Boolean cannot represent a non boolean value: 'True'" ) - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: serialize([False]) assert str(exc_info.value) == ( "Boolean cannot represent a non boolean value: [False]" ) - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: serialize({}) assert str(exc_info.value) == ( "Boolean cannot represent a non boolean value: {}" ) def cannot_be_redefined(): - with raises(TypeError, match="Redefinition of reserved type 'Boolean'"): + with pytest.raises( + TypeError, match="Redefinition of reserved type 'Boolean'" + ): GraphQLScalarType(name="Boolean") def pickles(): @@ -578,7 +583,7 @@ def parse_value(): _parse_value = GraphQLID.parse_value def _parse_value_raises(s: Any, message: str): - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: _parse_value(s) assert str(exc_info.value) == message @@ -610,7 +615,7 @@ def _parse_literal(s: str): return GraphQLID.parse_literal(parse_value_to_ast(s)) def _parse_literal_raises(s: str, message: str): - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: _parse_literal(s) assert str(exc_info.value).startswith(message + "\n") @@ -678,24 +683,24 @@ def __str__(self): obj_value = ObjValue(123) assert serialize(obj_value) == "123" - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: serialize(True) assert str(exc_info.value) == "ID cannot represent value: True" - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: serialize(3.14) assert str(exc_info.value) == "ID cannot represent value: 3.14" - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: serialize({}) assert str(exc_info.value) == "ID cannot represent value: {}" - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: serialize(["abc"]) assert str(exc_info.value) == "ID cannot represent value: ['abc']" def cannot_be_redefined(): - with raises(TypeError, match="Redefinition of reserved type 'ID'"): + with pytest.raises(TypeError, match="Redefinition of reserved type 'ID'"): GraphQLScalarType(name="ID") def pickles(): diff --git a/tests/type/test_schema.py b/tests/type/test_schema.py index c30cd4de..f589302b 100644 --- a/tests/type/test_schema.py +++ b/tests/type/test_schema.py @@ -1,7 +1,6 @@ from copy import deepcopy -from pytest import raises - +import pytest from graphql.language import ( DirectiveLocation, SchemaDefinitionNode, @@ -369,7 +368,7 @@ def rejects_a_schema_which_redefines_a_built_in_type(): }, ) - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: GraphQLSchema(QueryType) msg = str(exc_info.value) assert msg == ( @@ -381,7 +380,7 @@ def rejects_a_schema_when_a_provided_type_has_no_name(): query = GraphQLObjectType("Query", {"foo": GraphQLField(GraphQLString)}) types = [GraphQLType(), query, GraphQLType()] - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: GraphQLSchema(query, types=types) # type: ignore msg = str(exc_info.value) assert msg == ( @@ -394,7 +393,7 @@ def rejects_a_schema_which_defines_an_object_twice(): GraphQLObjectType("SameName", {}), ] - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: GraphQLSchema(types=types) msg = str(exc_info.value) assert msg == ( @@ -412,7 +411,7 @@ def rejects_a_schema_which_defines_fields_with_conflicting_types(): }, ) - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: GraphQLSchema(QueryType) msg = str(exc_info.value) assert msg == ( diff --git a/tests/type/test_validation.py b/tests/type/test_validation.py index e9f5d5e6..4ed1c09e 100644 --- a/tests/type/test_validation.py +++ b/tests/type/test_validation.py @@ -1,8 +1,7 @@ from operator import attrgetter from typing import List, Union -from pytest import mark, raises - +import pytest from graphql.language import DirectiveLocation, parse from graphql.pyutils import inspect from graphql.type import ( @@ -36,7 +35,6 @@ from ..utils import dedent - SomeSchema = build_schema( """ scalar SomeScalar @@ -555,7 +553,7 @@ def rejects_an_object_type_with_missing_fields(): assert msg == "Type IncompleteObject must define one or more fields." manual_schema_2 = schema_with_field_type( - GraphQLObjectType("IncompleteObject", lambda: {}) + GraphQLObjectType("IncompleteObject", dict) ) msg = validate_schema(manual_schema_2)[0].message assert msg == "Type IncompleteObject must define one or more fields." @@ -749,13 +747,14 @@ def rejects_a_union_type_with_non_object_member_types(): for member_type in bad_union_member_types: # invalid union type cannot be built with Python bad_union = GraphQLUnionType( - "BadUnion", types=[member_type] # type: ignore + "BadUnion", + types=[member_type], # type: ignore ) bad_schema = schema_with_field_type(bad_union) assert validate_schema(bad_schema) == [ { "message": "Union type BadUnion can only include Object types," - + f" it cannot include {inspect(member_type)}." + f" it cannot include {inspect(member_type)}." } ] @@ -1006,7 +1005,7 @@ def _schema_with_object_field(type_: GraphQLOutputType) -> GraphQLSchema: types=[SomeObjectType], ) - @mark.parametrize("type_", output_types, ids=get_name) + @pytest.mark.parametrize("type_", output_types, ids=get_name) def accepts_an_output_type_as_an_object_field_type(type_): schema = _schema_with_object_field(type_) assert validate_schema(schema) == [] @@ -1021,7 +1020,7 @@ def rejects_an_empty_object_field_type(): } ] - @mark.parametrize("type_", not_output_types, ids=get_name) + @pytest.mark.parametrize("type_", not_output_types, ids=get_name) def rejects_a_non_output_type_as_an_object_field_type(type_): schema = _schema_with_object_field(type_) assert validate_schema(schema) == [ @@ -1031,7 +1030,7 @@ def rejects_a_non_output_type_as_an_object_field_type(type_): } ] - @mark.parametrize("type_", not_graphql_types, ids=get_name) + @pytest.mark.parametrize("type_", not_graphql_types, ids=get_name) def rejects_a_non_type_value_as_an_object_field_type(type_): schema = _schema_with_object_field(type_) assert validate_schema(schema) == [ @@ -1301,7 +1300,7 @@ def _schema_with_interface_field(type_: GraphQLOutputType) -> GraphQLSchema: types=[bad_implementing_type, SomeObjectType], ) - @mark.parametrize("type_", output_types, ids=get_name) + @pytest.mark.parametrize("type_", output_types, ids=get_name) def accepts_an_output_type_as_an_interface_field_type(type_): schema = _schema_with_interface_field(type_) assert validate_schema(schema) == [] @@ -1320,7 +1319,7 @@ def rejects_an_empty_interface_field_type(): }, ] - @mark.parametrize("type_", not_output_types, ids=get_name) + @pytest.mark.parametrize("type_", not_output_types, ids=get_name) def rejects_a_non_output_type_as_an_interface_field_type(type_): schema = _schema_with_interface_field(type_) assert validate_schema(schema) == [ @@ -1334,7 +1333,7 @@ def rejects_a_non_output_type_as_an_interface_field_type(type_): }, ] - @mark.parametrize("type_", not_graphql_types, ids=get_name) + @pytest.mark.parametrize("type_", not_graphql_types, ids=get_name) def rejects_a_non_type_value_as_an_interface_field_type(type_): schema = _schema_with_interface_field(type_) assert validate_schema(schema) == [ @@ -1415,7 +1414,7 @@ def _schema_with_arg(type_: GraphQLInputType) -> GraphQLSchema: ], ) - @mark.parametrize("type_", input_types, ids=get_name) + @pytest.mark.parametrize("type_", input_types, ids=get_name) def accepts_an_input_type_as_a_field_arg_type(type_): schema = _schema_with_arg(type_) assert validate_schema(schema) == [] @@ -1434,7 +1433,7 @@ def rejects_an_empty_field_arg_type(): }, ] - @mark.parametrize("type_", not_input_types, ids=get_name) + @pytest.mark.parametrize("type_", not_input_types, ids=get_name) def rejects_a_non_input_type_as_a_field_arg_type(type_): schema = _schema_with_arg(type_) assert validate_schema(schema) == [ @@ -1448,7 +1447,7 @@ def rejects_a_non_input_type_as_a_field_arg_type(type_): }, ] - @mark.parametrize("type_", not_graphql_types, ids=get_name) + @pytest.mark.parametrize("type_", not_graphql_types, ids=get_name) def rejects_a_non_type_value_as_a_field_arg_type(type_): schema = _schema_with_arg(type_) assert validate_schema(schema) == [ @@ -1532,7 +1531,7 @@ def _schema_with_input_field(type_: GraphQLInputType) -> GraphQLSchema: ) ) - @mark.parametrize("type_", input_types, ids=get_name) + @pytest.mark.parametrize("type_", input_types, ids=get_name) def accepts_an_input_type_as_an_input_field_type(type_): schema = _schema_with_input_field(type_) assert validate_schema(schema) == [] @@ -1547,7 +1546,7 @@ def rejects_an_empty_input_field_type(): } ] - @mark.parametrize("type_", not_input_types, ids=get_name) + @pytest.mark.parametrize("type_", not_input_types, ids=get_name) def rejects_a_non_input_type_as_an_input_field_type(type_): schema = _schema_with_input_field(type_) assert validate_schema(schema) == [ @@ -1557,7 +1556,7 @@ def rejects_a_non_input_type_as_an_input_field_type(type_): } ] - @mark.parametrize("type_", not_graphql_types, ids=get_name) + @pytest.mark.parametrize("type_", not_graphql_types, ids=get_name) def rejects_a_non_type_value_as_an_input_field_type(type_): schema = _schema_with_input_field(type_) assert validate_schema(schema) == [ @@ -2506,19 +2505,17 @@ def rejects_a_circular_interface_implementation(): def describe_assert_valid_schema(): def does_not_throw_on_valid_schemas(): schema = build_schema( - ( - """ + """ type Query { foo: String } """ - ) ) assert_valid_schema(schema) def combines_multiple_errors(): schema = build_schema("type SomeType") - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: assert_valid_schema(schema) assert ( str(exc_info.value) diff --git a/tests/utilities/test_ast_from_value.py b/tests/utilities/test_ast_from_value.py index de037ca4..cc01df45 100644 --- a/tests/utilities/test_ast_from_value.py +++ b/tests/utilities/test_ast_from_value.py @@ -1,7 +1,6 @@ from math import inf, nan -from pytest import raises - +import pytest from graphql.error import GraphQLError from graphql.language import ( BooleanValueNode, @@ -58,18 +57,18 @@ def converts_int_values_to_int_asts(): # GraphQL spec does not allow coercing non-integer values to Int to # avoid accidental data loss. - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: assert ast_from_value(123.5, GraphQLInt) msg = str(exc_info.value) assert msg == "Int cannot represent non-integer value: 123.5" # Note: outside the bounds of 32bit signed int. - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: assert ast_from_value(1e40, GraphQLInt) msg = str(exc_info.value) assert msg == "Int cannot represent non 32-bit signed integer value: 1e+40" - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: ast_from_value(nan, GraphQLInt) msg = str(exc_info.value) assert msg == "Int cannot represent non-integer value: nan" @@ -126,7 +125,7 @@ def converts_id_values_to_int_or_string_asts(): assert ast_from_value("01", GraphQLID) == StringValueNode(value="01") - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: assert ast_from_value(False, GraphQLID) assert str(exc_info.value) == "ID cannot represent value: False" @@ -144,17 +143,17 @@ def converts_using_serialize_from_a_custom_scalar_type(): value="value" ) - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: assert ast_from_value(nan, pass_through_scalar) assert str(exc_info.value) == "Cannot convert value to AST: nan." - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: ast_from_value(inf, pass_through_scalar) assert str(exc_info.value) == "Cannot convert value to AST: inf." return_null_scalar = GraphQLScalarType( "ReturnNullScalar", - serialize=lambda value: None, + serialize=lambda value: None, # noqa: ARG005 ) assert ast_from_value("value", return_null_scalar) is None @@ -164,10 +163,10 @@ class SomeClass: return_custom_class_scalar = GraphQLScalarType( "ReturnCustomClassScalar", - serialize=lambda value: SomeClass(), + serialize=lambda value: SomeClass(), # noqa: ARG005 ) - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: ast_from_value("value", return_custom_class_scalar) msg = str(exc_info.value) assert msg == "Cannot convert value to AST: ." @@ -188,12 +187,12 @@ def converts_string_values_to_enum_asts_if_possible(): assert ast_from_value(complex_value, my_enum) == EnumValueNode(value="COMPLEX") # Note: case sensitive - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: ast_from_value("hello", my_enum) assert exc_info.value.message == "Enum 'MyEnum' cannot represent value: 'hello'" # Note: not a valid enum value - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: ast_from_value("UNKNOWN_VALUE", my_enum) assert ( exc_info.value.message diff --git a/tests/utilities/test_ast_to_dict.py b/tests/utilities/test_ast_to_dict.py index 3f7c2ca9..8e633fae 100644 --- a/tests/utilities/test_ast_to_dict.py +++ b/tests/utilities/test_ast_to_dict.py @@ -7,7 +7,7 @@ def converts_name_node_to_dict(): node = NameNode(value="test") res = ast_to_dict(node) assert res == {"kind": "name", "value": "test"} - assert list(res)[0] == "kind" + assert next(iter(res)) == "kind" assert ast_to_dict(node, locations=True) == res assert node.to_dict() == res assert node.to_dict(locations=True) == res @@ -122,7 +122,7 @@ def converts_simple_schema_to_dict(): ], "kind": "document", } - assert list(res)[0] == "kind" + assert next(iter(res)) == "kind" def converts_simple_schema_to_dict_with_locations(): ast = parse( @@ -380,7 +380,7 @@ def converts_simple_query_to_dict(): ], "kind": "document", } - assert list(res)[0] == "kind" + assert next(iter(res)) == "kind" def converts_simple_query_to_dict_with_locations(): ast = parse( diff --git a/tests/utilities/test_build_ast_schema.py b/tests/utilities/test_build_ast_schema.py index 0e55c168..2d65d858 100644 --- a/tests/utilities/test_build_ast_schema.py +++ b/tests/utilities/test_build_ast_schema.py @@ -4,8 +4,7 @@ from copy import deepcopy from typing import Union -from pytest import mark, raises - +import pytest from graphql import graphql_sync from graphql.language import DocumentNode, InterfaceTypeDefinitionNode, parse, print_ast from graphql.type import ( @@ -41,7 +40,6 @@ from ..star_wars_schema import star_wars_schema from ..utils import dedent - try: from typing import TypeAlias except ImportError: # Python < 3.10 @@ -68,12 +66,14 @@ def cycle_sdl(sdl: str) -> str: def expect_ast_node(obj: TypeWithAstNode, expected: str) -> None: - assert obj is not None and obj.ast_node is not None + assert obj is not None + assert obj.ast_node is not None assert print_ast(obj.ast_node) == expected def expect_extension_ast_nodes(obj: TypeWithExtensionAstNodes, expected: str) -> None: - assert obj is not None and obj.extension_ast_nodes is not None + assert obj is not None + assert obj.extension_ast_nodes is not None assert "\n\n".join(print_ast(node) for node in obj.extension_ast_nodes) == expected @@ -89,7 +89,9 @@ def can_use_built_schema_for_limited_execution(): ) ) - root_value = namedtuple("Data", "str")(123) # type: ignore + root_value = namedtuple( # noqa: PYI024 + "Data", "str" + )(123) # type: ignore result = graphql_sync(schema=schema, source="{ str }", root_value=root_value) assert result == ({"str": "123"}, None) @@ -504,7 +506,8 @@ def can_build_recursive_union(): """ ) errors = validate_schema(schema) - assert errors and isinstance(errors, list) + assert errors + assert isinstance(errors, list) def custom_scalar(): sdl = dedent( @@ -1162,7 +1165,7 @@ def rejects_invalid_sdl(): foo: String @unknown } """ - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: build_schema(sdl) assert str(exc_info.value) == "Unknown directive '@unknown'." @@ -1181,7 +1184,7 @@ def throws_on_unknown_types(): unknown: UnknownType } """ - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: build_schema(sdl, assume_valid_sdl=True) assert str(exc_info.value).endswith("Unknown type: 'UnknownType'.") @@ -1226,9 +1229,9 @@ def can_deep_copy_pickled_schema(): # check that printing the copied schema gives the same SDL assert print_schema(copied) == sdl - @mark.slow + @pytest.mark.slow() def describe_deepcopy_and_pickle_big(): # pragma: no cover - @mark.timeout(20) + @pytest.mark.timeout(20) def can_deep_copy_big_schema(big_schema_sdl): # noqa: F811 # use our printing conventions big_schema_sdl = cycle_sdl(big_schema_sdl) @@ -1240,7 +1243,7 @@ def can_deep_copy_big_schema(big_schema_sdl): # noqa: F811 # check that printing the copied schema gives the same SDL assert print_schema(copied) == big_schema_sdl - @mark.timeout(60) + @pytest.mark.timeout(60) def can_pickle_and_unpickle_big_schema(big_schema_sdl): # noqa: F811 # use our printing conventions big_schema_sdl = cycle_sdl(big_schema_sdl) @@ -1272,7 +1275,7 @@ def can_pickle_and_unpickle_big_schema(big_schema_sdl): # noqa: F811 finally: sys.setrecursionlimit(limit) - @mark.timeout(60) + @pytest.mark.timeout(60) def can_deep_copy_pickled_big_schema(big_schema_sdl): # noqa: F811 # use our printing conventions big_schema_sdl = cycle_sdl(big_schema_sdl) diff --git a/tests/utilities/test_build_client_schema.py b/tests/utilities/test_build_client_schema.py index 83c5935e..85b687f4 100644 --- a/tests/utilities/test_build_client_schema.py +++ b/tests/utilities/test_build_client_schema.py @@ -1,7 +1,6 @@ from typing import cast -from pytest import raises - +import pytest from graphql import graphql_sync from graphql.type import ( GraphQLArgument, @@ -651,7 +650,7 @@ def describe_throws_when_given_invalid_introspection(): ) def throws_when_introspection_is_missing_schema_property(): - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: # noinspection PyTypeChecker build_client_schema(None) # type: ignore @@ -661,7 +660,7 @@ def throws_when_introspection_is_missing_schema_property(): " and no 'errors' were returned alongside: None." ) - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: # noinspection PyTypeChecker build_client_schema({}) # type: ignore @@ -680,7 +679,7 @@ def throws_when_referenced_unknown_type(): if type_["name"] != "Query" ] - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: build_client_schema(introspection) assert str(exc_info.value) == ( @@ -704,7 +703,7 @@ def throws_when_missing_definition_for_one_of_the_standard_scalars(): if type_["name"] != "Float" ] - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: build_client_schema(introspection) assert str(exc_info.value).endswith( @@ -719,7 +718,7 @@ def throws_when_type_reference_is_missing_name(): assert query_type["name"] == "Query" del query_type["name"] # type: ignore - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: build_client_schema(introspection) assert str(exc_info.value) == "Unknown type reference: {}." @@ -734,7 +733,7 @@ def throws_when_missing_kind(): assert query_type_introspection["kind"] == "OBJECT" del query_type_introspection["kind"] - with raises( + with pytest.raises( TypeError, match=r"^Invalid or incomplete introspection result\." " Ensure that a full introspection query is used" @@ -756,7 +755,7 @@ def throws_when_missing_interfaces(): assert query_type_introspection["interfaces"] == [] del query_type_introspection["interfaces"] # type: ignore - with raises( + with pytest.raises( TypeError, match="^Query interfaces cannot be resolved." " Introspection result missing interfaces:" @@ -795,7 +794,7 @@ def throws_when_missing_fields(): assert query_type_introspection["fields"] del query_type_introspection["fields"] # type: ignore - with raises( + with pytest.raises( TypeError, match="^Query fields cannot be resolved." " Introspection result missing fields:" @@ -818,7 +817,7 @@ def throws_when_missing_field_args(): assert field["args"] del field["args"] # type: ignore - with raises( + with pytest.raises( TypeError, match="^Query fields cannot be resolved." r" Introspection result missing field args: {'name': 'foo', .*}\.$", @@ -840,7 +839,7 @@ def throws_when_output_type_is_used_as_an_arg_type(): assert arg["type"]["name"] == "String" arg["type"]["name"] = "SomeUnion" - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: build_client_schema(introspection) assert str(exc_info.value).startswith( @@ -864,7 +863,7 @@ def throws_when_output_type_is_used_as_an_input_value_type(): assert input_field["type"]["name"] == "String" input_field["type"]["name"] = "SomeUnion" - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: build_client_schema(introspection) assert str(exc_info.value).startswith( @@ -888,7 +887,7 @@ def throws_when_input_type_is_used_as_a_field_type(): assert field["type"]["name"] == "String" field["type"]["name"] = "SomeInputObject" - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: build_client_schema(introspection) assert str(exc_info.value).startswith( @@ -911,7 +910,7 @@ def throws_when_missing_possible_types(): assert some_union_introspection["possibleTypes"] del some_union_introspection["possibleTypes"] # type: ignore - with raises( + with pytest.raises( TypeError, match="^Introspection result missing possibleTypes:" r" {'kind': 'UNION', 'name': 'SomeUnion', .*}\.$", @@ -932,7 +931,7 @@ def throws_when_missing_enum_values(): assert some_enum_introspection["enumValues"] del some_enum_introspection["enumValues"] # type: ignore - with raises( + with pytest.raises( TypeError, match="^Introspection result missing enumValues:" r" {'kind': 'ENUM', 'name': 'SomeEnum', .*}\.$", @@ -953,7 +952,7 @@ def throws_when_missing_input_fields(): assert some_input_object_introspection["inputFields"] del some_input_object_introspection["inputFields"] # type: ignore - with raises( + with pytest.raises( TypeError, match="^Introspection result missing inputFields:" r" {'kind': 'INPUT_OBJECT', 'name': 'SomeInputObject', .*}\.$", @@ -968,7 +967,7 @@ def throws_when_missing_directive_locations(): assert some_directive_introspection["locations"] == ["QUERY"] del some_directive_introspection["locations"] # type: ignore - with raises( + with pytest.raises( TypeError, match="^Introspection result missing directive locations:" r" {'name': 'SomeDirective', .*}\.$", @@ -983,7 +982,7 @@ def throws_when_missing_directive_args(): assert some_directive_introspection["args"] == [] del some_directive_introspection["args"] # type: ignore - with raises( + with pytest.raises( TypeError, match="^Introspection result missing directive args:" r" {'name': 'SomeDirective', .*}\.$", @@ -1002,7 +1001,7 @@ def fails_on_very_deep_lists_more_than_7_levels(): introspection = introspection_from_schema(schema) - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: build_client_schema(introspection) assert str(exc_info.value) == ( @@ -1021,7 +1020,7 @@ def fails_on_a_very_deep_non_null_more_than_7_levels(): introspection = introspection_from_schema(schema) - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: build_client_schema(introspection) assert str(exc_info.value) == ( @@ -1069,7 +1068,7 @@ def recursive_interfaces(): {"kind": "OBJECT", "name": "Foo", "ofType": None} ] - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: build_client_schema(introspection) assert str(exc_info.value) == ( "Foo interfaces cannot be resolved." @@ -1099,7 +1098,7 @@ def recursive_union(): {"kind": "UNION", "name": "Foo", "ofType": None} ] - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: build_client_schema(introspection) assert str(exc_info.value) == ( "Foo types cannot be resolved." diff --git a/tests/utilities/test_coerce_input_value.py b/tests/utilities/test_coerce_input_value.py index f34e8564..2808b6ac 100644 --- a/tests/utilities/test_coerce_input_value.py +++ b/tests/utilities/test_coerce_input_value.py @@ -1,8 +1,7 @@ from math import nan from typing import Any, List, NamedTuple, Union -from pytest import raises - +import pytest from graphql.error import GraphQLError from graphql.pyutils import Undefined from graphql.type import ( @@ -362,14 +361,14 @@ def returns_nested_null_for_nested_null_values(): def describe_with_default_on_error(): def throw_error_without_path(): - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: assert coerce_input_value(None, GraphQLNonNull(GraphQLInt)) assert exc_info.value.message == ( "Invalid value None: Expected non-nullable type 'Int!' not to be None." ) def throw_error_with_path(): - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: assert coerce_input_value( [None], GraphQLList(GraphQLNonNull(GraphQLInt)) ) diff --git a/tests/utilities/test_extend_schema.py b/tests/utilities/test_extend_schema.py index 7c10b06e..9ce77071 100644 --- a/tests/utilities/test_extend_schema.py +++ b/tests/utilities/test_extend_schema.py @@ -1,7 +1,6 @@ from typing import Union -from pytest import raises - +import pytest from graphql import graphql_sync from graphql.language import parse, print_ast from graphql.type import ( @@ -30,7 +29,6 @@ from ..utils import dedent - try: from typing import TypeAlias except ImportError: # Python < 3.10 @@ -53,12 +51,14 @@ def expect_extension_ast_nodes(obj: TypeWithExtensionAstNodes, expected: str) -> None: - assert obj is not None and obj.extension_ast_nodes is not None + assert obj is not None + assert obj.extension_ast_nodes is not None assert "\n\n".join(print_ast(node) for node in obj.extension_ast_nodes) == expected def expect_ast_node(obj: TypeWithAstNode, expected: str) -> None: - assert obj is not None and obj.ast_node is not None + assert obj is not None + assert obj.ast_node is not None assert print_ast(obj.ast_node) == expected @@ -1315,7 +1315,7 @@ def rejects_invalid_sdl(): schema = GraphQLSchema() extend_ast = parse("extend schema @unknown") - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: extend_schema(schema, extend_ast) assert str(exc_info.value) == "Unknown directive '@unknown'." @@ -1335,7 +1335,7 @@ def throws_on_unknown_types(): } """ ) - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: extend_schema(schema, ast, assume_valid_sdl=True) assert str(exc_info.value).endswith("Unknown type: 'UnknownType'.") @@ -1347,7 +1347,7 @@ def does_not_allow_replacing_a_default_directive(): """ ) - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: extend_schema(schema, extend_ast) assert str(exc_info.value).startswith( "Directive '@include' already exists in the schema." @@ -1370,7 +1370,7 @@ def does_not_allow_replacing_an_existing_enum_value(): """ ) - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: extend_schema(schema, extend_ast) assert str(exc_info.value).startswith( "Enum value 'SomeEnum.ONE' already exists in the schema." diff --git a/tests/utilities/test_get_introspection_query.py b/tests/utilities/test_get_introspection_query.py index 57e37145..05a5cad5 100644 --- a/tests/utilities/test_get_introspection_query.py +++ b/tests/utilities/test_get_introspection_query.py @@ -5,7 +5,6 @@ from graphql.utilities import build_schema, get_introspection_query from graphql.validation import validate - dummy_schema = build_schema( """ type Query { diff --git a/tests/utilities/test_introspection_from_schema.py b/tests/utilities/test_introspection_from_schema.py index 9112bdf7..895ade9a 100644 --- a/tests/utilities/test_introspection_from_schema.py +++ b/tests/utilities/test_introspection_from_schema.py @@ -2,8 +2,7 @@ import sys from copy import deepcopy -from pytest import mark - +import pytest from graphql.type import GraphQLField, GraphQLObjectType, GraphQLSchema, GraphQLString from graphql.utilities import ( IntrospectionQuery, @@ -106,9 +105,9 @@ def can_deep_copy_pickled_schema(): # check that introspecting the copied schema gives the same result assert introspection_from_schema(copied) == introspected_schema - @mark.slow + @pytest.mark.slow() def describe_deepcopy_and_pickle_big(): # pragma: no cover - @mark.timeout(20) + @pytest.mark.timeout(20) def can_deep_copy_big_schema(big_schema_sdl): # noqa: F811 # introspect the original big schema big_schema = build_schema(big_schema_sdl) @@ -119,7 +118,7 @@ def can_deep_copy_big_schema(big_schema_sdl): # noqa: F811 # check that introspecting the copied schema gives the same result assert introspection_from_schema(copied) == expected_introspection - @mark.timeout(60) + @pytest.mark.timeout(60) def can_pickle_and_unpickle_big_schema(big_schema_sdl): # noqa: F811 # introspect the original big schema big_schema = build_schema(big_schema_sdl) @@ -153,7 +152,7 @@ def can_pickle_and_unpickle_big_schema(big_schema_sdl): # noqa: F811 finally: sys.setrecursionlimit(limit) - @mark.timeout(60) + @pytest.mark.timeout(60) def can_deep_copy_pickled_big_schema(big_schema_sdl): # noqa: F811 # introspect the original big schema big_schema = build_schema(big_schema_sdl) diff --git a/tests/utilities/test_print_schema.py b/tests/utilities/test_print_schema.py index 1d60aa41..ac3cbc42 100644 --- a/tests/utilities/test_print_schema.py +++ b/tests/utilities/test_print_schema.py @@ -87,7 +87,7 @@ def prints_non_null_list_of_string_field(): def prints_list_of_non_null_string_field(): schema = build_single_field_schema( - GraphQLField((GraphQLList(GraphQLNonNull(GraphQLString)))) + GraphQLField(GraphQLList(GraphQLNonNull(GraphQLString))) ) assert expect_printed_schema(schema) == dedent( """ diff --git a/tests/utilities/test_strip_ignored_characters.py b/tests/utilities/test_strip_ignored_characters.py index 58643603..9c07d1f1 100644 --- a/tests/utilities/test_strip_ignored_characters.py +++ b/tests/utilities/test_strip_ignored_characters.py @@ -1,7 +1,6 @@ from typing import Optional -from pytest import raises - +import pytest from graphql.error import GraphQLSyntaxError from graphql.language import Lexer, Source, TokenKind, parse from graphql.utilities import strip_ignored_characters @@ -89,7 +88,7 @@ def strips_ignored_characters_from_source(): assert strip_ignored_characters(source) == "{foo{bar}}" def report_document_with_invalid_token(): - with raises(GraphQLSyntaxError) as exc_info: + with pytest.raises(GraphQLSyntaxError) as exc_info: strip_ignored_characters('{ foo(arg: "\n"') assert str(exc_info.value) == dedent( diff --git a/tests/utilities/test_strip_ignored_characters_fuzz.py b/tests/utilities/test_strip_ignored_characters_fuzz.py index 977f62ea..aed5cc2a 100644 --- a/tests/utilities/test_strip_ignored_characters_fuzz.py +++ b/tests/utilities/test_strip_ignored_characters_fuzz.py @@ -1,15 +1,13 @@ from json import dumps from typing import Optional -from pytest import mark - +import pytest from graphql.error import GraphQLSyntaxError from graphql.language import Lexer, Source, TokenKind from graphql.utilities import strip_ignored_characters from ..utils import dedent, gen_fuzz_strings - ignored_tokens = [ # UnicodeBOM "\uFEFF", # Byte Order Mark (U+FEFF) @@ -75,8 +73,8 @@ def lex_value(s: str) -> Optional[str]: def describe_strip_ignored_characters(): - @mark.slow - @mark.timeout(10) + @pytest.mark.slow() + @pytest.mark.timeout(10) def strips_documents_with_random_combination_of_ignored_characters(): for ignored in ignored_tokens: ExpectStripped(ignored).to_equal("") @@ -86,8 +84,8 @@ def strips_documents_with_random_combination_of_ignored_characters(): ExpectStripped("".join(ignored_tokens)).to_equal("") - @mark.slow - @mark.timeout(10) + @pytest.mark.slow() + @pytest.mark.timeout(10) def strips_random_leading_and_trailing_ignored_tokens(): for token in punctuator_tokens + non_punctuator_tokens: for ignored in ignored_tokens: @@ -101,8 +99,8 @@ def strips_random_leading_and_trailing_ignored_tokens(): ExpectStripped("".join(ignored_tokens) + token).to_equal(token) ExpectStripped(token + "".join(ignored_tokens)).to_equal(token) - @mark.slow - @mark.timeout(10) + @pytest.mark.slow() + @pytest.mark.timeout(10) def strips_random_ignored_tokens_between_punctuator_tokens(): for left in punctuator_tokens: for right in punctuator_tokens: @@ -118,8 +116,8 @@ def strips_random_ignored_tokens_between_punctuator_tokens(): left + right ) - @mark.slow - @mark.timeout(10) + @pytest.mark.slow() + @pytest.mark.timeout(10) def strips_random_ignored_tokens_between_punctuator_and_non_punctuator_tokens(): for non_punctuator in non_punctuator_tokens: for punctuator in punctuator_tokens: @@ -137,8 +135,8 @@ def strips_random_ignored_tokens_between_punctuator_and_non_punctuator_tokens(): punctuator + "".join(ignored_tokens) + non_punctuator ).to_equal(punctuator + non_punctuator) - @mark.slow - @mark.timeout(10) + @pytest.mark.slow() + @pytest.mark.timeout(10) def strips_random_ignored_tokens_between_non_punctuator_and_punctuator_tokens(): for non_punctuator in non_punctuator_tokens: for punctuator in punctuator_tokens: @@ -160,8 +158,8 @@ def strips_random_ignored_tokens_between_non_punctuator_and_punctuator_tokens(): non_punctuator + "".join(ignored_tokens) + punctuator ).to_equal(non_punctuator + punctuator) - @mark.slow - @mark.timeout(10) + @pytest.mark.slow() + @pytest.mark.timeout(10) def replace_random_ignored_tokens_between_non_punctuator_and_spread_with_space(): for non_punctuator in non_punctuator_tokens: for ignored in ignored_tokens: @@ -178,8 +176,8 @@ def replace_random_ignored_tokens_between_non_punctuator_and_spread_with_space() non_punctuator + " ..." ) - @mark.slow - @mark.timeout(10) + @pytest.mark.slow() + @pytest.mark.timeout(10) def replace_random_ignored_tokens_between_non_punctuator_tokens_with_space(): for left in non_punctuator_tokens: for right in non_punctuator_tokens: @@ -195,8 +193,8 @@ def replace_random_ignored_tokens_between_non_punctuator_tokens_with_space(): left + " " + right ) - @mark.slow - @mark.timeout(10) + @pytest.mark.slow() + @pytest.mark.timeout(10) def does_not_strip_random_ignored_tokens_embedded_in_the_string(): for ignored in ignored_tokens: ExpectStripped(dumps(ignored)).to_stay_the_same() @@ -206,8 +204,8 @@ def does_not_strip_random_ignored_tokens_embedded_in_the_string(): ExpectStripped(dumps("".join(ignored_tokens))).to_stay_the_same() - @mark.slow - @mark.timeout(10) + @pytest.mark.slow() + @pytest.mark.timeout(10) def does_not_strip_random_ignored_tokens_embedded_in_the_block_string(): ignored_tokens_without_formatting = [ token @@ -227,8 +225,8 @@ def does_not_strip_random_ignored_tokens_embedded_in_the_block_string(): '"""|' + "".join(ignored_tokens_without_formatting) + '|"""' ).to_stay_the_same() - @mark.slow - @mark.timeout(80) + @pytest.mark.slow() + @pytest.mark.timeout(80) def strips_ignored_characters_inside_random_block_strings(): # Testing with length >7 is taking exponentially more time. However it is # highly recommended to test with increased limit if you make any change. diff --git a/tests/utilities/test_type_from_ast.py b/tests/utilities/test_type_from_ast.py index dd5cd6e9..282c8f50 100644 --- a/tests/utilities/test_type_from_ast.py +++ b/tests/utilities/test_type_from_ast.py @@ -1,5 +1,4 @@ -from pytest import raises - +import pytest from graphql.language import TypeNode, parse_type from graphql.type import GraphQLList, GraphQLNonNull, GraphQLObjectType from graphql.utilities import type_from_ast @@ -32,7 +31,7 @@ def for_non_null_type_node(): def for_unspecified_type_node(): node = TypeNode() - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: type_from_ast(test_schema, node) msg = str(exc_info.value) assert msg == "Unexpected type node: ." diff --git a/tests/utilities/test_type_info.py b/tests/utilities/test_type_info.py index 699cce6d..8b0cae05 100644 --- a/tests/utilities/test_type_info.py +++ b/tests/utilities/test_type_info.py @@ -24,7 +24,6 @@ from ..fixtures import kitchen_sink_query # noqa: F401 - test_schema = build_schema( """ interface Pet { @@ -185,7 +184,7 @@ def supports_introspection_fields(): class TestVisitor(Visitor): @staticmethod - def enter_field(self, node: OperationDefinitionNode, *_args): + def enter_field(*_args): parent_type = type_info.get_parent_type() type_name = getattr(type_info.get_parent_type(), "name", None) field_def = type_info.get_field_def() @@ -351,6 +350,8 @@ def enter(*args): ), ) + return None + @staticmethod def leave(*args): parent_type = type_info.get_parent_type() diff --git a/tests/utilities/test_value_from_ast.py b/tests/utilities/test_value_from_ast.py index 6e969f93..1760367f 100644 --- a/tests/utilities/test_value_from_ast.py +++ b/tests/utilities/test_value_from_ast.py @@ -112,15 +112,15 @@ def converts_enum_values_according_to_input_coercion_rules(): assert isnan(_value_from("NAN", test_enum)) assert _value_from("NO_CUSTOM_VALUE", test_enum) is Undefined - # Boolean! + # make a Boolean! non_null_bool = GraphQLNonNull(GraphQLBoolean) - # [Boolean] + # make a [Boolean] list_of_bool = GraphQLList(GraphQLBoolean) - # [Boolean!] + # make a [Boolean!] list_of_non_null_bool = GraphQLList(non_null_bool) - # [Boolean]! + # make a [Boolean]! non_null_list_of_bool = GraphQLNonNull(list_of_bool) - # [Boolean!]! + # make a [Boolean!]! non_null_list_of_non_mull_bool = GraphQLNonNull(list_of_non_null_bool) def coerces_to_null_unless_non_null(): diff --git a/tests/utils/__init__.py b/tests/utils/__init__.py index a40b86e2..80f3620c 100644 --- a/tests/utils/__init__.py +++ b/tests/utils/__init__.py @@ -5,7 +5,6 @@ from .dedent import dedent from .gen_fuzz_strings import gen_fuzz_strings - __all__ = [ "assert_matching_values", "assert_equal_awaitables_or_values", diff --git a/tests/utils/assert_equal_awaitables_or_values.py b/tests/utils/assert_equal_awaitables_or_values.py index 71805cee..9c4d562c 100644 --- a/tests/utils/assert_equal_awaitables_or_values.py +++ b/tests/utils/assert_equal_awaitables_or_values.py @@ -5,7 +5,6 @@ from .assert_matching_values import assert_matching_values - __all__ = ["assert_equal_awaitables_or_values"] T = TypeVar("T") diff --git a/tests/utils/assert_matching_values.py b/tests/utils/assert_matching_values.py index b7f0fdc7..0cadce37 100644 --- a/tests/utils/assert_matching_values.py +++ b/tests/utils/assert_matching_values.py @@ -1,6 +1,5 @@ from typing import TypeVar - __all__ = ["assert_matching_values"] T = TypeVar("T") diff --git a/tests/utils/dedent.py b/tests/utils/dedent.py index f22ae40d..a65c2d96 100644 --- a/tests/utils/dedent.py +++ b/tests/utils/dedent.py @@ -1,6 +1,5 @@ from textwrap import dedent as _dedent - __all__ = ["dedent"] diff --git a/tests/utils/gen_fuzz_strings.py b/tests/utils/gen_fuzz_strings.py index 50f71589..306984b7 100644 --- a/tests/utils/gen_fuzz_strings.py +++ b/tests/utils/gen_fuzz_strings.py @@ -1,7 +1,6 @@ from itertools import product from typing import Generator - __all__ = ["gen_fuzz_strings"] diff --git a/tests/utils/test_assert_equal_awaitables_or_values.py b/tests/utils/test_assert_equal_awaitables_or_values.py index ce202baf..214acfea 100644 --- a/tests/utils/test_assert_equal_awaitables_or_values.py +++ b/tests/utils/test_assert_equal_awaitables_or_values.py @@ -1,11 +1,11 @@ -from pytest import mark, raises +import pytest from . import assert_equal_awaitables_or_values def describe_assert_equal_awaitables_or_values(): def throws_when_given_unequal_values(): - with raises(AssertionError): + with pytest.raises(AssertionError): assert_equal_awaitables_or_values({}, {}, {"test": "test"}) def does_not_throw_when_given_equal_values(): @@ -15,7 +15,7 @@ def does_not_throw_when_given_equal_values(): == test_value ) - @mark.asyncio + @pytest.mark.asyncio() async def does_not_throw_when_given_equal_awaitables(): async def test_value(): return {"test": "test"} @@ -27,17 +27,17 @@ async def test_value(): == await test_value() ) - @mark.asyncio + @pytest.mark.asyncio() async def throws_when_given_unequal_awaitables(): async def test_value(value): return value - with raises(AssertionError): + with pytest.raises(AssertionError): await assert_equal_awaitables_or_values( test_value({}), test_value({}), test_value({"test": "test"}) ) - @mark.asyncio + @pytest.mark.asyncio() async def throws_when_given_mixture_of_equal_values_and_awaitables(): async def test_value(): return {"test": "test"} @@ -45,7 +45,7 @@ async def test_value(): value1 = await test_value() value2 = test_value() - with raises( + with pytest.raises( AssertionError, match=r"Received an invalid mixture of promises and values\.", ): diff --git a/tests/utils/test_assert_matching_values.py b/tests/utils/test_assert_matching_values.py index 7569b2c5..a67191d0 100644 --- a/tests/utils/test_assert_matching_values.py +++ b/tests/utils/test_assert_matching_values.py @@ -1,11 +1,11 @@ -from pytest import raises +import pytest from . import assert_matching_values def describe_assert_matching_values(): def throws_when_given_unequal_values(): - with raises(AssertionError): + with pytest.raises(AssertionError): assert_matching_values({}, {}, {"test": "test"}) def does_not_throw_when_given_equal_values(): diff --git a/tests/validation/__init__.py b/tests/validation/__init__.py index 235d2846..ad944ff3 100644 --- a/tests/validation/__init__.py +++ b/tests/validation/__init__.py @@ -1,6 +1,5 @@ """Tests for graphql.validation""" -from pytest import register_assert_rewrite +import pytest - -register_assert_rewrite("tests.validation.harness") +pytest.register_assert_rewrite("tests.validation.harness") diff --git a/tests/validation/harness.py b/tests/validation/harness.py index 68ec7c8f..42e6c768 100644 --- a/tests/validation/harness.py +++ b/tests/validation/harness.py @@ -7,7 +7,6 @@ from graphql.validation import SDLValidationRule, ValidationRule from graphql.validation.validate import validate, validate_sdl - __all__ = [ "test_schema", "assert_validation_errors", diff --git a/tests/validation/test_defer_stream_directive_label.py b/tests/validation/test_defer_stream_directive_label.py index 07ac73ea..3ecbcf46 100644 --- a/tests/validation/test_defer_stream_directive_label.py +++ b/tests/validation/test_defer_stream_directive_label.py @@ -4,7 +4,6 @@ from .harness import assert_validation_errors - assert_errors = partial(assert_validation_errors, DeferStreamDirectiveLabel) assert_valid = partial(assert_errors, errors=[]) diff --git a/tests/validation/test_defer_stream_directive_on_root_field.py b/tests/validation/test_defer_stream_directive_on_root_field.py index 687665f6..9bcdff80 100644 --- a/tests/validation/test_defer_stream_directive_on_root_field.py +++ b/tests/validation/test_defer_stream_directive_on_root_field.py @@ -5,7 +5,6 @@ from .harness import assert_validation_errors - schema = build_schema( """ type Message { diff --git a/tests/validation/test_executable_definitions.py b/tests/validation/test_executable_definitions.py index bcfb8538..4a21c63b 100644 --- a/tests/validation/test_executable_definitions.py +++ b/tests/validation/test_executable_definitions.py @@ -4,7 +4,6 @@ from .harness import assert_validation_errors - assert_errors = partial(assert_validation_errors, ExecutableDefinitionsRule) assert_valid = partial(assert_errors, errors=[]) diff --git a/tests/validation/test_fields_on_correct_type.py b/tests/validation/test_fields_on_correct_type.py index ca279070..d0d6d5f3 100644 --- a/tests/validation/test_fields_on_correct_type.py +++ b/tests/validation/test_fields_on_correct_type.py @@ -7,7 +7,6 @@ from .harness import assert_validation_errors - test_schema = build_schema( """ interface Pet { diff --git a/tests/validation/test_fragments_on_composite_types.py b/tests/validation/test_fragments_on_composite_types.py index 1f9be282..a4957668 100644 --- a/tests/validation/test_fragments_on_composite_types.py +++ b/tests/validation/test_fragments_on_composite_types.py @@ -4,7 +4,6 @@ from .harness import assert_validation_errors - assert_errors = partial(assert_validation_errors, FragmentsOnCompositeTypesRule) assert_valid = partial(assert_errors, errors=[]) diff --git a/tests/validation/test_known_argument_names.py b/tests/validation/test_known_argument_names.py index 03055459..74099ca6 100644 --- a/tests/validation/test_known_argument_names.py +++ b/tests/validation/test_known_argument_names.py @@ -8,7 +8,6 @@ from .harness import assert_sdl_validation_errors, assert_validation_errors - assert_errors = partial(assert_validation_errors, KnownArgumentNamesRule) assert_valid = partial(assert_errors, errors=[]) diff --git a/tests/validation/test_known_directives.py b/tests/validation/test_known_directives.py index 8c451926..c99921ac 100644 --- a/tests/validation/test_known_directives.py +++ b/tests/validation/test_known_directives.py @@ -5,7 +5,6 @@ from .harness import assert_sdl_validation_errors, assert_validation_errors - schema_with_directives = build_schema( """ type Query { diff --git a/tests/validation/test_known_fragment_names.py b/tests/validation/test_known_fragment_names.py index 1f95c70d..8a9b864b 100644 --- a/tests/validation/test_known_fragment_names.py +++ b/tests/validation/test_known_fragment_names.py @@ -4,7 +4,6 @@ from .harness import assert_validation_errors - assert_errors = partial(assert_validation_errors, KnownFragmentNamesRule) assert_valid = partial(assert_errors, errors=[]) diff --git a/tests/validation/test_known_type_names.py b/tests/validation/test_known_type_names.py index e4ef19c3..b4124a75 100644 --- a/tests/validation/test_known_type_names.py +++ b/tests/validation/test_known_type_names.py @@ -5,7 +5,6 @@ from .harness import assert_sdl_validation_errors, assert_validation_errors - assert_errors = partial(assert_validation_errors, KnownTypeNamesRule) assert_valid = partial(assert_errors, errors=[]) diff --git a/tests/validation/test_lone_anonymous_operation.py b/tests/validation/test_lone_anonymous_operation.py index d3930d64..83e431bf 100644 --- a/tests/validation/test_lone_anonymous_operation.py +++ b/tests/validation/test_lone_anonymous_operation.py @@ -4,7 +4,6 @@ from .harness import assert_validation_errors - assert_errors = partial(assert_validation_errors, LoneAnonymousOperationRule) assert_valid = partial(assert_errors, errors=[]) diff --git a/tests/validation/test_lone_schema_definition.py b/tests/validation/test_lone_schema_definition.py index 66080597..9b6d88fa 100644 --- a/tests/validation/test_lone_schema_definition.py +++ b/tests/validation/test_lone_schema_definition.py @@ -5,7 +5,6 @@ from .harness import assert_sdl_validation_errors - assert_sdl_errors = partial(assert_sdl_validation_errors, LoneSchemaDefinitionRule) assert_sdl_valid = partial(assert_sdl_errors, errors=[]) diff --git a/tests/validation/test_no_fragment_cycles.py b/tests/validation/test_no_fragment_cycles.py index 2eeca95a..3bc60a0a 100644 --- a/tests/validation/test_no_fragment_cycles.py +++ b/tests/validation/test_no_fragment_cycles.py @@ -4,7 +4,6 @@ from .harness import assert_validation_errors - assert_errors = partial(assert_validation_errors, NoFragmentCyclesRule) assert_valid = partial(assert_errors, errors=[]) diff --git a/tests/validation/test_no_schema_introspection.py b/tests/validation/test_no_schema_introspection.py index 5e95e621..03f1ddc8 100644 --- a/tests/validation/test_no_schema_introspection.py +++ b/tests/validation/test_no_schema_introspection.py @@ -5,7 +5,6 @@ from .harness import assert_validation_errors - schema = build_schema( """ type Query { diff --git a/tests/validation/test_no_undefined_variables.py b/tests/validation/test_no_undefined_variables.py index 890b629a..f9537234 100644 --- a/tests/validation/test_no_undefined_variables.py +++ b/tests/validation/test_no_undefined_variables.py @@ -4,7 +4,6 @@ from .harness import assert_validation_errors - assert_errors = partial(assert_validation_errors, NoUndefinedVariablesRule) assert_valid = partial(assert_errors, errors=[]) diff --git a/tests/validation/test_no_unused_fragments.py b/tests/validation/test_no_unused_fragments.py index 8c25956b..f317d1f5 100644 --- a/tests/validation/test_no_unused_fragments.py +++ b/tests/validation/test_no_unused_fragments.py @@ -4,7 +4,6 @@ from .harness import assert_validation_errors - assert_errors = partial(assert_validation_errors, NoUnusedFragmentsRule) assert_valid = partial(assert_errors, errors=[]) diff --git a/tests/validation/test_no_unused_variables.py b/tests/validation/test_no_unused_variables.py index 4ed8b4b1..7366ba55 100644 --- a/tests/validation/test_no_unused_variables.py +++ b/tests/validation/test_no_unused_variables.py @@ -4,7 +4,6 @@ from .harness import assert_validation_errors - assert_errors = partial(assert_validation_errors, NoUnusedVariablesRule) assert_valid = partial(assert_errors, errors=[]) diff --git a/tests/validation/test_overlapping_fields_can_be_merged.py b/tests/validation/test_overlapping_fields_can_be_merged.py index 8e5b3989..1ebc744e 100644 --- a/tests/validation/test_overlapping_fields_can_be_merged.py +++ b/tests/validation/test_overlapping_fields_can_be_merged.py @@ -5,7 +5,6 @@ from .harness import assert_validation_errors - assert_errors = partial(assert_validation_errors, OverlappingFieldsCanBeMergedRule) assert_valid = partial(assert_errors, errors=[]) diff --git a/tests/validation/test_possible_fragment_spreads.py b/tests/validation/test_possible_fragment_spreads.py index a450f890..791edfc1 100644 --- a/tests/validation/test_possible_fragment_spreads.py +++ b/tests/validation/test_possible_fragment_spreads.py @@ -5,7 +5,6 @@ from .harness import assert_validation_errors - test_schema = build_schema( """ interface Being { diff --git a/tests/validation/test_possible_type_extensions.py b/tests/validation/test_possible_type_extensions.py index f6bfbd44..473e0c88 100644 --- a/tests/validation/test_possible_type_extensions.py +++ b/tests/validation/test_possible_type_extensions.py @@ -5,7 +5,6 @@ from .harness import assert_sdl_validation_errors - assert_errors = partial(assert_sdl_validation_errors, PossibleTypeExtensionsRule) assert_valid = partial(assert_errors, errors=[]) diff --git a/tests/validation/test_provided_required_arguments.py b/tests/validation/test_provided_required_arguments.py index 7a7922ae..0e9607a9 100644 --- a/tests/validation/test_provided_required_arguments.py +++ b/tests/validation/test_provided_required_arguments.py @@ -8,7 +8,6 @@ from .harness import assert_sdl_validation_errors, assert_validation_errors - assert_errors = partial(assert_validation_errors, ProvidedRequiredArgumentsRule) assert_valid = partial(assert_errors, errors=[]) diff --git a/tests/validation/test_scalar_leafs.py b/tests/validation/test_scalar_leafs.py index 364df493..5d7b83d1 100644 --- a/tests/validation/test_scalar_leafs.py +++ b/tests/validation/test_scalar_leafs.py @@ -4,7 +4,6 @@ from .harness import assert_validation_errors - assert_errors = partial(assert_validation_errors, ScalarLeafsRule) assert_valid = partial(assert_errors, errors=[]) diff --git a/tests/validation/test_single_field_subscriptions.py b/tests/validation/test_single_field_subscriptions.py index 580f3e18..c6278f77 100644 --- a/tests/validation/test_single_field_subscriptions.py +++ b/tests/validation/test_single_field_subscriptions.py @@ -5,7 +5,6 @@ from .harness import assert_validation_errors - schema = build_schema( """ type Message { diff --git a/tests/validation/test_stream_directive_on_list_field.py b/tests/validation/test_stream_directive_on_list_field.py index 6613b15b..5b9b5b8c 100644 --- a/tests/validation/test_stream_directive_on_list_field.py +++ b/tests/validation/test_stream_directive_on_list_field.py @@ -4,7 +4,6 @@ from .harness import assert_validation_errors - assert_errors = partial(assert_validation_errors, StreamDirectiveOnListField) assert_valid = partial(assert_errors, errors=[]) diff --git a/tests/validation/test_unique_argument_definition_names.py b/tests/validation/test_unique_argument_definition_names.py index 5c1e8a1b..9d6625a7 100644 --- a/tests/validation/test_unique_argument_definition_names.py +++ b/tests/validation/test_unique_argument_definition_names.py @@ -6,7 +6,6 @@ from .harness import assert_sdl_validation_errors - assert_sdl_errors = partial( assert_sdl_validation_errors, UniqueArgumentDefinitionNamesRule ) diff --git a/tests/validation/test_unique_argument_names.py b/tests/validation/test_unique_argument_names.py index e7d5ab16..64aa2b95 100644 --- a/tests/validation/test_unique_argument_names.py +++ b/tests/validation/test_unique_argument_names.py @@ -4,7 +4,6 @@ from .harness import assert_validation_errors - assert_errors = partial(assert_validation_errors, UniqueArgumentNamesRule) assert_valid = partial(assert_errors, errors=[]) diff --git a/tests/validation/test_unique_directive_names.py b/tests/validation/test_unique_directive_names.py index 54d35638..1cb9dc58 100644 --- a/tests/validation/test_unique_directive_names.py +++ b/tests/validation/test_unique_directive_names.py @@ -5,7 +5,6 @@ from .harness import assert_sdl_validation_errors - assert_errors = partial(assert_sdl_validation_errors, UniqueDirectiveNamesRule) assert_valid = partial(assert_errors, errors=[]) diff --git a/tests/validation/test_unique_directives_per_location.py b/tests/validation/test_unique_directives_per_location.py index ea6993d0..4c21842c 100644 --- a/tests/validation/test_unique_directives_per_location.py +++ b/tests/validation/test_unique_directives_per_location.py @@ -6,7 +6,6 @@ from .harness import assert_sdl_validation_errors, assert_validation_errors, test_schema - extension_sdl = """ directive @directive on FIELD | FRAGMENT_DEFINITION directive @directiveA on FIELD | FRAGMENT_DEFINITION diff --git a/tests/validation/test_unique_enum_value_names.py b/tests/validation/test_unique_enum_value_names.py index 560bf97e..5611b45c 100644 --- a/tests/validation/test_unique_enum_value_names.py +++ b/tests/validation/test_unique_enum_value_names.py @@ -5,7 +5,6 @@ from .harness import assert_sdl_validation_errors - assert_errors = partial(assert_sdl_validation_errors, UniqueEnumValueNamesRule) assert_valid = partial(assert_errors, errors=[]) diff --git a/tests/validation/test_unique_field_definition_names.py b/tests/validation/test_unique_field_definition_names.py index eefa0f3f..5e9b1c21 100644 --- a/tests/validation/test_unique_field_definition_names.py +++ b/tests/validation/test_unique_field_definition_names.py @@ -7,7 +7,6 @@ from .harness import assert_sdl_validation_errors - assert_errors = partial(assert_sdl_validation_errors, UniqueFieldDefinitionNamesRule) assert_valid = partial(assert_errors, errors=[]) diff --git a/tests/validation/test_unique_fragment_names.py b/tests/validation/test_unique_fragment_names.py index b2270260..37370c9d 100644 --- a/tests/validation/test_unique_fragment_names.py +++ b/tests/validation/test_unique_fragment_names.py @@ -4,7 +4,6 @@ from .harness import assert_validation_errors - assert_errors = partial(assert_validation_errors, UniqueFragmentNamesRule) assert_valid = partial(assert_errors, errors=[]) diff --git a/tests/validation/test_unique_input_field_names.py b/tests/validation/test_unique_input_field_names.py index 0c9e2b48..857118da 100644 --- a/tests/validation/test_unique_input_field_names.py +++ b/tests/validation/test_unique_input_field_names.py @@ -4,7 +4,6 @@ from .harness import assert_validation_errors - assert_errors = partial(assert_validation_errors, UniqueInputFieldNamesRule) assert_valid = partial(assert_errors, errors=[]) diff --git a/tests/validation/test_unique_operation_names.py b/tests/validation/test_unique_operation_names.py index e80dd611..f2ba8a23 100644 --- a/tests/validation/test_unique_operation_names.py +++ b/tests/validation/test_unique_operation_names.py @@ -4,7 +4,6 @@ from .harness import assert_validation_errors - assert_errors = partial(assert_validation_errors, UniqueOperationNamesRule) assert_valid = partial(assert_errors, errors=[]) diff --git a/tests/validation/test_unique_operation_types.py b/tests/validation/test_unique_operation_types.py index fd1028ca..c341e84c 100644 --- a/tests/validation/test_unique_operation_types.py +++ b/tests/validation/test_unique_operation_types.py @@ -5,7 +5,6 @@ from .harness import assert_sdl_validation_errors - assert_errors = partial(assert_sdl_validation_errors, UniqueOperationTypesRule) assert_valid = partial(assert_errors, errors=[]) diff --git a/tests/validation/test_unique_type_names.py b/tests/validation/test_unique_type_names.py index 0d809f81..1ff03b8f 100644 --- a/tests/validation/test_unique_type_names.py +++ b/tests/validation/test_unique_type_names.py @@ -5,7 +5,6 @@ from .harness import assert_sdl_validation_errors - assert_errors = partial(assert_sdl_validation_errors, UniqueTypeNamesRule) assert_valid = partial(assert_errors, errors=[]) diff --git a/tests/validation/test_unique_variable_names.py b/tests/validation/test_unique_variable_names.py index 7c50ccda..9b5c10e6 100644 --- a/tests/validation/test_unique_variable_names.py +++ b/tests/validation/test_unique_variable_names.py @@ -4,7 +4,6 @@ from .harness import assert_validation_errors - assert_errors = partial(assert_validation_errors, UniqueVariableNamesRule) assert_valid = partial(assert_errors, errors=[]) diff --git a/tests/validation/test_validation.py b/tests/validation/test_validation.py index 85d64d6d..37d57e9b 100644 --- a/tests/validation/test_validation.py +++ b/tests/validation/test_validation.py @@ -1,5 +1,4 @@ -from pytest import raises - +import pytest from graphql.error import GraphQLError from graphql.language import parse from graphql.utilities import TypeInfo, build_schema @@ -46,7 +45,7 @@ def detects_unknown_fields(): def deprecated_validates_using_a_custom_type_info(): # This TypeInfo will never return a valid field. - type_info = TypeInfo(test_schema, None, lambda *args: None) + type_info = TypeInfo(test_schema, None, lambda *_args: None) doc = parse( """ @@ -164,5 +163,5 @@ class CustomRule(ValidationRule): def enter_field(self, *_args): raise RuntimeError("Error from custom rule!") - with raises(RuntimeError, match="^Error from custom rule!$"): + with pytest.raises(RuntimeError, match="^Error from custom rule!$"): validate(test_schema, doc, [CustomRule], max_errors=1) diff --git a/tests/validation/test_values_of_correct_type.py b/tests/validation/test_values_of_correct_type.py index 7208ddca..e19228aa 100644 --- a/tests/validation/test_values_of_correct_type.py +++ b/tests/validation/test_values_of_correct_type.py @@ -13,7 +13,6 @@ from .harness import assert_validation_errors - assert_errors = partial(assert_validation_errors, ValuesOfCorrectTypeRule) assert_valid = partial(assert_errors, errors=[]) @@ -1048,7 +1047,7 @@ def parse_value(value): def reports_error_for_custom_scalar_that_returns_undefined(): custom_scalar = GraphQLScalarType( - "CustomScalar", parse_value=lambda value: Undefined + "CustomScalar", parse_value=lambda _value: Undefined ) schema = GraphQLSchema( diff --git a/tests/validation/test_variables_are_input_types.py b/tests/validation/test_variables_are_input_types.py index 86ee3d80..9440f1b2 100644 --- a/tests/validation/test_variables_are_input_types.py +++ b/tests/validation/test_variables_are_input_types.py @@ -4,7 +4,6 @@ from .harness import assert_validation_errors - assert_errors = partial(assert_validation_errors, VariablesAreInputTypesRule) assert_valid = partial(assert_errors, errors=[]) diff --git a/tests/validation/test_variables_in_allowed_position.py b/tests/validation/test_variables_in_allowed_position.py index 429c781f..03fbbcbb 100644 --- a/tests/validation/test_variables_in_allowed_position.py +++ b/tests/validation/test_variables_in_allowed_position.py @@ -4,7 +4,6 @@ from .harness import assert_validation_errors - assert_errors = partial(assert_validation_errors, VariablesInAllowedPositionRule) assert_valid = partial(assert_errors, errors=[]) diff --git a/tox.ini b/tox.ini index fb116dab..9dd30855 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = py3{7,8,9,10,11}, pypy39, black, flake8, isort, mypy, docs +envlist = py3{7,8,9,10,11}, pypy39, ruff, mypy, docs isolated_build = true [gh-actions] @@ -13,26 +13,12 @@ python = pypy3: pypy39 pypy3.9: pypy39 -[testenv:black] +[testenv:ruff] basepython = python3.11 -deps = black==23.3.0 +deps = ruff>=0.2,<0.3 commands = - black src tests -t py310 --check - -[testenv:flake8] -basepython = python3.11 -deps = - flake8>=6,<7 - flake8-bandit>=4.1,<5 - flake8-bugbear==23.5.9 -commands = - flake8 src tests - -[testenv:isort] -basepython = python3.11 -deps = isort>=5.12,<6 -commands = - isort src tests --check-only + ruff check src tests + ruff format --check src tests [testenv:mypy] basepython = python3.11 From 37f8eb2984a63ab1cda849c83e39e954105b9f8f Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Thu, 8 Feb 2024 22:27:59 +0100 Subject: [PATCH 111/230] Update mypy --- .github/workflows/test.yml | 2 +- README.md | 4 ++-- pyproject.toml | 2 +- src/graphql/execution/execute.py | 8 +++----- src/graphql/type/definition.py | 2 +- src/graphql/utilities/strip_ignored_characters.py | 14 +++++++------- src/graphql/validation/validate.py | 2 +- tests/test_user_registry.py | 4 ++-- tests/type/test_definition.py | 4 ++-- tox.ini | 2 +- 10 files changed, 21 insertions(+), 23 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index b7050d6a..c8e32e59 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -21,7 +21,7 @@ jobs: - name: Install dependencies run: | python -m pip install --upgrade pip - pip install "tox>=4.12,<5" "tox-gh-actions>=3.2,<4" + pip install "tox>=3.28,<5" "tox-gh-actions>=3.2,<4" - name: Run unit tests with tox run: tox diff --git a/README.md b/README.md index 5cf727d1..66c07116 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # GraphQL-core 3 -GraphQL-core 3 is a Python 3.7+ port of [GraphQL.js](https://github.com/graphql/graphql-js), +GraphQL-core 3 is a Python 3.6+ port of [GraphQL.js](https://github.com/graphql/graphql-js), the JavaScript reference implementation for [GraphQL](https://graphql.org/), a query language for APIs created by Facebook. @@ -203,7 +203,7 @@ Design goals for the GraphQL-core 3 library were: Some restrictions (mostly in line with the design goals): -* requires Python 3.7 or newer +* requires Python 3.6 or newer (Python 3.7 and newer in latest version) * does not support some already deprecated methods and options of GraphQL.js * supports asynchronous operations only via async.io (does not support the additional executors in GraphQL-core) diff --git a/pyproject.toml b/pyproject.toml index 45759531..29b9d21e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -64,7 +64,7 @@ optional = true [tool.poetry.group.lint.dependencies] ruff = ">=0.2,<0.3" -mypy = "1.3.0" +mypy = "1.8.0" bump2version = ">=1.0,<2" [tool.poetry.group.doc] diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index 8884cb7e..2a9f8cc5 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -2039,7 +2039,7 @@ def execute( raise GraphQLError(UNEXPECTED_MULTIPLE_PAYLOADS) async def await_result() -> Any: - awaited_result = await result # type: ignore + awaited_result = await result if isinstance(awaited_result, ExecutionResult): return awaited_result return ExecutionResult( @@ -2388,7 +2388,7 @@ def subscribe( return map_async_iterable(result, ensure_single_execution_result) async def await_result() -> Union[AsyncIterator[ExecutionResult], ExecutionResult]: - result_or_iterable = await result # type: ignore + result_or_iterable = await result if isinstance(result_or_iterable, AsyncIterable): return map_async_iterable( result_or_iterable, ensure_single_execution_result @@ -2496,9 +2496,7 @@ async def await_result() -> Any: awaited_result_or_stream = await result_or_stream # type: ignore if isinstance(awaited_result_or_stream, ExecutionResult): return awaited_result_or_stream - return context.map_source_to_response( # type: ignore - awaited_result_or_stream - ) + return context.map_source_to_response(awaited_result_or_stream) return await_result() diff --git a/src/graphql/type/definition.py b/src/graphql/type/definition.py index 2982ea4f..9bea7eed 100644 --- a/src/graphql/type/definition.py +++ b/src/graphql/type/definition.py @@ -1055,7 +1055,7 @@ def __init__( isinstance(name, str) for name in values ): try: - values = dict(values) # type: ignore + values = dict(values) except (TypeError, ValueError) as error: msg = ( f"{name} values must be an Enum or a mapping" diff --git a/src/graphql/utilities/strip_ignored_characters.py b/src/graphql/utilities/strip_ignored_characters.py index 3e2c1658..1824c102 100644 --- a/src/graphql/utilities/strip_ignored_characters.py +++ b/src/graphql/utilities/strip_ignored_characters.py @@ -11,7 +11,7 @@ def strip_ignored_characters(source: Union[str, Source]) -> str: - """Strip characters that are ignored anyway. + '''Strip characters that are ignored anyway. Strips characters that are not significant to the validity or execution of a GraphQL document: @@ -51,20 +51,20 @@ def strip_ignored_characters(source: Union[str, Source]) -> str: SDL example:: - \"\"\" + """ Type description - \"\"\" + """ type Foo { - \"\"\" + """ Field description - \"\"\" + """ bar: String } Becomes:: - \"\"\"Type description\"\"\" type Foo{\"\"\"Field description\"\"\" bar:String} - """ + """Type description""" type Foo{"""Field description""" bar:String} + ''' if not is_source(source): source = Source(cast(str, source)) diff --git a/src/graphql/validation/validate.py b/src/graphql/validation/validate.py index 13c75d89..0035d877 100644 --- a/src/graphql/validation/validate.py +++ b/src/graphql/validation/validate.py @@ -59,7 +59,7 @@ def validate( errors: List[GraphQLError] = [] def on_error(error: GraphQLError) -> None: - if len(errors) >= max_errors: # type: ignore + if len(errors) >= max_errors: raise validation_aborted_error errors.append(error) diff --git a/tests/test_user_registry.py b/tests/test_user_registry.py index bcb8321e..42cb579a 100644 --- a/tests/test_user_registry.py +++ b/tests/test_user_registry.py @@ -492,13 +492,13 @@ async def mutate_users(): ) async def receive_one(): - async for result in subscription_one: # type: ignore # pragma: no cover + async for result in subscription_one: # pragma: no cover received_one.append(result) if len(received_one) == 3: # pragma: no cover else break async def receive_all(): - async for result in subscription_all: # type: ignore # pragma: no cover + async for result in subscription_all: # pragma: no cover received_all.append(result) if len(received_all) == 6: # pragma: no cover else break diff --git a/tests/type/test_definition.py b/tests/type/test_definition.py index d57f558e..cb38a678 100644 --- a/tests/type/test_definition.py +++ b/tests/type/test_definition.py @@ -735,7 +735,7 @@ def defines_an_enum_type_with_a_description(): description = "nice enum" enum_type = GraphQLEnumType( "SomeEnum", - {}, # type: ignore + {}, description=description, ) assert enum_type.description is description @@ -887,7 +887,7 @@ def accepts_an_enum_type_with_ast_node_and_extension_ast_nodes(): extension_ast_nodes = [EnumTypeExtensionNode()] enum_type = GraphQLEnumType( "SomeEnum", - {}, # type: ignore + {}, ast_node=ast_node, extension_ast_nodes=extension_ast_nodes, ) diff --git a/tox.ini b/tox.ini index 9dd30855..9121bf74 100644 --- a/tox.ini +++ b/tox.ini @@ -23,7 +23,7 @@ commands = [testenv:mypy] basepython = python3.11 deps = - mypy==1.3.0 + mypy==1.8.0 pytest>=7.3,<8 commands = mypy src tests From d42eddbfd9049d85101c6af6be103ab014fbb0c5 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 10 Feb 2024 20:52:00 +0100 Subject: [PATCH 112/230] Support Python 3.12 and update dependencies --- .github/workflows/lint.yml | 8 +- .github/workflows/publish.yml | 8 +- .github/workflows/test.yml | 2 +- poetry.lock | 276 ++++++++++++++---- pyproject.toml | 37 ++- src/graphql/pyutils/is_iterable.py | 4 +- .../execution/test_flatten_async_iterable.py | 2 +- tests/execution/test_map_async_iterable.py | 32 +- tests/execution/test_stream.py | 4 + tox.ini | 40 +-- 10 files changed, 281 insertions(+), 132 deletions(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 8dcb79ef..f5ad7802 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -7,12 +7,12 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - - name: Set up Python 3.11 - uses: actions/setup-python@v4 + - name: Set up Python 3.12 + uses: actions/setup-python@v5 with: - python-version: '3.11' + python-version: '3.12' - name: Install dependencies run: | diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 871ad03c..561b3028 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -10,12 +10,12 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - - name: Set up Python 3.11 - uses: actions/setup-python@v4 + - name: Set up Python 3.12 + uses: actions/setup-python@v5 with: - python-version: '3.11' + python-version: '3.12' - name: Build wheel and source tarball run: | diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index c8e32e59..6f9c3ce6 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -8,7 +8,7 @@ jobs: strategy: matrix: - python-version: ['3.7', '3.8', '3.9', '3.10', '3.11', 'pypy3.9'] + python-version: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12', 'pypy3.9', 'pypy3.10'] steps: - uses: actions/checkout@v3 diff --git a/poetry.lock b/poetry.lock index fb2f90cd..7903bb44 100644 --- a/poetry.lock +++ b/poetry.lock @@ -270,13 +270,24 @@ files = [ [[package]] name = "docutils" -version = "0.18.1" +version = "0.19" description = "Docutils -- Python Documentation Utilities" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.7" +files = [ + {file = "docutils-0.19-py3-none-any.whl", hash = "sha256:5e1de4d849fee02c63b040a4a3fd567f4ab104defd8a5511fbbc24a8a017efbc"}, + {file = "docutils-0.19.tar.gz", hash = "sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6"}, +] + +[[package]] +name = "docutils" +version = "0.20.1" +description = "Docutils -- Python Documentation Utilities" +optional = false +python-versions = ">=3.7" files = [ - {file = "docutils-0.18.1-py2.py3-none-any.whl", hash = "sha256:23010f129180089fbcd3bc08cfefccb3b890b0050e1ca00c867036e9d161b98c"}, - {file = "docutils-0.18.1.tar.gz", hash = "sha256:679987caf361a7539d76e584cbeddc311e3aee937877c87346f31debc63e9d06"}, + {file = "docutils-0.20.1-py3-none-any.whl", hash = "sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6"}, + {file = "docutils-0.20.1.tar.gz", hash = "sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b"}, ] [[package]] @@ -366,6 +377,25 @@ docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker perf = ["ipython"] testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] +[[package]] +name = "importlib-metadata" +version = "7.0.1" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_metadata-7.0.1-py3-none-any.whl", hash = "sha256:4805911c3a4ec7c3966410053e9ec6a1fecd629117df5adee56dfc9432a1081e"}, + {file = "importlib_metadata-7.0.1.tar.gz", hash = "sha256:f238736bb06590ae52ac1fab06a3a9ef1d8dce2b7a35b5ab329371d6c8f5d2cc"}, +] + +[package.dependencies] +zipp = ">=0.5" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +perf = ["ipython"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] + [[package]] name = "iniconfig" version = "2.0.0" @@ -465,44 +495,44 @@ files = [ [[package]] name = "mypy" -version = "1.3.0" +version = "1.4.1" description = "Optional static typing for Python" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-1.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c1eb485cea53f4f5284e5baf92902cd0088b24984f4209e25981cc359d64448d"}, - {file = "mypy-1.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4c99c3ecf223cf2952638da9cd82793d8f3c0c5fa8b6ae2b2d9ed1e1ff51ba85"}, - {file = "mypy-1.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:550a8b3a19bb6589679a7c3c31f64312e7ff482a816c96e0cecec9ad3a7564dd"}, - {file = "mypy-1.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cbc07246253b9e3d7d74c9ff948cd0fd7a71afcc2b77c7f0a59c26e9395cb152"}, - {file = "mypy-1.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:a22435632710a4fcf8acf86cbd0d69f68ac389a3892cb23fbad176d1cddaf228"}, - {file = "mypy-1.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6e33bb8b2613614a33dff70565f4c803f889ebd2f859466e42b46e1df76018dd"}, - {file = "mypy-1.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7d23370d2a6b7a71dc65d1266f9a34e4cde9e8e21511322415db4b26f46f6b8c"}, - {file = "mypy-1.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:658fe7b674769a0770d4b26cb4d6f005e88a442fe82446f020be8e5f5efb2fae"}, - {file = "mypy-1.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6e42d29e324cdda61daaec2336c42512e59c7c375340bd202efa1fe0f7b8f8ca"}, - {file = "mypy-1.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:d0b6c62206e04061e27009481cb0ec966f7d6172b5b936f3ead3d74f29fe3dcf"}, - {file = "mypy-1.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:76ec771e2342f1b558c36d49900dfe81d140361dd0d2df6cd71b3db1be155409"}, - {file = "mypy-1.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ebc95f8386314272bbc817026f8ce8f4f0d2ef7ae44f947c4664efac9adec929"}, - {file = "mypy-1.3.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:faff86aa10c1aa4a10e1a301de160f3d8fc8703b88c7e98de46b531ff1276a9a"}, - {file = "mypy-1.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:8c5979d0deb27e0f4479bee18ea0f83732a893e81b78e62e2dda3e7e518c92ee"}, - {file = "mypy-1.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c5d2cc54175bab47011b09688b418db71403aefad07cbcd62d44010543fc143f"}, - {file = "mypy-1.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:87df44954c31d86df96c8bd6e80dfcd773473e877ac6176a8e29898bfb3501cb"}, - {file = "mypy-1.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:473117e310febe632ddf10e745a355714e771ffe534f06db40702775056614c4"}, - {file = "mypy-1.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:74bc9b6e0e79808bf8678d7678b2ae3736ea72d56eede3820bd3849823e7f305"}, - {file = "mypy-1.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:44797d031a41516fcf5cbfa652265bb994e53e51994c1bd649ffcd0c3a7eccbf"}, - {file = "mypy-1.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ddae0f39ca146972ff6bb4399f3b2943884a774b8771ea0a8f50e971f5ea5ba8"}, - {file = "mypy-1.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1c4c42c60a8103ead4c1c060ac3cdd3ff01e18fddce6f1016e08939647a0e703"}, - {file = "mypy-1.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e86c2c6852f62f8f2b24cb7a613ebe8e0c7dc1402c61d36a609174f63e0ff017"}, - {file = "mypy-1.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f9dca1e257d4cc129517779226753dbefb4f2266c4eaad610fc15c6a7e14283e"}, - {file = "mypy-1.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:95d8d31a7713510685b05fbb18d6ac287a56c8f6554d88c19e73f724a445448a"}, - {file = "mypy-1.3.0-py3-none-any.whl", hash = "sha256:a8763e72d5d9574d45ce5881962bc8e9046bf7b375b0abf031f3e6811732a897"}, - {file = "mypy-1.3.0.tar.gz", hash = "sha256:e1f4d16e296f5135624b34e8fb741eb0eadedca90862405b1f1fde2040b9bd11"}, + {file = "mypy-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:566e72b0cd6598503e48ea610e0052d1b8168e60a46e0bfd34b3acf2d57f96a8"}, + {file = "mypy-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ca637024ca67ab24a7fd6f65d280572c3794665eaf5edcc7e90a866544076878"}, + {file = "mypy-1.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dde1d180cd84f0624c5dcaaa89c89775550a675aff96b5848de78fb11adabcd"}, + {file = "mypy-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8c4d8e89aa7de683e2056a581ce63c46a0c41e31bd2b6d34144e2c80f5ea53dc"}, + {file = "mypy-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:bfdca17c36ae01a21274a3c387a63aa1aafe72bff976522886869ef131b937f1"}, + {file = "mypy-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7549fbf655e5825d787bbc9ecf6028731973f78088fbca3a1f4145c39ef09462"}, + {file = "mypy-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:98324ec3ecf12296e6422939e54763faedbfcc502ea4a4c38502082711867258"}, + {file = "mypy-1.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:141dedfdbfe8a04142881ff30ce6e6653c9685b354876b12e4fe6c78598b45e2"}, + {file = "mypy-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8207b7105829eca6f3d774f64a904190bb2231de91b8b186d21ffd98005f14a7"}, + {file = "mypy-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:16f0db5b641ba159eff72cff08edc3875f2b62b2fa2bc24f68c1e7a4e8232d01"}, + {file = "mypy-1.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:470c969bb3f9a9efcedbadcd19a74ffb34a25f8e6b0e02dae7c0e71f8372f97b"}, + {file = "mypy-1.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5952d2d18b79f7dc25e62e014fe5a23eb1a3d2bc66318df8988a01b1a037c5b"}, + {file = "mypy-1.4.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:190b6bab0302cec4e9e6767d3eb66085aef2a1cc98fe04936d8a42ed2ba77bb7"}, + {file = "mypy-1.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:9d40652cc4fe33871ad3338581dca3297ff5f2213d0df345bcfbde5162abf0c9"}, + {file = "mypy-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:01fd2e9f85622d981fd9063bfaef1aed6e336eaacca00892cd2d82801ab7c042"}, + {file = "mypy-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2460a58faeea905aeb1b9b36f5065f2dc9a9c6e4c992a6499a2360c6c74ceca3"}, + {file = "mypy-1.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2746d69a8196698146a3dbe29104f9eb6a2a4d8a27878d92169a6c0b74435b6"}, + {file = "mypy-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ae704dcfaa180ff7c4cfbad23e74321a2b774f92ca77fd94ce1049175a21c97f"}, + {file = "mypy-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:43d24f6437925ce50139a310a64b2ab048cb2d3694c84c71c3f2a1626d8101dc"}, + {file = "mypy-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c482e1246726616088532b5e964e39765b6d1520791348e6c9dc3af25b233828"}, + {file = "mypy-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:43b592511672017f5b1a483527fd2684347fdffc041c9ef53428c8dc530f79a3"}, + {file = "mypy-1.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:34a9239d5b3502c17f07fd7c0b2ae6b7dd7d7f6af35fbb5072c6208e76295816"}, + {file = "mypy-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5703097c4936bbb9e9bce41478c8d08edd2865e177dc4c52be759f81ee4dd26c"}, + {file = "mypy-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e02d700ec8d9b1859790c0475df4e4092c7bf3272a4fd2c9f33d87fac4427b8f"}, + {file = "mypy-1.4.1-py3-none-any.whl", hash = "sha256:45d32cec14e7b97af848bddd97d85ea4f0db4d5a149ed9676caa4eb2f7402bb4"}, + {file = "mypy-1.4.1.tar.gz", hash = "sha256:9bbcd9ab8ea1f2e1c8031c21445b511442cc45c89951e49bbf852cbb70755b1b"}, ] [package.dependencies] mypy-extensions = ">=1.0.0" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} typed-ast = {version = ">=1.4.0,<2", markers = "python_version < \"3.8\""} -typing-extensions = ">=3.10" +typing-extensions = ">=4.1.0" [package.extras] dmypy = ["psutil (>=4.0)"] @@ -510,6 +540,53 @@ install-types = ["pip"] python2 = ["typed-ast (>=1.4.0,<2)"] reports = ["lxml"] +[[package]] +name = "mypy" +version = "1.8.0" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:485a8942f671120f76afffff70f259e1cd0f0cfe08f81c05d8816d958d4577d3"}, + {file = "mypy-1.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:df9824ac11deaf007443e7ed2a4a26bebff98d2bc43c6da21b2b64185da011c4"}, + {file = "mypy-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2afecd6354bbfb6e0160f4e4ad9ba6e4e003b767dd80d85516e71f2e955ab50d"}, + {file = "mypy-1.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8963b83d53ee733a6e4196954502b33567ad07dfd74851f32be18eb932fb1cb9"}, + {file = "mypy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:e46f44b54ebddbeedbd3d5b289a893219065ef805d95094d16a0af6630f5d410"}, + {file = "mypy-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:855fe27b80375e5c5878492f0729540db47b186509c98dae341254c8f45f42ae"}, + {file = "mypy-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c886c6cce2d070bd7df4ec4a05a13ee20c0aa60cb587e8d1265b6c03cf91da3"}, + {file = "mypy-1.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d19c413b3c07cbecf1f991e2221746b0d2a9410b59cb3f4fb9557f0365a1a817"}, + {file = "mypy-1.8.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9261ed810972061388918c83c3f5cd46079d875026ba97380f3e3978a72f503d"}, + {file = "mypy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:51720c776d148bad2372ca21ca29256ed483aa9a4cdefefcef49006dff2a6835"}, + {file = "mypy-1.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:52825b01f5c4c1c4eb0db253ec09c7aa17e1a7304d247c48b6f3599ef40db8bd"}, + {file = "mypy-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f5ac9a4eeb1ec0f1ccdc6f326bcdb464de5f80eb07fb38b5ddd7b0de6bc61e55"}, + {file = "mypy-1.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afe3fe972c645b4632c563d3f3eff1cdca2fa058f730df2b93a35e3b0c538218"}, + {file = "mypy-1.8.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:42c6680d256ab35637ef88891c6bd02514ccb7e1122133ac96055ff458f93fc3"}, + {file = "mypy-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:720a5ca70e136b675af3af63db533c1c8c9181314d207568bbe79051f122669e"}, + {file = "mypy-1.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:028cf9f2cae89e202d7b6593cd98db6759379f17a319b5faf4f9978d7084cdc6"}, + {file = "mypy-1.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4e6d97288757e1ddba10dd9549ac27982e3e74a49d8d0179fc14d4365c7add66"}, + {file = "mypy-1.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f1478736fcebb90f97e40aff11a5f253af890c845ee0c850fe80aa060a267c6"}, + {file = "mypy-1.8.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42419861b43e6962a649068a61f4a4839205a3ef525b858377a960b9e2de6e0d"}, + {file = "mypy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:2b5b6c721bd4aabaadead3a5e6fa85c11c6c795e0c81a7215776ef8afc66de02"}, + {file = "mypy-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5c1538c38584029352878a0466f03a8ee7547d7bd9f641f57a0f3017a7c905b8"}, + {file = "mypy-1.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ef4be7baf08a203170f29e89d79064463b7fc7a0908b9d0d5114e8009c3a259"}, + {file = "mypy-1.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7178def594014aa6c35a8ff411cf37d682f428b3b5617ca79029d8ae72f5402b"}, + {file = "mypy-1.8.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ab3c84fa13c04aeeeabb2a7f67a25ef5d77ac9d6486ff33ded762ef353aa5592"}, + {file = "mypy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:99b00bc72855812a60d253420d8a2eae839b0afa4938f09f4d2aa9bb4654263a"}, + {file = "mypy-1.8.0-py3-none-any.whl", hash = "sha256:538fd81bb5e430cc1381a443971c0475582ff9f434c16cd46d2c66763ce85d9d"}, + {file = "mypy-1.8.0.tar.gz", hash = "sha256:6ff8b244d7085a0b425b56d327b480c3b29cafbd2eff27316a004f9a7391ae07"}, +] + +[package.dependencies] +mypy-extensions = ">=1.0.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = ">=4.1.0" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +install-types = ["pip"] +mypyc = ["setuptools (>=50)"] +reports = ["lxml"] + [[package]] name = "mypy-extensions" version = "1.0.0" @@ -583,6 +660,21 @@ importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] +[[package]] +name = "pluggy" +version = "1.4.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + [[package]] name = "py" version = "1.11.0" @@ -662,6 +754,28 @@ tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} [package.extras] testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +[[package]] +name = "pytest" +version = "8.0.0" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-8.0.0-py3-none-any.whl", hash = "sha256:50fb9cbe836c3f20f0dfa99c565201fb75dc54c8d76373cd1bde06b06657bdb6"}, + {file = "pytest-8.0.0.tar.gz", hash = "sha256:249b1b0864530ba251b7438274c4d251c58d868edaaec8762893ad4a0d71c36c"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=1.3.0,<2.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + [[package]] name = "pytest-asyncio" version = "0.21.1" @@ -681,6 +795,24 @@ typing-extensions = {version = ">=3.7.2", markers = "python_version < \"3.8\""} docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy (>=0.931)", "pytest-trio (>=0.7.0)"] +[[package]] +name = "pytest-asyncio" +version = "0.23.5" +description = "Pytest support for asyncio" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-asyncio-0.23.5.tar.gz", hash = "sha256:3a048872a9c4ba14c3e90cc1aa20cbc2def7d01c7c8db3777ec281ba9c057675"}, + {file = "pytest_asyncio-0.23.5-py3-none-any.whl", hash = "sha256:4e7093259ba018d58ede7d5315131d21923a60f8a6e9ee266ce1589685c89eac"}, +] + +[package.dependencies] +pytest = ">=7.0.0,<9" + +[package.extras] +docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] +testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] + [[package]] name = "pytest-benchmark" version = "4.0.0" @@ -721,17 +853,17 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtuale [[package]] name = "pytest-describe" -version = "2.1.0" +version = "2.2.0" description = "Describe-style plugin for pytest" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-describe-2.1.0.tar.gz", hash = "sha256:0630c95ac4942ab8dcd8e766236f86436b4984896db0c059fc234fef66fe9732"}, - {file = "pytest_describe-2.1.0-py3-none-any.whl", hash = "sha256:3ea587839363a91ea24e35e442dae46b56bd91d670e63b755e002b0adfc7a7b2"}, + {file = "pytest-describe-2.2.0.tar.gz", hash = "sha256:39bb05eb90f2497d9ca342ef9a0b7fa5bada7e58505aec33f66d661d631955b7"}, + {file = "pytest_describe-2.2.0-py3-none-any.whl", hash = "sha256:bd9e2c73acb4b9522a8400823d98f5b6a081667d3bfd7243a8598336896b544d"}, ] [package.dependencies] -pytest = ">=4.6,<8" +pytest = ">=4.6,<9" [[package]] name = "pytest-timeout" @@ -864,20 +996,20 @@ test = ["cython", "html5lib", "pytest (>=4.6)", "typed_ast"] [[package]] name = "sphinx" -version = "6.2.1" +version = "7.1.2" description = "Python documentation generator" optional = false python-versions = ">=3.8" files = [ - {file = "Sphinx-6.2.1.tar.gz", hash = "sha256:6d56a34697bb749ffa0152feafc4b19836c755d90a7c59b72bc7dfd371b9cc6b"}, - {file = "sphinx-6.2.1-py3-none-any.whl", hash = "sha256:97787ff1fa3256a3eef9eda523a63dbf299f7b47e053cfcf684a1c2a8380c912"}, + {file = "sphinx-7.1.2-py3-none-any.whl", hash = "sha256:d170a81825b2fcacb6dfd5a0d7f578a053e45d3f2b153fecc948c37344eb4cbe"}, + {file = "sphinx-7.1.2.tar.gz", hash = "sha256:780f4d32f1d7d1126576e0e5ecc19dc32ab76cd24e950228dcf7b1f6d3d9e22f"}, ] [package.dependencies] alabaster = ">=0.7,<0.8" babel = ">=2.9" colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} -docutils = ">=0.18.1,<0.20" +docutils = ">=0.18.1,<0.21" imagesize = ">=1.3" importlib-metadata = {version = ">=4.8", markers = "python_version < \"3.10\""} Jinja2 = ">=3.0" @@ -899,18 +1031,18 @@ test = ["cython", "filelock", "html5lib", "pytest (>=4.6)"] [[package]] name = "sphinx-rtd-theme" -version = "1.3.0" +version = "2.0.0" description = "Read the Docs theme for Sphinx" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +python-versions = ">=3.6" files = [ - {file = "sphinx_rtd_theme-1.3.0-py2.py3-none-any.whl", hash = "sha256:46ddef89cc2416a81ecfbeaceab1881948c014b1b6e4450b815311a89fb977b0"}, - {file = "sphinx_rtd_theme-1.3.0.tar.gz", hash = "sha256:590b030c7abb9cf038ec053b95e5380b5c70d61591eb0b552063fbe7c41f0931"}, + {file = "sphinx_rtd_theme-2.0.0-py2.py3-none-any.whl", hash = "sha256:ec93d0856dc280cf3aee9a4c9807c60e027c7f7b461b77aeffed682e68f0e586"}, + {file = "sphinx_rtd_theme-2.0.0.tar.gz", hash = "sha256:bd5d7b80622406762073a04ef8fadc5f9151261563d47027de09910ce03afe6b"}, ] [package.dependencies] -docutils = "<0.19" -sphinx = ">=1.6,<8" +docutils = "<0.21" +sphinx = ">=5,<8" sphinxcontrib-jquery = ">=4,<5" [package.extras] @@ -1088,30 +1220,30 @@ testing = ["flaky (>=3.4.0)", "freezegun (>=0.3.11)", "pathlib2 (>=2.3.3)", "psu [[package]] name = "tox" -version = "4.10.0" +version = "4.12.1" description = "tox is a generic virtualenv management and test command line tool" optional = false python-versions = ">=3.8" files = [ - {file = "tox-4.10.0-py3-none-any.whl", hash = "sha256:e4a1b1438955a6da548d69a52350054350cf6a126658c20943261c48ed6d4c92"}, - {file = "tox-4.10.0.tar.gz", hash = "sha256:e041b2165375be690aca0ec4d96360c6906451380520e4665bf274f66112be35"}, + {file = "tox-4.12.1-py3-none-any.whl", hash = "sha256:c07ea797880a44f3c4f200ad88ad92b446b83079d4ccef89585df64cc574375c"}, + {file = "tox-4.12.1.tar.gz", hash = "sha256:61aafbeff1bd8a5af84e54ef6e8402f53c6a6066d0782336171ddfbf5362122e"}, ] [package.dependencies] -cachetools = ">=5.3.1" +cachetools = ">=5.3.2" chardet = ">=5.2" colorama = ">=0.4.6" -filelock = ">=3.12.2" -packaging = ">=23.1" -platformdirs = ">=3.10" -pluggy = ">=1.2" -pyproject-api = ">=1.5.3" +filelock = ">=3.13.1" +packaging = ">=23.2" +platformdirs = ">=4.1" +pluggy = ">=1.3" +pyproject-api = ">=1.6.1" tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} -virtualenv = ">=20.24.3" +virtualenv = ">=20.25" [package.extras] -docs = ["furo (>=2023.7.26)", "sphinx (>=7.1.2)", "sphinx-argparse-cli (>=1.11.1)", "sphinx-autodoc-typehints (>=1.24)", "sphinx-copybutton (>=0.5.2)", "sphinx-inline-tabs (>=2023.4.21)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] -testing = ["build[virtualenv] (>=0.10)", "covdefaults (>=2.3)", "detect-test-pollution (>=1.1.1)", "devpi-process (>=0.3.1)", "diff-cover (>=7.7)", "distlib (>=0.3.7)", "flaky (>=3.7)", "hatch-vcs (>=0.3)", "hatchling (>=1.18)", "psutil (>=5.9.5)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)", "pytest-xdist (>=3.3.1)", "re-assert (>=1.1)", "time-machine (>=2.12)", "wheel (>=0.41.1)"] +docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-argparse-cli (>=1.11.1)", "sphinx-autodoc-typehints (>=1.25.2)", "sphinx-copybutton (>=0.5.2)", "sphinx-inline-tabs (>=2023.4.21)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.11)"] +testing = ["build[virtualenv] (>=1.0.3)", "covdefaults (>=2.3)", "detect-test-pollution (>=1.2)", "devpi-process (>=1)", "diff-cover (>=8.0.2)", "distlib (>=0.3.8)", "flaky (>=3.7)", "hatch-vcs (>=0.4)", "hatchling (>=1.21)", "psutil (>=5.9.7)", "pytest (>=7.4.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-xdist (>=3.5)", "re-assert (>=1.1)", "time-machine (>=2.13)", "wheel (>=0.42)"] [[package]] name = "typed-ast" @@ -1174,6 +1306,17 @@ files = [ {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"}, ] +[[package]] +name = "typing-extensions" +version = "4.9.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, + {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, +] + [[package]] name = "urllib3" version = "2.0.7" @@ -1244,7 +1387,22 @@ files = [ docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] +[[package]] +name = "zipp" +version = "3.17.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "zipp-3.17.0-py3-none-any.whl", hash = "sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31"}, + {file = "zipp-3.17.0.tar.gz", hash = "sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] + [metadata] lock-version = "2.0" python-versions = "^3.7" -content-hash = "69b91d868497438a43047a8deb7e9ae765ddc2872d8367a17fe65fc681e2d03a" +content-hash = "910d6fe7bf0668879447dda5c6f98241d7facc12f25b2c97ea5e7b22117ba7da" diff --git a/pyproject.toml b/pyproject.toml index 29b9d21e..e606e1dc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -20,7 +20,8 @@ classifiers = [ "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11" + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", ] packages = [ { include = "graphql", from = "src" }, @@ -41,30 +42,40 @@ Changelog = "https://github.com/graphql-python/graphql-core/releases" [tool.poetry.dependencies] python = "^3.7" typing-extensions = [ - { version = "^4.5", python = "<3.10" } + { version = "^4.9", python = ">=3.8,<3.10" }, + { version = "^4.7.1", python = "<3.8" }, ] [tool.poetry.group.test] optional = true [tool.poetry.group.test.dependencies] -pytest = "^7.3" -pytest-asyncio = ">=0.21,<1" +pytest = [ + { version = "^8.0", python = ">=3.8" }, + { version = "^7.4", python = "<3.8"} +] +pytest-asyncio = [ + { version = "^0.23.5", python = ">=3.8" }, + { version = "~0.21.1", python = "<3.8"} +] pytest-benchmark = "^4.0" pytest-cov = "^4.1" -pytest-describe = "^2.1" -pytest-timeout = "^2.1" +pytest-describe = "^2.2" +pytest-timeout = "^2.2" tox = [ - { version = ">=4.12,<5", python = ">=3.8" }, - { version = ">=3.28,<4", python = "<3.8" } + { version = "^4.12", python = ">=3.8" }, + { version = "^3.28", python = "<3.8" } ] [tool.poetry.group.lint] optional = true [tool.poetry.group.lint.dependencies] -ruff = ">=0.2,<0.3" -mypy = "1.8.0" +ruff = ">=0.2.1,<0.3" +mypy = [ + { version = "^1.8", python = ">=3.8" }, + { version = "~1.4", python = "<3.8" } +] bump2version = ">=1.0,<2" [tool.poetry.group.doc] @@ -72,10 +83,10 @@ optional = true [tool.poetry.group.doc.dependencies] sphinx = [ - { version = ">=4,<7", python = ">=3.8" }, + { version = ">=7,<8", python = ">=3.8" }, { version = ">=4,<6", python = "<3.8" } ] -sphinx_rtd_theme = ">=1,<2" +sphinx_rtd_theme = "^2.0" [tool.ruff] line-length = 88 @@ -281,5 +292,5 @@ timeout = "100" filterwarnings = "ignore::pytest.PytestConfigWarning" [build-system] -requires = ["poetry_core>=1.6,<2"] +requires = ["poetry_core>=1.6.1,<2"] build-backend = "poetry.core.masonry.api" diff --git a/src/graphql/pyutils/is_iterable.py b/src/graphql/pyutils/is_iterable.py index 8fb803be..802aef8f 100644 --- a/src/graphql/pyutils/is_iterable.py +++ b/src/graphql/pyutils/is_iterable.py @@ -1,7 +1,7 @@ """Check whether objects are iterable""" from array import array -from typing import Any, ByteString, Collection, Iterable, Mapping, ValuesView +from typing import Any, Collection, Iterable, Mapping, ValuesView try: from typing import TypeGuard @@ -20,7 +20,7 @@ collection_types[0] if len(collection_types) == 1 else tuple(collection_types) ) iterable_types: Any = Iterable -not_iterable_types: Any = (ByteString, Mapping, str) +not_iterable_types: Any = (bytearray, bytes, str, memoryview, Mapping) def is_collection(value: Any) -> TypeGuard[Collection]: diff --git a/tests/execution/test_flatten_async_iterable.py b/tests/execution/test_flatten_async_iterable.py index 74b8f9c6..357e4cd0 100644 --- a/tests/execution/test_flatten_async_iterable.py +++ b/tests/execution/test_flatten_async_iterable.py @@ -100,7 +100,7 @@ async def nested3() -> AsyncGenerator[float, None]: # throw error with pytest.raises(RuntimeError, match="ouch"): - await doubles.athrow(RuntimeError, "ouch") + await doubles.athrow(RuntimeError("ouch")) @pytest.mark.asyncio() async def completely_yields_sub_iterables_even_when_anext_called_in_parallel(): diff --git a/tests/execution/test_map_async_iterable.py b/tests/execution/test_map_async_iterable.py index dd4aa3a8..055a61bc 100644 --- a/tests/execution/test_map_async_iterable.py +++ b/tests/execution/test_map_async_iterable.py @@ -151,7 +151,7 @@ async def __anext__(self): await anext(doubles) @pytest.mark.asyncio() - async def allows_throwing_errors_with_values_through_async_iterables(): + async def allows_throwing_errors_with_traceback_through_async_iterables(): class Iterable: def __aiter__(self): return self @@ -163,42 +163,16 @@ async def __anext__(self): assert await anext(one) == 2 - # Throw error with value passed separately try: raise RuntimeError("Ouch") except RuntimeError as error: with pytest.raises(RuntimeError, match="Ouch") as exc_info: - await one.athrow(error.__class__, error) + await one.athrow(error) assert exc_info.value is error # noqa: PT017 - assert exc_info.tb is error.__traceback__ # noqa: PT017 - - with pytest.raises(StopAsyncIteration): - await anext(one) - - @pytest.mark.asyncio() - async def allows_throwing_errors_with_traceback_through_async_iterables(): - class Iterable: - def __aiter__(self): - return self - - async def __anext__(self): - return 1 - - one = map_async_iterable(Iterable(), double) - - assert await anext(one) == 2 - - # Throw error with traceback passed separately - try: - raise RuntimeError("Ouch") - except RuntimeError as error: - with pytest.raises(RuntimeError) as exc_info: - await one.athrow(error.__class__, None, error.__traceback__) - assert exc_info.tb assert error.__traceback__ # noqa: PT017 - assert exc_info.tb.tb_frame is error.__traceback__.tb_frame # noqa: PT017 + assert exc_info.tb is error.__traceback__ # noqa: PT017 with pytest.raises(StopAsyncIteration): await anext(one) diff --git a/tests/execution/test_stream.py b/tests/execution/test_stream.py index 67385b3b..f8816d0f 100644 --- a/tests/execution/test_stream.py +++ b/tests/execution/test_stream.py @@ -1801,6 +1801,7 @@ async def iterable(_info): with pytest.raises(StopAsyncIteration): await anext(iterator) + await sleep(0) assert finished @pytest.mark.asyncio() @@ -1850,6 +1851,8 @@ async def __anext__(self): with pytest.raises(StopAsyncIteration): await anext(iterator) + await sleep(0) + await sleep(0) assert iterable.index == 4 @pytest.mark.asyncio() @@ -1891,4 +1894,5 @@ async def iterable(_info): with pytest.raises(StopAsyncIteration): await anext(iterator) + await sleep(0) assert finished diff --git a/tox.ini b/tox.ini index 9121bf74..d5c9e499 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = py3{7,8,9,10,11}, pypy39, ruff, mypy, docs +envlist = py3{7,8,9,10,11,312}, pypy{39,310}, ruff, mypy, docs isolated_build = true [gh-actions] @@ -10,43 +10,45 @@ python = 3.9: py39 3.10: py310 3.11: py311 - pypy3: pypy39 + 3.12: py312 + pypy3: pypy9 pypy3.9: pypy39 + pypy3.10: pypy310 [testenv:ruff] -basepython = python3.11 -deps = ruff>=0.2,<0.3 +basepython = python3.12 +deps = ruff>=0.2.1,<0.3 commands = ruff check src tests ruff format --check src tests [testenv:mypy] -basepython = python3.11 +basepython = python3.12 deps = - mypy==1.8.0 - pytest>=7.3,<8 + mypy>=1.8.0,<1.9 + pytest>=8.0,<9 commands = mypy src tests [testenv:docs] -basepython = python3.10 +basepython = python3.12 deps = - sphinx>=5.3,<6 - sphinx_rtd_theme>=1.1,<2 + sphinx>=7,<8 + sphinx_rtd_theme>=2.0,<3 commands = sphinx-build -b html -nEW docs docs/_build/html [testenv] deps = - pytest>=7.3,<8 - pytest-asyncio>=0.21,<1 + pytest>=7.4,<9 + pytest-asyncio>=0.21.1,<1 pytest-benchmark>=4,<5 pytest-cov>=4.1,<5 - pytest-describe>=2.1,<3 - pytest-timeout>=2.1,<3 - py37,py38,py39,pypy39: typing-extensions>=4.5,<5 + pytest-describe>=2.2,<3 + pytest-timeout>=2.2,<3 + py37,py38,py39,pypy39: typing-extensions>=4.7.1,<5 commands = - # to also run the time-consuming tests: tox -e py310 -- --run-slow - # to run the benchmarks: tox -e py310 -- -k benchmarks --benchmark-enable - py37,py38,py39,py311,pypy39: pytest tests {posargs} - py310: pytest tests {posargs: --cov-report=term-missing --cov=graphql --cov=tests --cov-fail-under=100} + # to also run the time-consuming tests: tox -e py311 -- --run-slow + # to run the benchmarks: tox -e py311 -- -k benchmarks --benchmark-enable + py37,py38,py39,py310,py311,pypy39,pypy310: pytest tests {posargs} + py312: pytest tests {posargs: --cov-report=term-missing --cov=graphql --cov=tests --cov-fail-under=100} From 63216e300b428e6b5c08ff00ef849f2788575726 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 10 Feb 2024 23:45:31 +0100 Subject: [PATCH 113/230] Add filtering for async iterator lists Replicates graphql/graphql-js@5ae2e064fe2aa4bd43ca894cf50bf3613d797764 --- src/graphql/execution/execute.py | 3 ++ tests/execution/test_defer.py | 82 ++++++++++++++++++++++++++------ 2 files changed, 70 insertions(+), 15 deletions(-) diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index 2a9f8cc5..edf8bfe4 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -1233,6 +1233,7 @@ async def catch_error( error = located_error( raw_error, field_nodes, field_path.as_list() ) + self.filter_subsequent_payloads(field_path) handle_field_error(error, item_type, errors) return None @@ -1243,10 +1244,12 @@ async def catch_error( except Exception as raw_error: append_result(None) error = located_error(raw_error, field_nodes, field_path.as_list()) + self.filter_subsequent_payloads(field_path) handle_field_error(error, item_type, errors) except Exception as raw_error: append_result(None) error = located_error(raw_error, field_nodes, field_path.as_list()) + self.filter_subsequent_payloads(field_path) handle_field_error(error, item_type, errors) break index += 1 diff --git a/tests/execution/test_defer.py b/tests/execution/test_defer.py index 32d205f0..866a1c13 100644 --- a/tests/execution/test_defer.py +++ b/tests/execution/test_defer.py @@ -1,5 +1,5 @@ from asyncio import sleep -from typing import Any, Dict, List, NamedTuple +from typing import Any, AsyncGenerator, Dict, List, NamedTuple import pytest from graphql.error import GraphQLError @@ -15,7 +15,7 @@ ) from graphql.execution.execute import DeferredFragmentRecord from graphql.language import DocumentNode, parse -from graphql.pyutils import Path +from graphql.pyutils import Path, is_awaitable from graphql.type import ( GraphQLField, GraphQLID, @@ -26,17 +26,35 @@ GraphQLString, ) + +def resolve_null_sync(_obj, _info) -> None: + """A resolver returning a null value synchronously.""" + return + + +async def resolve_null_async(_obj, _info) -> None: + """A resolver returning a null value asynchronously.""" + return + + friend_type = GraphQLObjectType( - "Friend", {"id": GraphQLField(GraphQLID), "name": GraphQLField(GraphQLString)} + "Friend", + { + "id": GraphQLField(GraphQLID), + "name": GraphQLField(GraphQLString), + "asyncNonNullErrorField": GraphQLField( + GraphQLNonNull(GraphQLString), resolve=resolve_null_async + ), + }, ) class Friend(NamedTuple): - name: str id: int + name: str -friends = [Friend("Han", 2), Friend("Leia", 3), Friend("C-3PO", 4)] +friends = [Friend(2, "Han"), Friend(3, "Leia"), Friend(4, "C-3PO")] async def resolve_slow(_obj, _info) -> str: @@ -50,14 +68,10 @@ async def resolve_bad(_obj, _info) -> str: raise RuntimeError("bad") -def resolve_null_sync(_obj, _info) -> None: - """Simulate a resolver returning a null value synchronously.""" - return - - -async def resolve_null_async(_obj, _info) -> None: - """Simulate a resolver returning a null value asynchronously.""" - return +async def resolve_friends_async(_obj, _info) -> AsyncGenerator[Friend, None]: + """A slow async generator yielding the first friend.""" + await sleep(0) + yield friends[0] hero_type = GraphQLObjectType( @@ -76,10 +90,13 @@ async def resolve_null_async(_obj, _info) -> None: "friends": GraphQLField( GraphQLList(friend_type), resolve=lambda _obj, _info: friends ), + "asyncFriends": GraphQLField( + GraphQLList(friend_type), resolve=resolve_friends_async + ), }, ) -hero = Friend("Luke", 1) +hero = Friend(1, "Luke") query = GraphQLObjectType( "Query", {"hero": GraphQLField(hero_type, resolve=lambda _obj, _info: hero)} @@ -90,6 +107,8 @@ async def resolve_null_async(_obj, _info) -> None: async def complete(document: DocumentNode, root_value: Any = None) -> Any: result = experimental_execute_incrementally(schema, document, root_value) + if is_awaitable(result): + result = await result if isinstance(result, ExperimentalIncrementalExecutionResults): results: List[Any] = [result.initial_result.formatted] @@ -882,6 +901,38 @@ async def returns_payloads_from_synchronous_data_in_correct_order(): }, ] + @pytest.mark.asyncio() + async def filters_deferred_payloads_when_list_item_from_async_iterable_nulled(): + document = parse( + """ + query { + hero { + asyncFriends { + asyncNonNullErrorField + ...NameFragment @defer + } + } + } + fragment NameFragment on Friend { + name + } + """ + ) + + result = await complete(document) + + assert result == { + "data": {"hero": {"asyncFriends": [None]}}, + "errors": [ + { + "message": "Cannot return null for non-nullable field" + " Friend.asyncNonNullErrorField.", + "locations": [{"line": 5, "column": 19}], + "path": ["hero", "asyncFriends", 0, "asyncNonNullErrorField"], + } + ], + } + @pytest.mark.asyncio() async def original_execute_function_throws_error_if_deferred_and_all_is_sync(): document = parse( @@ -918,7 +969,8 @@ async def original_execute_function_throws_error_if_deferred_and_not_all_is_sync [ { "message": "Executing this GraphQL operation would unexpectedly" - " produce multiple payloads (due to @defer or @stream directive)" + " produce multiple payloads" + " (due to @defer or @stream directive)" } ], ) From 4a87525b02fa80e080026946594c3815e467a3bb Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 11 Feb 2024 00:10:20 +0100 Subject: [PATCH 114/230] Fix envlist in tox.ini --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index d5c9e499..d0bf90d3 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = py3{7,8,9,10,11,312}, pypy{39,310}, ruff, mypy, docs +envlist = py3{7,8,9,10,11,12}, pypy3{9,10}, ruff, mypy, docs isolated_build = true [gh-actions] From e34f020d6730c3b01095f03d12b6ac75f29f65e2 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 11 Feb 2024 19:15:21 +0100 Subject: [PATCH 115/230] polish: add additional test for filtering Replicates graphql/graphql-js@bd5aae742b86ea433c2a643b4a6f3951b13713a2 --- src/graphql/execution/execute.py | 2 +- tests/execution/test_stream.py | 48 ++++++++++++++++++++++++++++++-- 2 files changed, 47 insertions(+), 3 deletions(-) diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index edf8bfe4..5efb433a 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -1172,7 +1172,7 @@ async def complete_async_iterator_value( ) -> List[Any]: """Complete an async iterator. - Complete a async iterator value by completing the result and calling + Complete an async iterator value by completing the result and calling recursively until all the results are completed. """ errors = async_payload_record.errors if async_payload_record else self.errors diff --git a/tests/execution/test_stream.py b/tests/execution/test_stream.py index f8816d0f..d58852bf 100644 --- a/tests/execution/test_stream.py +++ b/tests/execution/test_stream.py @@ -43,11 +43,11 @@ async def anext(iterator): # noqa: A001 class Friend(NamedTuple): - name: str id: int + name: str -friends = [Friend("Luke", 1), Friend("Han", 2), Friend("Leia", 3)] +friends = [Friend(1, "Luke"), Friend(2, "Han"), Friend(3, "Leia")] query = GraphQLObjectType( "Query", @@ -1186,6 +1186,50 @@ async def friend_list(_info): }, } + @pytest.mark.asyncio() + @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") + async def filters_payloads_that_are_nulled_by_a_later_synchronous_error(): + document = parse( + """ + query { + nestedObject { + nestedFriendList @stream(initialCount: 0) { + name + } + nonNullScalarField + } + } + """ + ) + + async def friend_list(_info): + await sleep(0) + yield friends[0] + + result = await complete( + document, + { + "nestedObject": { + "nestedFriendList": friend_list, + "nonNullScalarField": lambda _info: None, + } + }, + ) + + assert result == { + "errors": [ + { + "message": "Cannot return null for non-nullable field" + " NestedObject.nonNullScalarField.", + "locations": [{"line": 7, "column": 17}], + "path": ["nestedObject", "nonNullScalarField"], + }, + ], + "data": { + "nestedObject": None, + }, + } + @pytest.mark.asyncio() async def does_not_filter_payloads_when_null_error_is_in_a_different_path(): document = parse( From cf795527c57d5c9d7323cec80a2b90224baafd27 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 11 Feb 2024 19:21:59 +0100 Subject: [PATCH 116/230] Fix typo in the error message for max tokens Replicates graphql/graphql-js@d1c83e0b9632640fbeaf124ab1aef4e28054b18e --- src/graphql/language/parser.py | 2 +- tests/language/test_parser.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/graphql/language/parser.py b/src/graphql/language/parser.py index 2c1e8c0e..23a69b4a 100644 --- a/src/graphql/language/parser.py +++ b/src/graphql/language/parser.py @@ -1253,7 +1253,7 @@ def advance_lexer(self) -> None: raise GraphQLSyntaxError( self._lexer.source, token.start, - f"Document contains more that {max_tokens} tokens." + f"Document contains more than {max_tokens} tokens." " Parsing aborted.", ) diff --git a/tests/language/test_parser.py b/tests/language/test_parser.py index 2199a8fc..74f3cf8f 100644 --- a/tests/language/test_parser.py +++ b/tests/language/test_parser.py @@ -113,19 +113,19 @@ def parse_provides_useful_error_when_using_source(): """ ) - def limits_maximum_number_of_tokens(): + def limits_by_a_maximum_number_of_tokens(): parse("{ foo }", max_tokens=3) with pytest.raises( GraphQLSyntaxError, match="Syntax Error:" - r" Document contains more that 2 tokens\. Parsing aborted\.", + r" Document contains more than 2 tokens\. Parsing aborted\.", ): parse("{ foo }", max_tokens=2) parse('{ foo(bar: "baz") }', max_tokens=8) with pytest.raises( GraphQLSyntaxError, match="Syntax Error:" - r" Document contains more that 7 tokens\. Parsing aborted\.", + r" Document contains more than 7 tokens\. Parsing aborted\.", ): parse('{ foo(bar: "baz") }', max_tokens=7) From fccb0bb82740a3278bbb13f37226a5dbe29cb587 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 11 Feb 2024 20:01:37 +0100 Subject: [PATCH 117/230] incrementalDelivery: refactoring and streamlining Replicates graphql/graphql-js@80325b5b23d481829c6e31c621538bb74a371832 --- src/graphql/execution/execute.py | 31 ++++++++++++++++--------------- 1 file changed, 16 insertions(+), 15 deletions(-) diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index 5efb433a..12a4453b 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -1207,7 +1207,7 @@ async def complete_async_iterator_value( ) break - field_path = path.add_key(index, None) + item_path = path.add_key(index, None) try: try: value = await anext(iterator) @@ -1218,7 +1218,7 @@ async def complete_async_iterator_value( item_type, field_nodes, info, - field_path, + item_path, value, async_payload_record, ) @@ -1237,19 +1237,18 @@ async def catch_error( handle_field_error(error, item_type, errors) return None - append_result(catch_error(completed_item, field_path)) + append_result(catch_error(completed_item, item_path)) append_awaitable(index) else: append_result(completed_item) except Exception as raw_error: append_result(None) - error = located_error(raw_error, field_nodes, field_path.as_list()) - self.filter_subsequent_payloads(field_path) + error = located_error(raw_error, field_nodes, item_path.as_list()) + self.filter_subsequent_payloads(item_path) handle_field_error(error, item_type, errors) except Exception as raw_error: append_result(None) - error = located_error(raw_error, field_nodes, field_path.as_list()) - self.filter_subsequent_payloads(field_path) + error = located_error(raw_error, field_nodes, item_path.as_list()) handle_field_error(error, item_type, errors) break index += 1 @@ -1316,7 +1315,6 @@ def complete_list_value( # No need to modify the info object containing the path, since from here on # it is not ever accessed by resolver functions. item_path = path.add_key(index, None) - completed_item: AwaitableOrValue[Any] if ( stream @@ -1334,6 +1332,9 @@ def complete_list_value( previous_async_payload_record, ) continue + + completed_item: AwaitableOrValue[Any] + if is_awaitable(item): # noinspection PyShadowingNames async def await_completed(item: Any, item_path: Path) -> Any: @@ -1828,7 +1829,7 @@ async def execute_stream_iterator_item( info: GraphQLResolveInfo, item_type: GraphQLOutputType, async_payload_record: StreamRecord, - field_path: Path, + item_path: Path, ) -> Any: """Execute stream iterator item.""" if iterator in self._canceled_iterators: @@ -1836,7 +1837,7 @@ async def execute_stream_iterator_item( try: item = await anext(iterator) completed_item = self.complete_value( - item_type, field_nodes, info, field_path, item, async_payload_record + item_type, field_nodes, info, item_path, item, async_payload_record ) return ( @@ -1850,9 +1851,9 @@ async def execute_stream_iterator_item( raise StopAsyncIteration from raw_error except Exception as raw_error: - error = located_error(raw_error, field_nodes, field_path.as_list()) + error = located_error(raw_error, field_nodes, item_path.as_list()) handle_field_error(error, item_type, async_payload_record.errors) - self.filter_subsequent_payloads(field_path, async_payload_record) + self.filter_subsequent_payloads(item_path, async_payload_record) async def execute_stream_iterator( self, @@ -1870,13 +1871,13 @@ async def execute_stream_iterator( previous_async_payload_record = parent_context while True: - field_path = Path(path, index, None) + item_path = Path(path, index, None) async_payload_record = StreamRecord( - label, field_path, iterator, previous_async_payload_record, self + label, item_path, iterator, previous_async_payload_record, self ) awaitable_data = self.execute_stream_iterator_item( - iterator, field_modes, info, item_type, async_payload_record, field_path + iterator, field_modes, info, item_type, async_payload_record, item_path ) try: From 0c843f9d15837e6bdf4ecd3e3fa129c98fe75d96 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 11 Feb 2024 20:09:09 +0100 Subject: [PATCH 118/230] fix(incrementalDelivery): filtering should never filter the error source Replicates graphql/graphql-js@9997e987d548d86c29e683671a3e6a47b49e50fd --- src/graphql/execution/execute.py | 22 +++++++++++++--------- 1 file changed, 13 insertions(+), 9 deletions(-) diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index 12a4453b..3a6202e9 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -1001,7 +1001,7 @@ async def await_completed() -> Any: except Exception as raw_error: error = located_error(raw_error, field_nodes, path.as_list()) handle_field_error(error, return_type, errors) - self.filter_subsequent_payloads(path) + self.filter_subsequent_payloads(path, async_payload_record) return None return await_completed() @@ -1009,7 +1009,7 @@ async def await_completed() -> Any: except Exception as raw_error: error = located_error(raw_error, field_nodes, path.as_list()) handle_field_error(error, return_type, errors) - self.filter_subsequent_payloads(path) + self.filter_subsequent_payloads(path, async_payload_record) return None return completed @@ -1225,15 +1225,17 @@ async def complete_async_iterator_value( if is_awaitable(completed_item): # noinspection PyShadowingNames async def catch_error( - completed_item: Awaitable[Any], field_path: Path + completed_item: Awaitable[Any], item_path: Path ) -> Any: try: return await completed_item except Exception as raw_error: error = located_error( - raw_error, field_nodes, field_path.as_list() + raw_error, field_nodes, item_path.as_list() + ) + self.filter_subsequent_payloads( + item_path, async_payload_record ) - self.filter_subsequent_payloads(field_path) handle_field_error(error, item_type, errors) return None @@ -1244,7 +1246,7 @@ async def catch_error( except Exception as raw_error: append_result(None) error = located_error(raw_error, field_nodes, item_path.as_list()) - self.filter_subsequent_payloads(item_path) + self.filter_subsequent_payloads(item_path, async_payload_record) handle_field_error(error, item_type, errors) except Exception as raw_error: append_result(None) @@ -1354,7 +1356,7 @@ async def await_completed(item: Any, item_path: Path) -> Any: raw_error, field_nodes, item_path.as_list() ) handle_field_error(error, item_type, errors) - self.filter_subsequent_payloads(item_path) + self.filter_subsequent_payloads(item_path, async_payload_record) return None return completed @@ -1379,14 +1381,16 @@ async def await_completed(item: Any, item_path: Path) -> Any: raw_error, field_nodes, item_path.as_list() ) handle_field_error(error, item_type, errors) - self.filter_subsequent_payloads(item_path) + self.filter_subsequent_payloads( + item_path, async_payload_record + ) return None completed_item = await_completed(completed_item, item_path) except Exception as raw_error: error = located_error(raw_error, field_nodes, item_path.as_list()) handle_field_error(error, item_type, errors) - self.filter_subsequent_payloads(item_path) + self.filter_subsequent_payloads(item_path, async_payload_record) completed_item = None if is_awaitable(completed_item): From 04c4dd18d3d03372be39cb03cd7ceecc67fd39f1 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 11 Feb 2024 20:12:42 +0100 Subject: [PATCH 119/230] polish(incrementalDelivery): filter function is always passed a path Replicates graphql/graphql-js@41bc274f27fdfceede1822aaa90ffb84bf0b1d7b --- src/graphql/execution/execute.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index 3a6202e9..8870ff33 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -1866,9 +1866,9 @@ async def execute_stream_iterator( field_modes: List[FieldNode], info: GraphQLResolveInfo, item_type: GraphQLOutputType, - path: Optional[Path], - label: Optional[str], - parent_context: Optional[AsyncPayloadRecord], + path: Path, + label: Optional[str] = None, + parent_context: Optional[AsyncPayloadRecord] = None, ) -> None: """Execute stream iterator.""" index = initial_index @@ -1912,11 +1912,11 @@ async def execute_stream_iterator( def filter_subsequent_payloads( self, - null_path: Optional[Path] = None, + null_path: Path, current_async_record: Optional[AsyncPayloadRecord] = None, ) -> None: """Filter subsequent payloads.""" - null_path_list = null_path.as_list() if null_path else [] + null_path_list = null_path.as_list() for async_record in list(self.subsequent_payloads): if async_record is current_async_record: # don't remove payload from where error originates From 4aeece0fd3f7ef9195d3db0be02eb2ab07661099 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 11 Feb 2024 20:53:56 +0100 Subject: [PATCH 120/230] Fix coverage and formatting --- tests/execution/test_stream.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/tests/execution/test_stream.py b/tests/execution/test_stream.py index d58852bf..e2fdb80b 100644 --- a/tests/execution/test_stream.py +++ b/tests/execution/test_stream.py @@ -1187,11 +1187,10 @@ async def friend_list(_info): } @pytest.mark.asyncio() - @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") async def filters_payloads_that_are_nulled_by_a_later_synchronous_error(): document = parse( """ - query { + query { nestedObject { nestedFriendList @stream(initialCount: 0) { name @@ -1203,8 +1202,8 @@ async def filters_payloads_that_are_nulled_by_a_later_synchronous_error(): ) async def friend_list(_info): - await sleep(0) - yield friends[0] + await sleep(0) # pragma: no cover + yield friends[0] # pragma: no cover result = await complete( document, From e8035a88473de6075d6e3add9e65d7aa1df80af0 Mon Sep 17 00:00:00 2001 From: Kohei Morita Date: Thu, 15 Feb 2024 05:37:16 +0900 Subject: [PATCH 121/230] Fix performance degradation on handling conflict fields (#212) Contributed by Kohei Morita Replicates graphql/graphql-js@f94b511386c7e47bd0380dcd56553dc063320226 --- .../rules/overlapping_fields_can_be_merged.py | 43 +++++++++++++------ .../test_repeated_fields_benchmark.py | 27 ++++++++++++ .../test_overlapping_fields_can_be_merged.py | 17 ++++++++ 3 files changed, 75 insertions(+), 12 deletions(-) create mode 100644 tests/benchmarks/test_repeated_fields_benchmark.py diff --git a/src/graphql/validation/rules/overlapping_fields_can_be_merged.py b/src/graphql/validation/rules/overlapping_fields_can_be_merged.py index fe0a6adb..92279254 100644 --- a/src/graphql/validation/rules/overlapping_fields_can_be_merged.py +++ b/src/graphql/validation/rules/overlapping_fields_can_be_merged.py @@ -10,9 +10,8 @@ FragmentDefinitionNode, FragmentSpreadNode, InlineFragmentNode, - ObjectFieldNode, - ObjectValueNode, SelectionSetNode, + ValueNode, print_ast, ) from ...type import ( @@ -558,7 +557,7 @@ def find_conflict( ) # Two field calls must have the same arguments. - if stringify_arguments(node1) != stringify_arguments(node2): + if not same_arguments(node1, node2): return (response_name, "they have differing arguments"), [node1], [node2] directives1 = node1.directives @@ -598,14 +597,34 @@ def find_conflict( return None # no conflict -def stringify_arguments(field_node: Union[FieldNode, DirectiveNode]) -> str: - input_object_with_args = ObjectValueNode( - fields=tuple( - ObjectFieldNode(name=arg_node.name, value=arg_node.value) - for arg_node in field_node.arguments - ) - ) - return print_ast(sort_value_node(input_object_with_args)) +def same_arguments( + node1: Union[FieldNode, DirectiveNode], node2: Union[FieldNode, DirectiveNode] +) -> bool: + args1 = node1.arguments + args2 = node2.arguments + + if args1 is None or len(args1) == 0: + return args2 is None or len(args2) == 0 + + if args2 is None or len(args2) == 0: + return False + + if len(args1) != len(args2): + return False + + values2 = {arg.name.value: arg.value for arg in args2} + + for arg1 in args1: + value1 = arg1.value + value2 = values2.get(arg1.name.value) + if value2 is None or stringify_value(value1) != stringify_value(value2): + return False + + return True + + +def stringify_value(value: ValueNode) -> str: + return print_ast(sort_value_node(value)) def get_stream_directive( @@ -627,7 +646,7 @@ def same_streams( return True if stream1 and stream2: # check if both fields have equivalent streams - return stringify_arguments(stream1) == stringify_arguments(stream2) + return same_arguments(stream1, stream2) # fields have a mix of stream and no stream return False diff --git a/tests/benchmarks/test_repeated_fields_benchmark.py b/tests/benchmarks/test_repeated_fields_benchmark.py new file mode 100644 index 00000000..0d3b5e4c --- /dev/null +++ b/tests/benchmarks/test_repeated_fields_benchmark.py @@ -0,0 +1,27 @@ +from graphql import ( + GraphQLField, + GraphQLObjectType, + GraphQLSchema, + GraphQLString, + graphql_sync, +) + + +schema = GraphQLSchema( + query=GraphQLObjectType( + name="Query", + fields={ + "hello": GraphQLField( + GraphQLString, + resolve=lambda obj, info: "world", + ) + }, + ) +) +source = "query {{ {fields} }}".format(fields="hello " * 250) + + +def test_many_repeated_fields(benchmark): + print(source) + result = benchmark(lambda: graphql_sync(schema, source)) + assert not result.errors diff --git a/tests/validation/test_overlapping_fields_can_be_merged.py b/tests/validation/test_overlapping_fields_can_be_merged.py index 1ebc744e..8745f67e 100644 --- a/tests/validation/test_overlapping_fields_can_be_merged.py +++ b/tests/validation/test_overlapping_fields_can_be_merged.py @@ -166,6 +166,23 @@ def different_stream_directive_second_missing_args(): ], ) + def different_stream_directive_extra_argument(): + assert_errors( + """ + fragment conflictingArgs on Dog { + name @stream(label: "streamLabel", initialCount: 1) + name @stream(label: "streamLabel", initialCount: 1, extraArg: true) + }""", + [ + { + "message": "Fields 'name' conflict because they have differing" + " stream directives. Use different aliases on the fields" + " to fetch both if this was intentional.", + "locations": [(3, 15), (4, 15)], + } + ], + ) + def mix_of_stream_and_no_stream(): assert_errors( """ From 3d3393f76d6aaa74aeca3d2492846e2ea8a08aed Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Wed, 14 Feb 2024 22:42:50 +0100 Subject: [PATCH 122/230] Minor cleanup and simplification --- .../validation/rules/overlapping_fields_can_be_merged.py | 6 +++--- ...peated_fields_benchmark.py => test_repeated_fields.py} | 8 +++----- tests/execution/test_stream.py | 1 + 3 files changed, 7 insertions(+), 8 deletions(-) rename tests/benchmarks/{test_repeated_fields_benchmark.py => test_repeated_fields.py} (72%) diff --git a/src/graphql/validation/rules/overlapping_fields_can_be_merged.py b/src/graphql/validation/rules/overlapping_fields_can_be_merged.py index 92279254..67714c40 100644 --- a/src/graphql/validation/rules/overlapping_fields_can_be_merged.py +++ b/src/graphql/validation/rules/overlapping_fields_can_be_merged.py @@ -603,10 +603,10 @@ def same_arguments( args1 = node1.arguments args2 = node2.arguments - if args1 is None or len(args1) == 0: - return args2 is None or len(args2) == 0 + if not args1: + return not args2 - if args2 is None or len(args2) == 0: + if not args2: return False if len(args1) != len(args2): diff --git a/tests/benchmarks/test_repeated_fields_benchmark.py b/tests/benchmarks/test_repeated_fields.py similarity index 72% rename from tests/benchmarks/test_repeated_fields_benchmark.py rename to tests/benchmarks/test_repeated_fields.py index 0d3b5e4c..daba6169 100644 --- a/tests/benchmarks/test_repeated_fields_benchmark.py +++ b/tests/benchmarks/test_repeated_fields.py @@ -6,22 +6,20 @@ graphql_sync, ) - schema = GraphQLSchema( query=GraphQLObjectType( name="Query", fields={ "hello": GraphQLField( GraphQLString, - resolve=lambda obj, info: "world", + resolve=lambda _obj, _info: "world", ) }, ) ) -source = "query {{ {fields} }}".format(fields="hello " * 250) +source = f"{{ {'hello ' * 250}}}" def test_many_repeated_fields(benchmark): - print(source) result = benchmark(lambda: graphql_sync(schema, source)) - assert not result.errors + assert result == ({"hello": "world"}, None) diff --git a/tests/execution/test_stream.py b/tests/execution/test_stream.py index e2fdb80b..9ab60f9f 100644 --- a/tests/execution/test_stream.py +++ b/tests/execution/test_stream.py @@ -1297,6 +1297,7 @@ async def friend_list(_info): ] @pytest.mark.asyncio() + @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") async def filters_stream_payloads_that_are_nulled_in_a_deferred_payload(): document = parse( """ From c698ab509f638c30df0a08aa3b57f40124664c00 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Thu, 15 Feb 2024 13:02:43 +0100 Subject: [PATCH 123/230] refactor: introduce complete_list_item_value Replicates graphql/graphql-js@40ff40a21c710372330e65f0fb58f13c2df92a77 --- src/graphql/execution/execute.py | 193 +++++++++++++++---------------- tests/execution/test_stream.py | 1 + 2 files changed, 94 insertions(+), 100 deletions(-) diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index 8870ff33..9afd6b75 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -1177,11 +1177,10 @@ async def complete_async_iterator_value( """ errors = async_payload_record.errors if async_payload_record else self.errors stream = self.get_stream_values(field_nodes, path) - is_awaitable = self.is_awaitable + complete_list_item_value = self.complete_list_item_value awaitable_indices: List[int] = [] append_awaitable = awaitable_indices.append completed_results: List[Any] = [] - append_result = completed_results.append index = 0 while True: if ( @@ -1213,46 +1212,23 @@ async def complete_async_iterator_value( value = await anext(iterator) except StopAsyncIteration: break - try: - completed_item = self.complete_value( - item_type, - field_nodes, - info, - item_path, - value, - async_payload_record, - ) - if is_awaitable(completed_item): - # noinspection PyShadowingNames - async def catch_error( - completed_item: Awaitable[Any], item_path: Path - ) -> Any: - try: - return await completed_item - except Exception as raw_error: - error = located_error( - raw_error, field_nodes, item_path.as_list() - ) - self.filter_subsequent_payloads( - item_path, async_payload_record - ) - handle_field_error(error, item_type, errors) - return None - - append_result(catch_error(completed_item, item_path)) - append_awaitable(index) - else: - append_result(completed_item) - except Exception as raw_error: - append_result(None) - error = located_error(raw_error, field_nodes, item_path.as_list()) - self.filter_subsequent_payloads(item_path, async_payload_record) - handle_field_error(error, item_type, errors) except Exception as raw_error: - append_result(None) error = located_error(raw_error, field_nodes, item_path.as_list()) handle_field_error(error, item_type, errors) + completed_results.append(None) break + if complete_list_item_value( + value, + completed_results, + errors, + item_type, + field_nodes, + info, + item_path, + async_payload_record, + ): + append_awaitable(index) + index += 1 if not awaitable_indices: @@ -1307,12 +1283,11 @@ def complete_list_value( # This is specified as a simple map, however we're optimizing the path where # the list contains no coroutine objects by avoiding creating another coroutine # object. - is_awaitable = self.is_awaitable + complete_list_item_value = self.complete_list_item_value awaitable_indices: List[int] = [] append_awaitable = awaitable_indices.append previous_async_payload_record = async_payload_record completed_results: List[Any] = [] - append_result = completed_results.append for index, item in enumerate(result): # No need to modify the info object containing the path, since from here on # it is not ever accessed by resolver functions. @@ -1335,67 +1310,17 @@ def complete_list_value( ) continue - completed_item: AwaitableOrValue[Any] - - if is_awaitable(item): - # noinspection PyShadowingNames - async def await_completed(item: Any, item_path: Path) -> Any: - try: - completed = self.complete_value( - item_type, - field_nodes, - info, - item_path, - await item, - async_payload_record, - ) - if is_awaitable(completed): - return await completed - except Exception as raw_error: - error = located_error( - raw_error, field_nodes, item_path.as_list() - ) - handle_field_error(error, item_type, errors) - self.filter_subsequent_payloads(item_path, async_payload_record) - return None - return completed - - completed_item = await_completed(item, item_path) - else: - try: - completed_item = self.complete_value( - item_type, - field_nodes, - info, - item_path, - item, - async_payload_record, - ) - if is_awaitable(completed_item): - # noinspection PyShadowingNames - async def await_completed(item: Any, item_path: Path) -> Any: - try: - return await item - except Exception as raw_error: - error = located_error( - raw_error, field_nodes, item_path.as_list() - ) - handle_field_error(error, item_type, errors) - self.filter_subsequent_payloads( - item_path, async_payload_record - ) - return None - - completed_item = await_completed(completed_item, item_path) - except Exception as raw_error: - error = located_error(raw_error, field_nodes, item_path.as_list()) - handle_field_error(error, item_type, errors) - self.filter_subsequent_payloads(item_path, async_payload_record) - completed_item = None - - if is_awaitable(completed_item): + if complete_list_item_value( + item, + completed_results, + errors, + item_type, + field_nodes, + info, + item_path, + async_payload_record, + ): append_awaitable(index) - append_result(completed_item) if not awaitable_indices: return completed_results @@ -1418,6 +1343,74 @@ async def get_completed_results() -> List[Any]: return get_completed_results() + def complete_list_item_value( + self, + item: Any, + complete_results: List[Any], + errors: List[GraphQLError], + item_type: GraphQLOutputType, + field_nodes: List[FieldNode], + info: GraphQLResolveInfo, + item_path: Path, + async_payload_record: Optional[AsyncPayloadRecord], + ) -> bool: + """Complete a list item value by adding it to the completed results. + + Returns True if the value is awaitable. + """ + is_awaitable = self.is_awaitable + try: + if is_awaitable(item): + completed_item: Any + + async def await_completed() -> Any: + completed = self.complete_value( + item_type, + field_nodes, + info, + item_path, + await item, + async_payload_record, + ) + return await completed if is_awaitable(completed) else completed + + completed_item = await_completed() + else: + completed_item = self.complete_value( + item_type, + field_nodes, + info, + item_path, + item, + async_payload_record, + ) + + if is_awaitable(completed_item): + # noinspection PyShadowingNames + async def catch_error() -> Any: + try: + return await completed_item + except Exception as raw_error: + error = located_error( + raw_error, field_nodes, item_path.as_list() + ) + handle_field_error(error, item_type, errors) + self.filter_subsequent_payloads(item_path, async_payload_record) + return None + + complete_results.append(catch_error()) + return True + + complete_results.append(completed_item) + + except Exception as raw_error: + error = located_error(raw_error, field_nodes, item_path.as_list()) + handle_field_error(error, item_type, errors) + self.filter_subsequent_payloads(item_path, async_payload_record) + complete_results.append(None) + + return False + @staticmethod def complete_leaf_value(return_type: GraphQLLeafType, result: Any) -> Any: """Complete a leaf value. diff --git a/tests/execution/test_stream.py b/tests/execution/test_stream.py index 9ab60f9f..84719bb9 100644 --- a/tests/execution/test_stream.py +++ b/tests/execution/test_stream.py @@ -1230,6 +1230,7 @@ async def friend_list(_info): } @pytest.mark.asyncio() + @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") async def does_not_filter_payloads_when_null_error_is_in_a_different_path(): document = parse( """ From 38659a31b2f2f50fb436009adedf15262f2c9991 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Thu, 15 Feb 2024 16:49:05 +0100 Subject: [PATCH 124/230] Add test for mixing sync/async resolvers Replicates graphql/graphql-js@5009d9f61bfd26771b5dc26672ae3ee9474dbaec --- tests/execution/test_executor.py | 50 ++++++++++++++++++++++++++++++++ 1 file changed, 50 insertions(+) diff --git a/tests/execution/test_executor.py b/tests/execution/test_executor.py index f0c1477d..d15a119a 100644 --- a/tests/execution/test_executor.py +++ b/tests/execution/test_executor.py @@ -514,6 +514,56 @@ async def asyncReturnErrorWithExtensions(self, _info): ], ) + def handles_sync_errors_combined_with_async_ones(): + is_async_resolver_finished = False + + async def async_resolver(_obj, _info): + nonlocal is_async_resolver_finished + sleep = asyncio.sleep + sleep(0) + sleep(0) + sleep(0) + is_async_resolver_finished = True + + schema = GraphQLSchema( + GraphQLObjectType( + "Query", + { + "syncNullError": GraphQLField( + GraphQLNonNull(GraphQLString), resolve=lambda _obj, _info: None + ), + "asyncNullError": GraphQLField( + GraphQLNonNull(GraphQLString), resolve=async_resolver + ), + }, + ) + ) + + document = parse( + """ + { + asyncNullError + syncNullError + } + """ + ) + + result = execute(schema, document) + + assert is_async_resolver_finished is False + + assert result == ( + None, + [ + { + "message": "Cannot return null" + " for non-nullable field Query.syncNullError.", + "locations": [(4, 15)], + "path": ["syncNullError"], + } + ], + ) + def full_response_path_is_included_for_non_nullable_fields(): def resolve_ok(*_args): return {} From eed117e96f4347929d5c688dbe41eac9aef91ed9 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Thu, 15 Feb 2024 16:51:57 +0100 Subject: [PATCH 125/230] Fix formatting in schema in test_abstract Replicates graphql/graphql-js@f851eba93167b04d6be1373ff27927b16352e202 --- tests/execution/test_abstract.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/execution/test_abstract.py b/tests/execution/test_abstract.py index becca13d..30bdae28 100644 --- a/tests/execution/test_abstract.py +++ b/tests/execution/test_abstract.py @@ -526,7 +526,7 @@ def describe_interface_type(): interface Pet { name: String - } + } type Cat implements Pet { name: String From 05c264024277d0b1f5acca6292380fb7e413ecae Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Thu, 15 Feb 2024 18:00:17 +0100 Subject: [PATCH 126/230] fix(incrementalDelivery): fix null bubbling with async iterables Replicates graphql/graphql-js@0b7daed9811731362c71900e12e5ea0d1ecc7f1f --- src/graphql/execution/execute.py | 18 +++++---- tests/execution/test_executor.py | 6 +-- tests/execution/test_stream.py | 64 +++++++++++++++++++++++++++++++- 3 files changed, 73 insertions(+), 15 deletions(-) diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index 9afd6b75..23907903 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -1873,12 +1873,15 @@ async def execute_stream_iterator( label, item_path, iterator, previous_async_payload_record, self ) - awaitable_data = self.execute_stream_iterator_item( - iterator, field_modes, info, item_type, async_payload_record, item_path - ) - try: - data = await awaitable_data + data = await self.execute_stream_iterator_item( + iterator, + field_modes, + info, + item_type, + async_payload_record, + item_path, + ) except StopAsyncIteration: if async_payload_record.errors: async_payload_record.add_items(None) # pragma: no cover @@ -1886,16 +1889,15 @@ async def execute_stream_iterator( del self.subsequent_payloads[async_payload_record] break except GraphQLError as error: - # entire stream has errored and bubbled upwards + async_payload_record.errors.append(error) self.filter_subsequent_payloads(path, async_payload_record) + async_payload_record.add_items(None) if iterator: # pragma: no cover else with suppress(Exception): await iterator.aclose() # type: ignore # running generators cannot be closed since Python 3.8, # so we need to remember that this iterator is already canceled self._canceled_iterators.add(iterator) - async_payload_record.add_items(None) - async_payload_record.errors.append(error) break async_payload_record.add_items([data]) diff --git a/tests/execution/test_executor.py b/tests/execution/test_executor.py index d15a119a..be9e8965 100644 --- a/tests/execution/test_executor.py +++ b/tests/execution/test_executor.py @@ -519,11 +519,7 @@ def handles_sync_errors_combined_with_async_ones(): async def async_resolver(_obj, _info): nonlocal is_async_resolver_finished - sleep = asyncio.sleep - sleep(0) - sleep(0) - sleep(0) - is_async_resolver_finished = True + is_async_resolver_finished = True # pragma: no cover schema = GraphQLSchema( GraphQLObjectType( diff --git a/tests/execution/test_stream.py b/tests/execution/test_stream.py index 84719bb9..f8bedc62 100644 --- a/tests/execution/test_stream.py +++ b/tests/execution/test_stream.py @@ -1020,6 +1020,68 @@ async def scalar_list(_info): @pytest.mark.asyncio() async def handles_async_error_in_complete_value_after_initial_count_is_reached(): + document = parse( + """ + query { + friendList @stream(initialCount: 1) { + nonNullName + } + } + """ + ) + + async def throw(): + raise RuntimeError("Oops") + + async def get_friend(i): + await sleep(0) + return {"nonNullName": throw() if i < 0 else friends[i].name} + + def get_friends(_info): + return [get_friend(0), get_friend(-1), get_friend(1)] + + result = await complete( + document, + { + "friendList": get_friends, + }, + ) + assert result == [ + { + "data": { + "friendList": [{"nonNullName": "Luke"}], + }, + "hasNext": True, + }, + { + "incremental": [ + { + "items": [None], + "path": ["friendList", 1], + "errors": [ + { + "message": "Oops", + "locations": [{"line": 4, "column": 17}], + "path": ["friendList", 1, "nonNullName"], + }, + ], + }, + ], + "hasNext": True, + }, + { + "incremental": [ + { + "items": [{"nonNullName": "Han"}], + "path": ["friendList", 2], + }, + ], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio() + async def handles_async_error_in_complete_value_for_non_nullable_list(): document = parse( """ query { @@ -1230,7 +1292,6 @@ async def friend_list(_info): } @pytest.mark.asyncio() - @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") async def does_not_filter_payloads_when_null_error_is_in_a_different_path(): document = parse( """ @@ -1298,7 +1359,6 @@ async def friend_list(_info): ] @pytest.mark.asyncio() - @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") async def filters_stream_payloads_that_are_nulled_in_a_deferred_payload(): document = parse( """ From 551c6c4944d22ce89b6d7bee0d331a99a8ee4585 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Thu, 15 Feb 2024 18:08:20 +0100 Subject: [PATCH 127/230] Fix comments in defer/stream validation rules Replicates graphql/graphql-js@d23e5debf4cf74110a806b01a40d5c683dcae7a6 --- src/graphql/validation/rules/stream_directive_on_list_field.py | 2 +- tests/validation/test_defer_stream_directive_on_root_field.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/graphql/validation/rules/stream_directive_on_list_field.py b/src/graphql/validation/rules/stream_directive_on_list_field.py index 0006915c..f0ab3ef4 100644 --- a/src/graphql/validation/rules/stream_directive_on_list_field.py +++ b/src/graphql/validation/rules/stream_directive_on_list_field.py @@ -11,7 +11,7 @@ class StreamDirectiveOnListField(ASTValidationRule): - """Stream directive on list field + """Stream directives are used on list fields A GraphQL document is only valid if stream directives are used on list fields. """ diff --git a/tests/validation/test_defer_stream_directive_on_root_field.py b/tests/validation/test_defer_stream_directive_on_root_field.py index 9bcdff80..0997f140 100644 --- a/tests/validation/test_defer_stream_directive_on_root_field.py +++ b/tests/validation/test_defer_stream_directive_on_root_field.py @@ -170,7 +170,7 @@ def defer_fragment_spread_on_nested_subscription_field(): """ subscription { subscriptionField { - ...nestedFragment + ...nestedFragment @defer } } fragment nestedFragment on Message { From 7987576aeb274e1c33c63d3291a94dbc2a963e2b Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Thu, 15 Feb 2024 18:22:39 +0100 Subject: [PATCH 128/230] extend_schema: preserve "description" and "extensions" Replicates graphql/graphql-js@6b5c8af150350201d0d67f3eb6f6f44cb6f92288 --- src/graphql/utilities/extend_schema.py | 11 +++++++---- tests/utilities/test_extend_schema.py | 10 ++++++++++ 2 files changed, 17 insertions(+), 4 deletions(-) diff --git a/src/graphql/utilities/extend_schema.py b/src/graphql/utilities/extend_schema.py index 906383e7..ffa2420e 100644 --- a/src/graphql/utilities/extend_schema.py +++ b/src/graphql/utilities/extend_schema.py @@ -259,10 +259,13 @@ def extend_schema_args( for directive in schema_kwargs["directives"] ) + tuple(self.build_directive(directive) for directive in directive_defs), - description=schema_def.description.value - if schema_def and schema_def.description - else None, - extensions={}, + description=( + schema_def.description.value + if schema_def and schema_def.description + else None + ) + or schema_kwargs["description"], + extensions=schema_kwargs["extensions"], ast_node=schema_def or schema_kwargs["ast_node"], extension_ast_nodes=schema_kwargs["extension_ast_nodes"] + tuple(schema_extensions), diff --git a/tests/utilities/test_extend_schema.py b/tests/utilities/test_extend_schema.py index 9ce77071..9afd707e 100644 --- a/tests/utilities/test_extend_schema.py +++ b/tests/utilities/test_extend_schema.py @@ -135,6 +135,16 @@ def does_not_modify_built_in_types_and_directives(): assert extended_schema.directives == specified_directives + def preserves_original_schema_config(): + description = "A schema description" + extensions = {"foo": "bar"} + schema = GraphQLSchema(description=description, extensions=extensions) + + extended_schema = extend_schema(schema, parse("scalar Bar")) + + assert extended_schema.description == description + assert extended_schema.extensions is extensions + def extends_objects_by_adding_new_fields(): schema = build_schema( ''' From 5feaeebd133a0c2bad1313a31d2a838658c5f8a5 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Thu, 15 Feb 2024 22:17:23 +0100 Subject: [PATCH 129/230] polish: do not repeat is_awaitable check Replicates graphql/graphql-js@7fd1ddb9eeaba378a6445543be179b35d6c1ee55 --- src/graphql/execution/execute.py | 152 +++++++++++++++++-------------- tests/execution/test_executor.py | 2 + tests/execution/test_stream.py | 49 ++++++++++ 3 files changed, 133 insertions(+), 70 deletions(-) diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index 23907903..54bd0ec1 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -1359,11 +1359,11 @@ def complete_list_item_value( Returns True if the value is awaitable. """ is_awaitable = self.is_awaitable - try: - if is_awaitable(item): - completed_item: Any - async def await_completed() -> Any: + if is_awaitable(item): + # noinspection PyShadowingNames + async def await_completed() -> Any: + try: completed = self.complete_value( item_type, field_nodes, @@ -1373,21 +1373,28 @@ async def await_completed() -> Any: async_payload_record, ) return await completed if is_awaitable(completed) else completed + except Exception as raw_error: + error = located_error(raw_error, field_nodes, item_path.as_list()) + handle_field_error(error, item_type, errors) + self.filter_subsequent_payloads(item_path, async_payload_record) + return None - completed_item = await_completed() - else: - completed_item = self.complete_value( - item_type, - field_nodes, - info, - item_path, - item, - async_payload_record, - ) + complete_results.append(await_completed()) + return True + + try: + completed_item = self.complete_value( + item_type, + field_nodes, + info, + item_path, + item, + async_payload_record, + ) if is_awaitable(completed_item): # noinspection PyShadowingNames - async def catch_error() -> Any: + async def await_completed() -> Any: try: return await completed_item except Exception as raw_error: @@ -1398,7 +1405,7 @@ async def catch_error() -> Any: self.filter_subsequent_payloads(item_path, async_payload_record) return None - complete_results.append(catch_error()) + complete_results.append(await_completed()) return True complete_results.append(completed_item) @@ -1728,15 +1735,17 @@ def execute_stream_field( parent_context: Optional[AsyncPayloadRecord] = None, ) -> AsyncPayloadRecord: """Execute stream field.""" + is_awaitable = self.is_awaitable async_payload_record = StreamRecord( label, item_path, None, parent_context, self ) completed_item: Any - try: - try: - if self.is_awaitable(item): - async def await_completed_item() -> Any: + if is_awaitable(item): + # noinspection PyShadowingNames + async def await_completed_items() -> Optional[List[Any]]: + try: + try: completed = self.complete_value( item_type, field_nodes, @@ -1745,76 +1754,79 @@ async def await_completed_item() -> Any: await item, async_payload_record, ) - return ( + return [ await completed if self.is_awaitable(completed) else completed + ] + except Exception as raw_error: + error = located_error( + raw_error, field_nodes, item_path.as_list() + ) + handle_field_error( + error, item_type, async_payload_record.errors ) + self.filter_subsequent_payloads(item_path, async_payload_record) + return [None] + except GraphQLError as error: + async_payload_record.errors.append(error) + self.filter_subsequent_payloads(path, async_payload_record) + return None - completed_item = await_completed_item() + async_payload_record.add_items(await_completed_items()) + return async_payload_record - else: - completed_item = self.complete_value( - item_type, - field_nodes, - info, - item_path, - item, - async_payload_record, - ) + try: + try: + completed_item = self.complete_value( + item_type, + field_nodes, + info, + item_path, + item, + async_payload_record, + ) - if self.is_awaitable(completed_item): + completed_items: Any - async def await_completed_item() -> Any: + if is_awaitable(completed_item): + # noinspection PyShadowingNames + async def await_completed_items() -> Optional[List[Any]]: # noinspection PyShadowingNames try: - return await completed_item - except Exception as raw_error: - # noinspection PyShadowingNames - error = located_error( - raw_error, field_nodes, item_path.as_list() - ) - handle_field_error( - error, item_type, async_payload_record.errors - ) - self.filter_subsequent_payloads( - item_path, async_payload_record - ) + try: + return [await completed_item] + except Exception as raw_error: # pragma: no cover + # noinspection PyShadowingNames + error = located_error( + raw_error, field_nodes, item_path.as_list() + ) + handle_field_error( + error, item_type, async_payload_record.errors + ) + self.filter_subsequent_payloads( + item_path, async_payload_record + ) + return [None] + except GraphQLError as error: # pragma: no cover + async_payload_record.errors.append(error) + self.filter_subsequent_payloads(path, async_payload_record) return None - complete_item = await_completed_item() - + completed_items = await_completed_items() else: - complete_item = completed_item + completed_items = [completed_item] + except Exception as raw_error: error = located_error(raw_error, field_nodes, item_path.as_list()) handle_field_error(error, item_type, async_payload_record.errors) - self.filter_subsequent_payloads( # pragma: no cover - item_path, async_payload_record - ) - complete_item = None # pragma: no cover + self.filter_subsequent_payloads(item_path, async_payload_record) + completed_items = [None] except GraphQLError as error: async_payload_record.errors.append(error) self.filter_subsequent_payloads(item_path, async_payload_record) - async_payload_record.add_items(None) - return async_payload_record - - completed_items: AwaitableOrValue[Optional[List[Any]]] - if self.is_awaitable(complete_item): - - async def await_completed_items() -> Optional[List[Any]]: - # noinspection PyShadowingNames - try: - return [await complete_item] # type: ignore - except GraphQLError as error: - async_payload_record.errors.append(error) - self.filter_subsequent_payloads(path, async_payload_record) - return None - - completed_items = await_completed_items() - else: - completed_items = [complete_item] + completed_items = None async_payload_record.add_items(completed_items) return async_payload_record diff --git a/tests/execution/test_executor.py b/tests/execution/test_executor.py index be9e8965..b70ed483 100644 --- a/tests/execution/test_executor.py +++ b/tests/execution/test_executor.py @@ -514,6 +514,7 @@ async def asyncReturnErrorWithExtensions(self, _info): ], ) + @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") def handles_sync_errors_combined_with_async_ones(): is_async_resolver_finished = False @@ -560,6 +561,7 @@ async def async_resolver(_obj, _info): ], ) + @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") def full_response_path_is_included_for_non_nullable_fields(): def resolve_ok(*_args): return {} diff --git a/tests/execution/test_stream.py b/tests/execution/test_stream.py index f8bedc62..ccfd1f93 100644 --- a/tests/execution/test_stream.py +++ b/tests/execution/test_stream.py @@ -536,6 +536,54 @@ async def await_friend(f): }, ] + @pytest.mark.asyncio() + async def can_stream_a_field_that_returns_a_list_with_nested_async_fields(): + document = parse( + """ + query { + friendList @stream(initialCount: 2) { + name + id + } + } + """ + ) + + async def get_name(f): + return f.name + + async def get_id(f): + return f.id + + result = await complete( + document, + { + "friendList": lambda _info: [ + {"name": get_name(f), "id": get_id(f)} for f in friends + ] + }, + ) + assert result == [ + { + "data": { + "friendList": [ + {"name": "Luke", "id": "1"}, + {"name": "Han", "id": "2"}, + ] + }, + "hasNext": True, + }, + { + "incremental": [ + { + "items": [{"name": "Leia", "id": "3"}], + "path": ["friendList", 2], + } + ], + "hasNext": False, + }, + ] + @pytest.mark.asyncio() async def handles_error_in_list_of_awaitables_before_initial_count_reached(): document = parse( @@ -1292,6 +1340,7 @@ async def friend_list(_info): } @pytest.mark.asyncio() + @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") async def does_not_filter_payloads_when_null_error_is_in_a_different_path(): document = parse( """ From 9bb0b052a8fc3cf9a080332cc5b0bba8e5d2adea Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 17 Feb 2024 09:38:09 +0100 Subject: [PATCH 130/230] Refactor using a new complete_awaitable_value() method Replicates graphql/graphql-js@1564174b0dc26e0adf7ff2833716d06606b06a20 --- src/graphql/execution/execute.py | 99 ++++++++++++++------------------ 1 file changed, 43 insertions(+), 56 deletions(-) diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index 54bd0ec1..183ec607 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -969,26 +969,9 @@ def execute_field( result = resolve_fn(source, info, **args) if self.is_awaitable(result): - # noinspection PyShadowingNames - async def await_result() -> Any: - try: - completed = self.complete_value( - return_type, - field_nodes, - info, - path, - await result, - async_payload_record, - ) - if self.is_awaitable(completed): - return await completed - except Exception as raw_error: - error = located_error(raw_error, field_nodes, path.as_list()) - handle_field_error(error, return_type, errors) - return None - return completed - - return await_result() + return self.complete_awaitable_value( + return_type, field_nodes, info, path, result, async_payload_record + ) completed = self.complete_value( return_type, field_nodes, info, path, result, async_payload_record @@ -1131,6 +1114,37 @@ def complete_value( ) # pragma: no cover raise TypeError(msg) # pragma: no cover + async def complete_awaitable_value( + self, + return_type: GraphQLOutputType, + field_nodes: List[FieldNode], + info: GraphQLResolveInfo, + path: Path, + result: Any, + async_payload_record: Optional[AsyncPayloadRecord] = None, + ) -> Any: + """Complete an awaitable value.""" + try: + resolved = await result + completed = self.complete_value( + return_type, + field_nodes, + info, + path, + resolved, + async_payload_record, + ) + if self.is_awaitable(completed): + completed = await completed + except Exception as raw_error: + errors = ( + async_payload_record.errors if async_payload_record else self.errors + ) + error = located_error(raw_error, field_nodes, path.as_list()) + handle_field_error(error, return_type, errors) + completed = None + return completed + def get_stream_values( self, field_nodes: List[FieldNode], path: Path ) -> Optional[StreamArguments]: @@ -1361,25 +1375,11 @@ def complete_list_item_value( is_awaitable = self.is_awaitable if is_awaitable(item): - # noinspection PyShadowingNames - async def await_completed() -> Any: - try: - completed = self.complete_value( - item_type, - field_nodes, - info, - item_path, - await item, - async_payload_record, - ) - return await completed if is_awaitable(completed) else completed - except Exception as raw_error: - error = located_error(raw_error, field_nodes, item_path.as_list()) - handle_field_error(error, item_type, errors) - self.filter_subsequent_payloads(item_path, async_payload_record) - return None - - complete_results.append(await_completed()) + complete_results.append( + self.complete_awaitable_value( + item_type, field_nodes, info, item_path, item, async_payload_record + ) + ) return True try: @@ -1745,29 +1745,16 @@ def execute_stream_field( # noinspection PyShadowingNames async def await_completed_items() -> Optional[List[Any]]: try: - try: - completed = self.complete_value( + return [ + await self.complete_awaitable_value( item_type, field_nodes, info, item_path, - await item, + item, async_payload_record, ) - return [ - await completed - if self.is_awaitable(completed) - else completed - ] - except Exception as raw_error: - error = located_error( - raw_error, field_nodes, item_path.as_list() - ) - handle_field_error( - error, item_type, async_payload_record.errors - ) - self.filter_subsequent_payloads(item_path, async_payload_record) - return [None] + ] except GraphQLError as error: async_payload_record.errors.append(error) self.filter_subsequent_payloads(path, async_payload_record) From d82cc3276abf0ed5c80b459eaa8f1868d3c8ce07 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 17 Feb 2024 09:53:35 +0100 Subject: [PATCH 131/230] polish: narrow map_source_to_response return type Replicates graphql/graphql-js@baf11a551875b6560869ba2086e466525b34675b --- src/graphql/execution/execute.py | 20 +++++++++----------- 1 file changed, 9 insertions(+), 11 deletions(-) diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index 183ec607..af992acf 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -1651,18 +1651,16 @@ def collect_subfields( def map_source_to_response( self, result_or_stream: Union[ExecutionResult, AsyncIterable[Any]] - ) -> AwaitableOrValue[ - Union[ - AsyncGenerator[ - Union[ - ExecutionResult, - InitialIncrementalExecutionResult, - SubsequentIncrementalExecutionResult, - ], - None, + ) -> Union[ + AsyncGenerator[ + Union[ + ExecutionResult, + InitialIncrementalExecutionResult, + SubsequentIncrementalExecutionResult, ], - ExecutionResult, - ] + None, + ], + ExecutionResult, ]: """Map source result to response. From 921d566c6bb767710651c570f7648695b54a2ccd Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 17 Feb 2024 09:58:30 +0100 Subject: [PATCH 132/230] Fix spelling Replicates graphql/graphql-js@735b43d1df943b1a0784df734f403dcde57c6cae --- tests/utilities/test_build_client_schema.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/utilities/test_build_client_schema.py b/tests/utilities/test_build_client_schema.py index 85b687f4..518fb5bf 100644 --- a/tests/utilities/test_build_client_schema.py +++ b/tests/utilities/test_build_client_schema.py @@ -327,7 +327,7 @@ def builds_a_schema_with_field_arguments(): """A field with a two args""" two( - """This is an list of int arg""" + """This is a list of int arg""" listArg: [Int] """This is a required arg""" From 8f8102ff2b78167ef12dc603824e6a3b4f4e64a4 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 17 Feb 2024 10:28:53 +0100 Subject: [PATCH 133/230] fix: invalid original_error propagation in custom scalars Replicates graphql/graphql-js@76e47fcd7d432f515d5bb99e95af1851148a0c54 --- src/graphql/execution/values.py | 2 +- tests/execution/test_variables.py | 62 ++++++++++++++++++++++++++++++- 2 files changed, 61 insertions(+), 3 deletions(-) diff --git a/src/graphql/execution/values.py b/src/graphql/execution/values.py index 3080a1d7..640f9ea9 100644 --- a/src/graphql/execution/values.py +++ b/src/graphql/execution/values.py @@ -136,7 +136,7 @@ def on_input_value_error( GraphQLError( prefix + "; " + error.message, var_def_node, # noqa: B023 - original_error=error.original_error, + original_error=error, ) ) diff --git a/tests/execution/test_variables.py b/tests/execution/test_variables.py index 88cf180e..8e82ebec 100644 --- a/tests/execution/test_variables.py +++ b/tests/execution/test_variables.py @@ -1,6 +1,7 @@ from math import nan from typing import Any, Dict, Optional +from graphql.error import GraphQLError from graphql.execution import ExecutionResult, execute_sync from graphql.execution.values import get_variable_values from graphql.language import OperationDefinitionNode, StringValueNode, ValueNode, parse @@ -21,6 +22,25 @@ GraphQLString, ) +TestFaultyScalarGraphQLError = GraphQLError( + "FaultyScalarErrorMessage", extensions={"code": "FaultyScalarExtensionCode"} +) + + +def faulty_parse_value(value: str) -> str: + raise TestFaultyScalarGraphQLError + + +def faulty_parse_literal(ast: ValueNode, _variables=None) -> str: + raise TestFaultyScalarGraphQLError + + +TestFaultyScalar = GraphQLScalarType( + name="FaultyScalar", + parse_value=faulty_parse_value, + parse_literal=faulty_parse_literal, +) + def parse_serialized_value(value: str) -> str: assert value == "SerializedValue" @@ -47,6 +67,7 @@ def parse_literal_value(ast: ValueNode, _variables=None) -> str: "b": GraphQLInputField(GraphQLList(GraphQLString)), "c": GraphQLInputField(GraphQLNonNull(GraphQLString)), "d": GraphQLInputField(TestComplexScalar), + "e": GraphQLInputField(TestFaultyScalar), }, ) @@ -253,6 +274,27 @@ def properly_runs_parse_literal_on_complex_scalar_types(): None, ) + def errors_on_faulty_scalar_type_input(): + result = execute_query( + """ + { + fieldWithObjectInput(input: {c: "foo", e: "bar"}) + } + """ + ) + + assert result == ( + {"fieldWithObjectInput": None}, + [ + { + "message": "Argument 'input' has invalid value" + ' { c: "foo", e: "bar" }.', + "path": ["fieldWithObjectInput"], + "locations": [(3, 51)], + } + ], + ) + def describe_using_variables(): doc = """ query ($input: TestInputObject) { @@ -365,6 +407,22 @@ def executes_with_complex_scalar_input(): None, ) + def errors_on_faulty_scalar_type_input(): + params = {"input": {"c": "foo", "e": "SerializedValue"}} + result = execute_query(doc, params) + + assert result == ( + None, + [ + { + "message": "Variable '$input' got invalid value" + " 'SerializedValue' at 'input.e'; FaultyScalarErrorMessage", + "locations": [(2, 24)], + "extensions": {"code": "FaultyScalarExtensionCode"}, + } + ], + ) + def errors_on_null_for_nested_non_null(): params = {"input": {"a": "foo", "b": "bar", "c": None}} result = execute_query(doc, params) @@ -676,8 +734,8 @@ def reports_error_for_array_passed_into_string_input(): ) errors = result.errors - assert errors is not None - assert errors[0].original_error is None + assert errors + assert errors[0].original_error def reports_error_for_non_provided_variables_for_non_nullable_inputs(): # Note: this test would typically fail validation before From 85c12854eb559f4ead3b3ef2f318f7da2cb53de5 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 17 Feb 2024 11:00:11 +0100 Subject: [PATCH 134/230] Add missing filter call in complete_awaitable_value() --- src/graphql/execution/execute.py | 1 + tests/execution/test_subscribe.py | 2 +- tests/execution/test_variables.py | 8 +++----- 3 files changed, 5 insertions(+), 6 deletions(-) diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index af992acf..74ead0af 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -1142,6 +1142,7 @@ async def complete_awaitable_value( ) error = located_error(raw_error, field_nodes, path.as_list()) handle_field_error(error, return_type, errors) + self.filter_subsequent_payloads(path, async_payload_record) completed = None return completed diff --git a/tests/execution/test_subscribe.py b/tests/execution/test_subscribe.py index 1db123e4..9c133da9 100644 --- a/tests/execution/test_subscribe.py +++ b/tests/execution/test_subscribe.py @@ -488,7 +488,7 @@ async def resolves_to_an_error_if_variables_were_wrong_type(): ], ) - assert result.errors[0].original_error is None + assert result.errors[0].original_error # Once a subscription returns a valid AsyncIterator, it can still yield errors. diff --git a/tests/execution/test_variables.py b/tests/execution/test_variables.py index 8e82ebec..7a9ebb82 100644 --- a/tests/execution/test_variables.py +++ b/tests/execution/test_variables.py @@ -27,11 +27,11 @@ ) -def faulty_parse_value(value: str) -> str: +def faulty_parse_value(_value: str) -> str: raise TestFaultyScalarGraphQLError -def faulty_parse_literal(ast: ValueNode, _variables=None) -> str: +def faulty_parse_literal(_ast: ValueNode, _variables=None) -> str: raise TestFaultyScalarGraphQLError @@ -733,9 +733,7 @@ def reports_error_for_array_passed_into_string_input(): ], ) - errors = result.errors - assert errors - assert errors[0].original_error + assert result.errors[0].original_error def reports_error_for_non_provided_variables_for_non_nullable_inputs(): # Note: this test would typically fail validation before From e256148efca89f461939af3170f54d4e4e5f9816 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 17 Feb 2024 11:19:43 +0100 Subject: [PATCH 135/230] Alpha release v3.3.0a4 with some fixes and updates --- .bumpversion.cfg | 2 +- README.md | 2 +- docs/conf.py | 216 ++++++++++++++++-------------- poetry.lock | 14 +- pyproject.toml | 6 +- tests/execution/test_executor.py | 1 + tests/execution/test_stream.py | 1 + tests/execution/test_variables.py | 4 +- 8 files changed, 131 insertions(+), 115 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index e22c7dc9..61892e80 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 3.3.0a3 +current_version = 3.3.0a4 commit = False tag = False diff --git a/README.md b/README.md index 66c07116..313af1ba 100644 --- a/README.md +++ b/README.md @@ -17,7 +17,7 @@ reliable and compatible with GraphQL.js. The current stable version 3.2.3 of GraphQL-core is up-to-date with GraphQL.js version 16.6.0 and supports Python version 3.7 and newer. -You can also try out the latest alpha version 3.3.0a3 of GraphQL-core +You can also try out the latest alpha version 3.3.0a4 of GraphQL-core which is up-to-date with GraphQL.js version 17.0.0a2. Please note that this new minor version of GraphQL-core does not support Python 3.6 anymore. diff --git a/docs/conf.py b/docs/conf.py index 246db043..414333bf 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # # GraphQL-core 3 documentation build configuration file, created by # sphinx-quickstart on Thu Jun 21 16:28:30 2018. @@ -30,29 +29,29 @@ # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ - 'sphinx.ext.autodoc', + "sphinx.ext.autodoc", ] # Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] +templates_path = ["_templates"] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # # source_suffix = ['.rst', '.md'] -source_suffix = '.rst' +source_suffix = ".rst" # The encoding of source files. # # source_encoding = 'utf-8-sig' # The master toctree document. -master_doc = 'index' +master_doc = "index" # General information about the project. -project = 'GraphQL-core 3' -copyright = '2023, Christoph Zwerschke' -author = 'Christoph Zwerschke' +project = "GraphQL-core 3" +copyright = "2024, Christoph Zwerschke" +author = "Christoph Zwerschke" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -61,14 +60,14 @@ # The short X.Y version. # version = '3.3' # The full version, including alpha/beta/rc tags. -version = release = '3.3.0a3' +version = release = "3.3.0a4" # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = 'en' +language = "en" # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: @@ -82,23 +81,23 @@ # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This patterns also effect to html_static_path and html_extra_path -exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] +exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] # AutoDoc configuration autoclass_content = "class" autodoc_default_options = { - 'members': True, - 'inherited-members': True, - 'undoc-members': True, - 'show-inheritance': True + "members": True, + "inherited-members": True, + "undoc-members": True, + "show-inheritance": True, } autosummary_generate = True autodoc_type_aliases = { - 'AwaitableOrValue': 'graphql.pyutils.AwaitableOrValue', - 'FormattedSourceLocation': 'graphql.language.FormattedSourceLocation', - 'Middleware': 'graphql.execution.Middleware', - 'TypeMap': 'graphql.schema.TypeMap' + "AwaitableOrValue": "graphql.pyutils.AwaitableOrValue", + "FormattedSourceLocation": "graphql.language.FormattedSourceLocation", + "Middleware": "graphql.execution.Middleware", + "TypeMap": "graphql.schema.TypeMap", } # GraphQL-core top level modules with submodules that can be omitted. @@ -106,33 +105,41 @@ # qualified form, but the documentation has the shorter form. # We need to give autodoc a little help in this cases. graphql_modules = { - 'error': ['graphql_error'], - 'execution': ['execute', 'middleware'], - 'language': ['ast', 'directive_locations', 'location', - 'source', 'token_kind', 'visitor'], - 'pyutils': ['simple_pub_sub', 'frozen_list', 'path'], - 'type': ['definition', 'directives', 'schema'], - 'utilities': ['find_breaking_changes', 'type_info'], - 'validation': ['rules', 'validation_context']} + "error": ["graphql_error"], + "execution": ["execute", "middleware"], + "language": [ + "ast", + "directive_locations", + "location", + "source", + "token_kind", + "visitor", + ], + "pyutils": ["simple_pub_sub", "frozen_list", "path"], + "type": ["definition", "directives", "schema"], + "utilities": ["find_breaking_changes", "type_info"], + "validation": ["rules", "validation_context"], +} # GraphQL-core classes that autodoc sometimes cannot find # (e.g. where specified as string in type hints). # We need to give autodoc a little help in this cases, too: graphql_classes = { - 'GraphQLAbstractType': 'type', - 'GraphQLFieldResolver': 'type', - 'GraphQLObjectType': 'type', - 'GraphQLOutputType': 'type', - 'GraphQLTypeResolver': 'type', - 'AwaitableOrValue': 'execution', - 'Middleware': 'execution', - 'Node': 'language', - 'Source': 'language', - 'SourceLocation': 'language' + "GraphQLAbstractType": "type", + "GraphQLFieldResolver": "type", + "GraphQLObjectType": "type", + "GraphQLOutputType": "type", + "GraphQLTypeResolver": "type", + "AwaitableOrValue": "execution", + "Middleware": "execution", + "Node": "language", + "Source": "language", + "SourceLocation": "language", } # ignore the following undocumented or internal references: -ignore_references = set(''' +ignore_references = set( + """ GNT GT KT T VT enum.Enum traceback @@ -163,77 +170,79 @@ graphql.validation.validation_context.VariableUsage graphql.validation.rules.known_argument_names.KnownArgumentNamesOnDirectivesRule graphql.validation.rules.provided_required_arguments.ProvidedRequiredArgumentsOnDirectivesRule -'''.split()) +""".split() +) ignore_references.update(__builtins__.keys()) def on_missing_reference(app, env, node, contnode): """Fix or skip any missing references.""" - if node.get('refdomain') != 'py': + if node.get("refdomain") != "py": return None - target = node.get('reftarget') + target = node.get("reftarget") if not target: return None - if target in ignore_references or target.endswith('Kwargs'): + if target in ignore_references or target.endswith("Kwargs"): return contnode - typ = node.get('reftype') - name = target.rsplit('.', 1)[-1] - if name in ('GT', 'GNT', 'KT', 'T', 'VT'): + typ = node.get("reftype") + name = target.rsplit(".", 1)[-1] + if name in ("GT", "GNT", "KT", "T", "VT"): return contnode - if typ == 'obj': - if target.startswith('typing.'): - if name in ('Any', 'Optional', 'Union'): + if typ == "obj": + if target.startswith("typing."): + if name in ("Any", "Optional", "Union"): return contnode - if typ != 'class': + if typ != "class": return None - if '.' in target: # maybe too specific - base_module, target = target.split('.', 1) - if base_module == 'graphql': - if '.' not in target: + if "." in target: # maybe too specific + base_module, target = target.split(".", 1) + if base_module == "graphql": + if "." not in target: return None - base_module, target = target.split('.', 1) - if '.' not in target: + base_module, target = target.split(".", 1) + if "." not in target: return None sub_modules = graphql_modules.get(base_module) if not sub_modules: - return - sub_module = target.split('.', 1)[0] + return None + sub_module = target.split(".", 1)[0] if sub_module not in sub_modules: return None - target = 'graphql.' + base_module + '.' + target.rsplit('.', 1)[-1] + target = "graphql." + base_module + "." + target.rsplit(".", 1)[-1] else: # maybe not specific enough base_module = graphql_classes.get(target) if not base_module: return None - target = 'graphql.' + base_module + '.' + target + target = "graphql." + base_module + "." + target # replace target - if contnode.__class__.__name__ == 'Text': + if contnode.__class__.__name__ == "Text": contnode = contnode.__class__(target) - elif contnode.__class__.__name__ == 'literal': + elif contnode.__class__.__name__ == "literal": if len(contnode.children) != 1: return None textnode = contnode.children[0] contnode.children[0] = textnode.__class__(target) else: return None - node['reftarget'] = target - fromdoc = node.get('refdoc') + node["reftarget"] = target + fromdoc = node.get("refdoc") if not fromdoc: - doc_module = node.get('py:module') + doc_module = node.get("py:module") if doc_module: - if doc_module.startswith('graphql.'): - doc_module = doc_module.split('.', 1)[-1] - if doc_module not in graphql_modules and doc_module != 'graphql': + if doc_module.startswith("graphql."): + doc_module = doc_module.split(".", 1)[-1] + if doc_module not in graphql_modules and doc_module != "graphql": doc_module = None - fromdoc = 'modules/' + (doc_module or base_module) + fromdoc = "modules/" + (doc_module or base_module) # try resolving again with replaced target - return env.domains['py'].resolve_xref( - env, fromdoc, app.builder, typ, target, node, contnode) + return env.domains["py"].resolve_xref( + env, fromdoc, app.builder, typ, target, node, contnode + ) def on_skip_member(_app, what, name, _obj, skip, _options): - if what == 'class' and name == "__init__": + if what == "class" and name == "__init__": # we could set "special-members" to "__init__", # but this gives an error when documenting modules return False @@ -241,7 +250,7 @@ def on_skip_member(_app, what, name, _obj, skip, _options): def setup(app): - app.connect('missing-reference', on_missing_reference) + app.connect("missing-reference", on_missing_reference) app.connect("autodoc-skip-member", on_skip_member) @@ -269,7 +278,7 @@ def setup(app): # show_authors = False # The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' +pygments_style = "sphinx" # A list of ignored prefixes for module index sorting. # modindex_common_prefix = [] @@ -285,15 +294,13 @@ def setup(app): # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # -html_theme = 'sphinx_rtd_theme' +html_theme = "sphinx_rtd_theme" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # -html_theme_options = { - 'navigation_depth': 5 -} +html_theme_options = {"navigation_depth": 5} # Add any paths that contain custom themes here, relative to this directory. # html_theme_path = [] @@ -401,34 +408,36 @@ def setup(app): # html_search_scorer = 'scorer.js' # Output file base name for HTML help builder. -htmlhelp_basename = 'GraphQL-core-3-doc' +htmlhelp_basename = "GraphQL-core-3-doc" # -- Options for LaTeX output --------------------------------------------- latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # - # 'papersize': 'letterpaper', - - # The font size ('10pt', '11pt' or '12pt'). - # - # 'pointsize': '10pt', - - # Additional stuff for the LaTeX preamble. - # - # 'preamble': '', - - # Latex figure (float) alignment - # - # 'figure_align': 'htbp', + # The paper size ('letterpaper' or 'a4paper'). + # + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # + # 'preamble': '', + # Latex figure (float) alignment + # + # 'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ - (master_doc, 'GraphQL-core-3.tex', 'GraphQL-core 3 Documentation', - 'Christoph Zwerschke', 'manual'), + ( + master_doc, + "GraphQL-core-3.tex", + "GraphQL-core 3 Documentation", + "Christoph Zwerschke", + "manual", + ), ] # The name of an image file (relative to this directory) to place at the top of @@ -462,10 +471,7 @@ def setup(app): # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). -man_pages = [ - (master_doc, 'graphql-core', 'GraphQL-core 3 Documentation', - [author], 1) -] +man_pages = [(master_doc, "graphql-core", "GraphQL-core 3 Documentation", [author], 1)] # If true, show URL addresses after external links. # @@ -478,9 +484,15 @@ def setup(app): # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - (master_doc, 'GraphQL-core', 'GraphQL-core 3 Documentation', - author, 'GraphQL-core 3', 'One line description of project.', - 'Miscellaneous'), + ( + master_doc, + "GraphQL-core", + "GraphQL-core 3 Documentation", + author, + "GraphQL-core 3", + "One line description of project.", + "Miscellaneous", + ), ] # Documents to append as an appendix to all manuals. diff --git a/poetry.lock b/poetry.lock index 7903bb44..bc3735f0 100644 --- a/poetry.lock +++ b/poetry.lock @@ -756,13 +756,13 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no [[package]] name = "pytest" -version = "8.0.0" +version = "8.0.1" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-8.0.0-py3-none-any.whl", hash = "sha256:50fb9cbe836c3f20f0dfa99c565201fb75dc54c8d76373cd1bde06b06657bdb6"}, - {file = "pytest-8.0.0.tar.gz", hash = "sha256:249b1b0864530ba251b7438274c4d251c58d868edaaec8762893ad4a0d71c36c"}, + {file = "pytest-8.0.1-py3-none-any.whl", hash = "sha256:3e4f16fe1c0a9dc9d9389161c127c3edc5d810c38d6793042fb81d9f48a59fca"}, + {file = "pytest-8.0.1.tar.gz", hash = "sha256:267f6563751877d772019b13aacbe4e860d73fe8f651f28112e9ac37de7513ae"}, ] [package.dependencies] @@ -1220,13 +1220,13 @@ testing = ["flaky (>=3.4.0)", "freezegun (>=0.3.11)", "pathlib2 (>=2.3.3)", "psu [[package]] name = "tox" -version = "4.12.1" +version = "4.13.0" description = "tox is a generic virtualenv management and test command line tool" optional = false python-versions = ">=3.8" files = [ - {file = "tox-4.12.1-py3-none-any.whl", hash = "sha256:c07ea797880a44f3c4f200ad88ad92b446b83079d4ccef89585df64cc574375c"}, - {file = "tox-4.12.1.tar.gz", hash = "sha256:61aafbeff1bd8a5af84e54ef6e8402f53c6a6066d0782336171ddfbf5362122e"}, + {file = "tox-4.13.0-py3-none-any.whl", hash = "sha256:1143c7e2489c68026a55d3d4ae84c02c449f073b28e62f80e3e440a3b72a4afa"}, + {file = "tox-4.13.0.tar.gz", hash = "sha256:dd789a554c16c4b532924ba393c92fc8991323c4b3d466712bfecc8c9b9f24f7"}, ] [package.dependencies] @@ -1405,4 +1405,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.7" -content-hash = "910d6fe7bf0668879447dda5c6f98241d7facc12f25b2c97ea5e7b22117ba7da" +content-hash = "b78e75f3de0aa66a09e5f2d319fc43cc3201402707385827a1ddee81c22941ad" diff --git a/pyproject.toml b/pyproject.toml index e606e1dc..2e407b6e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "graphql-core" -version = "3.3.0a3" +version = "3.3.0a4" description = """\ GraphQL-core is a Python port of GraphQL.js,\ the JavaScript reference implementation for GraphQL.""" @@ -63,7 +63,7 @@ pytest-cov = "^4.1" pytest-describe = "^2.2" pytest-timeout = "^2.2" tox = [ - { version = "^4.12", python = ">=3.8" }, + { version = "^4.13", python = ">=3.8" }, { version = "^3.28", python = "<3.8" } ] @@ -277,7 +277,7 @@ module = [ disallow_untyped_defs = false [tool.pytest.ini_options] -minversion = "7.3" +minversion = "7.4" # Only run benchmarks as tests. # To actually run the benchmarks, use --benchmark-enable on the command line. # To run the slow tests (fuzzing), add --run-slow on the command line. diff --git a/tests/execution/test_executor.py b/tests/execution/test_executor.py index b70ed483..1cbb9f0b 100644 --- a/tests/execution/test_executor.py +++ b/tests/execution/test_executor.py @@ -624,6 +624,7 @@ class Data: result = execute_sync(schema, document, Data()) assert result == ({"a": "b"}, None) + @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") def uses_the_only_operation_if_no_operation_name_is_provided(): schema = GraphQLSchema( GraphQLObjectType("Type", {"a": GraphQLField(GraphQLString)}) diff --git a/tests/execution/test_stream.py b/tests/execution/test_stream.py index ccfd1f93..348a70ec 100644 --- a/tests/execution/test_stream.py +++ b/tests/execution/test_stream.py @@ -1408,6 +1408,7 @@ async def friend_list(_info): ] @pytest.mark.asyncio() + @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") async def filters_stream_payloads_that_are_nulled_in_a_deferred_payload(): document = parse( """ diff --git a/tests/execution/test_variables.py b/tests/execution/test_variables.py index 7a9ebb82..277efc0b 100644 --- a/tests/execution/test_variables.py +++ b/tests/execution/test_variables.py @@ -733,7 +733,9 @@ def reports_error_for_array_passed_into_string_input(): ], ) - assert result.errors[0].original_error + errors = result.errors + assert errors + assert errors[0].original_error def reports_error_for_non_provided_variables_for_non_nullable_inputs(): # Note: this test would typically fail validation before From 206de32dad4b4e0d0d8c0e1ad2e11fc400e8986b Mon Sep 17 00:00:00 2001 From: Fedir Zadniprovskyi <76551385+fedirz@users.noreply.github.com> Date: Tue, 27 Feb 2024 11:22:04 -0800 Subject: [PATCH 136/230] Allow user to pass in a custom resolve info context type (#213) --- src/graphql/type/definition.py | 63 ++++++++++++++++++++++++---------- 1 file changed, 44 insertions(+), 19 deletions(-) diff --git a/src/graphql/type/definition.py b/src/graphql/type/definition.py index 9bea7eed..81c5612d 100644 --- a/src/graphql/type/definition.py +++ b/src/graphql/type/definition.py @@ -2,6 +2,7 @@ from __future__ import annotations # Python < 3.10 +import sys from enum import Enum from typing import ( TYPE_CHECKING, @@ -554,30 +555,54 @@ def to_kwargs(self) -> GraphQLFieldKwargs: def __copy__(self) -> GraphQLField: # pragma: no cover return self.__class__(**self.to_kwargs()) +if sys.version_info < (3, 9) or sys.version_info >= (3, 11): + TContext = TypeVar("TContext") -class GraphQLResolveInfo(NamedTuple): - """Collection of information passed to the resolvers. + class GraphQLResolveInfo(NamedTuple, Generic[TContext]): + """Collection of information passed to the resolvers. - This is always passed as the first argument to the resolvers. + This is always passed as the first argument to the resolvers. - Note that contrary to the JavaScript implementation, the context (commonly used to - represent an authenticated user, or request-specific caches) is included here and - not passed as an additional argument. - """ + Note that contrary to the JavaScript implementation, the context (commonly used + to represent an authenticated user, or request-specific caches) is included here + and not passed as an additional argument. + """ - field_name: str - field_nodes: List[FieldNode] - return_type: GraphQLOutputType - parent_type: GraphQLObjectType - path: Path - schema: GraphQLSchema - fragments: Dict[str, FragmentDefinitionNode] - root_value: Any - operation: OperationDefinitionNode - variable_values: Dict[str, Any] - context: Any - is_awaitable: Callable[[Any], bool] + field_name: str + field_nodes: List[FieldNode] + return_type: GraphQLOutputType + parent_type: GraphQLObjectType + path: Path + schema: GraphQLSchema + fragments: Dict[str, FragmentDefinitionNode] + root_value: Any + operation: OperationDefinitionNode + variable_values: Dict[str, Any] + context: TContext + is_awaitable: Callable[[Any], bool] +else: + class GraphQLResolveInfo(NamedTuple): + """Collection of information passed to the resolvers. + + This is always passed as the first argument to the resolvers. + + Note that contrary to the JavaScript implementation, the context (commonly used + to represent an authenticated user, or request-specific caches) is included here + and not passed as an additional argument. + """ + field_name: str + field_nodes: List[FieldNode] + return_type: GraphQLOutputType + parent_type: GraphQLObjectType + path: Path + schema: GraphQLSchema + fragments: Dict[str, FragmentDefinitionNode] + root_value: Any + operation: OperationDefinitionNode + variable_values: Dict[str, Any] + context: Any + is_awaitable: Callable[[Any], bool] # Note: Contrary to the Javascript implementation of GraphQLFieldResolver, # the context is passed as part of the GraphQLResolveInfo and any arguments From 602f7d736d7911899d7ff620abebc156eeca1dfc Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Tue, 27 Feb 2024 21:26:40 +0100 Subject: [PATCH 137/230] Fix minor issues with testing --- docs/conf.py | 2 ++ src/graphql/type/definition.py | 15 ++++++++++----- tests/execution/test_executor.py | 1 + tests/execution/test_stream.py | 1 + 4 files changed, 14 insertions(+), 5 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 414333bf..ce27fe29 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -141,6 +141,7 @@ ignore_references = set( """ GNT GT KT T VT +TContext enum.Enum traceback types.TracebackType @@ -166,6 +167,7 @@ graphql.execution.execute.StreamRecord graphql.language.lexer.EscapeSequence graphql.language.visitor.EnterLeaveVisitor +graphql.type.definition.TContext graphql.type.schema.InterfaceImplementations graphql.validation.validation_context.VariableUsage graphql.validation.rules.known_argument_names.KnownArgumentNamesOnDirectivesRule diff --git a/src/graphql/type/definition.py b/src/graphql/type/definition.py index 81c5612d..212ab4e6 100644 --- a/src/graphql/type/definition.py +++ b/src/graphql/type/definition.py @@ -2,7 +2,6 @@ from __future__ import annotations # Python < 3.10 -import sys from enum import Enum from typing import ( TYPE_CHECKING, @@ -555,8 +554,10 @@ def to_kwargs(self) -> GraphQLFieldKwargs: def __copy__(self) -> GraphQLField: # pragma: no cover return self.__class__(**self.to_kwargs()) -if sys.version_info < (3, 9) or sys.version_info >= (3, 11): - TContext = TypeVar("TContext") + +TContext = TypeVar("TContext") + +try: class GraphQLResolveInfo(NamedTuple, Generic[TContext]): """Collection of information passed to the resolvers. @@ -580,8 +581,11 @@ class GraphQLResolveInfo(NamedTuple, Generic[TContext]): variable_values: Dict[str, Any] context: TContext is_awaitable: Callable[[Any], bool] -else: - class GraphQLResolveInfo(NamedTuple): +except TypeError as error: # pragma: no cover + if "Multiple inheritance with NamedTuple is not supported" not in str(error): + raise # only catch expected error for Python 3.9 and 3.10 + + class GraphQLResolveInfo(NamedTuple): # type: ignore[no-redef] """Collection of information passed to the resolvers. This is always passed as the first argument to the resolvers. @@ -604,6 +608,7 @@ class GraphQLResolveInfo(NamedTuple): context: Any is_awaitable: Callable[[Any], bool] + # Note: Contrary to the Javascript implementation of GraphQLFieldResolver, # the context is passed as part of the GraphQLResolveInfo and any arguments # are passed individually as keyword arguments. diff --git a/tests/execution/test_executor.py b/tests/execution/test_executor.py index 1cbb9f0b..61f4ba62 100644 --- a/tests/execution/test_executor.py +++ b/tests/execution/test_executor.py @@ -638,6 +638,7 @@ class Data: result = execute_sync(schema, document, Data()) assert result == ({"a": "b"}, None) + @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") def uses_the_named_operation_if_operation_name_is_provided(): schema = GraphQLSchema( GraphQLObjectType("Type", {"a": GraphQLField(GraphQLString)}) diff --git a/tests/execution/test_stream.py b/tests/execution/test_stream.py index 348a70ec..a3c2e49a 100644 --- a/tests/execution/test_stream.py +++ b/tests/execution/test_stream.py @@ -373,6 +373,7 @@ async def can_disable_stream_using_if_argument(): } @pytest.mark.asyncio() + @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") async def does_not_disable_stream_with_null_if_argument(): document = parse( "query ($shouldStream: Boolean)" From 98b44cc2c950d3ecbd8c275c5ab81b678181718c Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Thu, 29 Feb 2024 08:59:50 +0100 Subject: [PATCH 138/230] Add test for GraphQLResolveInfo with custom context --- tests/type/test_definition.py | 69 ++++++++++++++++++++++++++++++++++- 1 file changed, 67 insertions(+), 2 deletions(-) diff --git a/tests/type/test_definition.py b/tests/type/test_definition.py index cb38a678..8ecb2bc2 100644 --- a/tests/type/test_definition.py +++ b/tests/type/test_definition.py @@ -1,7 +1,13 @@ import pickle +import sys from enum import Enum from math import isnan, nan -from typing import Dict +from typing import Any, Callable, Dict, List + +try: + from typing import TypedDict +except ImportError: # Python < 3.8 + from typing_extensions import TypedDict import pytest from graphql.error import GraphQLError @@ -9,6 +15,8 @@ EnumTypeDefinitionNode, EnumTypeExtensionNode, EnumValueNode, + FieldNode, + FragmentDefinitionNode, InputObjectTypeDefinitionNode, InputObjectTypeExtensionNode, InputValueDefinitionNode, @@ -16,6 +24,7 @@ InterfaceTypeExtensionNode, ObjectTypeDefinitionNode, ObjectTypeExtensionNode, + OperationDefinitionNode, ScalarTypeDefinitionNode, ScalarTypeExtensionNode, StringValueNode, @@ -24,7 +33,7 @@ ValueNode, parse_value, ) -from graphql.pyutils import Undefined +from graphql.pyutils import Path, Undefined, is_awaitable from graphql.type import ( GraphQLArgument, GraphQLEnumType, @@ -37,7 +46,10 @@ GraphQLList, GraphQLNonNull, GraphQLObjectType, + GraphQLOutputType, + GraphQLResolveInfo, GraphQLScalarType, + GraphQLSchema, GraphQLString, GraphQLUnionType, introspection_types, @@ -1301,3 +1313,56 @@ def cannot_redefine_introspection_types(): TypeError, match=f"Redefinition of reserved type '{name}'" ): introspection_type.__class__(**introspection_type.to_kwargs()) + + +def describe_resolve_info(): + class InfoArgs(TypedDict): + """Arguments for GraphQLResolveInfo""" + + field_name: str + field_nodes: List[FieldNode] + return_type: GraphQLOutputType + parent_type: GraphQLObjectType + path: Path + schema: GraphQLSchema + fragments: Dict[str, FragmentDefinitionNode] + root_value: Any + operation: OperationDefinitionNode + variable_values: Dict[str, Any] + is_awaitable: Callable[[Any], bool] + + info_args: InfoArgs = { + "field_name": "foo", + "field_nodes": [], + "return_type": GraphQLString, + "parent_type": GraphQLObjectType("Foo", {}), + "path": Path(None, "foo", None), + "schema": GraphQLSchema(), + "fragments": {}, + "root_value": None, + "operation": OperationDefinitionNode(), + "variable_values": {}, + "is_awaitable": is_awaitable, + } + + def resolve_info_with_unspecified_context_type_can_use_any_type(): + info_int = GraphQLResolveInfo(**info_args, context=42) + assert info_int.context == 42 + info_str = GraphQLResolveInfo(**info_args, context="foo") + assert info_str.context == "foo" + + def resolve_info_with_unspecified_context_type_remembers_type(): + info = GraphQLResolveInfo(**info_args, context=42) + assert info.context == 42 + info = GraphQLResolveInfo(**info_args, context="foo") # type: ignore + assert info.context == "foo" + + @pytest.mark.skipif( + sys.version_info < (3, 9), reason="this needs at least Python 3.9" + ) + def resolve_info_with_specified_context_type_checks_type(): + info_int = GraphQLResolveInfo[int](**info_args, context=42) + assert isinstance(info_int.context, int) + # this should not pass type checking now: + info_str = GraphQLResolveInfo[int](**info_args, context="foo") # type: ignore + assert isinstance(info_str.context, str) From a91e4b20f0eab3d2212bf16c03082614a03dc830 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Fri, 5 Apr 2024 15:48:07 +0200 Subject: [PATCH 139/230] Remove defer/stream support from subscriptions Replicates graphql/graphql-js@1bf71eeec71d26b532a3722c54d0552ec1706af5 --- docs/modules/execution.rst | 2 - src/graphql/execution/__init__.py | 5 +- src/graphql/execution/async_iterables.py | 17 +- src/graphql/execution/collect_fields.py | 29 +- src/graphql/execution/execute.py | 170 ++------ src/graphql/validation/__init__.py | 6 + ...ream_directive_on_valid_operations_rule.py | 83 ++++ .../rules/single_field_subscriptions.py | 2 +- src/graphql/validation/specified_rules.py | 6 + .../execution/test_flatten_async_iterable.py | 210 ---------- tests/execution/test_subscribe.py | 156 +++---- ...er_stream_directive_on_valid_operations.py | 395 ++++++++++++++++++ 12 files changed, 596 insertions(+), 485 deletions(-) create mode 100644 src/graphql/validation/rules/defer_stream_directive_on_valid_operations_rule.py delete mode 100644 tests/execution/test_flatten_async_iterable.py create mode 100644 tests/validation/test_defer_stream_directive_on_valid_operations.py diff --git a/docs/modules/execution.rst b/docs/modules/execution.rst index 535dffbd..7509676c 100644 --- a/docs/modules/execution.rst +++ b/docs/modules/execution.rst @@ -53,8 +53,6 @@ Execution .. autofunction:: subscribe -.. autofunction:: experimental_subscribe_incrementally - .. autofunction:: create_source_event_stream .. autoclass:: Middleware diff --git a/src/graphql/execution/__init__.py b/src/graphql/execution/__init__.py index 29aa1594..e33d4ce7 100644 --- a/src/graphql/execution/__init__.py +++ b/src/graphql/execution/__init__.py @@ -13,7 +13,6 @@ default_field_resolver, default_type_resolver, subscribe, - experimental_subscribe_incrementally, ExecutionContext, ExecutionResult, ExperimentalIncrementalExecutionResults, @@ -30,7 +29,7 @@ FormattedIncrementalResult, Middleware, ) -from .async_iterables import flatten_async_iterable, map_async_iterable +from .async_iterables import map_async_iterable from .middleware import MiddlewareManager from .values import get_argument_values, get_directive_values, get_variable_values @@ -43,7 +42,6 @@ "default_field_resolver", "default_type_resolver", "subscribe", - "experimental_subscribe_incrementally", "ExecutionContext", "ExecutionResult", "ExperimentalIncrementalExecutionResults", @@ -58,7 +56,6 @@ "FormattedIncrementalDeferResult", "FormattedIncrementalStreamResult", "FormattedIncrementalResult", - "flatten_async_iterable", "map_async_iterable", "Middleware", "MiddlewareManager", diff --git a/src/graphql/execution/async_iterables.py b/src/graphql/execution/async_iterables.py index 7b7f6340..305b495f 100644 --- a/src/graphql/execution/async_iterables.py +++ b/src/graphql/execution/async_iterables.py @@ -12,7 +12,7 @@ Union, ) -__all__ = ["aclosing", "flatten_async_iterable", "map_async_iterable"] +__all__ = ["aclosing", "map_async_iterable"] T = TypeVar("T") V = TypeVar("V") @@ -42,21 +42,6 @@ async def __aexit__(self, *_exc_info: object) -> None: await aclose() -async def flatten_async_iterable( - iterable: AsyncIterableOrGenerator[AsyncIterableOrGenerator[T]], -) -> AsyncGenerator[T, None]: - """Flatten async iterables. - - Given an AsyncIterable of AsyncIterables, flatten all yielded results into a - single AsyncIterable. - """ - async with aclosing(iterable) as sub_iterators: # type: ignore - async for sub_iterator in sub_iterators: - async with aclosing(sub_iterator) as items: # type: ignore - async for item in items: - yield item - - async def map_async_iterable( iterable: AsyncIterableOrGenerator[T], callback: Callable[[T], Awaitable[V]] ) -> AsyncGenerator[V, None]: diff --git a/src/graphql/execution/collect_fields.py b/src/graphql/execution/collect_fields.py index 260e10ae..e7d64fe8 100644 --- a/src/graphql/execution/collect_fields.py +++ b/src/graphql/execution/collect_fields.py @@ -8,6 +8,8 @@ FragmentDefinitionNode, FragmentSpreadNode, InlineFragmentNode, + OperationDefinitionNode, + OperationType, SelectionSetNode, ) from ..type import ( @@ -43,7 +45,7 @@ def collect_fields( fragments: Dict[str, FragmentDefinitionNode], variable_values: Dict[str, Any], runtime_type: GraphQLObjectType, - selection_set: SelectionSetNode, + operation: OperationDefinitionNode, ) -> FieldsAndPatches: """Collect fields. @@ -61,8 +63,9 @@ def collect_fields( schema, fragments, variable_values, + operation, runtime_type, - selection_set, + operation.selection_set, fields, patches, set(), @@ -74,6 +77,7 @@ def collect_subfields( schema: GraphQLSchema, fragments: Dict[str, FragmentDefinitionNode], variable_values: Dict[str, Any], + operation: OperationDefinitionNode, return_type: GraphQLObjectType, field_nodes: List[FieldNode], ) -> FieldsAndPatches: @@ -100,6 +104,7 @@ def collect_subfields( schema, fragments, variable_values, + operation, return_type, node.selection_set, sub_field_nodes, @@ -113,6 +118,7 @@ def collect_fields_impl( schema: GraphQLSchema, fragments: Dict[str, FragmentDefinitionNode], variable_values: Dict[str, Any], + operation: OperationDefinitionNode, runtime_type: GraphQLObjectType, selection_set: SelectionSetNode, fields: Dict[str, List[FieldNode]], @@ -133,13 +139,14 @@ def collect_fields_impl( ) or not does_fragment_condition_match(schema, selection, runtime_type): continue - defer = get_defer_values(variable_values, selection) + defer = get_defer_values(operation, variable_values, selection) if defer: patch_fields = defaultdict(list) collect_fields_impl( schema, fragments, variable_values, + operation, runtime_type, selection.selection_set, patch_fields, @@ -152,6 +159,7 @@ def collect_fields_impl( schema, fragments, variable_values, + operation, runtime_type, selection.selection_set, fields, @@ -164,7 +172,7 @@ def collect_fields_impl( if not should_include_node(variable_values, selection): continue - defer = get_defer_values(variable_values, selection) + defer = get_defer_values(operation, variable_values, selection) if frag_name in visited_fragment_names and not defer: continue @@ -183,6 +191,7 @@ def collect_fields_impl( schema, fragments, variable_values, + operation, runtime_type, fragment.selection_set, patch_fields, @@ -195,6 +204,7 @@ def collect_fields_impl( schema, fragments, variable_values, + operation, runtime_type, fragment.selection_set, fields, @@ -210,7 +220,9 @@ class DeferValues(NamedTuple): def get_defer_values( - variable_values: Dict[str, Any], node: Union[FragmentSpreadNode, InlineFragmentNode] + operation: OperationDefinitionNode, + variable_values: Dict[str, Any], + node: Union[FragmentSpreadNode, InlineFragmentNode], ) -> Optional[DeferValues]: """Get values of defer directive if active. @@ -223,6 +235,13 @@ def get_defer_values( if not defer or defer.get("if") is False: return None + if operation.operation == OperationType.SUBSCRIPTION: + msg = ( + "`@defer` directive not supported on subscription operations." + " Disable `@defer` by setting the `if` argument to `false`." + ) + raise TypeError(msg) + return DeferValues(defer.get("label")) diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index 74ead0af..6310d33b 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -77,7 +77,7 @@ is_non_null_type, is_object_type, ) -from .async_iterables import flatten_async_iterable, map_async_iterable +from .async_iterables import map_async_iterable from .collect_fields import FieldsAndPatches, collect_fields, collect_subfields from .middleware import MiddlewareManager from .values import get_argument_values, get_directive_values, get_variable_values @@ -101,7 +101,6 @@ async def anext(iterator: AsyncIterator) -> Any: # noqa: A001 "execute", "execute_sync", "experimental_execute_incrementally", - "experimental_subscribe_incrementally", "subscribe", "AsyncPayloadRecord", "DeferredFragmentRecord", @@ -817,7 +816,7 @@ def execute_operation(self) -> AwaitableOrValue[Dict[str, Any]]: self.fragments, self.variable_values, root_type, - operation.selection_set, + operation, ) root_value = self.root_value @@ -1173,6 +1172,13 @@ def get_stream_values( msg = "initialCount must be a positive integer" raise ValueError(msg) + if self.operation.operation == OperationType.SUBSCRIPTION: + msg = ( + "`@stream` directive not supported on subscription operations." + " Disable `@stream` by setting the `if` argument to `false`." + ) + raise TypeError(msg) + label = stream.get("label") return StreamArguments(initial_count=initial_count, label=label) @@ -1644,6 +1650,7 @@ def collect_subfields( self.schema, self.fragments, self.variable_values, + self.operation, return_type, field_nodes, ) @@ -1652,17 +1659,7 @@ def collect_subfields( def map_source_to_response( self, result_or_stream: Union[ExecutionResult, AsyncIterable[Any]] - ) -> Union[ - AsyncGenerator[ - Union[ - ExecutionResult, - InitialIncrementalExecutionResult, - SubsequentIncrementalExecutionResult, - ], - None, - ], - ExecutionResult, - ]: + ) -> Union[AsyncGenerator[ExecutionResult, None], ExecutionResult]: """Map source result to response. For each payload yielded from a subscription, @@ -1678,13 +1675,17 @@ def map_source_to_response( if not isinstance(result_or_stream, AsyncIterable): return result_or_stream # pragma: no cover - async def callback(payload: Any) -> AsyncGenerator: + async def callback(payload: Any) -> ExecutionResult: result = execute_impl(self.build_per_event_execution_context(payload)) - return ensure_async_iterable( - await result if self.is_awaitable(result) else result # type: ignore + # typecast to ExecutionResult, not possible to return + # ExperimentalIncrementalExecutionResults when operation is 'subscription'. + return ( + await cast(Awaitable[ExecutionResult], result) + if self.is_awaitable(result) + else cast(ExecutionResult, result) ) - return flatten_async_iterable(map_async_iterable(result_or_stream, callback)) + return map_async_iterable(result_or_stream, callback) def execute_deferred_fragment( self, @@ -2015,8 +2016,8 @@ def execute( a GraphQLError will be thrown immediately explaining the invalid input. This function does not support incremental delivery (`@defer` and `@stream`). - If an operation which would defer or stream data is executed with this - function, it will throw or resolve to an object containing an error instead. + If an operation that defers or streams data is executed with this function, + it will throw or resolve to an object containing an error instead. Use `experimental_execute_incrementally` if you want to support incremental delivery. """ @@ -2362,111 +2363,8 @@ def subscribe( a stream of ExecutionResults representing the response stream. This function does not support incremental delivery (`@defer` and `@stream`). - If an operation which would defer or stream data is executed with this function, - each :class:`InitialIncrementalExecutionResult` and - :class:`SubsequentIncrementalExecutionResult` - in the result stream will be replaced with an :class:`ExecutionResult` - with a single error stating that defer/stream is not supported. - Use :func:`experimental_subscribe_incrementally` if you want to support - incremental delivery. - """ - result = experimental_subscribe_incrementally( - schema, - document, - root_value, - context_value, - variable_values, - operation_name, - field_resolver, - type_resolver, - subscribe_field_resolver, - execution_context_class, - ) - - if isinstance(result, ExecutionResult): - return result - if isinstance(result, AsyncIterable): - return map_async_iterable(result, ensure_single_execution_result) - - async def await_result() -> Union[AsyncIterator[ExecutionResult], ExecutionResult]: - result_or_iterable = await result - if isinstance(result_or_iterable, AsyncIterable): - return map_async_iterable( - result_or_iterable, ensure_single_execution_result - ) - return result_or_iterable - - return await_result() - - -async def ensure_single_execution_result( - result: Union[ - ExecutionResult, - InitialIncrementalExecutionResult, - SubsequentIncrementalExecutionResult, - ], -) -> ExecutionResult: - """Ensure that the given result does not use incremental delivery.""" - if not isinstance(result, ExecutionResult): - return ExecutionResult( - None, errors=[GraphQLError(UNEXPECTED_MULTIPLE_PAYLOADS)] - ) - return result - - -def experimental_subscribe_incrementally( - schema: GraphQLSchema, - document: DocumentNode, - root_value: Any = None, - context_value: Any = None, - variable_values: Optional[Dict[str, Any]] = None, - operation_name: Optional[str] = None, - field_resolver: Optional[GraphQLFieldResolver] = None, - type_resolver: Optional[GraphQLTypeResolver] = None, - subscribe_field_resolver: Optional[GraphQLFieldResolver] = None, - execution_context_class: Optional[Type[ExecutionContext]] = None, -) -> AwaitableOrValue[ - Union[ - AsyncGenerator[ - Union[ - ExecutionResult, - InitialIncrementalExecutionResult, - SubsequentIncrementalExecutionResult, - ], - None, - ], - ExecutionResult, - ] -]: - """Create a GraphQL subscription. - - Implements the "Subscribe" algorithm described in the GraphQL spec. - - Returns a coroutine object which yields either an AsyncIterator (if successful) or - an ExecutionResult (client error). The coroutine will raise an exception if a server - error occurs. - - If the client-provided arguments to this function do not result in a compliant - subscription, a GraphQL Response (ExecutionResult) with descriptive errors and no - data will be returned. - - If the source stream could not be created due to faulty subscription resolver logic - or underlying systems, the coroutine object will yield a single ExecutionResult - containing ``errors`` and no ``data``. - - If the operation succeeded, the coroutine will yield an AsyncIterator, which yields - a stream of ExecutionResults representing the response stream. - - Each result may be an ExecutionResult with no ``has_next`` attribute (if executing - the event did not use `@defer` or `@stream`), or an - :class:`InitialIncrementalExecutionResult` or - :class:`SubsequentIncrementalExecutionResult` - (if executing the event used `@defer` or `@stream`). In the case of - incremental execution results, each event produces a single - :class:`InitialIncrementalExecutionResult` followed by one or more - :class:`SubsequentIncrementalExecutionResult`; all but the last have - ``has_next == true``, and the last has ``has_next == False``. - There is no interleaving between results generated from the same original event. + If an operation that defers or streams data is executed with this function, + a field error will be raised at the location of the `@defer` or `@stream` directive. """ if execution_context_class is None: execution_context_class = ExecutionContext @@ -2507,26 +2405,6 @@ async def await_result() -> Any: return context.map_source_to_response(result_or_stream) # type: ignore -async def ensure_async_iterable( - some_execution_result: Union[ - ExecutionResult, ExperimentalIncrementalExecutionResults - ], -) -> AsyncGenerator[ - Union[ - ExecutionResult, - InitialIncrementalExecutionResult, - SubsequentIncrementalExecutionResult, - ], - None, -]: - if isinstance(some_execution_result, ExecutionResult): - yield some_execution_result - else: - yield some_execution_result.initial_result - async for result in some_execution_result.subsequent_results: - yield result - - def create_source_event_stream( schema: GraphQLSchema, document: DocumentNode, @@ -2622,7 +2500,7 @@ def execute_subscription( context.fragments, context.variable_values, root_type, - context.operation.selection_set, + context.operation, ).fields first_root_field = next(iter(root_fields.items())) response_name, field_nodes = first_root_field diff --git a/src/graphql/validation/__init__.py b/src/graphql/validation/__init__.py index 270eed06..8f67f9b7 100644 --- a/src/graphql/validation/__init__.py +++ b/src/graphql/validation/__init__.py @@ -23,6 +23,11 @@ # Spec Section: "Defer And Stream Directives Are Used On Valid Root Field" from .rules.defer_stream_directive_on_root_field import DeferStreamDirectiveOnRootField +# Spec Section: "Defer And Stream Directives Are Used On Valid Operations" +from .rules.defer_stream_directive_on_valid_operations_rule import ( + DeferStreamDirectiveOnValidOperationsRule, +) + # Spec Section: "Executable Definitions" from .rules.executable_definitions import ExecutableDefinitionsRule @@ -129,6 +134,7 @@ "specified_rules", "DeferStreamDirectiveLabel", "DeferStreamDirectiveOnRootField", + "DeferStreamDirectiveOnValidOperationsRule", "ExecutableDefinitionsRule", "FieldsOnCorrectTypeRule", "FragmentsOnCompositeTypesRule", diff --git a/src/graphql/validation/rules/defer_stream_directive_on_valid_operations_rule.py b/src/graphql/validation/rules/defer_stream_directive_on_valid_operations_rule.py new file mode 100644 index 00000000..391c8932 --- /dev/null +++ b/src/graphql/validation/rules/defer_stream_directive_on_valid_operations_rule.py @@ -0,0 +1,83 @@ +"""Defer stream directive on valid operations rule""" + +from typing import Any, List, Set + +from ...error import GraphQLError +from ...language import ( + BooleanValueNode, + DirectiveNode, + FragmentDefinitionNode, + Node, + OperationDefinitionNode, + OperationType, + VariableNode, +) +from ...type import GraphQLDeferDirective, GraphQLStreamDirective +from . import ASTValidationRule, ValidationContext + +__all__ = ["DeferStreamDirectiveOnValidOperationsRule"] + + +def if_argument_can_be_false(node: DirectiveNode) -> bool: + for argument in node.arguments: + if argument.name.value == "if": + if isinstance(argument.value, BooleanValueNode): + if argument.value.value: + return False + elif not isinstance(argument.value, VariableNode): + return False + return True + return False + + +class DeferStreamDirectiveOnValidOperationsRule(ASTValidationRule): + """Defer and stream directives are used on valid root field + + A GraphQL document is only valid if defer directives are not used on root + mutation or subscription types. + """ + + def __init__(self, context: ValidationContext) -> None: + super().__init__(context) + self.fragments_used_on_subscriptions: Set[str] = set() + + def enter_operation_definition( + self, operation: OperationDefinitionNode, *_args: Any + ) -> None: + if operation.operation == OperationType.SUBSCRIPTION: + fragments = self.context.get_recursively_referenced_fragments(operation) + for fragment in fragments: + self.fragments_used_on_subscriptions.add(fragment.name.value) + + def enter_directive( + self, + node: DirectiveNode, + _key: Any, + _parent: Any, + _path: Any, + ancestors: List[Node], + ) -> None: + try: + definition_node = ancestors[2] + except IndexError: # pragma: no cover + return + if ( + isinstance(definition_node, FragmentDefinitionNode) + and definition_node.name.value in self.fragments_used_on_subscriptions + or isinstance(definition_node, OperationDefinitionNode) + and definition_node.operation == OperationType.SUBSCRIPTION + ): + if node.name.value == GraphQLDeferDirective.name: + if not if_argument_can_be_false(node): + msg = ( + "Defer directive not supported on subscription operations." + " Disable `@defer` by setting the `if` argument to `false`." + ) + self.report_error(GraphQLError(msg, node)) + elif node.name.value == GraphQLStreamDirective.name: # noqa: SIM102 + if not if_argument_can_be_false(node): + msg = ( + "Stream directive not supported on subscription operations." + " Disable `@defer` by setting the `if` argument to `false`." + ) + self.report_error(GraphQLError(msg, node)) diff --git a/src/graphql/validation/rules/single_field_subscriptions.py b/src/graphql/validation/rules/single_field_subscriptions.py index 40d37eb2..e8ce9ec5 100644 --- a/src/graphql/validation/rules/single_field_subscriptions.py +++ b/src/graphql/validation/rules/single_field_subscriptions.py @@ -45,7 +45,7 @@ def enter_operation_definition( fragments, variable_values, subscription_type, - node.selection_set, + node, ).fields if len(fields) > 1: field_selection_lists = list(fields.values()) diff --git a/src/graphql/validation/specified_rules.py b/src/graphql/validation/specified_rules.py index d8c225d8..e024d0d1 100644 --- a/src/graphql/validation/specified_rules.py +++ b/src/graphql/validation/specified_rules.py @@ -10,6 +10,11 @@ # Spec Section: "Defer And Stream Directives Are Used On Valid Root Field" from .rules.defer_stream_directive_on_root_field import DeferStreamDirectiveOnRootField +# Spec Section: "Defer And Stream Directives Are Used On Valid Operations" +from .rules.defer_stream_directive_on_valid_operations_rule import ( + DeferStreamDirectiveOnValidOperationsRule, +) + # Spec Section: "Executable Definitions" from .rules.executable_definitions import ExecutableDefinitionsRule @@ -136,6 +141,7 @@ KnownDirectivesRule, UniqueDirectivesPerLocationRule, DeferStreamDirectiveOnRootField, + DeferStreamDirectiveOnValidOperationsRule, DeferStreamDirectiveLabel, StreamDirectiveOnListField, KnownArgumentNamesRule, diff --git a/tests/execution/test_flatten_async_iterable.py b/tests/execution/test_flatten_async_iterable.py deleted file mode 100644 index 357e4cd0..00000000 --- a/tests/execution/test_flatten_async_iterable.py +++ /dev/null @@ -1,210 +0,0 @@ -from contextlib import suppress -from typing import AsyncGenerator - -import pytest -from graphql.execution import flatten_async_iterable - -try: # pragma: no cover - anext # noqa: B018 -except NameError: # pragma: no cover (Python < 3.10) - # noinspection PyShadowingBuiltins - async def anext(iterator): # noqa: A001 - """Return the next item from an async iterator.""" - return await iterator.__anext__() - - -def describe_flatten_async_iterable(): - @pytest.mark.asyncio() - async def flattens_nested_async_generators(): - async def source(): - async def nested1() -> AsyncGenerator[float, None]: - yield 1.1 - yield 1.2 - - async def nested2() -> AsyncGenerator[float, None]: - yield 2.1 - yield 2.2 - - yield nested1() - yield nested2() - - doubles = flatten_async_iterable(source()) - - result = [x async for x in doubles] - - assert result == [1.1, 1.2, 2.1, 2.2] - - @pytest.mark.asyncio() - async def allows_returning_early_from_a_nested_async_generator(): - async def source(): - async def nested1() -> AsyncGenerator[float, None]: - yield 1.1 - yield 1.2 - - async def nested2() -> AsyncGenerator[float, None]: - yield 2.1 - # Not reachable, early return - yield 2.2 # pragma: no cover - - # Not reachable, early return - async def nested3() -> AsyncGenerator[float, None]: - yield 3.1 # pragma: no cover - yield 3.2 # pragma: no cover - - yield nested1() - yield nested2() - yield nested3() # pragma: no cover - - doubles = flatten_async_iterable(source()) - - assert await anext(doubles) == 1.1 - assert await anext(doubles) == 1.2 - assert await anext(doubles) == 2.1 - - # early return - with suppress(RuntimeError): # suppress error for Python < 3.8 - await doubles.aclose() - - # subsequent anext calls - with pytest.raises(StopAsyncIteration): - assert await anext(doubles) - with pytest.raises(StopAsyncIteration): - assert await anext(doubles) - - @pytest.mark.asyncio() - async def allows_throwing_errors_from_a_nested_async_generator(): - async def source(): - async def nested1() -> AsyncGenerator[float, None]: - yield 1.1 - yield 1.2 - - async def nested2() -> AsyncGenerator[float, None]: - yield 2.1 - # Not reachable, early return - yield 2.2 # pragma: no cover - - # Not reachable, early return - async def nested3() -> AsyncGenerator[float, None]: - yield 3.1 # pragma: no cover - yield 3.2 # pragma: no cover - - yield nested1() - yield nested2() - yield nested3() # pragma: no cover - - doubles = flatten_async_iterable(source()) - - assert await anext(doubles) == 1.1 - assert await anext(doubles) == 1.2 - assert await anext(doubles) == 2.1 - - # throw error - with pytest.raises(RuntimeError, match="ouch"): - await doubles.athrow(RuntimeError("ouch")) - - @pytest.mark.asyncio() - async def completely_yields_sub_iterables_even_when_anext_called_in_parallel(): - async def source(): - async def nested1() -> AsyncGenerator[float, None]: - yield 1.1 - yield 1.2 - - async def nested2() -> AsyncGenerator[float, None]: - yield 2.1 - yield 2.2 - - yield nested1() - yield nested2() - - doubles = flatten_async_iterable(source()) - - anext1 = anext(doubles) - anext2 = anext(doubles) - assert await anext1 == 1.1 - assert await anext2 == 1.2 - assert await anext(doubles) == 2.1 - assert await anext(doubles) == 2.2 - with pytest.raises(StopAsyncIteration): - assert await anext(doubles) - - @pytest.mark.asyncio() - async def closes_nested_async_iterators(): - closed = [] - - class Source: - def __init__(self): - self.counter = 0 - - def __aiter__(self): - return self - - async def __anext__(self): - if self.counter == 2: - raise StopAsyncIteration - self.counter += 1 - return Nested(self.counter) - - async def aclose(self): - nonlocal closed - closed.append(self.counter) - - class Nested: - def __init__(self, value): - self.value = value - self.counter = 0 - - def __aiter__(self): - return self - - async def __anext__(self): - if self.counter == 2: - raise StopAsyncIteration - self.counter += 1 - return self.value + self.counter / 10 - - async def aclose(self): - nonlocal closed - closed.append(self.value + self.counter / 10) - - doubles = flatten_async_iterable(Source()) - - result = [x async for x in doubles] - - assert result == [1.1, 1.2, 2.1, 2.2] - - assert closed == [1.2, 2.2, 2] - - @pytest.mark.asyncio() - async def works_with_nested_async_iterators_that_have_no_close_method(): - class Source: - def __init__(self): - self.counter = 0 - - def __aiter__(self): - return self - - async def __anext__(self): - if self.counter == 2: - raise StopAsyncIteration - self.counter += 1 - return Nested(self.counter) - - class Nested: - def __init__(self, value): - self.value = value - self.counter = 0 - - def __aiter__(self): - return self - - async def __anext__(self): - if self.counter == 2: - raise StopAsyncIteration - self.counter += 1 - return self.value + self.counter / 10 - - doubles = flatten_async_iterable(Source()) - - result = [x async for x in doubles] - - assert result == [1.1, 1.2, 2.1, 2.2] diff --git a/tests/execution/test_subscribe.py b/tests/execution/test_subscribe.py index 9c133da9..fcbd13ef 100644 --- a/tests/execution/test_subscribe.py +++ b/tests/execution/test_subscribe.py @@ -16,7 +16,6 @@ from graphql.execution import ( ExecutionResult, create_source_event_stream, - experimental_subscribe_incrementally, subscribe, ) from graphql.language import DocumentNode, parse @@ -116,15 +115,16 @@ async def async_subject(email: Email, _info: GraphQLResolveInfo) -> str: def create_subscription( - pubsub: SimplePubSub, - variable_values: Optional[Dict[str, Any]] = None, - original_subscribe: bool = False, + pubsub: SimplePubSub, variable_values: Optional[Dict[str, Any]] = None ) -> AwaitableOrValue[Union[AsyncIterator[ExecutionResult], ExecutionResult]]: document = parse( """ - subscription ($priority: Int = 0, - $shouldDefer: Boolean = false - $asyncResolver: Boolean = false) { + subscription ( + $priority: Int = 0 + $shouldDefer: Boolean = false + $shouldStream: Boolean = false + $asyncResolver: Boolean = false + ) { importantEmail(priority: $priority) { email { from @@ -135,6 +135,7 @@ def create_subscription( } ... @defer(if: $shouldDefer) { inbox { + emails @include(if: $shouldStream) @stream(if: $shouldStream) unread total } @@ -163,9 +164,7 @@ def transform(new_email): "importantEmail": pubsub.get_subscriber(transform), } - return (subscribe if original_subscribe else experimental_subscribe_incrementally)( # type: ignore - email_schema, document, data, variable_values=variable_values - ) + return subscribe(email_schema, document, data, variable_values=variable_values) DummyQueryType = GraphQLObjectType("Query", {"dummy": GraphQLField(GraphQLString)}) @@ -645,7 +644,7 @@ async def produces_a_payload_per_subscription_event(): assert await anext(subscription) @pytest.mark.asyncio() - async def produces_additional_payloads_for_subscriptions_with_defer(): + async def subscribe_function_returns_errors_with_defer(): pubsub = SimplePubSub() subscription = create_subscription(pubsub, {"shouldDefer": True}) assert isinstance(subscription, AsyncIterator) @@ -666,31 +665,22 @@ async def produces_additional_payloads_for_subscriptions_with_defer(): is True ) - # The previously waited on payload now has a value. - result = await payload - assert result.formatted == { - "data": { - "importantEmail": { - "email": { - "from": "yuzhi@graphql.org", - "subject": "Alright", - }, - }, - }, - "hasNext": True, - } - - # Wait for the next payload from @defer - result = await anext(subscription) - assert result.formatted == { - "incremental": [ + error_result = ( + {"importantEmail": None}, + [ { - "data": {"inbox": {"total": 2, "unread": 1}}, + "message": "`@defer` directive not supported" + " on subscription operations." + " Disable `@defer` by setting the `if` argument to `false`.", + "locations": [(8, 11)], "path": ["importantEmail"], } ], - "hasNext": False, - } + ) + + # The previously waited on payload now has a value. + result = await payload + assert result == error_result # Another new email arrives, # after all incrementally delivered payloads are received. @@ -708,59 +698,8 @@ async def produces_additional_payloads_for_subscriptions_with_defer(): # The next waited on payload will have a value. result = await anext(subscription) - assert result.formatted == { - "data": { - "importantEmail": { - "email": { - "from": "hyo@graphql.org", - "subject": "Tools", - }, - }, - }, - "hasNext": True, - } - - # Another new email arrives, - # before the incrementally delivered payloads from the last email was received. - assert ( - pubsub.emit( - { - "from": "adam@graphql.org", - "subject": "Important", - "message": "Read me please", - "unread": True, - } - ) - is True - ) + assert result == error_result - # Deferred payload from previous event is received. - result = await anext(subscription) - assert result.formatted == { - "incremental": [ - { - "data": {"inbox": {"total": 3, "unread": 2}}, - "path": ["importantEmail"], - } - ], - "hasNext": False, - } - - # Next payload from last event - result = await anext(subscription) - assert result.formatted == { - "data": { - "importantEmail": { - "email": { - "from": "adam@graphql.org", - "subject": "Important", - }, - }, - }, - "hasNext": True, - } - - # The client disconnects before the deferred payload is consumed. with suppress(RuntimeError): # suppress error for Python < 3.8 await subscription.aclose() # type: ignore @@ -769,9 +708,9 @@ async def produces_additional_payloads_for_subscriptions_with_defer(): assert await anext(subscription) @pytest.mark.asyncio() - async def original_subscribe_function_returns_errors_with_defer(): + async def subscribe_function_returns_errors_with_stream(): pubsub = SimplePubSub() - subscription = create_subscription(pubsub, {"shouldDefer": True}, True) + subscription = create_subscription(pubsub, {"shouldStream": True}) assert isinstance(subscription, AsyncIterator) # Wait for the next subscription payload. @@ -790,23 +729,25 @@ async def original_subscribe_function_returns_errors_with_defer(): is True ) - error_payload = ( - None, + # The previously waited on payload now has a value. + assert await payload == ( + { + "importantEmail": { + "email": {"from": "yuzhi@graphql.org", "subject": "Alright"}, + "inbox": {"emails": None, "unread": 1, "total": 2}, + } + }, [ { - "message": "Executing this GraphQL operation would unexpectedly" - " produce multiple payloads" - " (due to @defer or @stream directive)", + "message": "`@stream` directive not supported" + " on subscription operations." + " Disable `@stream` by setting the `if` argument to `false`.", + "locations": [(18, 17)], + "path": ["importantEmail", "inbox", "emails"], } ], ) - # The previously waited on payload now has a value. - assert await payload == error_payload - - # Wait for the next payload from @defer - assert await anext(subscription) == error_payload - # Another new email arrives, # after all incrementally delivered payloads are received. assert ( @@ -822,10 +763,23 @@ async def original_subscribe_function_returns_errors_with_defer(): ) # The next waited on payload will have a value. - assert await anext(subscription) == error_payload - - # The next waited on payload will have a value. - assert await anext(subscription) == error_payload + assert await anext(subscription) == ( + { + "importantEmail": { + "email": {"from": "hyo@graphql.org", "subject": "Tools"}, + "inbox": {"emails": None, "unread": 2, "total": 3}, + } + }, + [ + { + "message": "`@stream` directive not supported" + " on subscription operations." + " Disable `@stream` by setting the `if` argument to `false`.", + "locations": [(18, 17)], + "path": ["importantEmail", "inbox", "emails"], + } + ], + ) # The client disconnects before the deferred payload is consumed. await subscription.aclose() # type: ignore diff --git a/tests/validation/test_defer_stream_directive_on_valid_operations.py b/tests/validation/test_defer_stream_directive_on_valid_operations.py new file mode 100644 index 00000000..7d33fd2b --- /dev/null +++ b/tests/validation/test_defer_stream_directive_on_valid_operations.py @@ -0,0 +1,395 @@ +from functools import partial + +from graphql.utilities import build_schema +from graphql.validation import DeferStreamDirectiveOnValidOperationsRule + +from .harness import assert_validation_errors + +schema = build_schema( + """ + type Message { + body: String + sender: String + } + + type SubscriptionRoot { + subscriptionField: Message + subscriptionListField: [Message] + } + + type MutationRoot { + mutationField: Message + mutationListField: [Message] + } + + type QueryRoot { + message: Message + messages: [Message] + } + + schema { + query: QueryRoot + mutation: MutationRoot + subscription: SubscriptionRoot + } + """ +) + +assert_errors = partial( + assert_validation_errors, DeferStreamDirectiveOnValidOperationsRule, schema=schema +) + +assert_valid = partial(assert_errors, errors=[]) + + +def describe_defer_stream_directive_on_valid_operations(): + def defer_fragment_spread_nested_in_query_operation(): + assert_valid( + """ + { + message { + ...myFragment @defer + } + } + fragment myFragment on Message { + message { + body + } + } + """ + ) + + def defer_inline_fragment_spread_in_query_operation(): + assert_valid( + """ + { + ... @defer { + message { + body + } + } + } + """ + ) + + def defer_fragment_spread_on_mutation_field(): + assert_valid( + """ + mutation { + mutationField { + ...myFragment @defer + } + } + fragment myFragment on Message { + body + } + """ + ) + + def defer_inline_fragment_spread_on_mutation_field(): + assert_valid( + """ + mutation { + mutationField { + ... @defer { + body + } + } + } + """ + ) + + def defer_fragment_spread_on_subscription_field(): + assert_errors( + """ + subscription { + subscriptionField { + ...myFragment @defer + } + } + fragment myFragment on Message { + body + } + """, + [ + { + "message": "Defer directive not supported" + " on subscription operations." + " Disable `@defer` by setting the `if` argument to `false`.", + "locations": [(4, 31)], + }, + ], + ) + + def defer_fragment_spread_with_boolean_true_if_argument(): + assert_errors( + """ + subscription { + subscriptionField { + ...myFragment @defer(if: true) + } + } + fragment myFragment on Message { + body + } + """, + [ + { + "message": "Defer directive not supported" + " on subscription operations." + " Disable `@defer` by setting the `if` argument to `false`.", + "locations": [(4, 31)], + }, + ], + ) + + def defer_fragment_spread_with_boolean_false_if_argument(): + assert_valid( + """ + subscription { + subscriptionField { + ...myFragment @defer(if: false) + } + } + fragment myFragment on Message { + body + } + """ + ) + + def defer_fragment_spread_on_query_in_multi_operation_document(): + assert_valid( + """ + subscription MySubscription { + subscriptionField { + ...myFragment + } + } + query MyQuery { + message { + ...myFragment @defer + } + } + fragment myFragment on Message { + body + } + """ + ) + + def defer_fragment_spread_on_subscription_in_multi_operation_document(): + assert_errors( + """ + subscription MySubscription { + subscriptionField { + ...myFragment @defer + } + } + query MyQuery { + message { + ...myFragment @defer + } + } + fragment myFragment on Message { + body + } + """, + [ + { + "message": "Defer directive not supported" + " on subscription operations." + " Disable `@defer` by setting the `if` argument to `false`.", + "locations": [(4, 31)], + }, + ], + ) + + def defer_fragment_spread_with_invalid_if_argument(): + assert_errors( + """ + subscription MySubscription { + subscriptionField { + ...myFragment @defer(if: "Oops") + } + } + fragment myFragment on Message { + body + } + """, + [ + { + "message": "Defer directive not supported" + " on subscription operations." + " Disable `@defer` by setting the `if` argument to `false`.", + "locations": [(4, 31)], + }, + ], + ) + + def stream_on_query_field(): + assert_valid( + """ + { + messages @stream { + name + } + } + """ + ) + + def stream_on_mutation_field(): + assert_valid( + """ + mutation { + mutationField { + messages @stream + } + } + """ + ) + + def stream_on_fragment_on_mutation_field(): + assert_valid( + """ + mutation { + mutationField { + ...myFragment + } + } + fragment myFragment on Message { + messages @stream + } + """ + ) + + def stream_on_subscription_field(): + assert_errors( + """ + subscription { + subscriptionField { + messages @stream + } + } + """, + [ + { + "message": "Stream directive not supported" + " on subscription operations." + " Disable `@defer` by setting the `if` argument to `false`.", + "locations": [(4, 26)], + }, + ], + ) + + def stream_on_fragment_on_subscription_field(): + assert_errors( + """ + subscription { + subscriptionField { + ...myFragment + } + } + fragment myFragment on Message { + messages @stream + } + """, + [ + { + "message": "Stream directive not supported" + " on subscription operations." + " Disable `@defer` by setting the `if` argument to `false`.", + "locations": [(8, 24)], + }, + ], + ) + + def stream_on_fragment_on_query_in_multi_operation_document(): + assert_valid( + """ + subscription MySubscription { + subscriptionField { + message + } + } + query MyQuery { + message { + ...myFragment + } + } + fragment myFragment on Message { + messages @stream + } + """ + ) + + def stream_on_subscription_in_multi_operation_document(): + assert_errors( + """ + query MyQuery { + message { + ...myFragment + } + } + subscription MySubscription { + subscriptionField { + message { + ...myFragment + } + } + } + fragment myFragment on Message { + messages @stream + } + """, + [ + { + "message": "Stream directive not supported" + " on subscription operations." + " Disable `@defer` by setting the `if` argument to `false`.", + "locations": [(15, 24)], + }, + ], + ) + + def stream_with_boolean_false_if_argument(): + assert_valid( + """ + subscription { + subscriptionField { + ...myFragment @stream(if:false) + } + } + """ + ) + + def stream_with_two_arguments(): + assert_valid( + """ + subscription { + subscriptionField { + ...myFragment @stream(foo:false,if:false) + } + } + """ + ) + + def stream_with_variable_argument(): + assert_valid( + """ + subscription ($stream: boolean!) { + subscriptionField { + ...myFragment @stream(if:$stream) + } + } + """ + ) + + def other_directive_on_subscription_field(): + assert_valid( + """ + subscription { + subscriptionField { + ...myFragment @foo + } + } + """ + ) From ae91327cc7f9e5e382b9dc9ec00a56939a085b09 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Fri, 5 Apr 2024 18:52:22 +0200 Subject: [PATCH 140/230] Minor simplification --- src/graphql/execution/execute.py | 37 ++++++++++++++------------------ 1 file changed, 16 insertions(+), 21 deletions(-) diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index 6310d33b..58488f8f 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -1709,7 +1709,6 @@ async def await_data( awaitable: Awaitable[Dict[str, Any]], ) -> Optional[Dict[str, Any]]: # noinspection PyShadowingNames - try: return await awaitable except GraphQLError as error: @@ -2607,16 +2606,14 @@ async def wait(self) -> Optional[Dict[str, Any]]: if self.parent_context: await self.parent_context.completed.wait() _data = self._data - try: - data = ( - await _data # type: ignore - if self._context.is_awaitable(_data) - else _data - ) - finally: - await sleep(ASYNC_DELAY) # always defer completion a little bit - self.data = data - self.completed.set() + data = ( + await _data # type: ignore + if self._context.is_awaitable(_data) + else _data + ) + await sleep(ASYNC_DELAY) # always defer completion a little bit + self.completed.set() + self.data = data return data def add_data(self, data: AwaitableOrValue[Optional[Dict[str, Any]]]) -> None: @@ -2680,16 +2677,14 @@ async def wait(self) -> Optional[List[str]]: if self.parent_context: await self.parent_context.completed.wait() _items = self._items - try: - items = ( - await _items # type: ignore - if self._context.is_awaitable(_items) - else _items - ) - finally: - await sleep(ASYNC_DELAY) # always defer completion a little bit - self.items = items - self.completed.set() + items = ( + await _items # type: ignore + if self._context.is_awaitable(_items) + else _items + ) + await sleep(ASYNC_DELAY) # always defer completion a little bit + self.items = items + self.completed.set() return items def add_items(self, items: AwaitableOrValue[Optional[List[Any]]]) -> None: From 891586dd3583d9676363a6d7d8ce957e35b16393 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Fri, 5 Apr 2024 20:46:05 +0200 Subject: [PATCH 141/230] Original `execute` should throw if defer/stream directives are present Replicates graphql/graphql-js@522f4950cea3bff53c919e0b3bca295c5696a618 --- src/graphql/execution/execute.py | 19 ++++++++++++------ tests/execution/test_defer.py | 15 +++++--------- tests/execution/test_executor.py | 34 ++++++++++++++++++++++++++++++++ 3 files changed, 52 insertions(+), 16 deletions(-) diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index 58488f8f..35bddba4 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -1984,6 +1984,13 @@ async def yield_subsequent_payloads( break +UNEXPECTED_EXPERIMENTAL_DIRECTIVES = ( + "The provided schema unexpectedly contains experimental directives" + " (@defer or @stream). These directives may only be utilized" + " if experimental execution features are explicitly enabled." +) + + UNEXPECTED_MULTIPLE_PAYLOADS = ( "Executing this GraphQL operation would unexpectedly produce multiple payloads" " (due to @defer or @stream directive)" @@ -2016,10 +2023,12 @@ def execute( This function does not support incremental delivery (`@defer` and `@stream`). If an operation that defers or streams data is executed with this function, - it will throw or resolve to an object containing an error instead. - Use `experimental_execute_incrementally` if you want to support incremental - delivery. + it will throw an error instead. Use `experimental_execute_incrementally` if + you want to support incremental delivery. """ + if schema.get_directive("defer") or schema.get_directive("stream"): + raise GraphQLError(UNEXPECTED_EXPERIMENTAL_DIRECTIVES) + result = experimental_execute_incrementally( schema, document, @@ -2043,9 +2052,7 @@ async def await_result() -> Any: awaited_result = await result if isinstance(awaited_result, ExecutionResult): return awaited_result - return ExecutionResult( - None, errors=[GraphQLError(UNEXPECTED_MULTIPLE_PAYLOADS)] - ) + raise GraphQLError(UNEXPECTED_MULTIPLE_PAYLOADS) return await_result() diff --git a/tests/execution/test_defer.py b/tests/execution/test_defer.py index 866a1c13..ff17c9f0 100644 --- a/tests/execution/test_defer.py +++ b/tests/execution/test_defer.py @@ -962,15 +962,10 @@ async def original_execute_function_throws_error_if_deferred_and_not_all_is_sync """ ) - result = await execute(schema, document, {}) # type: ignore + with pytest.raises(GraphQLError) as exc_info: + await execute(schema, document, {}) # type: ignore - assert result == ( - None, - [ - { - "message": "Executing this GraphQL operation would unexpectedly" - " produce multiple payloads" - " (due to @defer or @stream directive)" - } - ], + assert str(exc_info.value) == ( + "Executing this GraphQL operation would unexpectedly produce" + " multiple payloads (due to @defer or @stream directive)" ) diff --git a/tests/execution/test_executor.py b/tests/execution/test_executor.py index 61f4ba62..fd80051b 100644 --- a/tests/execution/test_executor.py +++ b/tests/execution/test_executor.py @@ -9,6 +9,7 @@ from graphql.type import ( GraphQLArgument, GraphQLBoolean, + GraphQLDeferDirective, GraphQLField, GraphQLInt, GraphQLInterfaceType, @@ -18,6 +19,7 @@ GraphQLResolveInfo, GraphQLScalarType, GraphQLSchema, + GraphQLStreamDirective, GraphQLString, GraphQLUnionType, ResponsePath, @@ -786,6 +788,38 @@ class Data: result = execute_sync(schema, document, Data(), operation_name="S") assert result == ({"a": "b"}, None) + def errors_when_using_original_execute_with_schemas_including_experimental_defer(): + schema = GraphQLSchema( + query=GraphQLObjectType("Q", {"a": GraphQLField(GraphQLString)}), + directives=[GraphQLDeferDirective], + ) + document = parse("query Q { a }") + + with pytest.raises(GraphQLError) as exc_info: + execute(schema, document) + + assert str(exc_info.value) == ( + "The provided schema unexpectedly contains experimental directives" + " (@defer or @stream). These directives may only be utilized" + " if experimental execution features are explicitly enabled." + ) + + def errors_when_using_original_execute_with_schemas_including_experimental_stream(): + schema = GraphQLSchema( + query=GraphQLObjectType("Q", {"a": GraphQLField(GraphQLString)}), + directives=[GraphQLStreamDirective], + ) + document = parse("query Q { a }") + + with pytest.raises(GraphQLError) as exc_info: + execute(schema, document) + + assert str(exc_info.value) == ( + "The provided schema unexpectedly contains experimental directives" + " (@defer or @stream). These directives may only be utilized" + " if experimental execution features are explicitly enabled." + ) + def resolves_to_an_error_if_schema_does_not_support_operation(): schema = GraphQLSchema(assume_valid=True) From ae0aff36121bd84091cf320a886b56036b859fe4 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Fri, 5 Apr 2024 21:54:50 +0200 Subject: [PATCH 142/230] Fix ambiguity around when schema definition may be omitted Replicates graphql/graphql-js@f201681bf806a2c46c4ee8b2533287421327a302 --- src/graphql/utilities/print_schema.py | 74 +++++++++++++----------- tests/utilities/test_build_ast_schema.py | 17 +++++- tests/utilities/test_print_schema.py | 6 +- tests/utils/__init__.py | 4 ++ tests/utils/viral_schema.py | 34 +++++++++++ tests/utils/viral_sdl.py | 21 +++++++ 6 files changed, 121 insertions(+), 35 deletions(-) create mode 100644 tests/utils/viral_schema.py create mode 100644 tests/utils/viral_sdl.py diff --git a/src/graphql/utilities/print_schema.py b/src/graphql/utilities/print_schema.py index b3a5ba23..a5d2dfc7 100644 --- a/src/graphql/utilities/print_schema.py +++ b/src/graphql/utilities/print_schema.py @@ -70,51 +70,59 @@ def print_filtered_schema( def print_schema_definition(schema: GraphQLSchema) -> Optional[str]: """Print GraphQL schema definitions.""" - if schema.description is None and is_schema_of_common_names(schema): - return None - - operation_types = [] - query_type = schema.query_type - if query_type: - operation_types.append(f" query: {query_type.name}") - mutation_type = schema.mutation_type - if mutation_type: - operation_types.append(f" mutation: {mutation_type.name}") - subscription_type = schema.subscription_type - if subscription_type: - operation_types.append(f" subscription: {subscription_type.name}") - return print_description(schema) + "schema {\n" + "\n".join(operation_types) + "\n}" + # Special case: When a schema has no root operation types, no valid schema + # definition can be printed. + if not query_type and not mutation_type and not subscription_type: + return None + + # Only print a schema definition if there is a description or if it should + # not be omitted because of having default type names. + if schema.description or not has_default_root_operation_types(schema): + return ( + print_description(schema) + + "schema {\n" + + (f" query: {query_type.name}\n" if query_type else "") + + (f" mutation: {mutation_type.name}\n" if mutation_type else "") + + ( + f" subscription: {subscription_type.name}\n" + if subscription_type + else "" + ) + + "}" + ) + + return None -def is_schema_of_common_names(schema: GraphQLSchema) -> bool: - """Check whether this schema uses the common naming convention. +def has_default_root_operation_types(schema: GraphQLSchema) -> bool: + """Check whether a schema uses the default root operation type names. GraphQL schema define root types for each type of operation. These types are the same as any other type and can be named in any manner, however there is a common - naming convention: + naming convention:: - schema { - query: Query - mutation: Mutation - subscription: Subscription - } + schema { + query: Query + mutation: Mutation + subscription: Subscription + } - When using this naming convention, the schema description can be omitted. - """ - query_type = schema.query_type - if query_type and query_type.name != "Query": - return False - - mutation_type = schema.mutation_type - if mutation_type and mutation_type.name != "Mutation": - return False + When using this naming convention, the schema description can be omitted so + long as these names are only used for operation types. - subscription_type = schema.subscription_type - return not subscription_type or subscription_type.name == "Subscription" + Note however that if any of these default names are used elsewhere in the + schema but not as a root operation type, the schema definition must still + be printed to avoid ambiguity. + """ + return ( + schema.query_type is schema.get_type("Query") + and schema.mutation_type is schema.get_type("Mutation") + and schema.subscription_type is schema.get_type("Subscription") + ) def print_type(type_: GraphQLNamedType) -> str: diff --git a/tests/utilities/test_build_ast_schema.py b/tests/utilities/test_build_ast_schema.py index 2d65d858..816a3898 100644 --- a/tests/utilities/test_build_ast_schema.py +++ b/tests/utilities/test_build_ast_schema.py @@ -38,7 +38,7 @@ from ..fixtures import big_schema_sdl # noqa: F401 from ..star_wars_schema import star_wars_schema -from ..utils import dedent +from ..utils import dedent, viral_sdl try: from typing import TypeAlias @@ -1188,6 +1188,21 @@ def throws_on_unknown_types(): build_schema(sdl, assume_valid_sdl=True) assert str(exc_info.value).endswith("Unknown type: 'UnknownType'.") + def correctly_processes_viral_schema(): + schema = build_schema(viral_sdl) + query_type = schema.query_type + assert isinstance(query_type, GraphQLNamedType) + assert query_type.name == "Query" + virus_type = schema.get_type("Virus") + assert isinstance(virus_type, GraphQLNamedType) + assert virus_type.name == "Virus" + mutation_type = schema.get_type("Mutation") + assert isinstance(mutation_type, GraphQLNamedType) + assert mutation_type.name == "Mutation" + # Though the viral schema has a 'Mutation' type, it is not used for the + # 'mutation' operation. + assert schema.mutation_type is None + def describe_deepcopy_and_pickle(): # pragma: no cover sdl = print_schema(star_wars_schema) diff --git a/tests/utilities/test_print_schema.py b/tests/utilities/test_print_schema.py index ac3cbc42..34258d49 100644 --- a/tests/utilities/test_print_schema.py +++ b/tests/utilities/test_print_schema.py @@ -27,7 +27,7 @@ print_value, ) -from ..utils import dedent +from ..utils import dedent, viral_schema, viral_sdl def expect_printed_schema(schema: GraphQLSchema) -> str: @@ -865,6 +865,10 @@ def prints_introspection_schema(): ''' # noqa: E501 ) + def prints_viral_schema_correctly(): + printed = print_schema(viral_schema) + assert printed == viral_sdl + def describe_print_value(): def print_value_convenience_function(): diff --git a/tests/utils/__init__.py b/tests/utils/__init__.py index 80f3620c..6ae4a6e5 100644 --- a/tests/utils/__init__.py +++ b/tests/utils/__init__.py @@ -4,10 +4,14 @@ from .assert_matching_values import assert_matching_values from .dedent import dedent from .gen_fuzz_strings import gen_fuzz_strings +from .viral_schema import viral_schema +from .viral_sdl import viral_sdl __all__ = [ "assert_matching_values", "assert_equal_awaitables_or_values", "dedent", "gen_fuzz_strings", + "viral_schema", + "viral_sdl", ] diff --git a/tests/utils/viral_schema.py b/tests/utils/viral_schema.py new file mode 100644 index 00000000..57ebf703 --- /dev/null +++ b/tests/utils/viral_schema.py @@ -0,0 +1,34 @@ +from graphql import GraphQLSchema +from graphql.type import ( + GraphQLField, + GraphQLList, + GraphQLNonNull, + GraphQLObjectType, + GraphQLString, +) + +__all__ = ["viral_schema"] + +Mutation = GraphQLObjectType( + "Mutation", + { + "name": GraphQLField(GraphQLNonNull(GraphQLString)), + "geneSequence": GraphQLField(GraphQLNonNull(GraphQLString)), + }, +) + +Virus = GraphQLObjectType( + "Virus", + { + "name": GraphQLField(GraphQLNonNull(GraphQLString)), + "knownMutations": GraphQLField( + GraphQLNonNull(GraphQLList(GraphQLNonNull(Mutation))) + ), + }, +) + +Query = GraphQLObjectType( + "Query", {"viruses": GraphQLField(GraphQLList(GraphQLNonNull(Virus)))} +) + +viral_schema = GraphQLSchema(Query) diff --git a/tests/utils/viral_sdl.py b/tests/utils/viral_sdl.py new file mode 100644 index 00000000..dd7afc84 --- /dev/null +++ b/tests/utils/viral_sdl.py @@ -0,0 +1,21 @@ +__all__ = ["viral_sdl"] + +viral_sdl = """ +schema { + query: Query +} + +type Query { + viruses: [Virus!] +} + +type Virus { + name: String! + knownMutations: [Mutation!]! +} + +type Mutation { + name: String! + geneSequence: String! +} +""".strip() From 066617bde8d9dabf403063f05b842a988d03cecd Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 6 Apr 2024 22:48:37 +0200 Subject: [PATCH 143/230] Enforce ruff rules UP006 and UP007 --- pyproject.toml | 1 - src/graphql/execution/execute.py | 461 ++++++++++++------------ src/graphql/language/ast.py | 146 ++++---- src/graphql/pyutils/path.py | 12 +- src/graphql/pyutils/simple_pub_sub.py | 8 +- src/graphql/pyutils/undefined.py | 3 +- src/graphql/type/definition.py | 317 ++++++++-------- src/graphql/type/directives.py | 32 +- src/graphql/type/schema.py | 82 ++--- src/graphql/utilities/type_info.py | 40 +- tests/execution/test_union_interface.py | 22 +- 11 files changed, 555 insertions(+), 569 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 2e407b6e..12d48c10 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -154,7 +154,6 @@ ignore = [ "PLR2004", # allow some "magic" values "PYI034", # do not check return value of new method "TID252", # allow relative imports - "UP006", "UP007", # use old type annotations (for now) "TRY003", # allow specific messages outside the exception class ] diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index 35bddba4..ead2b520 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -12,7 +12,6 @@ AsyncIterator, Awaitable, Callable, - Dict, Generator, Iterable, Iterator, @@ -20,9 +19,7 @@ NamedTuple, Optional, Sequence, - Set, Tuple, - Type, Union, cast, ) @@ -145,9 +142,9 @@ async def anext(iterator: AsyncIterator) -> Any: # noqa: A001 class FormattedExecutionResult(TypedDict, total=False): """Formatted execution result""" - data: Optional[Dict[str, Any]] - errors: List[GraphQLFormattedError] - extensions: Dict[str, Any] + data: dict[str, Any] | None + errors: list[GraphQLFormattedError] + extensions: dict[str, Any] class ExecutionResult: @@ -160,15 +157,15 @@ class ExecutionResult: __slots__ = "data", "errors", "extensions" - data: Optional[Dict[str, Any]] - errors: Optional[List[GraphQLError]] - extensions: Optional[Dict[str, Any]] + data: dict[str, Any] | None + errors: list[GraphQLError] | None + extensions: dict[str, Any] | None def __init__( self, - data: Optional[Dict[str, Any]] = None, - errors: Optional[List[GraphQLError]] = None, - extensions: Optional[Dict[str, Any]] = None, + data: dict[str, Any] | None = None, + errors: list[GraphQLError] | None = None, + extensions: dict[str, Any] | None = None, ) -> None: self.data = data self.errors = errors @@ -219,31 +216,31 @@ def __ne__(self, other: object) -> bool: class FormattedIncrementalDeferResult(TypedDict, total=False): """Formatted incremental deferred execution result""" - data: Optional[Dict[str, Any]] - errors: List[GraphQLFormattedError] - path: List[Union[str, int]] + data: dict[str, Any] | None + errors: list[GraphQLFormattedError] + path: list[str | int] label: str - extensions: Dict[str, Any] + extensions: dict[str, Any] class IncrementalDeferResult: """Incremental deferred execution result""" - data: Optional[Dict[str, Any]] - errors: Optional[List[GraphQLError]] - path: Optional[List[Union[str, int]]] - label: Optional[str] - extensions: Optional[Dict[str, Any]] + data: dict[str, Any] | None + errors: list[GraphQLError] | None + path: list[str | int] | None + label: str | None + extensions: dict[str, Any] | None __slots__ = "data", "errors", "path", "label", "extensions" def __init__( self, - data: Optional[Dict[str, Any]] = None, - errors: Optional[List[GraphQLError]] = None, - path: Optional[List[Union[str, int]]] = None, - label: Optional[str] = None, - extensions: Optional[Dict[str, Any]] = None, + data: dict[str, Any] | None = None, + errors: list[GraphQLError] | None = None, + path: list[str | int] | None = None, + label: str | None = None, + extensions: dict[str, Any] | None = None, ) -> None: self.data = data self.errors = errors @@ -253,7 +250,7 @@ def __init__( def __repr__(self) -> str: name = self.__class__.__name__ - args: List[str] = [f"data={self.data!r}, errors={self.errors!r}"] + args: list[str] = [f"data={self.data!r}, errors={self.errors!r}"] if self.path: args.append(f"path={self.path!r}") if self.label: @@ -312,31 +309,31 @@ def __ne__(self, other: object) -> bool: class FormattedIncrementalStreamResult(TypedDict, total=False): """Formatted incremental stream execution result""" - items: Optional[List[Any]] - errors: List[GraphQLFormattedError] - path: List[Union[str, int]] + items: list[Any] | None + errors: list[GraphQLFormattedError] + path: list[str | int] label: str - extensions: Dict[str, Any] + extensions: dict[str, Any] class IncrementalStreamResult: """Incremental streamed execution result""" - items: Optional[List[Any]] - errors: Optional[List[GraphQLError]] - path: Optional[List[Union[str, int]]] - label: Optional[str] - extensions: Optional[Dict[str, Any]] + items: list[Any] | None + errors: list[GraphQLError] | None + path: list[str | int] | None + label: str | None + extensions: dict[str, Any] | None __slots__ = "items", "errors", "path", "label", "extensions" def __init__( self, - items: Optional[List[Any]] = None, - errors: Optional[List[GraphQLError]] = None, - path: Optional[List[Union[str, int]]] = None, - label: Optional[str] = None, - extensions: Optional[Dict[str, Any]] = None, + items: list[Any] | None = None, + errors: list[GraphQLError] | None = None, + path: list[str | int] | None = None, + label: str | None = None, + extensions: dict[str, Any] | None = None, ) -> None: self.items = items self.errors = errors @@ -346,7 +343,7 @@ def __init__( def __repr__(self) -> str: name = self.__class__.__name__ - args: List[str] = [f"items={self.items!r}, errors={self.errors!r}"] + args: list[str] = [f"items={self.items!r}, errors={self.errors!r}"] if self.path: args.append(f"path={self.path!r}") if self.label: @@ -412,11 +409,11 @@ def __ne__(self, other: object) -> bool: class FormattedInitialIncrementalExecutionResult(TypedDict, total=False): """Formatted initial incremental execution result""" - data: Optional[Dict[str, Any]] - errors: List[GraphQLFormattedError] + data: dict[str, Any] | None + errors: list[GraphQLFormattedError] hasNext: bool - incremental: List[FormattedIncrementalResult] - extensions: Dict[str, Any] + incremental: list[FormattedIncrementalResult] + extensions: dict[str, Any] class InitialIncrementalExecutionResult: @@ -426,21 +423,21 @@ class InitialIncrementalExecutionResult: - ``incremental`` is a list of the results from defer/stream directives. """ - data: Optional[Dict[str, Any]] - errors: Optional[List[GraphQLError]] - incremental: Optional[Sequence[IncrementalResult]] + data: dict[str, Any] | None + errors: list[GraphQLError] | None + incremental: Sequence[IncrementalResult] | None has_next: bool - extensions: Optional[Dict[str, Any]] + extensions: dict[str, Any] | None __slots__ = "data", "errors", "has_next", "incremental", "extensions" def __init__( self, - data: Optional[Dict[str, Any]] = None, - errors: Optional[List[GraphQLError]] = None, - incremental: Optional[Sequence[IncrementalResult]] = None, + data: dict[str, Any] | None = None, + errors: list[GraphQLError] | None = None, + incremental: Sequence[IncrementalResult] | None = None, has_next: bool = False, - extensions: Optional[Dict[str, Any]] = None, + extensions: dict[str, Any] | None = None, ) -> None: self.data = data self.errors = errors @@ -450,7 +447,7 @@ def __init__( def __repr__(self) -> str: name = self.__class__.__name__ - args: List[str] = [f"data={self.data!r}, errors={self.errors!r}"] + args: list[str] = [f"data={self.data!r}, errors={self.errors!r}"] if self.incremental: args.append(f"incremental[{len(self.incremental)}]") if self.has_next: @@ -515,9 +512,9 @@ def __ne__(self, other: object) -> bool: class FormattedSubsequentIncrementalExecutionResult(TypedDict, total=False): """Formatted subsequent incremental execution result""" - incremental: List[FormattedIncrementalResult] + incremental: list[FormattedIncrementalResult] hasNext: bool - extensions: Dict[str, Any] + extensions: dict[str, Any] class SubsequentIncrementalExecutionResult: @@ -529,15 +526,15 @@ class SubsequentIncrementalExecutionResult: __slots__ = "has_next", "incremental", "extensions" - incremental: Optional[Sequence[IncrementalResult]] + incremental: Sequence[IncrementalResult] | None has_next: bool - extensions: Optional[Dict[str, Any]] + extensions: dict[str, Any] | None def __init__( self, - incremental: Optional[Sequence[IncrementalResult]] = None, + incremental: Sequence[IncrementalResult] | None = None, has_next: bool = False, - extensions: Optional[Dict[str, Any]] = None, + extensions: dict[str, Any] | None = None, ) -> None: self.incremental = incremental self.has_next = has_next @@ -545,7 +542,7 @@ def __init__( def __repr__(self) -> str: name = self.__class__.__name__ - args: List[str] = [] + args: list[str] = [] if self.incremental: args.append(f"incremental[{len(self.incremental)}]") if self.has_next: @@ -600,7 +597,7 @@ class StreamArguments(NamedTuple): """Arguments of the stream directive""" initial_count: int - label: Optional[str] + label: str | None class ExperimentalIncrementalExecutionResults(NamedTuple): @@ -621,17 +618,17 @@ class ExecutionContext: """ schema: GraphQLSchema - fragments: Dict[str, FragmentDefinitionNode] + fragments: dict[str, FragmentDefinitionNode] root_value: Any context_value: Any operation: OperationDefinitionNode - variable_values: Dict[str, Any] + variable_values: dict[str, Any] field_resolver: GraphQLFieldResolver type_resolver: GraphQLTypeResolver subscribe_field_resolver: GraphQLFieldResolver - errors: List[GraphQLError] - subsequent_payloads: Dict[AsyncPayloadRecord, None] # used as ordered set - middleware_manager: Optional[MiddlewareManager] + errors: list[GraphQLError] + subsequent_payloads: dict[AsyncPayloadRecord, None] # used as ordered set + middleware_manager: MiddlewareManager | None is_awaitable: Callable[[Any], TypeGuard[Awaitable]] = staticmethod( default_is_awaitable # type: ignore @@ -640,18 +637,18 @@ class ExecutionContext: def __init__( self, schema: GraphQLSchema, - fragments: Dict[str, FragmentDefinitionNode], + fragments: dict[str, FragmentDefinitionNode], root_value: Any, context_value: Any, operation: OperationDefinitionNode, - variable_values: Dict[str, Any], + variable_values: dict[str, Any], field_resolver: GraphQLFieldResolver, type_resolver: GraphQLTypeResolver, subscribe_field_resolver: GraphQLFieldResolver, - subsequent_payloads: Dict[AsyncPayloadRecord, None], - errors: List[GraphQLError], - middleware_manager: Optional[MiddlewareManager], - is_awaitable: Optional[Callable[[Any], bool]], + subsequent_payloads: dict[AsyncPayloadRecord, None], + errors: list[GraphQLError], + middleware_manager: MiddlewareManager | None, + is_awaitable: Callable[[Any], bool] | None, ) -> None: self.schema = schema self.fragments = fragments @@ -667,8 +664,8 @@ def __init__( self.middleware_manager = middleware_manager if is_awaitable: self.is_awaitable = is_awaitable - self._canceled_iterators: Set[AsyncIterator] = set() - self._subfields_cache: Dict[Tuple, FieldsAndPatches] = {} + self._canceled_iterators: set[AsyncIterator] = set() + self._subfields_cache: dict[tuple, FieldsAndPatches] = {} @classmethod def build( @@ -677,14 +674,14 @@ def build( document: DocumentNode, root_value: Any = None, context_value: Any = None, - raw_variable_values: Optional[Dict[str, Any]] = None, - operation_name: Optional[str] = None, - field_resolver: Optional[GraphQLFieldResolver] = None, - type_resolver: Optional[GraphQLTypeResolver] = None, - subscribe_field_resolver: Optional[GraphQLFieldResolver] = None, - middleware: Optional[Middleware] = None, - is_awaitable: Optional[Callable[[Any], bool]] = None, - ) -> Union[List[GraphQLError], ExecutionContext]: + raw_variable_values: dict[str, Any] | None = None, + operation_name: str | None = None, + field_resolver: GraphQLFieldResolver | None = None, + type_resolver: GraphQLTypeResolver | None = None, + subscribe_field_resolver: GraphQLFieldResolver | None = None, + middleware: Middleware | None = None, + is_awaitable: Callable[[Any], bool] | None = None, + ) -> list[GraphQLError] | ExecutionContext: """Build an execution context Constructs a ExecutionContext object from the arguments passed to execute, which @@ -697,9 +694,9 @@ def build( # If the schema used for execution is invalid, raise an error. assert_valid_schema(schema) - operation: Optional[OperationDefinitionNode] = None - fragments: Dict[str, FragmentDefinitionNode] = {} - middleware_manager: Optional[MiddlewareManager] = None + operation: OperationDefinitionNode | None = None + fragments: dict[str, FragmentDefinitionNode] = {} + middleware_manager: MiddlewareManager | None = None if middleware is not None: if isinstance(middleware, (list, tuple)): middleware_manager = MiddlewareManager(*middleware) @@ -762,7 +759,7 @@ def build( @staticmethod def build_response( - data: Optional[Dict[str, Any]], errors: List[GraphQLError] + data: dict[str, Any] | None, errors: list[GraphQLError] ) -> ExecutionResult: """Build response. @@ -796,7 +793,7 @@ def build_per_event_execution_context(self, payload: Any) -> ExecutionContext: self.is_awaitable, ) - def execute_operation(self) -> AwaitableOrValue[Dict[str, Any]]: + def execute_operation(self) -> AwaitableOrValue[dict[str, Any]]: """Execute an operation. Implements the "Executing operations" section of the spec. @@ -839,9 +836,9 @@ def execute_fields_serially( self, parent_type: GraphQLObjectType, source_value: Any, - path: Optional[Path], - fields: Dict[str, List[FieldNode]], - ) -> AwaitableOrValue[Dict[str, Any]]: + path: Path | None, + fields: dict[str, list[FieldNode]], + ) -> AwaitableOrValue[dict[str, Any]]: """Execute the given fields serially. Implements the "Executing selection sets" section of the spec @@ -850,8 +847,8 @@ def execute_fields_serially( is_awaitable = self.is_awaitable def reducer( - results: Dict[str, Any], field_item: Tuple[str, List[FieldNode]] - ) -> AwaitableOrValue[Dict[str, Any]]: + results: dict[str, Any], field_item: tuple[str, list[FieldNode]] + ) -> AwaitableOrValue[dict[str, Any]]: response_name, field_nodes = field_item field_path = Path(path, response_name, parent_type.name) result = self.execute_field( @@ -864,7 +861,7 @@ def reducer( async def set_result( response_name: str, awaitable_result: Awaitable, - ) -> Dict[str, Any]: + ) -> dict[str, Any]: results[response_name] = await awaitable_result return results @@ -879,10 +876,10 @@ def execute_fields( self, parent_type: GraphQLObjectType, source_value: Any, - path: Optional[Path], - fields: Dict[str, List[FieldNode]], - async_payload_record: Optional[AsyncPayloadRecord] = None, - ) -> AwaitableOrValue[Dict[str, Any]]: + path: Path | None, + fields: dict[str, list[FieldNode]], + async_payload_record: AsyncPayloadRecord | None = None, + ) -> AwaitableOrValue[dict[str, Any]]: """Execute the given fields concurrently. Implements the "Executing selection sets" section of the spec @@ -890,7 +887,7 @@ def execute_fields( """ results = {} is_awaitable = self.is_awaitable - awaitable_fields: List[str] = [] + awaitable_fields: list[str] = [] append_awaitable = awaitable_fields.append for response_name, field_nodes in fields.items(): field_path = Path(path, response_name, parent_type.name) @@ -910,7 +907,7 @@ def execute_fields( # field, which is possibly a coroutine object. Return a coroutine object that # will yield this same map, but with any coroutines awaited in parallel and # replaced with the values they yielded. - async def get_results() -> Dict[str, Any]: + async def get_results() -> dict[str, Any]: if len(awaitable_fields) == 1: # If there is only one field, avoid the overhead of parallelization. field = awaitable_fields[0] @@ -930,9 +927,9 @@ def execute_field( self, parent_type: GraphQLObjectType, source: Any, - field_nodes: List[FieldNode], + field_nodes: list[FieldNode], path: Path, - async_payload_record: Optional[AsyncPayloadRecord] = None, + async_payload_record: AsyncPayloadRecord | None = None, ) -> AwaitableOrValue[Any]: """Resolve the field on the given source object. @@ -999,7 +996,7 @@ async def await_completed() -> Any: def build_resolve_info( self, field_def: GraphQLField, - field_nodes: List[FieldNode], + field_nodes: list[FieldNode], parent_type: GraphQLObjectType, path: Path, ) -> GraphQLResolveInfo: @@ -1027,11 +1024,11 @@ def build_resolve_info( def complete_value( self, return_type: GraphQLOutputType, - field_nodes: List[FieldNode], + field_nodes: list[FieldNode], info: GraphQLResolveInfo, path: Path, result: Any, - async_payload_record: Optional[AsyncPayloadRecord], + async_payload_record: AsyncPayloadRecord | None, ) -> AwaitableOrValue[Any]: """Complete a value. @@ -1116,11 +1113,11 @@ def complete_value( async def complete_awaitable_value( self, return_type: GraphQLOutputType, - field_nodes: List[FieldNode], + field_nodes: list[FieldNode], info: GraphQLResolveInfo, path: Path, result: Any, - async_payload_record: Optional[AsyncPayloadRecord] = None, + async_payload_record: AsyncPayloadRecord | None = None, ) -> Any: """Complete an awaitable value.""" try: @@ -1146,8 +1143,8 @@ async def complete_awaitable_value( return completed def get_stream_values( - self, field_nodes: List[FieldNode], path: Path - ) -> Optional[StreamArguments]: + self, field_nodes: list[FieldNode], path: Path + ) -> StreamArguments | None: """Get stream values. Returns an object containing the `@stream` arguments if a field should be @@ -1185,12 +1182,12 @@ def get_stream_values( async def complete_async_iterator_value( self, item_type: GraphQLOutputType, - field_nodes: List[FieldNode], + field_nodes: list[FieldNode], info: GraphQLResolveInfo, path: Path, iterator: AsyncIterator[Any], - async_payload_record: Optional[AsyncPayloadRecord], - ) -> List[Any]: + async_payload_record: AsyncPayloadRecord | None, + ) -> list[Any]: """Complete an async iterator. Complete an async iterator value by completing the result and calling @@ -1199,9 +1196,9 @@ async def complete_async_iterator_value( errors = async_payload_record.errors if async_payload_record else self.errors stream = self.get_stream_values(field_nodes, path) complete_list_item_value = self.complete_list_item_value - awaitable_indices: List[int] = [] + awaitable_indices: list[int] = [] append_awaitable = awaitable_indices.append - completed_results: List[Any] = [] + completed_results: list[Any] = [] index = 0 while True: if ( @@ -1272,12 +1269,12 @@ async def complete_async_iterator_value( def complete_list_value( self, return_type: GraphQLList[GraphQLOutputType], - field_nodes: List[FieldNode], + field_nodes: list[FieldNode], info: GraphQLResolveInfo, path: Path, - result: Union[AsyncIterable[Any], Iterable[Any]], - async_payload_record: Optional[AsyncPayloadRecord], - ) -> AwaitableOrValue[List[Any]]: + result: AsyncIterable[Any] | Iterable[Any], + async_payload_record: AsyncPayloadRecord | None, + ) -> AwaitableOrValue[list[Any]]: """Complete a list value. Complete a list value by completing each item in the list with the inner type. @@ -1305,10 +1302,10 @@ def complete_list_value( # the list contains no coroutine objects by avoiding creating another coroutine # object. complete_list_item_value = self.complete_list_item_value - awaitable_indices: List[int] = [] + awaitable_indices: list[int] = [] append_awaitable = awaitable_indices.append previous_async_payload_record = async_payload_record - completed_results: List[Any] = [] + completed_results: list[Any] = [] for index, item in enumerate(result): # No need to modify the info object containing the path, since from here on # it is not ever accessed by resolver functions. @@ -1347,7 +1344,7 @@ def complete_list_value( return completed_results # noinspection PyShadowingNames - async def get_completed_results() -> List[Any]: + async def get_completed_results() -> list[Any]: if len(awaitable_indices) == 1: # If there is only one index, avoid the overhead of parallelization. index = awaitable_indices[0] @@ -1367,13 +1364,13 @@ async def get_completed_results() -> List[Any]: def complete_list_item_value( self, item: Any, - complete_results: List[Any], - errors: List[GraphQLError], + complete_results: list[Any], + errors: list[GraphQLError], item_type: GraphQLOutputType, - field_nodes: List[FieldNode], + field_nodes: list[FieldNode], info: GraphQLResolveInfo, item_path: Path, - async_payload_record: Optional[AsyncPayloadRecord], + async_payload_record: AsyncPayloadRecord | None, ) -> bool: """Complete a list item value by adding it to the completed results. @@ -1445,11 +1442,11 @@ def complete_leaf_value(return_type: GraphQLLeafType, result: Any) -> Any: def complete_abstract_value( self, return_type: GraphQLAbstractType, - field_nodes: List[FieldNode], + field_nodes: list[FieldNode], info: GraphQLResolveInfo, path: Path, result: Any, - async_payload_record: Optional[AsyncPayloadRecord], + async_payload_record: AsyncPayloadRecord | None, ) -> AwaitableOrValue[Any]: """Complete an abstract value. @@ -1499,7 +1496,7 @@ def ensure_valid_runtime_type( self, runtime_type_name: Any, return_type: GraphQLAbstractType, - field_nodes: List[FieldNode], + field_nodes: list[FieldNode], info: GraphQLResolveInfo, result: Any, ) -> GraphQLObjectType: @@ -1560,12 +1557,12 @@ def ensure_valid_runtime_type( def complete_object_value( self, return_type: GraphQLObjectType, - field_nodes: List[FieldNode], + field_nodes: list[FieldNode], info: GraphQLResolveInfo, path: Path, result: Any, - async_payload_record: Optional[AsyncPayloadRecord], - ) -> AwaitableOrValue[Dict[str, Any]]: + async_payload_record: AsyncPayloadRecord | None, + ) -> AwaitableOrValue[dict[str, Any]]: """Complete an Object value by executing all sub-selections.""" # If there is an `is_type_of()` predicate function, call it with the current # result. If `is_type_of()` returns False, then raise an error rather than @@ -1575,7 +1572,7 @@ def complete_object_value( if self.is_awaitable(is_type_of): - async def execute_subfields_async() -> Dict[str, Any]: + async def execute_subfields_async() -> dict[str, Any]: if not await is_type_of: # type: ignore raise invalid_return_type_error( return_type, result, field_nodes @@ -1596,11 +1593,11 @@ async def execute_subfields_async() -> Dict[str, Any]: def collect_and_execute_subfields( self, return_type: GraphQLObjectType, - field_nodes: List[FieldNode], + field_nodes: list[FieldNode], path: Path, result: Any, - async_payload_record: Optional[AsyncPayloadRecord], - ) -> AwaitableOrValue[Dict[str, Any]]: + async_payload_record: AsyncPayloadRecord | None, + ) -> AwaitableOrValue[dict[str, Any]]: """Collect sub-fields to execute to complete this value.""" sub_field_nodes, sub_patches = self.collect_subfields(return_type, field_nodes) @@ -1622,7 +1619,7 @@ def collect_and_execute_subfields( return sub_fields def collect_subfields( - self, return_type: GraphQLObjectType, field_nodes: List[FieldNode] + self, return_type: GraphQLObjectType, field_nodes: list[FieldNode] ) -> FieldsAndPatches: """Collect subfields. @@ -1658,8 +1655,8 @@ def collect_subfields( return sub_fields_and_patches def map_source_to_response( - self, result_or_stream: Union[ExecutionResult, AsyncIterable[Any]] - ) -> Union[AsyncGenerator[ExecutionResult, None], ExecutionResult]: + self, result_or_stream: ExecutionResult | AsyncIterable[Any] + ) -> AsyncGenerator[ExecutionResult, None] | ExecutionResult: """Map source result to response. For each payload yielded from a subscription, @@ -1691,10 +1688,10 @@ def execute_deferred_fragment( self, parent_type: GraphQLObjectType, source_value: Any, - fields: Dict[str, List[FieldNode]], - label: Optional[str] = None, - path: Optional[Path] = None, - parent_context: Optional[AsyncPayloadRecord] = None, + fields: dict[str, list[FieldNode]], + label: str | None = None, + path: Path | None = None, + parent_context: AsyncPayloadRecord | None = None, ) -> None: """Execute deferred fragment.""" async_payload_record = DeferredFragmentRecord(label, path, parent_context, self) @@ -1706,8 +1703,8 @@ def execute_deferred_fragment( if self.is_awaitable(awaitable_or_data): async def await_data( - awaitable: Awaitable[Dict[str, Any]], - ) -> Optional[Dict[str, Any]]: + awaitable: Awaitable[dict[str, Any]], + ) -> dict[str, Any] | None: # noinspection PyShadowingNames try: return await awaitable @@ -1727,11 +1724,11 @@ def execute_stream_field( path: Path, item_path: Path, item: AwaitableOrValue[Any], - field_nodes: List[FieldNode], + field_nodes: list[FieldNode], info: GraphQLResolveInfo, item_type: GraphQLOutputType, - label: Optional[str] = None, - parent_context: Optional[AsyncPayloadRecord] = None, + label: str | None = None, + parent_context: AsyncPayloadRecord | None = None, ) -> AsyncPayloadRecord: """Execute stream field.""" is_awaitable = self.is_awaitable @@ -1742,7 +1739,7 @@ def execute_stream_field( if is_awaitable(item): # noinspection PyShadowingNames - async def await_completed_items() -> Optional[List[Any]]: + async def await_completed_items() -> list[Any] | None: try: return [ await self.complete_awaitable_value( @@ -1777,7 +1774,7 @@ async def await_completed_items() -> Optional[List[Any]]: if is_awaitable(completed_item): # noinspection PyShadowingNames - async def await_completed_items() -> Optional[List[Any]]: + async def await_completed_items() -> list[Any] | None: # noinspection PyShadowingNames try: try: @@ -1820,7 +1817,7 @@ async def await_completed_items() -> Optional[List[Any]]: async def execute_stream_iterator_item( self, iterator: AsyncIterator[Any], - field_nodes: List[FieldNode], + field_nodes: list[FieldNode], info: GraphQLResolveInfo, item_type: GraphQLOutputType, async_payload_record: StreamRecord, @@ -1854,12 +1851,12 @@ async def execute_stream_iterator( self, initial_index: int, iterator: AsyncIterator[Any], - field_modes: List[FieldNode], + field_modes: list[FieldNode], info: GraphQLResolveInfo, item_type: GraphQLOutputType, path: Path, - label: Optional[str] = None, - parent_context: Optional[AsyncPayloadRecord] = None, + label: str | None = None, + parent_context: AsyncPayloadRecord | None = None, ) -> None: """Execute stream iterator.""" index = initial_index @@ -1906,7 +1903,7 @@ async def execute_stream_iterator( def filter_subsequent_payloads( self, null_path: Path, - current_async_record: Optional[AsyncPayloadRecord] = None, + current_async_record: AsyncPayloadRecord | None = None, ) -> None: """Filter subsequent payloads.""" null_path_list = null_path.as_list() @@ -1922,9 +1919,9 @@ def filter_subsequent_payloads( self._canceled_iterators.add(async_record.iterator) del self.subsequent_payloads[async_record] - def get_completed_incremental_results(self) -> List[IncrementalResult]: + def get_completed_incremental_results(self) -> list[IncrementalResult]: """Get completed incremental results.""" - incremental_results: List[IncrementalResult] = [] + incremental_results: list[IncrementalResult] = [] append_result = incremental_results.append subsequent_payloads = list(self.subsequent_payloads) for async_payload_record in subsequent_payloads: @@ -2002,14 +1999,14 @@ def execute( document: DocumentNode, root_value: Any = None, context_value: Any = None, - variable_values: Optional[Dict[str, Any]] = None, - operation_name: Optional[str] = None, - field_resolver: Optional[GraphQLFieldResolver] = None, - type_resolver: Optional[GraphQLTypeResolver] = None, - subscribe_field_resolver: Optional[GraphQLFieldResolver] = None, - middleware: Optional[Middleware] = None, - execution_context_class: Optional[Type[ExecutionContext]] = None, - is_awaitable: Optional[Callable[[Any], bool]] = None, + variable_values: dict[str, Any] | None = None, + operation_name: str | None = None, + field_resolver: GraphQLFieldResolver | None = None, + type_resolver: GraphQLTypeResolver | None = None, + subscribe_field_resolver: GraphQLFieldResolver | None = None, + middleware: Middleware | None = None, + execution_context_class: type[ExecutionContext] | None = None, + is_awaitable: Callable[[Any], bool] | None = None, ) -> AwaitableOrValue[ExecutionResult]: """Execute a GraphQL operation. @@ -2062,15 +2059,15 @@ def experimental_execute_incrementally( document: DocumentNode, root_value: Any = None, context_value: Any = None, - variable_values: Optional[Dict[str, Any]] = None, - operation_name: Optional[str] = None, - field_resolver: Optional[GraphQLFieldResolver] = None, - type_resolver: Optional[GraphQLTypeResolver] = None, - subscribe_field_resolver: Optional[GraphQLFieldResolver] = None, - middleware: Optional[Middleware] = None, - execution_context_class: Optional[Type[ExecutionContext]] = None, - is_awaitable: Optional[Callable[[Any], bool]] = None, -) -> AwaitableOrValue[Union[ExecutionResult, ExperimentalIncrementalExecutionResults]]: + variable_values: dict[str, Any] | None = None, + operation_name: str | None = None, + field_resolver: GraphQLFieldResolver | None = None, + type_resolver: GraphQLTypeResolver | None = None, + subscribe_field_resolver: GraphQLFieldResolver | None = None, + middleware: Middleware | None = None, + execution_context_class: type[ExecutionContext] | None = None, + is_awaitable: Callable[[Any], bool] | None = None, +) -> AwaitableOrValue[ExecutionResult | ExperimentalIncrementalExecutionResults]: """Execute GraphQL operation incrementally (internal implementation). Implements the "Executing requests" section of the GraphQL specification, @@ -2109,7 +2106,7 @@ def experimental_execute_incrementally( def execute_impl( context: ExecutionContext, -) -> AwaitableOrValue[Union[ExecutionResult, ExperimentalIncrementalExecutionResults]]: +) -> AwaitableOrValue[ExecutionResult | ExperimentalIncrementalExecutionResults]: """Execute GraphQL operation (internal implementation).""" # Return a possible coroutine object that will eventually yield the data described # by the "Response" section of the GraphQL specification. @@ -2177,12 +2174,12 @@ def execute_sync( document: DocumentNode, root_value: Any = None, context_value: Any = None, - variable_values: Optional[Dict[str, Any]] = None, - operation_name: Optional[str] = None, - field_resolver: Optional[GraphQLFieldResolver] = None, - type_resolver: Optional[GraphQLTypeResolver] = None, - middleware: Optional[Middleware] = None, - execution_context_class: Optional[Type[ExecutionContext]] = None, + variable_values: dict[str, Any] | None = None, + operation_name: str | None = None, + field_resolver: GraphQLFieldResolver | None = None, + type_resolver: GraphQLTypeResolver | None = None, + middleware: Middleware | None = None, + execution_context_class: type[ExecutionContext] | None = None, check_sync: bool = False, ) -> ExecutionResult: """Execute a GraphQL operation synchronously. @@ -2228,7 +2225,7 @@ def execute_sync( def handle_field_error( - error: GraphQLError, return_type: GraphQLOutputType, errors: List[GraphQLError] + error: GraphQLError, return_type: GraphQLOutputType, errors: list[GraphQLError] ) -> None: """Handle error properly according to the field type.""" # If the field type is non-nullable, then it is resolved without any protection @@ -2241,7 +2238,7 @@ def handle_field_error( def invalid_return_type_error( - return_type: GraphQLObjectType, result: Any, field_nodes: List[FieldNode] + return_type: GraphQLObjectType, result: Any, field_nodes: list[FieldNode] ) -> GraphQLError: """Create a GraphQLError for an invalid return type.""" return GraphQLError( @@ -2250,7 +2247,7 @@ def invalid_return_type_error( ) -def get_typename(value: Any) -> Optional[str]: +def get_typename(value: Any) -> str | None: """Get the ``__typename`` property of the given value.""" if isinstance(value, Mapping): return value.get("__typename") @@ -2264,7 +2261,7 @@ def get_typename(value: Any) -> Optional[str]: def default_type_resolver( value: Any, info: GraphQLResolveInfo, abstract_type: GraphQLAbstractType -) -> AwaitableOrValue[Optional[str]]: +) -> AwaitableOrValue[str | None]: """Default type resolver function. If a resolve_type function is not given, then a default resolve behavior is used @@ -2285,9 +2282,9 @@ def default_type_resolver( # Otherwise, test each possible type. possible_types = info.schema.get_possible_types(abstract_type) is_awaitable = info.is_awaitable - awaitable_is_type_of_results: List[Awaitable] = [] + awaitable_is_type_of_results: list[Awaitable] = [] append_awaitable_results = awaitable_is_type_of_results.append - awaitable_types: List[GraphQLObjectType] = [] + awaitable_types: list[GraphQLObjectType] = [] append_awaitable_types = awaitable_types.append for type_ in possible_types: @@ -2302,7 +2299,7 @@ def default_type_resolver( if awaitable_is_type_of_results: # noinspection PyShadowingNames - async def get_type() -> Optional[str]: + async def get_type() -> str | None: is_type_of_results = await gather(*awaitable_is_type_of_results) for is_type_of_result, type_ in zip(is_type_of_results, awaitable_types): if is_type_of_result: @@ -2342,13 +2339,13 @@ def subscribe( document: DocumentNode, root_value: Any = None, context_value: Any = None, - variable_values: Optional[Dict[str, Any]] = None, - operation_name: Optional[str] = None, - field_resolver: Optional[GraphQLFieldResolver] = None, - type_resolver: Optional[GraphQLTypeResolver] = None, - subscribe_field_resolver: Optional[GraphQLFieldResolver] = None, - execution_context_class: Optional[Type[ExecutionContext]] = None, -) -> AwaitableOrValue[Union[AsyncIterator[ExecutionResult], ExecutionResult]]: + variable_values: dict[str, Any] | None = None, + operation_name: str | None = None, + field_resolver: GraphQLFieldResolver | None = None, + type_resolver: GraphQLTypeResolver | None = None, + subscribe_field_resolver: GraphQLFieldResolver | None = None, + execution_context_class: type[ExecutionContext] | None = None, +) -> AwaitableOrValue[AsyncIterator[ExecutionResult] | ExecutionResult]: """Create a GraphQL subscription. Implements the "Subscribe" algorithm described in the GraphQL spec. @@ -2416,13 +2413,13 @@ def create_source_event_stream( document: DocumentNode, root_value: Any = None, context_value: Any = None, - variable_values: Optional[Dict[str, Any]] = None, - operation_name: Optional[str] = None, - field_resolver: Optional[GraphQLFieldResolver] = None, - type_resolver: Optional[GraphQLTypeResolver] = None, - subscribe_field_resolver: Optional[GraphQLFieldResolver] = None, - execution_context_class: Optional[Type[ExecutionContext]] = None, -) -> AwaitableOrValue[Union[AsyncIterable[Any], ExecutionResult]]: + variable_values: dict[str, Any] | None = None, + operation_name: str | None = None, + field_resolver: GraphQLFieldResolver | None = None, + type_resolver: GraphQLTypeResolver | None = None, + subscribe_field_resolver: GraphQLFieldResolver | None = None, + execution_context_class: type[ExecutionContext] | None = None, +) -> AwaitableOrValue[AsyncIterable[Any] | ExecutionResult]: """Create source event stream Implements the "CreateSourceEventStream" algorithm described in the GraphQL @@ -2469,7 +2466,7 @@ def create_source_event_stream( def create_source_event_stream_impl( context: ExecutionContext, -) -> AwaitableOrValue[Union[AsyncIterable[Any], ExecutionResult]]: +) -> AwaitableOrValue[AsyncIterable[Any] | ExecutionResult]: """Create source event stream (internal implementation).""" try: event_stream = execute_subscription(context) @@ -2480,7 +2477,7 @@ def create_source_event_stream_impl( awaitable_event_stream = cast(Awaitable, event_stream) # noinspection PyShadowingNames - async def await_event_stream() -> Union[AsyncIterable[Any], ExecutionResult]: + async def await_event_stream() -> AsyncIterable[Any] | ExecutionResult: try: return await awaitable_event_stream except GraphQLError as error: @@ -2567,21 +2564,21 @@ def assert_event_stream(result: Any) -> AsyncIterable: class DeferredFragmentRecord: """A record collecting data marked with the defer directive""" - errors: List[GraphQLError] - label: Optional[str] - path: List[Union[str, int]] - data: Optional[Dict[str, Any]] - parent_context: Optional[AsyncPayloadRecord] + errors: list[GraphQLError] + label: str | None + path: list[str | int] + data: dict[str, Any] | None + parent_context: AsyncPayloadRecord | None completed: Event _context: ExecutionContext - _data: AwaitableOrValue[Optional[Dict[str, Any]]] + _data: AwaitableOrValue[dict[str, Any] | None] _data_added: Event def __init__( self, - label: Optional[str], - path: Optional[Path], - parent_context: Optional[AsyncPayloadRecord], + label: str | None, + path: Path | None, + parent_context: AsyncPayloadRecord | None, context: ExecutionContext, ) -> None: self.label = label @@ -2596,7 +2593,7 @@ def __init__( def __repr__(self) -> str: name = self.__class__.__name__ - args: List[str] = [f"path={self.path!r}"] + args: list[str] = [f"path={self.path!r}"] if self.label: args.append(f"label={self.label!r}") if self.parent_context: @@ -2605,10 +2602,10 @@ def __repr__(self) -> str: args.append("data") return f"{name}({', '.join(args)})" - def __await__(self) -> Generator[Any, None, Optional[Dict[str, Any]]]: + def __await__(self) -> Generator[Any, None, dict[str, Any] | None]: return self.wait().__await__() - async def wait(self) -> Optional[Dict[str, Any]]: + async def wait(self) -> dict[str, Any] | None: """Wait until data is ready.""" if self.parent_context: await self.parent_context.completed.wait() @@ -2623,7 +2620,7 @@ async def wait(self) -> Optional[Dict[str, Any]]: self.data = data return data - def add_data(self, data: AwaitableOrValue[Optional[Dict[str, Any]]]) -> None: + def add_data(self, data: AwaitableOrValue[dict[str, Any] | None]) -> None: """Add data to the record.""" self._data = data self._data_added.set() @@ -2632,24 +2629,24 @@ def add_data(self, data: AwaitableOrValue[Optional[Dict[str, Any]]]) -> None: class StreamRecord: """A record collecting items marked with the stream directive""" - errors: List[GraphQLError] - label: Optional[str] - path: List[Union[str, int]] - items: Optional[List[str]] - parent_context: Optional[AsyncPayloadRecord] - iterator: Optional[AsyncIterator[Any]] + errors: list[GraphQLError] + label: str | None + path: list[str | int] + items: list[str] | None + parent_context: AsyncPayloadRecord | None + iterator: AsyncIterator[Any] | None is_completed_iterator: bool completed: Event _context: ExecutionContext - _items: AwaitableOrValue[Optional[List[Any]]] + _items: AwaitableOrValue[list[Any] | None] _items_added: Event def __init__( self, - label: Optional[str], - path: Optional[Path], - iterator: Optional[AsyncIterator[Any]], - parent_context: Optional[AsyncPayloadRecord], + label: str | None, + path: Path | None, + iterator: AsyncIterator[Any] | None, + parent_context: AsyncPayloadRecord | None, context: ExecutionContext, ) -> None: self.label = label @@ -2666,7 +2663,7 @@ def __init__( def __repr__(self) -> str: name = self.__class__.__name__ - args: List[str] = [f"path={self.path!r}"] + args: list[str] = [f"path={self.path!r}"] if self.label: args.append(f"label={self.label!r}") if self.parent_context: @@ -2675,10 +2672,10 @@ def __repr__(self) -> str: args.append("items") return f"{name}({', '.join(args)})" - def __await__(self) -> Generator[Any, None, Optional[List[str]]]: + def __await__(self) -> Generator[Any, None, list[str] | None]: return self.wait().__await__() - async def wait(self) -> Optional[List[str]]: + async def wait(self) -> list[str] | None: """Wait until data is ready.""" await self._items_added.wait() if self.parent_context: @@ -2694,7 +2691,7 @@ async def wait(self) -> Optional[List[str]]: self.completed.set() return items - def add_items(self, items: AwaitableOrValue[Optional[List[Any]]]) -> None: + def add_items(self, items: AwaitableOrValue[list[Any] | None]) -> None: """Add items to the record.""" self._items = items self._items_added.set() diff --git a/src/graphql/language/ast.py b/src/graphql/language/ast.py index 35a06f11..b1df369b 100644 --- a/src/graphql/language/ast.py +++ b/src/graphql/language/ast.py @@ -4,7 +4,7 @@ from copy import copy, deepcopy from enum import Enum -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union +from typing import TYPE_CHECKING, Any, Union try: from typing import TypeAlias @@ -103,11 +103,11 @@ class Token: line: int # the 1-indexed line number on which this Token appears column: int # the 1-indexed column number at which this Token begins # for non-punctuation tokens, represents the interpreted value of the token: - value: Optional[str] + value: str | None # Tokens exist as nodes in a double-linked-list amongst all tokens including # ignored tokens. is always the first node and the last. - prev: Optional[Token] - next: Optional[Token] + prev: Token | None + next: Token | None def __init__( self, @@ -116,7 +116,7 @@ def __init__( end: int, line: int, column: int, - value: Optional[str] = None, + value: str | None = None, ) -> None: self.kind = kind self.start, self.end = start, end @@ -166,11 +166,11 @@ def __copy__(self) -> Token: token.prev = self.prev return token - def __deepcopy__(self, memo: Dict) -> Token: + def __deepcopy__(self, memo: dict) -> Token: """Allow only shallow copies to avoid recursion.""" return copy(self) - def __getstate__(self) -> Dict[str, Any]: + def __getstate__(self) -> dict[str, Any]: """Remove the links when pickling. Keeping the links would make pickling a schema too expensive. @@ -181,7 +181,7 @@ def __getstate__(self) -> Dict[str, Any]: if key not in {"prev", "next"} } - def __setstate__(self, state: Dict[str, Any]) -> None: + def __setstate__(self, state: dict[str, Any]) -> None: """Reset the links when un-pickling.""" for key, value in state.items(): setattr(self, key, value) @@ -253,7 +253,7 @@ class OperationType(Enum): # Default map from node kinds to their node attributes (internal) -QUERY_DOCUMENT_KEYS: Dict[str, Tuple[str, ...]] = { +QUERY_DOCUMENT_KEYS: dict[str, tuple[str, ...]] = { "name": (), "document": ("definitions",), "operation_definition": ( @@ -347,10 +347,10 @@ class Node: # allow custom attributes and weak references (not used internally) __slots__ = "__dict__", "__weakref__", "loc", "_hash" - loc: Optional[Location] + loc: Location | None kind: str = "ast" # the kind of the node as a snake_case string - keys: Tuple[str, ...] = ("loc",) # the names of the attributes of this node + keys: tuple[str, ...] = ("loc",) # the names of the attributes of this node def __init__(self, **kwargs: Any) -> None: """Initialize the node with the given keyword arguments.""" @@ -402,7 +402,7 @@ def __copy__(self) -> Node: """Create a shallow copy of the node.""" return self.__class__(**{key: getattr(self, key) for key in self.keys}) - def __deepcopy__(self, memo: Dict) -> Node: + def __deepcopy__(self, memo: dict) -> Node: """Create a deep copy of the node""" # noinspection PyArgumentList return self.__class__( @@ -420,14 +420,14 @@ def __init_subclass__(cls) -> None: if name.endswith("Node"): name = name[:-4] cls.kind = camel_to_snake(name) - keys: List[str] = [] + keys: list[str] = [] for base in cls.__bases__: # noinspection PyUnresolvedReferences keys.extend(base.keys) # type: ignore keys.extend(cls.__slots__) cls.keys = tuple(keys) - def to_dict(self, locations: bool = False) -> Dict: + def to_dict(self, locations: bool = False) -> dict: """Concert node to a dictionary.""" from ..utilities import ast_to_dict @@ -449,7 +449,7 @@ class NameNode(Node): class DocumentNode(Node): __slots__ = ("definitions",) - definitions: Tuple[DefinitionNode, ...] + definitions: tuple[DefinitionNode, ...] class DefinitionNode(Node): @@ -459,9 +459,9 @@ class DefinitionNode(Node): class ExecutableDefinitionNode(DefinitionNode): __slots__ = "name", "directives", "variable_definitions", "selection_set" - name: Optional[NameNode] - directives: Tuple[DirectiveNode, ...] - variable_definitions: Tuple[VariableDefinitionNode, ...] + name: NameNode | None + directives: tuple[DirectiveNode, ...] + variable_definitions: tuple[VariableDefinitionNode, ...] selection_set: SelectionSetNode @@ -476,37 +476,37 @@ class VariableDefinitionNode(Node): variable: VariableNode type: TypeNode - default_value: Optional[ConstValueNode] - directives: Tuple[ConstDirectiveNode, ...] + default_value: ConstValueNode | None + directives: tuple[ConstDirectiveNode, ...] class SelectionSetNode(Node): __slots__ = ("selections",) - selections: Tuple[SelectionNode, ...] + selections: tuple[SelectionNode, ...] class SelectionNode(Node): __slots__ = ("directives",) - directives: Tuple[DirectiveNode, ...] + directives: tuple[DirectiveNode, ...] class FieldNode(SelectionNode): __slots__ = "alias", "name", "arguments", "nullability_assertion", "selection_set" - alias: Optional[NameNode] + alias: NameNode | None name: NameNode - arguments: Tuple[ArgumentNode, ...] + arguments: tuple[ArgumentNode, ...] # Note: Client Controlled Nullability is experimental # and may be changed or removed in the future. nullability_assertion: NullabilityAssertionNode - selection_set: Optional[SelectionSetNode] + selection_set: SelectionSetNode | None class NullabilityAssertionNode(Node): __slots__ = ("nullability_assertion",) - nullability_assertion: Optional[NullabilityAssertionNode] + nullability_assertion: NullabilityAssertionNode | None class ListNullabilityOperatorNode(NullabilityAssertionNode): @@ -584,7 +584,7 @@ class StringValueNode(ValueNode): __slots__ = "value", "block" value: str - block: Optional[bool] + block: bool | None class BooleanValueNode(ValueNode): @@ -606,21 +606,21 @@ class EnumValueNode(ValueNode): class ListValueNode(ValueNode): __slots__ = ("values",) - values: Tuple[ValueNode, ...] + values: tuple[ValueNode, ...] class ConstListValueNode(ListValueNode): - values: Tuple[ConstValueNode, ...] + values: tuple[ConstValueNode, ...] class ObjectValueNode(ValueNode): __slots__ = ("fields",) - fields: Tuple[ObjectFieldNode, ...] + fields: tuple[ObjectFieldNode, ...] class ConstObjectValueNode(ObjectValueNode): - fields: Tuple[ConstObjectFieldNode, ...] + fields: tuple[ConstObjectFieldNode, ...] class ObjectFieldNode(Node): @@ -653,11 +653,11 @@ class DirectiveNode(Node): __slots__ = "name", "arguments" name: NameNode - arguments: Tuple[ArgumentNode, ...] + arguments: tuple[ArgumentNode, ...] class ConstDirectiveNode(DirectiveNode): - arguments: Tuple[ConstArgumentNode, ...] + arguments: tuple[ConstArgumentNode, ...] # Type Reference @@ -682,7 +682,7 @@ class ListTypeNode(TypeNode): class NonNullTypeNode(TypeNode): __slots__ = ("type",) - type: Union[NamedTypeNode, ListTypeNode] + type: NamedTypeNode | ListTypeNode # Type System Definition @@ -695,9 +695,9 @@ class TypeSystemDefinitionNode(DefinitionNode): class SchemaDefinitionNode(TypeSystemDefinitionNode): __slots__ = "description", "directives", "operation_types" - description: Optional[StringValueNode] - directives: Tuple[ConstDirectiveNode, ...] - operation_types: Tuple[OperationTypeDefinitionNode, ...] + description: StringValueNode | None + directives: tuple[ConstDirectiveNode, ...] + operation_types: tuple[OperationTypeDefinitionNode, ...] class OperationTypeDefinitionNode(Node): @@ -713,80 +713,80 @@ class OperationTypeDefinitionNode(Node): class TypeDefinitionNode(TypeSystemDefinitionNode): __slots__ = "description", "name", "directives" - description: Optional[StringValueNode] + description: StringValueNode | None name: NameNode - directives: Tuple[DirectiveNode, ...] + directives: tuple[DirectiveNode, ...] class ScalarTypeDefinitionNode(TypeDefinitionNode): __slots__ = () - directives: Tuple[ConstDirectiveNode, ...] + directives: tuple[ConstDirectiveNode, ...] class ObjectTypeDefinitionNode(TypeDefinitionNode): __slots__ = "interfaces", "fields" - interfaces: Tuple[NamedTypeNode, ...] - directives: Tuple[ConstDirectiveNode, ...] - fields: Tuple[FieldDefinitionNode, ...] + interfaces: tuple[NamedTypeNode, ...] + directives: tuple[ConstDirectiveNode, ...] + fields: tuple[FieldDefinitionNode, ...] class FieldDefinitionNode(DefinitionNode): __slots__ = "description", "name", "directives", "arguments", "type" - description: Optional[StringValueNode] + description: StringValueNode | None name: NameNode - directives: Tuple[ConstDirectiveNode, ...] - arguments: Tuple[InputValueDefinitionNode, ...] + directives: tuple[ConstDirectiveNode, ...] + arguments: tuple[InputValueDefinitionNode, ...] type: TypeNode class InputValueDefinitionNode(DefinitionNode): __slots__ = "description", "name", "directives", "type", "default_value" - description: Optional[StringValueNode] + description: StringValueNode | None name: NameNode - directives: Tuple[ConstDirectiveNode, ...] + directives: tuple[ConstDirectiveNode, ...] type: TypeNode - default_value: Optional[ConstValueNode] + default_value: ConstValueNode | None class InterfaceTypeDefinitionNode(TypeDefinitionNode): __slots__ = "fields", "interfaces" - fields: Tuple[FieldDefinitionNode, ...] - directives: Tuple[ConstDirectiveNode, ...] - interfaces: Tuple[NamedTypeNode, ...] + fields: tuple[FieldDefinitionNode, ...] + directives: tuple[ConstDirectiveNode, ...] + interfaces: tuple[NamedTypeNode, ...] class UnionTypeDefinitionNode(TypeDefinitionNode): __slots__ = ("types",) - directives: Tuple[ConstDirectiveNode, ...] - types: Tuple[NamedTypeNode, ...] + directives: tuple[ConstDirectiveNode, ...] + types: tuple[NamedTypeNode, ...] class EnumTypeDefinitionNode(TypeDefinitionNode): __slots__ = ("values",) - directives: Tuple[ConstDirectiveNode, ...] - values: Tuple[EnumValueDefinitionNode, ...] + directives: tuple[ConstDirectiveNode, ...] + values: tuple[EnumValueDefinitionNode, ...] class EnumValueDefinitionNode(DefinitionNode): __slots__ = "description", "name", "directives" - description: Optional[StringValueNode] + description: StringValueNode | None name: NameNode - directives: Tuple[ConstDirectiveNode, ...] + directives: tuple[ConstDirectiveNode, ...] class InputObjectTypeDefinitionNode(TypeDefinitionNode): __slots__ = ("fields",) - directives: Tuple[ConstDirectiveNode, ...] - fields: Tuple[InputValueDefinitionNode, ...] + directives: tuple[ConstDirectiveNode, ...] + fields: tuple[InputValueDefinitionNode, ...] # Directive Definitions @@ -795,11 +795,11 @@ class InputObjectTypeDefinitionNode(TypeDefinitionNode): class DirectiveDefinitionNode(TypeSystemDefinitionNode): __slots__ = "description", "name", "arguments", "repeatable", "locations" - description: Optional[StringValueNode] + description: StringValueNode | None name: NameNode - arguments: Tuple[InputValueDefinitionNode, ...] + arguments: tuple[InputValueDefinitionNode, ...] repeatable: bool - locations: Tuple[NameNode, ...] + locations: tuple[NameNode, ...] # Type System Extensions @@ -808,8 +808,8 @@ class DirectiveDefinitionNode(TypeSystemDefinitionNode): class SchemaExtensionNode(Node): __slots__ = "directives", "operation_types" - directives: Tuple[ConstDirectiveNode, ...] - operation_types: Tuple[OperationTypeDefinitionNode, ...] + directives: tuple[ConstDirectiveNode, ...] + operation_types: tuple[OperationTypeDefinitionNode, ...] # Type Extensions @@ -819,7 +819,7 @@ class TypeExtensionNode(TypeSystemDefinitionNode): __slots__ = "name", "directives" name: NameNode - directives: Tuple[ConstDirectiveNode, ...] + directives: tuple[ConstDirectiveNode, ...] TypeSystemExtensionNode: TypeAlias = Union[SchemaExtensionNode, TypeExtensionNode] @@ -832,30 +832,30 @@ class ScalarTypeExtensionNode(TypeExtensionNode): class ObjectTypeExtensionNode(TypeExtensionNode): __slots__ = "interfaces", "fields" - interfaces: Tuple[NamedTypeNode, ...] - fields: Tuple[FieldDefinitionNode, ...] + interfaces: tuple[NamedTypeNode, ...] + fields: tuple[FieldDefinitionNode, ...] class InterfaceTypeExtensionNode(TypeExtensionNode): __slots__ = "interfaces", "fields" - interfaces: Tuple[NamedTypeNode, ...] - fields: Tuple[FieldDefinitionNode, ...] + interfaces: tuple[NamedTypeNode, ...] + fields: tuple[FieldDefinitionNode, ...] class UnionTypeExtensionNode(TypeExtensionNode): __slots__ = ("types",) - types: Tuple[NamedTypeNode, ...] + types: tuple[NamedTypeNode, ...] class EnumTypeExtensionNode(TypeExtensionNode): __slots__ = ("values",) - values: Tuple[EnumValueDefinitionNode, ...] + values: tuple[EnumValueDefinitionNode, ...] class InputObjectTypeExtensionNode(TypeExtensionNode): __slots__ = ("fields",) - fields: Tuple[InputValueDefinitionNode, ...] + fields: tuple[InputValueDefinitionNode, ...] diff --git a/src/graphql/pyutils/path.py b/src/graphql/pyutils/path.py index f2212dd3..ff71af4d 100644 --- a/src/graphql/pyutils/path.py +++ b/src/graphql/pyutils/path.py @@ -2,7 +2,7 @@ from __future__ import annotations # Python < 3.10 -from typing import Any, List, NamedTuple, Optional, Union +from typing import Any, NamedTuple __all__ = ["Path"] @@ -12,18 +12,18 @@ class Path(NamedTuple): prev: Any # Optional['Path'] (python/mypy/issues/731) """path with the previous indices""" - key: Union[str, int] + key: str | int """current index in the path (string or integer)""" - typename: Optional[str] + typename: str | None """name of the parent type to avoid path ambiguity""" - def add_key(self, key: Union[str, int], typename: Optional[str] = None) -> Path: + def add_key(self, key: str | int, typename: str | None = None) -> Path: """Return a new Path containing the given key.""" return Path(self, key, typename) - def as_list(self) -> List[Union[str, int]]: + def as_list(self) -> list[str | int]: """Return a list of the path keys.""" - flattened: List[Union[str, int]] = [] + flattened: list[str | int] = [] append = flattened.append curr: Path = self while curr: diff --git a/src/graphql/pyutils/simple_pub_sub.py b/src/graphql/pyutils/simple_pub_sub.py index 4b8b0795..b8648165 100644 --- a/src/graphql/pyutils/simple_pub_sub.py +++ b/src/graphql/pyutils/simple_pub_sub.py @@ -3,7 +3,7 @@ from __future__ import annotations # Python < 3.10 from asyncio import Future, Queue, create_task, get_running_loop, sleep -from typing import Any, AsyncIterator, Callable, Optional, Set +from typing import Any, AsyncIterator, Callable from .is_awaitable import is_awaitable @@ -18,7 +18,7 @@ class SimplePubSub: Useful for mocking a PubSub system for tests. """ - subscribers: Set[Callable] + subscribers: set[Callable] def __init__(self) -> None: self.subscribers = set() @@ -32,7 +32,7 @@ def emit(self, event: Any) -> bool: return bool(self.subscribers) def get_subscriber( - self, transform: Optional[Callable] = None + self, transform: Callable | None = None ) -> SimplePubSubIterator: """Return subscriber iterator""" return SimplePubSubIterator(self, transform) @@ -41,7 +41,7 @@ def get_subscriber( class SimplePubSubIterator(AsyncIterator): """Async iterator used for subscriptions.""" - def __init__(self, pubsub: SimplePubSub, transform: Optional[Callable]) -> None: + def __init__(self, pubsub: SimplePubSub, transform: Callable | None) -> None: self.pubsub = pubsub self.transform = transform self.pull_queue: Queue[Future] = Queue() diff --git a/src/graphql/pyutils/undefined.py b/src/graphql/pyutils/undefined.py index 00382867..d1e21071 100644 --- a/src/graphql/pyutils/undefined.py +++ b/src/graphql/pyutils/undefined.py @@ -3,7 +3,6 @@ from __future__ import annotations # Python < 3.10 import warnings -from typing import Optional __all__ = ["Undefined", "UndefinedType"] @@ -11,7 +10,7 @@ class UndefinedType: """Auxiliary class for creating the Undefined singleton.""" - _instance: Optional[UndefinedType] = None + _instance: UndefinedType | None = None def __new__(cls) -> UndefinedType: """Create the Undefined singleton.""" diff --git a/src/graphql/type/definition.py b/src/graphql/type/definition.py index 212ab4e6..9551735d 100644 --- a/src/graphql/type/definition.py +++ b/src/graphql/type/definition.py @@ -10,12 +10,9 @@ Collection, Dict, Generic, - List, Mapping, NamedTuple, Optional, - Tuple, - Type, TypeVar, Union, cast, @@ -224,22 +221,22 @@ class GraphQLNamedTypeKwargs(TypedDict, total=False): """Arguments for GraphQL named types""" name: str - description: Optional[str] - extensions: Dict[str, Any] + description: str | None + extensions: dict[str, Any] # unfortunately, we cannot make the following more specific, because they are # used by subclasses with different node types and typed dicts cannot be refined - ast_node: Optional[Any] - extension_ast_nodes: Tuple[Any, ...] + ast_node: Any | None + extension_ast_nodes: tuple[Any, ...] class GraphQLNamedType(GraphQLType): """Base class for all GraphQL named types""" name: str - description: Optional[str] - extensions: Dict[str, Any] - ast_node: Optional[TypeDefinitionNode] - extension_ast_nodes: Tuple[TypeExtensionNode, ...] + description: str | None + extensions: dict[str, Any] + ast_node: TypeDefinitionNode | None + extension_ast_nodes: tuple[TypeExtensionNode, ...] reserved_types: Mapping[str, GraphQLNamedType] = {} @@ -250,11 +247,11 @@ def __new__(cls, name: str, *_args: Any, **_kwargs: Any) -> GraphQLNamedType: raise TypeError(msg) return super().__new__(cls) - def __reduce__(self) -> Tuple[Callable, Tuple]: + def __reduce__(self) -> tuple[Callable, tuple]: return self._get_instance, (self.name, tuple(self.to_kwargs().items())) @classmethod - def _get_instance(cls, name: str, args: Tuple) -> GraphQLNamedType: + def _get_instance(cls, name: str, args: tuple) -> GraphQLNamedType: try: return cls.reserved_types[name] except KeyError: @@ -263,10 +260,10 @@ def _get_instance(cls, name: str, args: Tuple) -> GraphQLNamedType: def __init__( self, name: str, - description: Optional[str] = None, - extensions: Optional[Dict[str, Any]] = None, - ast_node: Optional[TypeDefinitionNode] = None, - extension_ast_nodes: Optional[Collection[TypeExtensionNode]] = None, + description: str | None = None, + extensions: dict[str, Any] | None = None, + ast_node: TypeDefinitionNode | None = None, + extension_ast_nodes: Collection[TypeExtensionNode] | None = None, ) -> None: assert_name(name) self.name = name @@ -323,10 +320,10 @@ def resolve_thunk(thunk: Thunk[T]) -> T: class GraphQLScalarTypeKwargs(GraphQLNamedTypeKwargs, total=False): """Arguments for GraphQL scalar types""" - serialize: Optional[GraphQLScalarSerializer] - parse_value: Optional[GraphQLScalarValueParser] - parse_literal: Optional[GraphQLScalarLiteralParser] - specified_by_url: Optional[str] + serialize: GraphQLScalarSerializer | None + parse_value: GraphQLScalarValueParser | None + parse_literal: GraphQLScalarLiteralParser | None + specified_by_url: str | None class GraphQLScalarType(GraphQLNamedType): @@ -357,21 +354,21 @@ def serialize_odd(value: Any) -> int: """ - specified_by_url: Optional[str] - ast_node: Optional[ScalarTypeDefinitionNode] - extension_ast_nodes: Tuple[ScalarTypeExtensionNode, ...] + specified_by_url: str | None + ast_node: ScalarTypeDefinitionNode | None + extension_ast_nodes: tuple[ScalarTypeExtensionNode, ...] def __init__( self, name: str, - serialize: Optional[GraphQLScalarSerializer] = None, - parse_value: Optional[GraphQLScalarValueParser] = None, - parse_literal: Optional[GraphQLScalarLiteralParser] = None, - description: Optional[str] = None, - specified_by_url: Optional[str] = None, - extensions: Optional[Dict[str, Any]] = None, - ast_node: Optional[ScalarTypeDefinitionNode] = None, - extension_ast_nodes: Optional[Collection[ScalarTypeExtensionNode]] = None, + serialize: GraphQLScalarSerializer | None = None, + parse_value: GraphQLScalarValueParser | None = None, + parse_literal: GraphQLScalarLiteralParser | None = None, + description: str | None = None, + specified_by_url: str | None = None, + extensions: dict[str, Any] | None = None, + ast_node: ScalarTypeDefinitionNode | None = None, + extension_ast_nodes: Collection[ScalarTypeExtensionNode] | None = None, ) -> None: super().__init__( name=name, @@ -420,7 +417,7 @@ def parse_value(value: Any) -> Any: return value def parse_literal( - self, node: ValueNode, variables: Optional[Dict[str, Any]] = None + self, node: ValueNode, variables: dict[str, Any] | None = None ) -> Any: """Parses an externally provided literal value to use as an input. @@ -471,13 +468,13 @@ class GraphQLFieldKwargs(TypedDict, total=False): """Arguments for GraphQL fields""" type_: GraphQLOutputType - args: Optional[GraphQLArgumentMap] - resolve: Optional[GraphQLFieldResolver] - subscribe: Optional[GraphQLFieldResolver] - description: Optional[str] - deprecation_reason: Optional[str] - extensions: Dict[str, Any] - ast_node: Optional[FieldDefinitionNode] + args: GraphQLArgumentMap | None + resolve: GraphQLFieldResolver | None + subscribe: GraphQLFieldResolver | None + description: str | None + deprecation_reason: str | None + extensions: dict[str, Any] + ast_node: FieldDefinitionNode | None class GraphQLField: @@ -485,23 +482,23 @@ class GraphQLField: type: GraphQLOutputType args: GraphQLArgumentMap - resolve: Optional[GraphQLFieldResolver] - subscribe: Optional[GraphQLFieldResolver] - description: Optional[str] - deprecation_reason: Optional[str] - extensions: Dict[str, Any] - ast_node: Optional[FieldDefinitionNode] + resolve: GraphQLFieldResolver | None + subscribe: GraphQLFieldResolver | None + description: str | None + deprecation_reason: str | None + extensions: dict[str, Any] + ast_node: FieldDefinitionNode | None def __init__( self, type_: GraphQLOutputType, - args: Optional[GraphQLArgumentMap] = None, - resolve: Optional[GraphQLFieldResolver] = None, - subscribe: Optional[GraphQLFieldResolver] = None, - description: Optional[str] = None, - deprecation_reason: Optional[str] = None, - extensions: Optional[Dict[str, Any]] = None, - ast_node: Optional[FieldDefinitionNode] = None, + args: GraphQLArgumentMap | None = None, + resolve: GraphQLFieldResolver | None = None, + subscribe: GraphQLFieldResolver | None = None, + description: str | None = None, + deprecation_reason: str | None = None, + extensions: dict[str, Any] | None = None, + ast_node: FieldDefinitionNode | None = None, ) -> None: if args: args = { @@ -570,15 +567,15 @@ class GraphQLResolveInfo(NamedTuple, Generic[TContext]): """ field_name: str - field_nodes: List[FieldNode] + field_nodes: list[FieldNode] return_type: GraphQLOutputType parent_type: GraphQLObjectType path: Path schema: GraphQLSchema - fragments: Dict[str, FragmentDefinitionNode] + fragments: dict[str, FragmentDefinitionNode] root_value: Any operation: OperationDefinitionNode - variable_values: Dict[str, Any] + variable_values: dict[str, Any] context: TContext is_awaitable: Callable[[Any], bool] except TypeError as error: # pragma: no cover @@ -596,15 +593,15 @@ class GraphQLResolveInfo(NamedTuple): # type: ignore[no-redef] """ field_name: str - field_nodes: List[FieldNode] + field_nodes: list[FieldNode] return_type: GraphQLOutputType parent_type: GraphQLObjectType path: Path schema: GraphQLSchema - fragments: Dict[str, FragmentDefinitionNode] + fragments: dict[str, FragmentDefinitionNode] root_value: Any operation: OperationDefinitionNode - variable_values: Dict[str, Any] + variable_values: dict[str, Any] context: Any is_awaitable: Callable[[Any], bool] @@ -638,11 +635,11 @@ class GraphQLArgumentKwargs(TypedDict, total=False): type_: GraphQLInputType default_value: Any - description: Optional[str] - deprecation_reason: Optional[str] - out_name: Optional[str] - extensions: Dict[str, Any] - ast_node: Optional[InputValueDefinitionNode] + description: str | None + deprecation_reason: str | None + out_name: str | None + extensions: dict[str, Any] + ast_node: InputValueDefinitionNode | None class GraphQLArgument: @@ -650,21 +647,21 @@ class GraphQLArgument: type: GraphQLInputType default_value: Any - description: Optional[str] - deprecation_reason: Optional[str] - out_name: Optional[str] # for transforming names (extension of GraphQL.js) - extensions: Dict[str, Any] - ast_node: Optional[InputValueDefinitionNode] + description: str | None + deprecation_reason: str | None + out_name: str | None # for transforming names (extension of GraphQL.js) + extensions: dict[str, Any] + ast_node: InputValueDefinitionNode | None def __init__( self, type_: GraphQLInputType, default_value: Any = Undefined, - description: Optional[str] = None, - deprecation_reason: Optional[str] = None, - out_name: Optional[str] = None, - extensions: Optional[Dict[str, Any]] = None, - ast_node: Optional[InputValueDefinitionNode] = None, + description: str | None = None, + deprecation_reason: str | None = None, + out_name: str | None = None, + extensions: dict[str, Any] | None = None, + ast_node: InputValueDefinitionNode | None = None, ) -> None: self.type = type_ self.default_value = default_value @@ -710,8 +707,8 @@ class GraphQLObjectTypeKwargs(GraphQLNamedTypeKwargs, total=False): """Arguments for GraphQL object types""" fields: GraphQLFieldMap - interfaces: Tuple[GraphQLInterfaceType, ...] - is_type_of: Optional[GraphQLIsTypeOfFn] + interfaces: tuple[GraphQLInterfaceType, ...] + is_type_of: GraphQLIsTypeOfFn | None class GraphQLObjectType(GraphQLNamedType): @@ -742,20 +739,20 @@ class GraphQLObjectType(GraphQLNamedType): """ - is_type_of: Optional[GraphQLIsTypeOfFn] - ast_node: Optional[ObjectTypeDefinitionNode] - extension_ast_nodes: Tuple[ObjectTypeExtensionNode, ...] + is_type_of: GraphQLIsTypeOfFn | None + ast_node: ObjectTypeDefinitionNode | None + extension_ast_nodes: tuple[ObjectTypeExtensionNode, ...] def __init__( self, name: str, fields: ThunkMapping[GraphQLField], - interfaces: Optional[ThunkCollection[GraphQLInterfaceType]] = None, - is_type_of: Optional[GraphQLIsTypeOfFn] = None, - extensions: Optional[Dict[str, Any]] = None, - description: Optional[str] = None, - ast_node: Optional[ObjectTypeDefinitionNode] = None, - extension_ast_nodes: Optional[Collection[ObjectTypeExtensionNode]] = None, + interfaces: ThunkCollection[GraphQLInterfaceType] | None = None, + is_type_of: GraphQLIsTypeOfFn | None = None, + extensions: dict[str, Any] | None = None, + description: str | None = None, + ast_node: ObjectTypeDefinitionNode | None = None, + extension_ast_nodes: Collection[ObjectTypeExtensionNode] | None = None, ) -> None: super().__init__( name=name, @@ -798,7 +795,7 @@ def fields(self) -> GraphQLFieldMap: } @cached_property - def interfaces(self) -> Tuple[GraphQLInterfaceType, ...]: + def interfaces(self) -> tuple[GraphQLInterfaceType, ...]: """Get provided interfaces.""" try: interfaces: Collection[GraphQLInterfaceType] = resolve_thunk( @@ -828,8 +825,8 @@ class GraphQLInterfaceTypeKwargs(GraphQLNamedTypeKwargs, total=False): """Arguments for GraphQL interface types""" fields: GraphQLFieldMap - interfaces: Tuple[GraphQLInterfaceType, ...] - resolve_type: Optional[GraphQLTypeResolver] + interfaces: tuple[GraphQLInterfaceType, ...] + resolve_type: GraphQLTypeResolver | None class GraphQLInterfaceType(GraphQLNamedType): @@ -847,20 +844,20 @@ class GraphQLInterfaceType(GraphQLNamedType): }) """ - resolve_type: Optional[GraphQLTypeResolver] - ast_node: Optional[InterfaceTypeDefinitionNode] - extension_ast_nodes: Tuple[InterfaceTypeExtensionNode, ...] + resolve_type: GraphQLTypeResolver | None + ast_node: InterfaceTypeDefinitionNode | None + extension_ast_nodes: tuple[InterfaceTypeExtensionNode, ...] def __init__( self, name: str, fields: ThunkMapping[GraphQLField], - interfaces: Optional[ThunkCollection[GraphQLInterfaceType]] = None, - resolve_type: Optional[GraphQLTypeResolver] = None, - description: Optional[str] = None, - extensions: Optional[Dict[str, Any]] = None, - ast_node: Optional[InterfaceTypeDefinitionNode] = None, - extension_ast_nodes: Optional[Collection[InterfaceTypeExtensionNode]] = None, + interfaces: ThunkCollection[GraphQLInterfaceType] | None = None, + resolve_type: GraphQLTypeResolver | None = None, + description: str | None = None, + extensions: dict[str, Any] | None = None, + ast_node: InterfaceTypeDefinitionNode | None = None, + extension_ast_nodes: Collection[InterfaceTypeExtensionNode] | None = None, ) -> None: super().__init__( name=name, @@ -903,7 +900,7 @@ def fields(self) -> GraphQLFieldMap: } @cached_property - def interfaces(self) -> Tuple[GraphQLInterfaceType, ...]: + def interfaces(self) -> tuple[GraphQLInterfaceType, ...]: """Get provided interfaces.""" try: interfaces: Collection[GraphQLInterfaceType] = resolve_thunk( @@ -932,8 +929,8 @@ def assert_interface_type(type_: Any) -> GraphQLInterfaceType: class GraphQLUnionTypeKwargs(GraphQLNamedTypeKwargs, total=False): """Arguments for GraphQL union types""" - types: Tuple[GraphQLObjectType, ...] - resolve_type: Optional[GraphQLTypeResolver] + types: tuple[GraphQLObjectType, ...] + resolve_type: GraphQLTypeResolver | None class GraphQLUnionType(GraphQLNamedType): @@ -954,19 +951,19 @@ def resolve_type(obj, _info, _type): PetType = GraphQLUnionType('Pet', [DogType, CatType], resolve_type) """ - resolve_type: Optional[GraphQLTypeResolver] - ast_node: Optional[UnionTypeDefinitionNode] - extension_ast_nodes: Tuple[UnionTypeExtensionNode, ...] + resolve_type: GraphQLTypeResolver | None + ast_node: UnionTypeDefinitionNode | None + extension_ast_nodes: tuple[UnionTypeExtensionNode, ...] def __init__( self, name: str, types: ThunkCollection[GraphQLObjectType], - resolve_type: Optional[GraphQLTypeResolver] = None, - description: Optional[str] = None, - extensions: Optional[Dict[str, Any]] = None, - ast_node: Optional[UnionTypeDefinitionNode] = None, - extension_ast_nodes: Optional[Collection[UnionTypeExtensionNode]] = None, + resolve_type: GraphQLTypeResolver | None = None, + description: str | None = None, + extensions: dict[str, Any] | None = None, + ast_node: UnionTypeDefinitionNode | None = None, + extension_ast_nodes: Collection[UnionTypeExtensionNode] | None = None, ) -> None: super().__init__( name=name, @@ -989,7 +986,7 @@ def __copy__(self) -> GraphQLUnionType: # pragma: no cover return self.__class__(**self.to_kwargs()) @cached_property - def types(self) -> Tuple[GraphQLObjectType, ...]: + def types(self) -> tuple[GraphQLObjectType, ...]: """Get provided types.""" try: types: Collection[GraphQLObjectType] = resolve_thunk(self._types) @@ -1020,7 +1017,7 @@ class GraphQLEnumTypeKwargs(GraphQLNamedTypeKwargs, total=False): """Arguments for GraphQL enum types""" values: GraphQLEnumValueMap - names_as_values: Optional[bool] + names_as_values: bool | None class GraphQLEnumType(GraphQLNamedType): @@ -1058,18 +1055,18 @@ class RGBEnum(enum.Enum): """ values: GraphQLEnumValueMap - ast_node: Optional[EnumTypeDefinitionNode] - extension_ast_nodes: Tuple[EnumTypeExtensionNode, ...] + ast_node: EnumTypeDefinitionNode | None + extension_ast_nodes: tuple[EnumTypeExtensionNode, ...] def __init__( self, name: str, - values: Union[GraphQLEnumValueMap, Mapping[str, Any], Type[Enum]], - names_as_values: Optional[bool] = False, - description: Optional[str] = None, - extensions: Optional[Dict[str, Any]] = None, - ast_node: Optional[EnumTypeDefinitionNode] = None, - extension_ast_nodes: Optional[Collection[EnumTypeExtensionNode]] = None, + values: GraphQLEnumValueMap | Mapping[str, Any] | type[Enum], + names_as_values: bool | None = False, + description: str | None = None, + extensions: dict[str, Any] | None = None, + ast_node: EnumTypeDefinitionNode | None = None, + extension_ast_nodes: Collection[EnumTypeExtensionNode] | None = None, ) -> None: super().__init__( name=name, @@ -1118,9 +1115,9 @@ def __copy__(self) -> GraphQLEnumType: # pragma: no cover return self.__class__(**self.to_kwargs()) @cached_property - def _value_lookup(self) -> Dict[Any, str]: + def _value_lookup(self) -> dict[Any, str]: # use first value or name as lookup - lookup: Dict[Any, str] = {} + lookup: dict[Any, str] = {} for name, enum_value in self.values.items(): value = enum_value.value if value is None or value is Undefined: @@ -1165,7 +1162,7 @@ def parse_value(self, input_value: str) -> Any: raise GraphQLError(msg) def parse_literal( - self, value_node: ValueNode, _variables: Optional[Dict[str, Any]] = None + self, value_node: ValueNode, _variables: dict[str, Any] | None = None ) -> Any: """Parse literal value.""" # Note: variables will be resolved before calling this method. @@ -1211,28 +1208,28 @@ class GraphQLEnumValueKwargs(TypedDict, total=False): """Arguments for GraphQL enum values""" value: Any - description: Optional[str] - deprecation_reason: Optional[str] - extensions: Dict[str, Any] - ast_node: Optional[EnumValueDefinitionNode] + description: str | None + deprecation_reason: str | None + extensions: dict[str, Any] + ast_node: EnumValueDefinitionNode | None class GraphQLEnumValue: """A GraphQL enum value.""" value: Any - description: Optional[str] - deprecation_reason: Optional[str] - extensions: Dict[str, Any] - ast_node: Optional[EnumValueDefinitionNode] + description: str | None + deprecation_reason: str | None + extensions: dict[str, Any] + ast_node: EnumValueDefinitionNode | None def __init__( self, value: Any = None, - description: Optional[str] = None, - deprecation_reason: Optional[str] = None, - extensions: Optional[Dict[str, Any]] = None, - ast_node: Optional[EnumValueDefinitionNode] = None, + description: str | None = None, + deprecation_reason: str | None = None, + extensions: dict[str, Any] | None = None, + ast_node: EnumValueDefinitionNode | None = None, ) -> None: self.value = value self.description = description @@ -1271,7 +1268,7 @@ class GraphQLInputObjectTypeKwargs(GraphQLNamedTypeKwargs, total=False): """Arguments for GraphQL input object types""" fields: GraphQLInputFieldMap - out_type: Optional[GraphQLInputFieldOutType] + out_type: GraphQLInputFieldOutType | None class GraphQLInputObjectType(GraphQLNamedType): @@ -1299,18 +1296,18 @@ class GeoPoint(GraphQLInputObjectType): converted to other types by specifying an ``out_type`` function or class. """ - ast_node: Optional[InputObjectTypeDefinitionNode] - extension_ast_nodes: Tuple[InputObjectTypeExtensionNode, ...] + ast_node: InputObjectTypeDefinitionNode | None + extension_ast_nodes: tuple[InputObjectTypeExtensionNode, ...] def __init__( self, name: str, fields: ThunkMapping[GraphQLInputField], - description: Optional[str] = None, - out_type: Optional[GraphQLInputFieldOutType] = None, - extensions: Optional[Dict[str, Any]] = None, - ast_node: Optional[InputObjectTypeDefinitionNode] = None, - extension_ast_nodes: Optional[Collection[InputObjectTypeExtensionNode]] = None, + description: str | None = None, + out_type: GraphQLInputFieldOutType | None = None, + extensions: dict[str, Any] | None = None, + ast_node: InputObjectTypeDefinitionNode | None = None, + extension_ast_nodes: Collection[InputObjectTypeExtensionNode] | None = None, ) -> None: super().__init__( name=name, @@ -1324,7 +1321,7 @@ def __init__( self.out_type = out_type # type: ignore @staticmethod - def out_type(value: Dict[str, Any]) -> Any: + def out_type(value: dict[str, Any]) -> Any: """Transform outbound values (this is an extension of GraphQL.js). This default implementation passes values unaltered as dictionaries. @@ -1380,11 +1377,11 @@ class GraphQLInputFieldKwargs(TypedDict, total=False): type_: GraphQLInputType default_value: Any - description: Optional[str] - deprecation_reason: Optional[str] - out_name: Optional[str] - extensions: Dict[str, Any] - ast_node: Optional[InputValueDefinitionNode] + description: str | None + deprecation_reason: str | None + out_name: str | None + extensions: dict[str, Any] + ast_node: InputValueDefinitionNode | None class GraphQLInputField: @@ -1392,21 +1389,21 @@ class GraphQLInputField: type: GraphQLInputType default_value: Any - description: Optional[str] - deprecation_reason: Optional[str] - out_name: Optional[str] # for transforming names (extension of GraphQL.js) - extensions: Dict[str, Any] - ast_node: Optional[InputValueDefinitionNode] + description: str | None + deprecation_reason: str | None + out_name: str | None # for transforming names (extension of GraphQL.js) + extensions: dict[str, Any] + ast_node: InputValueDefinitionNode | None def __init__( self, type_: GraphQLInputType, default_value: Any = Undefined, - description: Optional[str] = None, - deprecation_reason: Optional[str] = None, - out_name: Optional[str] = None, - extensions: Optional[Dict[str, Any]] = None, - ast_node: Optional[InputValueDefinitionNode] = None, + description: str | None = None, + deprecation_reason: str | None = None, + out_name: str | None = None, + extensions: dict[str, Any] | None = None, + ast_node: InputValueDefinitionNode | None = None, ) -> None: self.type = type_ self.default_value = default_value @@ -1656,8 +1653,8 @@ def get_nullable_type(type_: GraphQLNonNull) -> GraphQLNullableType: def get_nullable_type( - type_: Optional[Union[GraphQLNullableType, GraphQLNonNull]], -) -> Optional[GraphQLNullableType]: + type_: GraphQLNullableType | GraphQLNonNull | None, +) -> GraphQLNullableType | None: """Unwrap possible non-null type""" if is_non_null_type(type_): type_ = type_.of_type @@ -1702,7 +1699,7 @@ def get_named_type(type_: GraphQLType) -> GraphQLNamedType: ... -def get_named_type(type_: Optional[GraphQLType]) -> Optional[GraphQLNamedType]: +def get_named_type(type_: GraphQLType | None) -> GraphQLNamedType | None: """Unwrap possible wrapping type""" if type_: unwrapped_type = type_ diff --git a/src/graphql/type/directives.py b/src/graphql/type/directives.py index 7966f377..b8068d0c 100644 --- a/src/graphql/type/directives.py +++ b/src/graphql/type/directives.py @@ -2,7 +2,7 @@ from __future__ import annotations # Python < 3.10 -from typing import Any, Collection, Dict, Optional, Tuple, cast +from typing import Any, Collection, cast from ..language import DirectiveLocation, ast from ..pyutils import inspect @@ -41,12 +41,12 @@ class GraphQLDirectiveKwargs(TypedDict, total=False): """Arguments for GraphQL directives""" name: str - locations: Tuple[DirectiveLocation, ...] - args: Dict[str, GraphQLArgument] + locations: tuple[DirectiveLocation, ...] + args: dict[str, GraphQLArgument] is_repeatable: bool - description: Optional[str] - extensions: Dict[str, Any] - ast_node: Optional[ast.DirectiveDefinitionNode] + description: str | None + extensions: dict[str, Any] + ast_node: ast.DirectiveDefinitionNode | None class GraphQLDirective: @@ -57,22 +57,22 @@ class GraphQLDirective: """ name: str - locations: Tuple[DirectiveLocation, ...] + locations: tuple[DirectiveLocation, ...] is_repeatable: bool - args: Dict[str, GraphQLArgument] - description: Optional[str] - extensions: Dict[str, Any] - ast_node: Optional[ast.DirectiveDefinitionNode] + args: dict[str, GraphQLArgument] + description: str | None + extensions: dict[str, Any] + ast_node: ast.DirectiveDefinitionNode | None def __init__( self, name: str, locations: Collection[DirectiveLocation], - args: Optional[Dict[str, GraphQLArgument]] = None, + args: dict[str, GraphQLArgument] | None = None, is_repeatable: bool = False, - description: Optional[str] = None, - extensions: Optional[Dict[str, Any]] = None, - ast_node: Optional[ast.DirectiveDefinitionNode] = None, + description: str | None = None, + extensions: dict[str, Any] | None = None, + ast_node: ast.DirectiveDefinitionNode | None = None, ) -> None: assert_name(name) try: @@ -261,7 +261,7 @@ def assert_directive(directive: Any) -> GraphQLDirective: description="Exposes a URL that specifies the behaviour of this scalar.", ) -specified_directives: Tuple[GraphQLDirective, ...] = ( +specified_directives: tuple[GraphQLDirective, ...] = ( GraphQLIncludeDirective, GraphQLSkipDirective, GraphQLDeprecatedDirective, diff --git a/src/graphql/type/schema.py b/src/graphql/type/schema.py index 4fa7d233..47155ed8 100644 --- a/src/graphql/type/schema.py +++ b/src/graphql/type/schema.py @@ -8,11 +8,7 @@ Any, Collection, Dict, - List, NamedTuple, - Optional, - Set, - Tuple, cast, ) @@ -59,22 +55,22 @@ class InterfaceImplementations(NamedTuple): - objects: List[GraphQLObjectType] - interfaces: List[GraphQLInterfaceType] + objects: list[GraphQLObjectType] + interfaces: list[GraphQLInterfaceType] class GraphQLSchemaKwargs(TypedDict, total=False): """Arguments for GraphQL schemas""" - query: Optional[GraphQLObjectType] - mutation: Optional[GraphQLObjectType] - subscription: Optional[GraphQLObjectType] - types: Optional[Tuple[GraphQLNamedType, ...]] - directives: Tuple[GraphQLDirective, ...] - description: Optional[str] - extensions: Dict[str, Any] - ast_node: Optional[ast.SchemaDefinitionNode] - extension_ast_nodes: Tuple[ast.SchemaExtensionNode, ...] + query: GraphQLObjectType | None + mutation: GraphQLObjectType | None + subscription: GraphQLObjectType | None + types: tuple[GraphQLNamedType, ...] | None + directives: tuple[GraphQLDirective, ...] + description: str | None + extensions: dict[str, Any] + ast_node: ast.SchemaDefinitionNode | None + extension_ast_nodes: tuple[ast.SchemaExtensionNode, ...] assume_valid: bool @@ -128,31 +124,31 @@ class GraphQLSchema: directives=specified_directives + [my_custom_directive]) """ - query_type: Optional[GraphQLObjectType] - mutation_type: Optional[GraphQLObjectType] - subscription_type: Optional[GraphQLObjectType] + query_type: GraphQLObjectType | None + mutation_type: GraphQLObjectType | None + subscription_type: GraphQLObjectType | None type_map: TypeMap - directives: Tuple[GraphQLDirective, ...] - description: Optional[str] - extensions: Dict[str, Any] - ast_node: Optional[ast.SchemaDefinitionNode] - extension_ast_nodes: Tuple[ast.SchemaExtensionNode, ...] + directives: tuple[GraphQLDirective, ...] + description: str | None + extensions: dict[str, Any] + ast_node: ast.SchemaDefinitionNode | None + extension_ast_nodes: tuple[ast.SchemaExtensionNode, ...] - _implementations_map: Dict[str, InterfaceImplementations] - _sub_type_map: Dict[str, Set[str]] - _validation_errors: Optional[List[GraphQLError]] + _implementations_map: dict[str, InterfaceImplementations] + _sub_type_map: dict[str, set[str]] + _validation_errors: list[GraphQLError] | None def __init__( self, - query: Optional[GraphQLObjectType] = None, - mutation: Optional[GraphQLObjectType] = None, - subscription: Optional[GraphQLObjectType] = None, - types: Optional[Collection[GraphQLNamedType]] = None, - directives: Optional[Collection[GraphQLDirective]] = None, - description: Optional[str] = None, - extensions: Optional[Dict[str, Any]] = None, - ast_node: Optional[ast.SchemaDefinitionNode] = None, - extension_ast_nodes: Optional[Collection[ast.SchemaExtensionNode]] = None, + query: GraphQLObjectType | None = None, + mutation: GraphQLObjectType | None = None, + subscription: GraphQLObjectType | None = None, + types: Collection[GraphQLNamedType] | None = None, + directives: Collection[GraphQLDirective] | None = None, + description: str | None = None, + extensions: dict[str, Any] | None = None, + ast_node: ast.SchemaDefinitionNode | None = None, + extension_ast_nodes: Collection[ast.SchemaExtensionNode] | None = None, assume_valid: bool = False, ) -> None: """Initialize GraphQL schema. @@ -212,7 +208,7 @@ def __init__( self._sub_type_map = {} # Keep track of all implementations by interface name. - implementations_map: Dict[str, InterfaceImplementations] = {} + implementations_map: dict[str, InterfaceImplementations] = {} self._implementations_map = implementations_map for named_type in all_referenced_types: @@ -278,7 +274,7 @@ def to_kwargs(self) -> GraphQLSchemaKwargs: def __copy__(self) -> GraphQLSchema: # pragma: no cover return self.__class__(**self.to_kwargs()) - def __deepcopy__(self, memo_: Dict) -> GraphQLSchema: + def __deepcopy__(self, memo_: dict) -> GraphQLSchema: from ..type import ( is_introspection_type, is_specified_directive, @@ -312,17 +308,17 @@ def __deepcopy__(self, memo_: Dict) -> GraphQLSchema: assume_valid=True, ) - def get_root_type(self, operation: OperationType) -> Optional[GraphQLObjectType]: + def get_root_type(self, operation: OperationType) -> GraphQLObjectType | None: """Get the root type.""" return getattr(self, f"{operation.value}_type") - def get_type(self, name: str) -> Optional[GraphQLNamedType]: + def get_type(self, name: str) -> GraphQLNamedType | None: """Get the type with the given name.""" return self.type_map.get(name) def get_possible_types( self, abstract_type: GraphQLAbstractType - ) -> List[GraphQLObjectType]: + ) -> list[GraphQLObjectType]: """Get list of all possible concrete types for given abstract type.""" return ( abstract_type.types @@ -364,7 +360,7 @@ def is_sub_type( self._sub_type_map[abstract_type.name] = types return maybe_sub_type.name in types - def get_directive(self, name: str) -> Optional[GraphQLDirective]: + def get_directive(self, name: str) -> GraphQLDirective | None: """Get the directive with the given name.""" for directive in self.directives: if directive.name == name: @@ -373,7 +369,7 @@ def get_directive(self, name: str) -> Optional[GraphQLDirective]: def get_field( self, parent_type: GraphQLCompositeType, field_name: str - ) -> Optional[GraphQLField]: + ) -> GraphQLField | None: """Get field of a given type with the given name. This method looks up the field on the given type definition. @@ -401,7 +397,7 @@ def get_field( return None @property - def validation_errors(self) -> Optional[List[GraphQLError]]: + def validation_errors(self) -> list[GraphQLError] | None: """Get validation errors.""" return self._validation_errors diff --git a/src/graphql/utilities/type_info.py b/src/graphql/utilities/type_info.py index 2057c87f..2926112a 100644 --- a/src/graphql/utilities/type_info.py +++ b/src/graphql/utilities/type_info.py @@ -2,7 +2,7 @@ from __future__ import annotations # Python < 3.10 -from typing import Any, Callable, List, Optional +from typing import Any, Callable, Optional from ..language import ( ArgumentNode, @@ -67,8 +67,8 @@ class TypeInfo: def __init__( self, schema: GraphQLSchema, - initial_type: Optional[GraphQLType] = None, - get_field_def_fn: Optional[GetFieldDefFn] = None, + initial_type: GraphQLType | None = None, + get_field_def_fn: GetFieldDefFn | None = None, ) -> None: """Initialize the TypeInfo for the given GraphQL schema. @@ -78,14 +78,14 @@ def __init__( The optional last parameter is deprecated and will be removed in v3.3. """ self._schema = schema - self._type_stack: List[Optional[GraphQLOutputType]] = [] - self._parent_type_stack: List[Optional[GraphQLCompositeType]] = [] - self._input_type_stack: List[Optional[GraphQLInputType]] = [] - self._field_def_stack: List[Optional[GraphQLField]] = [] - self._default_value_stack: List[Any] = [] - self._directive: Optional[GraphQLDirective] = None - self._argument: Optional[GraphQLArgument] = None - self._enum_value: Optional[GraphQLEnumValue] = None + self._type_stack: list[GraphQLOutputType | None] = [] + self._parent_type_stack: list[GraphQLCompositeType | None] = [] + self._input_type_stack: list[GraphQLInputType | None] = [] + self._field_def_stack: list[GraphQLField | None] = [] + self._default_value_stack: list[Any] = [] + self._directive: GraphQLDirective | None = None + self._argument: GraphQLArgument | None = None + self._enum_value: GraphQLEnumValue | None = None self._get_field_def: GetFieldDefFn = get_field_def_fn or get_field_def if initial_type: if is_input_type(initial_type): @@ -95,27 +95,27 @@ def __init__( if is_output_type(initial_type): self._type_stack.append(initial_type) - def get_type(self) -> Optional[GraphQLOutputType]: + def get_type(self) -> GraphQLOutputType | None: if self._type_stack: return self._type_stack[-1] return None - def get_parent_type(self) -> Optional[GraphQLCompositeType]: + def get_parent_type(self) -> GraphQLCompositeType | None: if self._parent_type_stack: return self._parent_type_stack[-1] return None - def get_input_type(self) -> Optional[GraphQLInputType]: + def get_input_type(self) -> GraphQLInputType | None: if self._input_type_stack: return self._input_type_stack[-1] return None - def get_parent_input_type(self) -> Optional[GraphQLInputType]: + def get_parent_input_type(self) -> GraphQLInputType | None: if len(self._input_type_stack) > 1: return self._input_type_stack[-2] return None - def get_field_def(self) -> Optional[GraphQLField]: + def get_field_def(self) -> GraphQLField | None: if self._field_def_stack: return self._field_def_stack[-1] return None @@ -125,13 +125,13 @@ def get_default_value(self) -> Any: return self._default_value_stack[-1] return None - def get_directive(self) -> Optional[GraphQLDirective]: + def get_directive(self) -> GraphQLDirective | None: return self._directive - def get_argument(self) -> Optional[GraphQLArgument]: + def get_argument(self) -> GraphQLArgument | None: return self._argument - def get_enum_value(self) -> Optional[GraphQLEnumValue]: + def get_enum_value(self) -> GraphQLEnumValue | None: return self._enum_value def enter(self, node: Node) -> None: @@ -262,7 +262,7 @@ def leave_enum_value(self) -> None: def get_field_def( schema: GraphQLSchema, parent_type: GraphQLCompositeType, field_node: FieldNode -) -> Optional[GraphQLField]: +) -> GraphQLField | None: return schema.get_field(parent_type, field_node.name.value) diff --git a/tests/execution/test_union_interface.py b/tests/execution/test_union_interface.py index efccd669..1adcd8af 100644 --- a/tests/execution/test_union_interface.py +++ b/tests/execution/test_union_interface.py @@ -1,7 +1,5 @@ from __future__ import annotations # Python < 3.10 -from typing import List, Optional, Union - from graphql.execution import execute_sync from graphql.language import parse from graphql.type import ( @@ -19,9 +17,9 @@ class Dog: name: str barks: bool - mother: Optional[Dog] - father: Optional[Dog] - progeny: List[Dog] + mother: Dog | None + father: Dog | None + progeny: list[Dog] def __init__(self, name: str, barks: bool): self.name = name @@ -34,9 +32,9 @@ def __init__(self, name: str, barks: bool): class Cat: name: str meows: bool - mother: Optional[Cat] - father: Optional[Cat] - progeny: List[Cat] + mother: Cat | None + father: Cat | None + progeny: list[Cat] def __init__(self, name: str, meows: bool): self.name = name @@ -48,14 +46,14 @@ def __init__(self, name: str, meows: bool): class Person: name: str - pets: Optional[List[Union[Dog, Cat]]] - friends: Optional[List[Union[Dog, Cat, Person]]] + pets: list[Dog | Cat] | None + friends: list[Dog | Cat | Person] | None def __init__( self, name: str, - pets: Optional[List[Union[Dog, Cat]]] = None, - friends: Optional[List[Union[Dog, Cat, Person]]] = None, + pets: list[Dog | Cat] | None = None, + friends: list[Dog | Cat | Person] | None = None, ): self.name = name self.pets = pets From 10a2d8a936bd1a65dc521158760f22d81a3a6b50 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 6 Apr 2024 23:10:08 +0200 Subject: [PATCH 144/230] More modernization of typing annotations in src --- src/graphql/error/graphql_error.py | 36 +++--- src/graphql/error/located_error.py | 8 +- src/graphql/error/syntax_error.py | 2 +- src/graphql/execution/async_iterables.py | 2 +- src/graphql/execution/collect_fields.py | 58 ++++----- src/graphql/execution/execute.py | 2 +- src/graphql/execution/middleware.py | 10 +- src/graphql/execution/values.py | 32 ++--- src/graphql/graphql.py | 50 ++++---- src/graphql/language/ast.py | 2 +- src/graphql/language/block_string.py | 6 +- src/graphql/language/lexer.py | 12 +- src/graphql/language/location.py | 2 +- src/graphql/language/parser.py | 66 +++++----- src/graphql/language/predicates.py | 4 +- src/graphql/language/print_location.py | 12 +- src/graphql/language/printer.py | 16 ++- src/graphql/language/source.py | 2 + src/graphql/language/visitor.py | 36 +++--- src/graphql/pyutils/async_reduce.py | 8 +- src/graphql/pyutils/awaitable_or_value.py | 2 + src/graphql/pyutils/cached_property.py | 2 + src/graphql/pyutils/description.py | 6 +- src/graphql/pyutils/did_you_mean.py | 6 +- src/graphql/pyutils/format_list.py | 2 + src/graphql/pyutils/group_by.py | 8 +- src/graphql/pyutils/identity_func.py | 2 + src/graphql/pyutils/inspect.py | 8 +- src/graphql/pyutils/is_awaitable.py | 2 + src/graphql/pyutils/is_iterable.py | 2 + src/graphql/pyutils/merge_kwargs.py | 2 + src/graphql/pyutils/natural_compare.py | 5 +- src/graphql/pyutils/path.py | 2 +- src/graphql/pyutils/print_path_list.py | 5 +- src/graphql/pyutils/simple_pub_sub.py | 2 +- src/graphql/pyutils/suggestion_list.py | 12 +- src/graphql/pyutils/undefined.py | 2 +- src/graphql/type/definition.py | 2 +- src/graphql/type/directives.py | 2 +- src/graphql/type/introspection.py | 2 + src/graphql/type/scalars.py | 2 + src/graphql/type/schema.py | 2 +- src/graphql/type/validate.py | 50 ++++---- src/graphql/utilities/ast_from_value.py | 6 +- src/graphql/utilities/ast_to_dict.py | 16 +-- src/graphql/utilities/build_ast_schema.py | 6 +- src/graphql/utilities/build_client_schema.py | 23 ++-- src/graphql/utilities/coerce_input_value.py | 12 +- src/graphql/utilities/concat_ast.py | 2 + src/graphql/utilities/extend_schema.py | 117 ++++++++---------- .../utilities/find_breaking_changes.py | 66 +++++----- .../utilities/get_introspection_query.py | 49 ++++---- src/graphql/utilities/get_operation_ast.py | 6 +- .../utilities/introspection_from_schema.py | 8 +- .../utilities/lexicographic_sort_schema.py | 32 ++--- src/graphql/utilities/print_schema.py | 28 ++--- src/graphql/utilities/separate_operations.py | 18 +-- src/graphql/utilities/sort_value_node.py | 5 +- .../utilities/strip_ignored_characters.py | 6 +- src/graphql/utilities/type_from_ast.py | 16 +-- src/graphql/utilities/type_info.py | 2 +- src/graphql/utilities/value_from_ast.py | 14 ++- .../utilities/value_from_ast_untyped.py | 44 ++++--- .../validation/rules/custom/no_deprecated.py | 8 +- .../rules/custom/no_schema_introspection.py | 8 +- .../defer_stream_directive_on_root_field.py | 10 +- ...ream_directive_on_valid_operations_rule.py | 8 +- .../rules/executable_definitions.py | 2 + .../rules/fields_on_correct_type.py | 20 +-- .../rules/fragments_on_composite_types.py | 2 + .../validation/rules/known_argument_names.py | 10 +- .../validation/rules/known_directives.py | 16 +-- .../validation/rules/known_fragment_names.py | 8 +- .../validation/rules/known_type_names.py | 12 +- .../rules/lone_anonymous_operation.py | 2 + .../rules/lone_schema_definition.py | 8 +- .../validation/rules/no_fragment_cycles.py | 10 +- .../rules/no_undefined_variables.py | 10 +- .../validation/rules/no_unused_fragments.py | 8 +- .../validation/rules/no_unused_variables.py | 12 +- .../rules/overlapping_fields_can_be_merged.py | 84 +++++++------ .../rules/possible_fragment_spreads.py | 10 +- .../rules/possible_type_extensions.py | 6 +- .../rules/provided_required_arguments.py | 12 +- src/graphql/validation/rules/scalar_leafs.py | 8 +- .../rules/single_field_subscriptions.py | 8 +- .../rules/stream_directive_on_list_field.py | 10 +- .../rules/unique_argument_definition_names.py | 2 + .../validation/rules/unique_argument_names.py | 8 +- .../rules/unique_directive_names.py | 6 +- .../rules/unique_directives_per_location.py | 14 ++- .../rules/unique_enum_value_names.py | 6 +- .../rules/unique_field_definition_names.py | 6 +- .../validation/rules/unique_fragment_names.py | 6 +- .../rules/unique_input_field_names.py | 12 +- .../rules/unique_operation_names.py | 6 +- .../rules/unique_operation_types.py | 12 +- .../validation/rules/unique_type_names.py | 6 +- .../validation/rules/unique_variable_names.py | 8 +- .../rules/values_of_correct_type.py | 2 + .../rules/variables_are_input_types.py | 2 + .../rules/variables_in_allowed_position.py | 8 +- src/graphql/validation/specified_rules.py | 11 +- src/graphql/validation/validate.py | 26 ++-- src/graphql/validation/validation_context.py | 83 +++++++------ src/graphql/version.py | 2 +- tests/execution/test_schema.py | 2 +- tests/execution/test_union_interface.py | 2 +- 108 files changed, 856 insertions(+), 638 deletions(-) diff --git a/src/graphql/error/graphql_error.py b/src/graphql/error/graphql_error.py index 2f530660..ff128748 100644 --- a/src/graphql/error/graphql_error.py +++ b/src/graphql/error/graphql_error.py @@ -1,7 +1,9 @@ """GraphQL Error""" +from __future__ import annotations + from sys import exc_info -from typing import TYPE_CHECKING, Any, Collection, Dict, List, Optional, Union +from typing import TYPE_CHECKING, Any, Collection, Dict try: from typing import TypedDict @@ -39,12 +41,12 @@ class GraphQLFormattedError(TypedDict, total=False): message: str # If an error can be associated to a particular point in the requested # GraphQL document, it should contain a list of locations. - locations: List["FormattedSourceLocation"] + locations: list[FormattedSourceLocation] # If an error can be associated to a particular field in the GraphQL result, # it _must_ contain an entry with the key `path` that details the path of # the response field which experienced the error. This allows clients to # identify whether a null result is intentional or caused by a runtime error. - path: List[Union[str, int]] + path: list[str | int] # Reserved for implementors to extend the protocol however they see fit, # and hence there are no additional restrictions on its contents. extensions: GraphQLErrorExtensions @@ -62,7 +64,7 @@ class GraphQLError(Exception): message: str """A message describing the Error for debugging purposes""" - locations: Optional[List["SourceLocation"]] + locations: list[SourceLocation] | None """Source locations A list of (line, column) locations within the source GraphQL document which @@ -73,7 +75,7 @@ class GraphQLError(Exception): the field which produced the error. """ - path: Optional[List[Union[str, int]]] + path: list[str | int] | None """ A list of field names and array indexes describing the JSON-path into the execution @@ -82,27 +84,27 @@ class GraphQLError(Exception): Only included for errors during execution. """ - nodes: Optional[List["Node"]] + nodes: list[Node] | None """A list of GraphQL AST Nodes corresponding to this error""" - source: Optional["Source"] + source: Source | None """The source GraphQL document for the first location of this error Note that if this Error represents more than one node, the source may not represent nodes after the first node. """ - positions: Optional[Collection[int]] + positions: Collection[int] | None """Error positions A list of character offsets within the source GraphQL document which correspond to this error. """ - original_error: Optional[Exception] + original_error: Exception | None """The original error thrown from a field resolver during execution""" - extensions: Optional[GraphQLErrorExtensions] + extensions: GraphQLErrorExtensions | None """Extension fields to add to the formatted error""" __slots__ = ( @@ -121,12 +123,12 @@ class GraphQLError(Exception): def __init__( self, message: str, - nodes: Union[Collection["Node"], "Node", None] = None, - source: Optional["Source"] = None, - positions: Optional[Collection[int]] = None, - path: Optional[Collection[Union[str, int]]] = None, - original_error: Optional[Exception] = None, - extensions: Optional[GraphQLErrorExtensions] = None, + nodes: Collection[Node] | Node | None = None, + source: Source | None = None, + positions: Collection[int] | None = None, + path: Collection[str | int] | None = None, + original_error: Exception | None = None, + extensions: GraphQLErrorExtensions | None = None, ) -> None: """Initialize a GraphQLError.""" super().__init__(message) @@ -155,7 +157,7 @@ def __init__( positions = [loc.start for loc in node_locations] self.positions = positions or None if positions and source: - locations: Optional[List[SourceLocation]] = [ + locations: list[SourceLocation] | None = [ source.get_location(pos) for pos in positions ] else: diff --git a/src/graphql/error/located_error.py b/src/graphql/error/located_error.py index 690bcddf..ab665787 100644 --- a/src/graphql/error/located_error.py +++ b/src/graphql/error/located_error.py @@ -1,7 +1,9 @@ """Located GraphQL Error""" +from __future__ import annotations + from contextlib import suppress -from typing import TYPE_CHECKING, Collection, Optional, Union +from typing import TYPE_CHECKING, Collection from ..pyutils import inspect from .graphql_error import GraphQLError @@ -14,8 +16,8 @@ def located_error( original_error: Exception, - nodes: Optional[Union[None, Collection["Node"]]] = None, - path: Optional[Collection[Union[str, int]]] = None, + nodes: None | Collection[Node] = None, + path: Collection[str | int] | None = None, ) -> GraphQLError: """Located GraphQL Error diff --git a/src/graphql/error/syntax_error.py b/src/graphql/error/syntax_error.py index 97b61d83..10b6b3df 100644 --- a/src/graphql/error/syntax_error.py +++ b/src/graphql/error/syntax_error.py @@ -1,6 +1,6 @@ """GraphQL Syntax Error""" -from __future__ import annotations # Python < 3.10 +from __future__ import annotations from typing import TYPE_CHECKING diff --git a/src/graphql/execution/async_iterables.py b/src/graphql/execution/async_iterables.py index 305b495f..83d902c0 100644 --- a/src/graphql/execution/async_iterables.py +++ b/src/graphql/execution/async_iterables.py @@ -1,6 +1,6 @@ """Helpers for async iterables""" -from __future__ import annotations # Python < 3.10 +from __future__ import annotations from contextlib import AbstractAsyncContextManager from typing import ( diff --git a/src/graphql/execution/collect_fields.py b/src/graphql/execution/collect_fields.py index e7d64fe8..de19aaec 100644 --- a/src/graphql/execution/collect_fields.py +++ b/src/graphql/execution/collect_fields.py @@ -1,7 +1,9 @@ """Collect fields""" +from __future__ import annotations + from collections import defaultdict -from typing import Any, Dict, List, NamedTuple, Optional, Set, Union +from typing import Any, NamedTuple from ..language import ( FieldNode, @@ -29,21 +31,21 @@ class PatchFields(NamedTuple): """Optionally labelled set of fields to be used as a patch.""" - label: Optional[str] - fields: Dict[str, List[FieldNode]] + label: str | None + fields: dict[str, list[FieldNode]] class FieldsAndPatches(NamedTuple): """Tuple of collected fields and patches to be applied.""" - fields: Dict[str, List[FieldNode]] - patches: List[PatchFields] + fields: dict[str, list[FieldNode]] + patches: list[PatchFields] def collect_fields( schema: GraphQLSchema, - fragments: Dict[str, FragmentDefinitionNode], - variable_values: Dict[str, Any], + fragments: dict[str, FragmentDefinitionNode], + variable_values: dict[str, Any], runtime_type: GraphQLObjectType, operation: OperationDefinitionNode, ) -> FieldsAndPatches: @@ -57,8 +59,8 @@ def collect_fields( For internal use only. """ - fields: Dict[str, List[FieldNode]] = defaultdict(list) - patches: List[PatchFields] = [] + fields: dict[str, list[FieldNode]] = defaultdict(list) + patches: list[PatchFields] = [] collect_fields_impl( schema, fragments, @@ -75,11 +77,11 @@ def collect_fields( def collect_subfields( schema: GraphQLSchema, - fragments: Dict[str, FragmentDefinitionNode], - variable_values: Dict[str, Any], + fragments: dict[str, FragmentDefinitionNode], + variable_values: dict[str, Any], operation: OperationDefinitionNode, return_type: GraphQLObjectType, - field_nodes: List[FieldNode], + field_nodes: list[FieldNode], ) -> FieldsAndPatches: """Collect subfields. @@ -92,10 +94,10 @@ def collect_subfields( For internal use only. """ - sub_field_nodes: Dict[str, List[FieldNode]] = defaultdict(list) - visited_fragment_names: Set[str] = set() + sub_field_nodes: dict[str, list[FieldNode]] = defaultdict(list) + visited_fragment_names: set[str] = set() - sub_patches: List[PatchFields] = [] + sub_patches: list[PatchFields] = [] sub_fields_and_patches = FieldsAndPatches(sub_field_nodes, sub_patches) for node in field_nodes: @@ -116,17 +118,17 @@ def collect_subfields( def collect_fields_impl( schema: GraphQLSchema, - fragments: Dict[str, FragmentDefinitionNode], - variable_values: Dict[str, Any], + fragments: dict[str, FragmentDefinitionNode], + variable_values: dict[str, Any], operation: OperationDefinitionNode, runtime_type: GraphQLObjectType, selection_set: SelectionSetNode, - fields: Dict[str, List[FieldNode]], - patches: List[PatchFields], - visited_fragment_names: Set[str], + fields: dict[str, list[FieldNode]], + patches: list[PatchFields], + visited_fragment_names: set[str], ) -> None: """Collect fields (internal implementation).""" - patch_fields: Dict[str, List[FieldNode]] + patch_fields: dict[str, list[FieldNode]] for selection in selection_set.selections: if isinstance(selection, FieldNode): @@ -216,14 +218,14 @@ def collect_fields_impl( class DeferValues(NamedTuple): """Values of an active defer directive.""" - label: Optional[str] + label: str | None def get_defer_values( operation: OperationDefinitionNode, - variable_values: Dict[str, Any], - node: Union[FragmentSpreadNode, InlineFragmentNode], -) -> Optional[DeferValues]: + variable_values: dict[str, Any], + node: FragmentSpreadNode | InlineFragmentNode, +) -> DeferValues | None: """Get values of defer directive if active. Returns an object containing the `@defer` arguments if a field should be @@ -246,8 +248,8 @@ def get_defer_values( def should_include_node( - variable_values: Dict[str, Any], - node: Union[FragmentSpreadNode, FieldNode, InlineFragmentNode], + variable_values: dict[str, Any], + node: FragmentSpreadNode | FieldNode | InlineFragmentNode, ) -> bool: """Check if node should be included @@ -267,7 +269,7 @@ def should_include_node( def does_fragment_condition_match( schema: GraphQLSchema, - fragment: Union[FragmentDefinitionNode, InlineFragmentNode], + fragment: FragmentDefinitionNode | InlineFragmentNode, type_: GraphQLObjectType, ) -> bool: """Determine if a fragment is applicable to the given type.""" diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index ead2b520..c28338e6 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -1,6 +1,6 @@ """GraphQL execution""" -from __future__ import annotations # Python < 3.10 +from __future__ import annotations from asyncio import Event, as_completed, ensure_future, gather, shield, sleep, wait_for from collections.abc import Mapping diff --git a/src/graphql/execution/middleware.py b/src/graphql/execution/middleware.py index 4a90be68..de99e12b 100644 --- a/src/graphql/execution/middleware.py +++ b/src/graphql/execution/middleware.py @@ -1,8 +1,10 @@ """Middleware manager""" +from __future__ import annotations + from functools import partial, reduce from inspect import isfunction -from typing import Any, Callable, Dict, Iterator, List, Optional, Tuple +from typing import Any, Callable, Iterator try: from typing import TypeAlias @@ -30,8 +32,8 @@ class MiddlewareManager: # allow custom attributes (not used internally) __slots__ = "__dict__", "middlewares", "_middleware_resolvers", "_cached_resolvers" - _cached_resolvers: Dict[GraphQLFieldResolver, GraphQLFieldResolver] - _middleware_resolvers: Optional[List[Callable]] + _cached_resolvers: dict[GraphQLFieldResolver, GraphQLFieldResolver] + _middleware_resolvers: list[Callable] | None def __init__(self, *middlewares: Any) -> None: self.middlewares = middlewares @@ -59,7 +61,7 @@ def get_field_resolver( return self._cached_resolvers[field_resolver] -def get_middleware_resolvers(middlewares: Tuple[Any, ...]) -> Iterator[Callable]: +def get_middleware_resolvers(middlewares: tuple[Any, ...]) -> Iterator[Callable]: """Get a list of resolver functions from a list of classes or functions.""" for middleware in middlewares: if isfunction(middleware): diff --git a/src/graphql/execution/values.py b/src/graphql/execution/values.py index 640f9ea9..4810a8bd 100644 --- a/src/graphql/execution/values.py +++ b/src/graphql/execution/values.py @@ -1,6 +1,8 @@ """Helpers for handling values""" -from typing import Any, Callable, Collection, Dict, List, Optional, Union +from __future__ import annotations + +from typing import Any, Callable, Collection, Dict, List, Union from ..error import GraphQLError from ..language import ( @@ -44,8 +46,8 @@ def get_variable_values( schema: GraphQLSchema, var_def_nodes: Collection[VariableDefinitionNode], - inputs: Dict[str, Any], - max_errors: Optional[int] = None, + inputs: dict[str, Any], + max_errors: int | None = None, ) -> CoercedVariableValues: """Get coerced variable values based on provided definitions. @@ -53,7 +55,7 @@ def get_variable_values( variable definitions and arbitrary input. If the input cannot be parsed to match the variable definitions, a GraphQLError will be raised. """ - errors: List[GraphQLError] = [] + errors: list[GraphQLError] = [] def on_error(error: GraphQLError) -> None: if max_errors is not None and len(errors) >= max_errors: @@ -77,10 +79,10 @@ def on_error(error: GraphQLError) -> None: def coerce_variable_values( schema: GraphQLSchema, var_def_nodes: Collection[VariableDefinitionNode], - inputs: Dict[str, Any], + inputs: dict[str, Any], on_error: Callable[[GraphQLError], None], -) -> Dict[str, Any]: - coerced_values: Dict[str, Any] = {} +) -> dict[str, Any]: + coerced_values: dict[str, Any] = {} for var_def_node in var_def_nodes: var_name = var_def_node.variable.name.value var_type = type_from_ast(schema, var_def_node.type) @@ -126,7 +128,7 @@ def coerce_variable_values( continue def on_input_value_error( - path: List[Union[str, int]], invalid_value: Any, error: GraphQLError + path: list[str | int], invalid_value: Any, error: GraphQLError ) -> None: invalid_str = inspect(invalid_value) prefix = f"Variable '${var_name}' got invalid value {invalid_str}" # noqa: B023 @@ -148,16 +150,16 @@ def on_input_value_error( def get_argument_values( - type_def: Union[GraphQLField, GraphQLDirective], - node: Union[FieldNode, DirectiveNode], - variable_values: Optional[Dict[str, Any]] = None, -) -> Dict[str, Any]: + type_def: GraphQLField | GraphQLDirective, + node: FieldNode | DirectiveNode, + variable_values: dict[str, Any] | None = None, +) -> dict[str, Any]: """Get coerced argument values based on provided definitions and nodes. Prepares a dict of argument values given a list of argument definitions and list of argument AST nodes. """ - coerced_values: Dict[str, Any] = {} + coerced_values: dict[str, Any] = {} arg_node_map = {arg.name.value: arg for arg in node.arguments or []} for name, arg_def in type_def.args.items(): @@ -224,8 +226,8 @@ def get_argument_values( def get_directive_values( directive_def: GraphQLDirective, node: NodeWithDirective, - variable_values: Optional[Dict[str, Any]] = None, -) -> Optional[Dict[str, Any]]: + variable_values: dict[str, Any] | None = None, +) -> dict[str, Any] | None: """Get coerced argument values based on provided nodes. Prepares a dict of argument values given a directive definition and an AST node diff --git a/src/graphql/graphql.py b/src/graphql/graphql.py index b1460fd2..aacc7326 100644 --- a/src/graphql/graphql.py +++ b/src/graphql/graphql.py @@ -1,7 +1,9 @@ """Execute a GraphQL operation""" +from __future__ import annotations + from asyncio import ensure_future -from typing import Any, Awaitable, Callable, Dict, Optional, Type, Union, cast +from typing import Any, Awaitable, Callable, cast from .error import GraphQLError from .execution import ExecutionContext, ExecutionResult, Middleware, execute @@ -20,16 +22,16 @@ async def graphql( schema: GraphQLSchema, - source: Union[str, Source], + source: str | Source, root_value: Any = None, context_value: Any = None, - variable_values: Optional[Dict[str, Any]] = None, - operation_name: Optional[str] = None, - field_resolver: Optional[GraphQLFieldResolver] = None, - type_resolver: Optional[GraphQLTypeResolver] = None, - middleware: Optional[Middleware] = None, - execution_context_class: Optional[Type[ExecutionContext]] = None, - is_awaitable: Optional[Callable[[Any], bool]] = None, + variable_values: dict[str, Any] | None = None, + operation_name: str | None = None, + field_resolver: GraphQLFieldResolver | None = None, + type_resolver: GraphQLTypeResolver | None = None, + middleware: Middleware | None = None, + execution_context_class: type[ExecutionContext] | None = None, + is_awaitable: Callable[[Any], bool] | None = None, ) -> ExecutionResult: """Execute a GraphQL operation asynchronously. @@ -106,15 +108,15 @@ def assume_not_awaitable(_value: Any) -> bool: def graphql_sync( schema: GraphQLSchema, - source: Union[str, Source], + source: str | Source, root_value: Any = None, context_value: Any = None, - variable_values: Optional[Dict[str, Any]] = None, - operation_name: Optional[str] = None, - field_resolver: Optional[GraphQLFieldResolver] = None, - type_resolver: Optional[GraphQLTypeResolver] = None, - middleware: Optional[Middleware] = None, - execution_context_class: Optional[Type[ExecutionContext]] = None, + variable_values: dict[str, Any] | None = None, + operation_name: str | None = None, + field_resolver: GraphQLFieldResolver | None = None, + type_resolver: GraphQLTypeResolver | None = None, + middleware: Middleware | None = None, + execution_context_class: type[ExecutionContext] | None = None, check_sync: bool = False, ) -> ExecutionResult: """Execute a GraphQL operation synchronously. @@ -156,16 +158,16 @@ def graphql_sync( def graphql_impl( schema: GraphQLSchema, - source: Union[str, Source], + source: str | Source, root_value: Any, context_value: Any, - variable_values: Optional[Dict[str, Any]], - operation_name: Optional[str], - field_resolver: Optional[GraphQLFieldResolver], - type_resolver: Optional[GraphQLTypeResolver], - middleware: Optional[Middleware], - execution_context_class: Optional[Type[ExecutionContext]], - is_awaitable: Optional[Callable[[Any], bool]], + variable_values: dict[str, Any] | None, + operation_name: str | None, + field_resolver: GraphQLFieldResolver | None, + type_resolver: GraphQLTypeResolver | None, + middleware: Middleware | None, + execution_context_class: type[ExecutionContext] | None, + is_awaitable: Callable[[Any], bool] | None, ) -> AwaitableOrValue[ExecutionResult]: """Execute a query, return asynchronously only if necessary.""" # Validate Schema diff --git a/src/graphql/language/ast.py b/src/graphql/language/ast.py index b1df369b..5b61767d 100644 --- a/src/graphql/language/ast.py +++ b/src/graphql/language/ast.py @@ -1,6 +1,6 @@ """GraphQL Abstract Syntax Tree""" -from __future__ import annotations # Python < 3.10 +from __future__ import annotations from copy import copy, deepcopy from enum import Enum diff --git a/src/graphql/language/block_string.py b/src/graphql/language/block_string.py index e3b8511e..ef5e1ccf 100644 --- a/src/graphql/language/block_string.py +++ b/src/graphql/language/block_string.py @@ -1,7 +1,9 @@ """Helpers for block strings""" +from __future__ import annotations + from sys import maxsize -from typing import Collection, List +from typing import Collection __all__ = [ "dedent_block_string_lines", @@ -10,7 +12,7 @@ ] -def dedent_block_string_lines(lines: Collection[str]) -> List[str]: +def dedent_block_string_lines(lines: Collection[str]) -> list[str]: """Produce the value of a block string from its parsed raw value. This function works similar to CoffeeScript's block string, diff --git a/src/graphql/language/lexer.py b/src/graphql/language/lexer.py index 5c54abbc..2d42f346 100644 --- a/src/graphql/language/lexer.py +++ b/src/graphql/language/lexer.py @@ -1,14 +1,18 @@ """GraphQL Lexer""" -from typing import List, NamedTuple, Optional +from __future__ import annotations + +from typing import TYPE_CHECKING, NamedTuple from ..error import GraphQLSyntaxError from .ast import Token from .block_string import dedent_block_string_lines from .character_classes import is_digit, is_name_continue, is_name_start -from .source import Source from .token_kind import TokenKind +if TYPE_CHECKING: + from .source import Source + __all__ = ["Lexer", "is_punctuator_token_kind"] @@ -84,7 +88,7 @@ def print_code_point_at(self, location: int) -> str: return f"U+{point:04X}" def create_token( - self, kind: TokenKind, start: int, end: int, value: Optional[str] = None + self, kind: TokenKind, start: int, end: int, value: str | None = None ) -> Token: """Create a token with line and column location information.""" line = self.line @@ -265,7 +269,7 @@ def read_string(self, start: int) -> Token: body_length = len(body) position = start + 1 chunk_start = position - value: List[str] = [] + value: list[str] = [] append = value.append while position < body_length: diff --git a/src/graphql/language/location.py b/src/graphql/language/location.py index 6f191964..8b1ee38d 100644 --- a/src/graphql/language/location.py +++ b/src/graphql/language/location.py @@ -1,6 +1,6 @@ """Source locations""" -from __future__ import annotations # Python < 3.10 +from __future__ import annotations from typing import TYPE_CHECKING, NamedTuple diff --git a/src/graphql/language/parser.py b/src/graphql/language/parser.py index 23a69b4a..78d308d0 100644 --- a/src/graphql/language/parser.py +++ b/src/graphql/language/parser.py @@ -1,7 +1,9 @@ """GraphQL parser""" +from __future__ import annotations + from functools import partial -from typing import Callable, List, Mapping, Optional, TypeVar, Union, cast +from typing import Callable, List, Mapping, TypeVar, Union, cast from ..error import GraphQLError, GraphQLSyntaxError from .ast import ( @@ -85,7 +87,7 @@ def parse( source: SourceType, no_location: bool = False, - max_tokens: Optional[int] = None, + max_tokens: int | None = None, allow_legacy_fragment_variables: bool = False, experimental_client_controlled_nullability: bool = False, ) -> DocumentNode: @@ -149,7 +151,7 @@ def parse( def parse_value( source: SourceType, no_location: bool = False, - max_tokens: Optional[int] = None, + max_tokens: int | None = None, allow_legacy_fragment_variables: bool = False, ) -> ValueNode: """Parse the AST for a given string containing a GraphQL value. @@ -177,7 +179,7 @@ def parse_value( def parse_const_value( source: SourceType, no_location: bool = False, - max_tokens: Optional[int] = None, + max_tokens: int | None = None, allow_legacy_fragment_variables: bool = False, ) -> ConstValueNode: """Parse the AST for a given string containing a GraphQL constant value. @@ -200,7 +202,7 @@ def parse_const_value( def parse_type( source: SourceType, no_location: bool = False, - max_tokens: Optional[int] = None, + max_tokens: int | None = None, allow_legacy_fragment_variables: bool = False, ) -> TypeNode: """Parse the AST for a given string containing a GraphQL Type. @@ -238,7 +240,7 @@ class Parser: """ _no_location: bool - _max_tokens: Optional[int] + _max_tokens: int | None _allow_legacy_fragment_variables: bool _experimental_client_controlled_nullability: bool _lexer: Lexer @@ -248,7 +250,7 @@ def __init__( self, source: SourceType, no_location: bool = False, - max_tokens: Optional[int] = None, + max_tokens: int | None = None, allow_legacy_fragment_variables: bool = False, experimental_client_controlled_nullability: bool = False, ) -> None: @@ -371,7 +373,7 @@ def parse_operation_type(self) -> OperationType: except ValueError as error: raise self.unexpected(operation_token) from error - def parse_variable_definitions(self) -> List[VariableDefinitionNode]: + def parse_variable_definitions(self) -> list[VariableDefinitionNode]: """VariableDefinitions: (VariableDefinition+)""" return self.optional_many( TokenKind.PAREN_L, self.parse_variable_definition, TokenKind.PAREN_R @@ -417,7 +419,7 @@ def parse_field(self) -> FieldNode: start = self._lexer.token name_or_alias = self.parse_name() if self.expect_optional_token(TokenKind.COLON): - alias: Optional[NameNode] = name_or_alias + alias: NameNode | None = name_or_alias name = self.parse_name() else: alias = None @@ -436,7 +438,7 @@ def parse_field(self) -> FieldNode: loc=self.loc(start), ) - def parse_nullability_assertion(self) -> Optional[NullabilityAssertionNode]: + def parse_nullability_assertion(self) -> NullabilityAssertionNode | None: """NullabilityAssertion (grammar not yet finalized) # Note: Client Controlled Nullability is experimental and may be changed or @@ -446,7 +448,7 @@ def parse_nullability_assertion(self) -> Optional[NullabilityAssertionNode]: return None start = self._lexer.token - nullability_assertion: Optional[NullabilityAssertionNode] = None + nullability_assertion: NullabilityAssertionNode | None = None if self.expect_optional_token(TokenKind.BRACKET_L): inner_modifier = self.parse_nullability_assertion() @@ -466,7 +468,7 @@ def parse_nullability_assertion(self) -> Optional[NullabilityAssertionNode]: return nullability_assertion - def parse_arguments(self, is_const: bool) -> List[ArgumentNode]: + def parse_arguments(self, is_const: bool) -> list[ArgumentNode]: """Arguments[Const]: (Argument[?Const]+)""" item = self.parse_const_argument if is_const else self.parse_argument item = cast(Callable[[], ArgumentNode], item) @@ -488,7 +490,7 @@ def parse_const_argument(self) -> ConstArgumentNode: # Implement the parsing rules in the Fragments section. - def parse_fragment(self) -> Union[FragmentSpreadNode, InlineFragmentNode]: + def parse_fragment(self) -> FragmentSpreadNode | InlineFragmentNode: """Corresponds to both FragmentSpread and InlineFragment in the spec. FragmentSpread: ... FragmentName Directives? @@ -642,15 +644,15 @@ def parse_const_value_literal(self) -> ConstValueNode: # Implement the parsing rules in the Directives section. - def parse_directives(self, is_const: bool) -> List[DirectiveNode]: + def parse_directives(self, is_const: bool) -> list[DirectiveNode]: """Directives[Const]: Directive[?Const]+""" - directives: List[DirectiveNode] = [] + directives: list[DirectiveNode] = [] append = directives.append while self.peek(TokenKind.AT): append(self.parse_directive(is_const)) return directives - def parse_const_directives(self) -> List[ConstDirectiveNode]: + def parse_const_directives(self) -> list[ConstDirectiveNode]: return cast(List[ConstDirectiveNode], self.parse_directives(True)) def parse_directive(self, is_const: bool) -> DirectiveNode: @@ -710,7 +712,7 @@ def parse_type_system_extension(self) -> TypeSystemExtensionNode: def peek_description(self) -> bool: return self.peek(TokenKind.STRING) or self.peek(TokenKind.BLOCK_STRING) - def parse_description(self) -> Optional[StringValueNode]: + def parse_description(self) -> StringValueNode | None: """Description: StringValue""" if self.peek_description(): return self.parse_string_literal() @@ -774,7 +776,7 @@ def parse_object_type_definition(self) -> ObjectTypeDefinitionNode: loc=self.loc(start), ) - def parse_implements_interfaces(self) -> List[NamedTypeNode]: + def parse_implements_interfaces(self) -> list[NamedTypeNode]: """ImplementsInterfaces""" return ( self.delimited_many(TokenKind.AMP, self.parse_named_type) @@ -782,7 +784,7 @@ def parse_implements_interfaces(self) -> List[NamedTypeNode]: else [] ) - def parse_fields_definition(self) -> List[FieldDefinitionNode]: + def parse_fields_definition(self) -> list[FieldDefinitionNode]: """FieldsDefinition: {FieldDefinition+}""" return self.optional_many( TokenKind.BRACE_L, self.parse_field_definition, TokenKind.BRACE_R @@ -806,7 +808,7 @@ def parse_field_definition(self) -> FieldDefinitionNode: loc=self.loc(start), ) - def parse_argument_defs(self) -> List[InputValueDefinitionNode]: + def parse_argument_defs(self) -> list[InputValueDefinitionNode]: """ArgumentsDefinition: (InputValueDefinition+)""" return self.optional_many( TokenKind.PAREN_L, self.parse_input_value_def, TokenKind.PAREN_R @@ -868,7 +870,7 @@ def parse_union_type_definition(self) -> UnionTypeDefinitionNode: loc=self.loc(start), ) - def parse_union_member_types(self) -> List[NamedTypeNode]: + def parse_union_member_types(self) -> list[NamedTypeNode]: """UnionMemberTypes""" return ( self.delimited_many(TokenKind.PIPE, self.parse_named_type) @@ -892,7 +894,7 @@ def parse_enum_type_definition(self) -> EnumTypeDefinitionNode: loc=self.loc(start), ) - def parse_enum_values_definition(self) -> List[EnumValueDefinitionNode]: + def parse_enum_values_definition(self) -> list[EnumValueDefinitionNode]: """EnumValuesDefinition: {EnumValueDefinition+}""" return self.optional_many( TokenKind.BRACE_L, self.parse_enum_value_definition, TokenKind.BRACE_R @@ -938,7 +940,7 @@ def parse_input_object_type_definition(self) -> InputObjectTypeDefinitionNode: loc=self.loc(start), ) - def parse_input_fields_definition(self) -> List[InputValueDefinitionNode]: + def parse_input_fields_definition(self) -> list[InputValueDefinitionNode]: """InputFieldsDefinition: {InputValueDefinition+}""" return self.optional_many( TokenKind.BRACE_L, self.parse_input_value_def, TokenKind.BRACE_R @@ -1072,7 +1074,7 @@ def parse_directive_definition(self) -> DirectiveDefinitionNode: loc=self.loc(start), ) - def parse_directive_locations(self) -> List[NameNode]: + def parse_directive_locations(self) -> list[NameNode]: """DirectiveLocations""" return self.delimited_many(TokenKind.PIPE, self.parse_directive_location) @@ -1086,7 +1088,7 @@ def parse_directive_location(self) -> NameNode: # Core parsing utility functions - def loc(self, start_token: Token) -> Optional[Location]: + def loc(self, start_token: Token) -> Location | None: """Return a location object. Used to identify the place in the source that created a given parsed object. @@ -1160,7 +1162,7 @@ def expect_optional_keyword(self, value: str) -> bool: return False - def unexpected(self, at_token: Optional[Token] = None) -> GraphQLError: + def unexpected(self, at_token: Token | None = None) -> GraphQLError: """Create an error when an unexpected lexed token is encountered.""" token = at_token or self._lexer.token return GraphQLSyntaxError( @@ -1169,7 +1171,7 @@ def unexpected(self, at_token: Optional[Token] = None) -> GraphQLError: def any( self, open_kind: TokenKind, parse_fn: Callable[[], T], close_kind: TokenKind - ) -> List[T]: + ) -> list[T]: """Fetch any matching nodes, possibly none. Returns a possibly empty list of parse nodes, determined by the ``parse_fn``. @@ -1178,7 +1180,7 @@ def any( token. """ self.expect_token(open_kind) - nodes: List[T] = [] + nodes: list[T] = [] append = nodes.append expect_optional_token = partial(self.expect_optional_token, close_kind) while not expect_optional_token(): @@ -1187,7 +1189,7 @@ def any( def optional_many( self, open_kind: TokenKind, parse_fn: Callable[[], T], close_kind: TokenKind - ) -> List[T]: + ) -> list[T]: """Fetch matching nodes, maybe none. Returns a list of parse nodes, determined by the ``parse_fn``. It can be empty @@ -1207,7 +1209,7 @@ def optional_many( def many( self, open_kind: TokenKind, parse_fn: Callable[[], T], close_kind: TokenKind - ) -> List[T]: + ) -> list[T]: """Fetch matching nodes, at least one. Returns a non-empty list of parse nodes, determined by the ``parse_fn``. This @@ -1225,7 +1227,7 @@ def many( def delimited_many( self, delimiter_kind: TokenKind, parse_fn: Callable[[], T] - ) -> List[T]: + ) -> list[T]: """Fetch many delimited nodes. Returns a non-empty list of parse nodes, determined by the ``parse_fn``. This @@ -1235,7 +1237,7 @@ def delimited_many( """ expect_optional_token = partial(self.expect_optional_token, delimiter_kind) expect_optional_token() - nodes: List[T] = [] + nodes: list[T] = [] append = nodes.append while True: append(parse_fn()) diff --git a/src/graphql/language/predicates.py b/src/graphql/language/predicates.py index 2b483ec9..b65b1982 100644 --- a/src/graphql/language/predicates.py +++ b/src/graphql/language/predicates.py @@ -1,6 +1,6 @@ """Predicates for GraphQL nodes""" -from typing import Union +from __future__ import annotations from .ast import ( DefinitionNode, @@ -93,7 +93,7 @@ def is_type_definition_node(node: Node) -> TypeGuard[TypeDefinitionNode]: def is_type_system_extension_node( node: Node, -) -> TypeGuard[Union[SchemaExtensionNode, TypeExtensionNode]]: +) -> TypeGuard[SchemaExtensionNode | TypeExtensionNode]: """Check whether the given node represents a type system extension.""" return isinstance(node, (SchemaExtensionNode, TypeExtensionNode)) diff --git a/src/graphql/language/print_location.py b/src/graphql/language/print_location.py index e0ae5de5..03509732 100644 --- a/src/graphql/language/print_location.py +++ b/src/graphql/language/print_location.py @@ -1,11 +1,15 @@ """Print location in GraphQL source""" +from __future__ import annotations + import re -from typing import Optional, Tuple, cast +from typing import TYPE_CHECKING, Tuple, cast -from .ast import Location from .location import SourceLocation, get_location -from .source import Source + +if TYPE_CHECKING: + from .ast import Location + from .source import Source __all__ = ["print_location", "print_source_location"] @@ -66,7 +70,7 @@ def print_source_location(source: Source, source_location: SourceLocation) -> st ) -def print_prefixed_lines(*lines: Tuple[str, Optional[str]]) -> str: +def print_prefixed_lines(*lines: tuple[str, str | None]) -> str: """Print lines specified like this: ("prefix", "string")""" existing_lines = [ cast(Tuple[str, str], line) for line in lines if line[1] is not None diff --git a/src/graphql/language/printer.py b/src/graphql/language/printer.py index 7170ca5f..7062b5c8 100644 --- a/src/graphql/language/printer.py +++ b/src/graphql/language/printer.py @@ -1,12 +1,16 @@ """Print AST""" -from typing import Any, Collection, Optional +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, Collection -from ..language.ast import Node, OperationType from .block_string import print_block_string from .print_string import print_string from .visitor import Visitor, visit +if TYPE_CHECKING: + from ..language.ast import Node, OperationType + try: from typing import TypeAlias except ImportError: # Python < 3.10 @@ -414,7 +418,7 @@ def leave_input_object_type_extension(node: PrintedNode, *_args: Any) -> str: ) -def join(strings: Optional[Strings], separator: str = "") -> str: +def join(strings: Strings | None, separator: str = "") -> str: """Join strings in a given collection. Return an empty string if it is None or empty, otherwise join all items together @@ -423,7 +427,7 @@ def join(strings: Optional[Strings], separator: str = "") -> str: return separator.join(s for s in strings if s) if strings else "" -def block(strings: Optional[Strings]) -> str: +def block(strings: Strings | None) -> str: """Return strings inside a block. Given a collection of strings, return a string with each item on its own line, @@ -432,7 +436,7 @@ def block(strings: Optional[Strings]) -> str: return wrap("{\n", indent(join(strings, "\n")), "\n}") -def wrap(start: str, string: Optional[str], end: str = "") -> str: +def wrap(start: str, string: str | None, end: str = "") -> str: """Wrap string inside other strings at start and end. If the string is not None or empty, then wrap with start and end, otherwise return @@ -455,6 +459,6 @@ def is_multiline(string: str) -> bool: return "\n" in string -def has_multiline_items(strings: Optional[Strings]) -> bool: +def has_multiline_items(strings: Strings | None) -> bool: """Check whether one of the items in the list has multiple lines.""" return any(is_multiline(item) for item in strings) if strings else False diff --git a/src/graphql/language/source.py b/src/graphql/language/source.py index bd2c635d..01bb013f 100644 --- a/src/graphql/language/source.py +++ b/src/graphql/language/source.py @@ -1,5 +1,7 @@ """GraphQL source input""" +from __future__ import annotations + from typing import Any from .location import SourceLocation diff --git a/src/graphql/language/visitor.py b/src/graphql/language/visitor.py index a7dccaeb..0538c2e2 100644 --- a/src/graphql/language/visitor.py +++ b/src/graphql/language/visitor.py @@ -1,5 +1,7 @@ """AST Visitor""" +from __future__ import annotations + from copy import copy from enum import Enum from typing import ( @@ -7,11 +9,9 @@ Callable, Collection, Dict, - List, NamedTuple, Optional, Tuple, - Union, ) from ..pyutils import inspect, snake_to_camel @@ -64,8 +64,8 @@ class VisitorActionEnum(Enum): class EnterLeaveVisitor(NamedTuple): """Visitor with functions for entering and leaving.""" - enter: Optional[Callable[..., Optional[VisitorAction]]] - leave: Optional[Callable[..., Optional[VisitorAction]]] + enter: Callable[..., VisitorAction | None] | None + leave: Callable[..., VisitorAction | None] | None class Visitor: @@ -112,7 +112,7 @@ def leave(self, node, key, parent, path, ancestors): # Provide special return values as attributes BREAK, SKIP, REMOVE, IDLE = BREAK, SKIP, REMOVE, IDLE - enter_leave_map: Dict[str, EnterLeaveVisitor] + enter_leave_map: dict[str, EnterLeaveVisitor] def __init_subclass__(cls) -> None: """Verify that all defined handlers are valid.""" @@ -122,7 +122,7 @@ def __init_subclass__(cls) -> None: continue attr_kind = attr.split("_", 1) if len(attr_kind) < 2: - kind: Optional[str] = None + kind: str | None = None else: attr, kind = attr_kind # noqa: PLW2901 if attr in ("enter", "leave") and kind: @@ -160,13 +160,13 @@ class Stack(NamedTuple): in_array: bool idx: int - keys: Tuple[Node, ...] - edits: List[Tuple[Union[int, str], Node]] + keys: tuple[Node, ...] + edits: list[tuple[int | str, Node]] prev: Any # 'Stack' (python/mypy/issues/731) def visit( - root: Node, visitor: Visitor, visitor_keys: Optional[VisitorKeyMap] = None + root: Node, visitor: Visitor, visitor_keys: VisitorKeyMap | None = None ) -> Any: """Visit each node in an AST. @@ -197,16 +197,16 @@ def visit( stack: Any = None in_array = False - keys: Tuple[Node, ...] = (root,) + keys: tuple[Node, ...] = (root,) idx = -1 - edits: List[Any] = [] + edits: list[Any] = [] node: Any = root key: Any = None parent: Any = None - path: List[Any] = [] + path: list[Any] = [] path_append = path.append path_pop = path.pop - ancestors: List[Any] = [] + ancestors: list[Any] = [] ancestors_append = ancestors.append ancestors_pop = ancestors.pop @@ -317,7 +317,7 @@ def __init__(self, visitors: Collection[Visitor]) -> None: """Create a new visitor from the given list of parallel visitors.""" super().__init__() self.visitors = visitors - self.skipping: List[Any] = [None] * len(visitors) + self.skipping: list[Any] = [None] * len(visitors) def get_enter_leave_for_kind(self, kind: str) -> EnterLeaveVisitor: """Given a node kind, return the EnterLeaveVisitor for that kind.""" @@ -325,8 +325,8 @@ def get_enter_leave_for_kind(self, kind: str) -> EnterLeaveVisitor: return self.enter_leave_map[kind] except KeyError: has_visitor = False - enter_list: List[Optional[Callable[..., Optional[VisitorAction]]]] = [] - leave_list: List[Optional[Callable[..., Optional[VisitorAction]]]] = [] + enter_list: list[Callable[..., VisitorAction | None] | None] = [] + leave_list: list[Callable[..., VisitorAction | None] | None] = [] for visitor in self.visitors: enter, leave = visitor.get_enter_leave_for_kind(kind) if not has_visitor and (enter or leave): @@ -336,7 +336,7 @@ def get_enter_leave_for_kind(self, kind: str) -> EnterLeaveVisitor: if has_visitor: - def enter(node: Node, *args: Any) -> Optional[VisitorAction]: + def enter(node: Node, *args: Any) -> VisitorAction | None: skipping = self.skipping for i, fn in enumerate(enter_list): if not skipping[i] and fn: @@ -349,7 +349,7 @@ def enter(node: Node, *args: Any) -> Optional[VisitorAction]: return result return None - def leave(node: Node, *args: Any) -> Optional[VisitorAction]: + def leave(node: Node, *args: Any) -> VisitorAction | None: skipping = self.skipping for i, fn in enumerate(leave_list): if not skipping[i]: diff --git a/src/graphql/pyutils/async_reduce.py b/src/graphql/pyutils/async_reduce.py index 2ffa3c82..33d97f9c 100644 --- a/src/graphql/pyutils/async_reduce.py +++ b/src/graphql/pyutils/async_reduce.py @@ -1,10 +1,14 @@ """Reduce awaitable values""" -from typing import Any, Awaitable, Callable, Collection, TypeVar, cast +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, Awaitable, Callable, Collection, TypeVar, cast -from .awaitable_or_value import AwaitableOrValue from .is_awaitable import is_awaitable as default_is_awaitable +if TYPE_CHECKING: + from .awaitable_or_value import AwaitableOrValue + __all__ = ["async_reduce"] T = TypeVar("T") diff --git a/src/graphql/pyutils/awaitable_or_value.py b/src/graphql/pyutils/awaitable_or_value.py index c1b888d1..7348db9b 100644 --- a/src/graphql/pyutils/awaitable_or_value.py +++ b/src/graphql/pyutils/awaitable_or_value.py @@ -1,5 +1,7 @@ """Awaitable or value type""" +from __future__ import annotations + from typing import Awaitable, TypeVar, Union try: diff --git a/src/graphql/pyutils/cached_property.py b/src/graphql/pyutils/cached_property.py index d55e7427..fcd49a10 100644 --- a/src/graphql/pyutils/cached_property.py +++ b/src/graphql/pyutils/cached_property.py @@ -1,5 +1,7 @@ """Cached properties""" +from __future__ import annotations + from typing import TYPE_CHECKING, Any, Callable if TYPE_CHECKING: diff --git a/src/graphql/pyutils/description.py b/src/graphql/pyutils/description.py index d7e9d37d..812d61fe 100644 --- a/src/graphql/pyutils/description.py +++ b/src/graphql/pyutils/description.py @@ -1,6 +1,8 @@ """Human-readable descriptions""" -from typing import Any, Tuple, Union +from __future__ import annotations + +from typing import Any __all__ = [ "Description", @@ -19,7 +21,7 @@ class Description: If you register(object), any object will be allowed as description. """ - bases: Union[type, Tuple[type, ...]] = str + bases: type | tuple[type, ...] = str @classmethod def isinstance(cls, obj: Any) -> bool: diff --git a/src/graphql/pyutils/did_you_mean.py b/src/graphql/pyutils/did_you_mean.py index de29e9e2..ae2022b5 100644 --- a/src/graphql/pyutils/did_you_mean.py +++ b/src/graphql/pyutils/did_you_mean.py @@ -1,6 +1,8 @@ """Generating suggestions""" -from typing import Optional, Sequence +from __future__ import annotations + +from typing import Sequence from .format_list import or_list @@ -9,7 +11,7 @@ MAX_LENGTH = 5 -def did_you_mean(suggestions: Sequence[str], sub_message: Optional[str] = None) -> str: +def did_you_mean(suggestions: Sequence[str], sub_message: str | None = None) -> str: """Given [ A, B, C ] return ' Did you mean A, B, or C?'""" if not suggestions or not MAX_LENGTH: return "" diff --git a/src/graphql/pyutils/format_list.py b/src/graphql/pyutils/format_list.py index b564e592..87184728 100644 --- a/src/graphql/pyutils/format_list.py +++ b/src/graphql/pyutils/format_list.py @@ -1,5 +1,7 @@ """List formatting""" +from __future__ import annotations + from typing import Sequence __all__ = ["or_list", "and_list"] diff --git a/src/graphql/pyutils/group_by.py b/src/graphql/pyutils/group_by.py index d765d9e7..60c77b30 100644 --- a/src/graphql/pyutils/group_by.py +++ b/src/graphql/pyutils/group_by.py @@ -1,7 +1,9 @@ """Grouping function""" +from __future__ import annotations + from collections import defaultdict -from typing import Callable, Collection, Dict, List, TypeVar +from typing import Callable, Collection, TypeVar __all__ = ["group_by"] @@ -9,9 +11,9 @@ T = TypeVar("T") -def group_by(items: Collection[T], key_fn: Callable[[T], K]) -> Dict[K, List[T]]: +def group_by(items: Collection[T], key_fn: Callable[[T], K]) -> dict[K, list[T]]: """Group an unsorted collection of items by a key derived via a function.""" - result: Dict[K, List[T]] = defaultdict(list) + result: dict[K, list[T]] = defaultdict(list) for item in items: key = key_fn(item) result[key].append(item) diff --git a/src/graphql/pyutils/identity_func.py b/src/graphql/pyutils/identity_func.py index 21c6ae28..2876c570 100644 --- a/src/graphql/pyutils/identity_func.py +++ b/src/graphql/pyutils/identity_func.py @@ -1,5 +1,7 @@ """Identity function""" +from __future__ import annotations + from typing import Any, TypeVar, cast from .undefined import Undefined diff --git a/src/graphql/pyutils/inspect.py b/src/graphql/pyutils/inspect.py index 305b697e..ed4920be 100644 --- a/src/graphql/pyutils/inspect.py +++ b/src/graphql/pyutils/inspect.py @@ -1,5 +1,7 @@ """Value inspection for error messages""" +from __future__ import annotations + from inspect import ( isasyncgen, isasyncgenfunction, @@ -11,7 +13,7 @@ isgeneratorfunction, ismethod, ) -from typing import Any, List +from typing import Any from .undefined import Undefined @@ -36,7 +38,7 @@ def inspect(value: Any) -> str: return inspect_recursive(value, []) -def inspect_recursive(value: Any, seen_values: List) -> str: +def inspect_recursive(value: Any, seen_values: list) -> str: if value is None or value is Undefined or isinstance(value, (bool, float, complex)): return repr(value) if isinstance(value, (int, str, bytes, bytearray)): @@ -164,7 +166,7 @@ def trunc_str(s: str) -> str: return s -def trunc_list(s: List) -> List: +def trunc_list(s: list) -> list: """Truncate lists to maximum length.""" if len(s) > max_list_size: i = max_list_size // 2 diff --git a/src/graphql/pyutils/is_awaitable.py b/src/graphql/pyutils/is_awaitable.py index 3d450b82..ce8c93c0 100644 --- a/src/graphql/pyutils/is_awaitable.py +++ b/src/graphql/pyutils/is_awaitable.py @@ -1,5 +1,7 @@ """Check whether objects are awaitable""" +from __future__ import annotations + import inspect from types import CoroutineType, GeneratorType from typing import Any, Awaitable diff --git a/src/graphql/pyutils/is_iterable.py b/src/graphql/pyutils/is_iterable.py index 802aef8f..3ec027bb 100644 --- a/src/graphql/pyutils/is_iterable.py +++ b/src/graphql/pyutils/is_iterable.py @@ -1,5 +1,7 @@ """Check whether objects are iterable""" +from __future__ import annotations + from array import array from typing import Any, Collection, Iterable, Mapping, ValuesView diff --git a/src/graphql/pyutils/merge_kwargs.py b/src/graphql/pyutils/merge_kwargs.py index 726d0dd6..c7cace3e 100644 --- a/src/graphql/pyutils/merge_kwargs.py +++ b/src/graphql/pyutils/merge_kwargs.py @@ -1,5 +1,7 @@ """Merge arguments""" +from __future__ import annotations + from typing import Any, Dict, TypeVar, cast T = TypeVar("T") diff --git a/src/graphql/pyutils/natural_compare.py b/src/graphql/pyutils/natural_compare.py index 1e8310e8..9c357cc6 100644 --- a/src/graphql/pyutils/natural_compare.py +++ b/src/graphql/pyutils/natural_compare.py @@ -1,15 +1,16 @@ """Natural sort order""" +from __future__ import annotations + import re from itertools import cycle -from typing import Tuple __all__ = ["natural_comparison_key"] _re_digits = re.compile(r"(\d+)") -def natural_comparison_key(key: str) -> Tuple: +def natural_comparison_key(key: str) -> tuple: """Comparison key function for sorting strings by natural sort order. See: https://en.wikipedia.org/wiki/Natural_sort_order diff --git a/src/graphql/pyutils/path.py b/src/graphql/pyutils/path.py index ff71af4d..089f5970 100644 --- a/src/graphql/pyutils/path.py +++ b/src/graphql/pyutils/path.py @@ -1,6 +1,6 @@ """Path of indices""" -from __future__ import annotations # Python < 3.10 +from __future__ import annotations from typing import Any, NamedTuple diff --git a/src/graphql/pyutils/print_path_list.py b/src/graphql/pyutils/print_path_list.py index dadbfac9..37dca741 100644 --- a/src/graphql/pyutils/print_path_list.py +++ b/src/graphql/pyutils/print_path_list.py @@ -1,9 +1,10 @@ """Path printing""" +from __future__ import annotations -from typing import Collection, Union +from typing import Collection -def print_path_list(path: Collection[Union[str, int]]) -> str: +def print_path_list(path: Collection[str | int]) -> str: """Build a string describing the path.""" return "".join(f"[{key}]" if isinstance(key, int) else f".{key}" for key in path) diff --git a/src/graphql/pyutils/simple_pub_sub.py b/src/graphql/pyutils/simple_pub_sub.py index b8648165..6b040ef3 100644 --- a/src/graphql/pyutils/simple_pub_sub.py +++ b/src/graphql/pyutils/simple_pub_sub.py @@ -1,6 +1,6 @@ """Simple public-subscribe system""" -from __future__ import annotations # Python < 3.10 +from __future__ import annotations from asyncio import Future, Queue, create_task, get_running_loop, sleep from typing import Any, AsyncIterator, Callable diff --git a/src/graphql/pyutils/suggestion_list.py b/src/graphql/pyutils/suggestion_list.py index 16526b34..6abeefed 100644 --- a/src/graphql/pyutils/suggestion_list.py +++ b/src/graphql/pyutils/suggestion_list.py @@ -1,13 +1,15 @@ """List with suggestions""" -from typing import Collection, List, Optional +from __future__ import annotations + +from typing import Collection from .natural_compare import natural_comparison_key __all__ = ["suggestion_list"] -def suggestion_list(input_: str, options: Collection[str]) -> List[str]: +def suggestion_list(input_: str, options: Collection[str]) -> list[str]: """Get list with suggestions for a given input. Given an invalid input string and list of valid options, returns a filtered list @@ -44,8 +46,8 @@ class LexicalDistance: _input: str _input_lower_case: str - _input_list: List[int] - _rows: List[List[int]] + _input_list: list[int] + _rows: list[list[int]] def __init__(self, input_: str) -> None: self._input = input_ @@ -55,7 +57,7 @@ def __init__(self, input_: str) -> None: self._rows = [[0] * row_size, [0] * row_size, [0] * row_size] - def measure(self, option: str, threshold: int) -> Optional[int]: + def measure(self, option: str, threshold: int) -> int | None: if self._input == option: return 0 diff --git a/src/graphql/pyutils/undefined.py b/src/graphql/pyutils/undefined.py index d1e21071..10e2c69e 100644 --- a/src/graphql/pyutils/undefined.py +++ b/src/graphql/pyutils/undefined.py @@ -1,6 +1,6 @@ """The Undefined value""" -from __future__ import annotations # Python < 3.10 +from __future__ import annotations import warnings diff --git a/src/graphql/type/definition.py b/src/graphql/type/definition.py index 9551735d..6307eee6 100644 --- a/src/graphql/type/definition.py +++ b/src/graphql/type/definition.py @@ -1,6 +1,6 @@ """GraphQL type definitions.""" -from __future__ import annotations # Python < 3.10 +from __future__ import annotations from enum import Enum from typing import ( diff --git a/src/graphql/type/directives.py b/src/graphql/type/directives.py index b8068d0c..17e8083c 100644 --- a/src/graphql/type/directives.py +++ b/src/graphql/type/directives.py @@ -1,6 +1,6 @@ """GraphQL directives""" -from __future__ import annotations # Python < 3.10 +from __future__ import annotations from typing import Any, Collection, cast diff --git a/src/graphql/type/introspection.py b/src/graphql/type/introspection.py index 1edbdd9f..866a0499 100644 --- a/src/graphql/type/introspection.py +++ b/src/graphql/type/introspection.py @@ -1,5 +1,7 @@ """GraphQL introspection""" +from __future__ import annotations + from enum import Enum from typing import Mapping diff --git a/src/graphql/type/scalars.py b/src/graphql/type/scalars.py index e9fbbdaa..22669c80 100644 --- a/src/graphql/type/scalars.py +++ b/src/graphql/type/scalars.py @@ -1,5 +1,7 @@ """GraphQL scalar types""" +from __future__ import annotations + from math import isfinite from typing import Any, Mapping diff --git a/src/graphql/type/schema.py b/src/graphql/type/schema.py index 47155ed8..4da894c1 100644 --- a/src/graphql/type/schema.py +++ b/src/graphql/type/schema.py @@ -1,6 +1,6 @@ """GraphQL schemas""" -from __future__ import annotations # Python < 3.10 +from __future__ import annotations from copy import copy, deepcopy from typing import ( diff --git a/src/graphql/type/validate.py b/src/graphql/type/validate.py index 505cebde..daf9935a 100644 --- a/src/graphql/type/validate.py +++ b/src/graphql/type/validate.py @@ -1,8 +1,10 @@ """Schema validation""" +from __future__ import annotations + from collections import defaultdict from operator import attrgetter, itemgetter -from typing import Any, Collection, Dict, List, Optional, Set, Tuple, Union, cast +from typing import Any, Collection, Optional, cast from ..error import GraphQLError from ..language import ( @@ -42,7 +44,7 @@ __all__ = ["validate_schema", "assert_valid_schema"] -def validate_schema(schema: GraphQLSchema) -> List[GraphQLError]: +def validate_schema(schema: GraphQLSchema) -> list[GraphQLError]: """Validate a GraphQL schema. Implements the "Type Validation" sub-sections of the specification's "Type System" @@ -85,7 +87,7 @@ def assert_valid_schema(schema: GraphQLSchema) -> None: class SchemaValidationContext: """Utility class providing a context for schema validation.""" - errors: List[GraphQLError] + errors: list[GraphQLError] schema: GraphQLSchema def __init__(self, schema: GraphQLSchema) -> None: @@ -95,7 +97,7 @@ def __init__(self, schema: GraphQLSchema) -> None: def report_error( self, message: str, - nodes: Union[Optional[Node], Collection[Optional[Node]]] = None, + nodes: Node | None | Collection[Node | None] = None, ) -> None: if nodes and not isinstance(nodes, Node): nodes = [node for node in nodes if node] @@ -106,7 +108,7 @@ def validate_root_types(self) -> None: schema = self.schema if not schema.query_type: self.report_error("Query root type must be provided.", schema.ast_node) - root_types_map: Dict[GraphQLObjectType, List[OperationType]] = defaultdict(list) + root_types_map: dict[GraphQLObjectType, list[OperationType]] = defaultdict(list) for operation_type in OperationType: root_type = schema.get_root_type(operation_type) @@ -176,7 +178,7 @@ def validate_directives(self) -> None: ], ) - def validate_name(self, node: Any, name: Optional[str] = None) -> None: + def validate_name(self, node: Any, name: str | None = None) -> None: # Ensure names are valid, however introspection types opt out. try: if not name: @@ -234,7 +236,7 @@ def validate_types(self) -> None: validate_input_object_circular_refs(type_) def validate_fields( - self, type_: Union[GraphQLObjectType, GraphQLInterfaceType] + self, type_: GraphQLObjectType | GraphQLInterfaceType ) -> None: fields = type_.fields @@ -281,9 +283,9 @@ def validate_fields( ) def validate_interfaces( - self, type_: Union[GraphQLObjectType, GraphQLInterfaceType] + self, type_: GraphQLObjectType | GraphQLInterfaceType ) -> None: - iface_type_names: Set[str] = set() + iface_type_names: set[str] = set() for iface in type_.interfaces: if not is_interface_type(iface): self.report_error( @@ -314,7 +316,7 @@ def validate_interfaces( def validate_type_implements_interface( self, - type_: Union[GraphQLObjectType, GraphQLInterfaceType], + type_: GraphQLObjectType | GraphQLInterfaceType, iface: GraphQLInterfaceType, ) -> None: type_fields, iface_fields = type_.fields, iface.fields @@ -393,7 +395,7 @@ def validate_type_implements_interface( def validate_type_implements_ancestors( self, - type_: Union[GraphQLObjectType, GraphQLInterfaceType], + type_: GraphQLObjectType | GraphQLInterfaceType, iface: GraphQLInterfaceType, ) -> None: type_interfaces, iface_interfaces = type_.interfaces, iface.interfaces @@ -418,7 +420,7 @@ def validate_union_members(self, union: GraphQLUnionType) -> None: [union.ast_node, *union.extension_ast_nodes], ) - included_type_names: Set[str] = set() + included_type_names: set[str] = set() for member_type in member_types: if is_object_type(member_type): if member_type.name in included_type_names: @@ -485,8 +487,8 @@ def validate_input_fields(self, input_obj: GraphQLInputObjectType) -> None: def get_operation_type_node( schema: GraphQLSchema, operation: OperationType -) -> Optional[Node]: - ast_node: Optional[Union[SchemaDefinitionNode, SchemaExtensionNode]] +) -> Node | None: + ast_node: SchemaDefinitionNode | SchemaExtensionNode | None for ast_node in [schema.ast_node, *(schema.extension_ast_nodes or ())]: if ast_node: operation_types = ast_node.operation_types @@ -504,11 +506,11 @@ def __init__(self, context: SchemaValidationContext) -> None: self.context = context # Tracks already visited types to maintain O(N) and to ensure that cycles # are not redundantly reported. - self.visited_types: Set[str] = set() + self.visited_types: set[str] = set() # Array of input fields used to produce meaningful errors - self.field_path: List[Tuple[str, GraphQLInputField]] = [] + self.field_path: list[tuple[str, GraphQLInputField]] = [] # Position in the type path - self.field_path_index_by_type_name: Dict[str, int] = {} + self.field_path_index_by_type_name: dict[str, int] = {} def __call__(self, input_obj: GraphQLInputObjectType) -> None: """Detect cycles recursively.""" @@ -550,13 +552,13 @@ def __call__(self, input_obj: GraphQLInputObjectType) -> None: def get_all_implements_interface_nodes( - type_: Union[GraphQLObjectType, GraphQLInterfaceType], iface: GraphQLInterfaceType -) -> List[NamedTypeNode]: + type_: GraphQLObjectType | GraphQLInterfaceType, iface: GraphQLInterfaceType +) -> list[NamedTypeNode]: ast_node = type_.ast_node nodes = type_.extension_ast_nodes if ast_node is not None: nodes = [ast_node, *nodes] # type: ignore - implements_nodes: List[NamedTypeNode] = [] + implements_nodes: list[NamedTypeNode] = [] for node in nodes: iface_nodes = node.interfaces if iface_nodes: # pragma: no cover else @@ -570,12 +572,12 @@ def get_all_implements_interface_nodes( def get_union_member_type_nodes( union: GraphQLUnionType, type_name: str -) -> List[NamedTypeNode]: +) -> list[NamedTypeNode]: ast_node = union.ast_node nodes = union.extension_ast_nodes if ast_node is not None: nodes = [ast_node, *nodes] # type: ignore - member_type_nodes: List[NamedTypeNode] = [] + member_type_nodes: list[NamedTypeNode] = [] for node in nodes: type_nodes = node.types if type_nodes: # pragma: no cover else @@ -588,8 +590,8 @@ def get_union_member_type_nodes( def get_deprecated_directive_node( - definition_node: Optional[Union[InputValueDefinitionNode]], -) -> Optional[DirectiveNode]: + definition_node: InputValueDefinitionNode | None, +) -> DirectiveNode | None: directives = definition_node and definition_node.directives if directives: for directive in directives: diff --git a/src/graphql/utilities/ast_from_value.py b/src/graphql/utilities/ast_from_value.py index 2c10b4e9..99bf0769 100644 --- a/src/graphql/utilities/ast_from_value.py +++ b/src/graphql/utilities/ast_from_value.py @@ -1,8 +1,10 @@ """GraphQL AST creation from Python""" +from __future__ import annotations + import re from math import isfinite -from typing import Any, Mapping, Optional +from typing import Any, Mapping from ..language import ( BooleanValueNode, @@ -33,7 +35,7 @@ _re_integer_string = re.compile("^-?(?:0|[1-9][0-9]*)$") -def ast_from_value(value: Any, type_: GraphQLInputType) -> Optional[ValueNode]: +def ast_from_value(value: Any, type_: GraphQLInputType) -> ValueNode | None: """Produce a GraphQL Value AST given a Python object. This function will match Python/JSON values to GraphQL AST schema format by using diff --git a/src/graphql/utilities/ast_to_dict.py b/src/graphql/utilities/ast_to_dict.py index a04e31a5..959a90a8 100644 --- a/src/graphql/utilities/ast_to_dict.py +++ b/src/graphql/utilities/ast_to_dict.py @@ -1,6 +1,8 @@ """Python dictionary creation from GraphQL AST""" -from typing import Any, Collection, Dict, List, Optional, overload +from __future__ import annotations + +from typing import Any, Collection, overload from ..language import Node, OperationType from ..pyutils import is_iterable @@ -10,8 +12,8 @@ @overload def ast_to_dict( - node: Node, locations: bool = False, cache: Optional[Dict[Node, Any]] = None -) -> Dict: + node: Node, locations: bool = False, cache: dict[Node, Any] | None = None +) -> dict: ... @@ -19,8 +21,8 @@ def ast_to_dict( def ast_to_dict( node: Collection[Node], locations: bool = False, - cache: Optional[Dict[Node, Any]] = None, -) -> List[Node]: + cache: dict[Node, Any] | None = None, +) -> list[Node]: ... @@ -28,13 +30,13 @@ def ast_to_dict( def ast_to_dict( node: OperationType, locations: bool = False, - cache: Optional[Dict[Node, Any]] = None, + cache: dict[Node, Any] | None = None, ) -> str: ... def ast_to_dict( - node: Any, locations: bool = False, cache: Optional[Dict[Node, Any]] = None + node: Any, locations: bool = False, cache: dict[Node, Any] | None = None ) -> Any: """Convert a language AST to a nested Python dictionary. diff --git a/src/graphql/utilities/build_ast_schema.py b/src/graphql/utilities/build_ast_schema.py index 4ec86f02..8736e979 100644 --- a/src/graphql/utilities/build_ast_schema.py +++ b/src/graphql/utilities/build_ast_schema.py @@ -1,6 +1,8 @@ """GraphQL Schema creation from GraphQL AST""" -from typing import Union, cast +from __future__ import annotations + +from typing import cast from ..language import DocumentNode, Source, parse from ..type import ( @@ -86,7 +88,7 @@ def build_ast_schema( def build_schema( - source: Union[str, Source], + source: str | Source, assume_valid: bool = False, assume_valid_sdl: bool = False, no_location: bool = False, diff --git a/src/graphql/utilities/build_client_schema.py b/src/graphql/utilities/build_client_schema.py index 65e567a7..c4d05ccc 100644 --- a/src/graphql/utilities/build_client_schema.py +++ b/src/graphql/utilities/build_client_schema.py @@ -1,7 +1,9 @@ """GraphQL client schema creation""" +from __future__ import annotations + from itertools import chain -from typing import Callable, Collection, Dict, List, Union, cast +from typing import Callable, Collection, cast from ..language import DirectiveLocation, parse_value from ..pyutils import Undefined, inspect @@ -152,10 +154,9 @@ def build_scalar_def( ) def build_implementations_list( - implementing_introspection: Union[ - IntrospectionObjectType, IntrospectionInterfaceType - ], - ) -> List[GraphQLInterfaceType]: + implementing_introspection: IntrospectionObjectType + | IntrospectionInterfaceType, + ) -> list[GraphQLInterfaceType]: maybe_interfaces = implementing_introspection.get("interfaces") if maybe_interfaces is None: # Temporary workaround until GraphQL ecosystem will fully support @@ -252,7 +253,7 @@ def build_input_object_def( ), ) - type_builders: Dict[str, Callable[[IntrospectionType], GraphQLNamedType]] = { + type_builders: dict[str, Callable[[IntrospectionType], GraphQLNamedType]] = { TypeKind.SCALAR.name: build_scalar_def, # type: ignore TypeKind.OBJECT.name: build_object_def, # type: ignore TypeKind.INTERFACE.name: build_interface_def, # type: ignore @@ -262,8 +263,8 @@ def build_input_object_def( } def build_field_def_map( - type_introspection: Union[IntrospectionObjectType, IntrospectionInterfaceType], - ) -> Dict[str, GraphQLField]: + type_introspection: IntrospectionObjectType | IntrospectionInterfaceType, + ) -> dict[str, GraphQLField]: if type_introspection.get("fields") is None: msg = f"Introspection result missing fields: {type_introspection}." @@ -300,7 +301,7 @@ def build_field(field_introspection: IntrospectionField) -> GraphQLField: def build_argument_def_map( argument_value_introspections: Collection[IntrospectionInputValue], - ) -> Dict[str, GraphQLArgument]: + ) -> dict[str, GraphQLArgument]: return { argument_introspection["name"]: build_argument(argument_introspection) for argument_introspection in argument_value_introspections @@ -333,7 +334,7 @@ def build_argument( def build_input_value_def_map( input_value_introspections: Collection[IntrospectionInputValue], - ) -> Dict[str, GraphQLInputField]: + ) -> dict[str, GraphQLInputField]: return { input_value_introspection["name"]: build_input_value( input_value_introspection @@ -395,7 +396,7 @@ def build_directive( ) # Iterate through all types, getting the type definition for each. - type_map: Dict[str, GraphQLNamedType] = { + type_map: dict[str, GraphQLNamedType] = { type_introspection["name"]: build_type(type_introspection) for type_introspection in schema_introspection["types"] } diff --git a/src/graphql/utilities/coerce_input_value.py b/src/graphql/utilities/coerce_input_value.py index 23883285..db74d272 100644 --- a/src/graphql/utilities/coerce_input_value.py +++ b/src/graphql/utilities/coerce_input_value.py @@ -1,6 +1,8 @@ """Input value coercion""" -from typing import Any, Callable, Dict, List, Optional, Union, cast +from __future__ import annotations + +from typing import Any, Callable, List, Union, cast from ..error import GraphQLError from ..pyutils import ( @@ -34,7 +36,7 @@ def default_on_error( - path: List[Union[str, int]], invalid_value: Any, error: GraphQLError + path: list[str | int], invalid_value: Any, error: GraphQLError ) -> None: error_prefix = "Invalid value " + inspect(invalid_value) if path: @@ -47,7 +49,7 @@ def coerce_input_value( input_value: Any, type_: GraphQLInputType, on_error: OnErrorCB = default_on_error, - path: Optional[Path] = None, + path: Path | None = None, ) -> Any: """Coerce a Python value given a GraphQL Input Type.""" if is_non_null_type(type_): @@ -69,7 +71,7 @@ def coerce_input_value( if is_list_type(type_): item_type = type_.of_type if is_iterable(input_value): - coerced_list: List[Any] = [] + coerced_list: list[Any] = [] append_item = coerced_list.append for index, item_value in enumerate(input_value): append_item( @@ -90,7 +92,7 @@ def coerce_input_value( ) return Undefined - coerced_dict: Dict[str, Any] = {} + coerced_dict: dict[str, Any] = {} fields = type_.fields for field_name, field in fields.items(): diff --git a/src/graphql/utilities/concat_ast.py b/src/graphql/utilities/concat_ast.py index 901d985e..806292f9 100644 --- a/src/graphql/utilities/concat_ast.py +++ b/src/graphql/utilities/concat_ast.py @@ -1,5 +1,7 @@ """AST concatenation""" +from __future__ import annotations + from itertools import chain from typing import Collection diff --git a/src/graphql/utilities/extend_schema.py b/src/graphql/utilities/extend_schema.py index ffa2420e..6c3eebc7 100644 --- a/src/graphql/utilities/extend_schema.py +++ b/src/graphql/utilities/extend_schema.py @@ -1,18 +1,14 @@ """GraphQL schema extension""" +from __future__ import annotations + from collections import defaultdict from functools import partial from typing import ( Any, Collection, - DefaultDict, - Dict, - List, Mapping, - Optional, - Tuple, TypeVar, - Union, cast, ) @@ -141,12 +137,12 @@ def extend_schema( class TypeExtensionsMap: """Mappings from types to their extensions.""" - scalar: DefaultDict[str, List[ScalarTypeExtensionNode]] - object: DefaultDict[str, List[ObjectTypeExtensionNode]] - interface: DefaultDict[str, List[InterfaceTypeExtensionNode]] - union: DefaultDict[str, List[UnionTypeExtensionNode]] - enum: DefaultDict[str, List[EnumTypeExtensionNode]] - input_object: DefaultDict[str, List[InputObjectTypeExtensionNode]] + scalar: defaultdict[str, list[ScalarTypeExtensionNode]] + object: defaultdict[str, list[ObjectTypeExtensionNode]] + interface: defaultdict[str, list[InterfaceTypeExtensionNode]] + union: defaultdict[str, list[UnionTypeExtensionNode]] + enum: defaultdict[str, list[EnumTypeExtensionNode]] + input_object: defaultdict[str, list[InputObjectTypeExtensionNode]] def __init__(self) -> None: self.scalar = defaultdict(list) @@ -156,7 +152,7 @@ def __init__(self) -> None: self.enum = defaultdict(list) self.input_object = defaultdict(list) - def for_node(self, node: TEN) -> DefaultDict[str, List[TEN]]: + def for_node(self, node: TEN) -> defaultdict[str, list[TEN]]: """Get type extensions map for the given node kind.""" kind = node.kind try: @@ -176,7 +172,7 @@ class ExtendSchemaImpl: For internal use only. """ - type_map: Dict[str, GraphQLNamedType] + type_map: dict[str, GraphQLNamedType] type_extensions: TypeExtensionsMap def __init__(self, type_extensions: TypeExtensionsMap) -> None: @@ -195,17 +191,17 @@ def extend_schema_args( For internal use only. """ # Collect the type definitions and extensions found in the document. - type_defs: List[TypeDefinitionNode] = [] + type_defs: list[TypeDefinitionNode] = [] type_extensions = TypeExtensionsMap() # New directives and types are separate because a directives and types can have # the same name. For example, a type named "skip". - directive_defs: List[DirectiveDefinitionNode] = [] + directive_defs: list[DirectiveDefinitionNode] = [] - schema_def: Optional[SchemaDefinitionNode] = None + schema_def: SchemaDefinitionNode | None = None # Schema extensions are collected which may add additional operation types. - schema_extensions: List[SchemaExtensionNode] = [] + schema_extensions: list[SchemaExtensionNode] = [] is_schema_changed = False for def_ in document_ast.definitions: @@ -236,7 +232,7 @@ def extend_schema_args( self.type_map[name] = std_type_map.get(name) or self.build_type(type_node) # Get the extended root operation types. - operation_types: Dict[OperationType, GraphQLNamedType] = {} + operation_types: dict[OperationType, GraphQLNamedType] = {} for operation_type in OperationType: original_type = schema_kwargs[operation_type.value] if original_type: @@ -328,7 +324,7 @@ def extend_named_type(self, type_: GraphQLNamedType) -> GraphQLNamedType: raise TypeError(msg) # pragma: no cover def extend_input_object_type_fields( - self, kwargs: Dict[str, Any], extensions: Tuple[Any, ...] + self, kwargs: dict[str, Any], extensions: tuple[Any, ...] ) -> GraphQLInputFieldMap: """Extend GraphQL input object type fields.""" return { @@ -394,8 +390,8 @@ def extend_scalar_type(self, type_: GraphQLScalarType) -> GraphQLScalarType: ) def extend_object_type_interfaces( - self, kwargs: Dict[str, Any], extensions: Tuple[Any, ...] - ) -> List[GraphQLInterfaceType]: + self, kwargs: dict[str, Any], extensions: tuple[Any, ...] + ) -> list[GraphQLInterfaceType]: """Extend a GraphQL object type interface.""" return [ cast(GraphQLInterfaceType, self.replace_named_type(interface)) @@ -403,7 +399,7 @@ def extend_object_type_interfaces( ] + self.build_interfaces(extensions) def extend_object_type_fields( - self, kwargs: Dict[str, Any], extensions: Tuple[Any, ...] + self, kwargs: dict[str, Any], extensions: tuple[Any, ...] ) -> GraphQLFieldMap: """Extend GraphQL object type fields.""" return { @@ -432,8 +428,8 @@ def extend_object_type(self, type_: GraphQLObjectType) -> GraphQLObjectType: ) def extend_interface_type_interfaces( - self, kwargs: Dict[str, Any], extensions: Tuple[Any, ...] - ) -> List[GraphQLInterfaceType]: + self, kwargs: dict[str, Any], extensions: tuple[Any, ...] + ) -> list[GraphQLInterfaceType]: """Extend GraphQL interface type interfaces.""" return [ cast(GraphQLInterfaceType, self.replace_named_type(interface)) @@ -441,7 +437,7 @@ def extend_interface_type_interfaces( ] + self.build_interfaces(extensions) def extend_interface_type_fields( - self, kwargs: Dict[str, Any], extensions: Tuple[Any, ...] + self, kwargs: dict[str, Any], extensions: tuple[Any, ...] ) -> GraphQLFieldMap: """Extend GraphQL interface type fields.""" return { @@ -472,8 +468,8 @@ def extend_interface_type( ) def extend_union_type_types( - self, kwargs: Dict[str, Any], extensions: Tuple[Any, ...] - ) -> List[GraphQLObjectType]: + self, kwargs: dict[str, Any], extensions: tuple[Any, ...] + ) -> list[GraphQLObjectType]: """Extend types of a GraphQL union type.""" return [ cast(GraphQLObjectType, self.replace_named_type(member_type)) @@ -515,8 +511,8 @@ def extend_arg(self, arg: GraphQLArgument) -> GraphQLArgument: # noinspection PyShadowingNames def get_operation_types( - self, nodes: Collection[Union[SchemaDefinitionNode, SchemaExtensionNode]] - ) -> Dict[OperationType, GraphQLNamedType]: + self, nodes: Collection[SchemaDefinitionNode | SchemaExtensionNode] + ) -> dict[OperationType, GraphQLNamedType]: """Extend GraphQL operation types.""" # Note: While this could make early assertions to get the correctly # typed values below, that would throw immediately while type system @@ -564,12 +560,10 @@ def build_directive(self, node: DirectiveDefinitionNode) -> GraphQLDirective: def build_field_map( self, nodes: Collection[ - Union[ - InterfaceTypeDefinitionNode, - InterfaceTypeExtensionNode, - ObjectTypeDefinitionNode, - ObjectTypeExtensionNode, - ] + InterfaceTypeDefinitionNode + | InterfaceTypeExtensionNode + | ObjectTypeDefinitionNode + | ObjectTypeExtensionNode ], ) -> GraphQLFieldMap: """Build a GraphQL field map.""" @@ -590,7 +584,7 @@ def build_field_map( def build_argument_map( self, - args: Optional[Collection[InputValueDefinitionNode]], + args: Collection[InputValueDefinitionNode] | None, ) -> GraphQLArgumentMap: """Build a GraphQL argument map.""" arg_map: GraphQLArgumentMap = {} @@ -610,9 +604,7 @@ def build_argument_map( def build_input_field_map( self, - nodes: Collection[ - Union[InputObjectTypeDefinitionNode, InputObjectTypeExtensionNode] - ], + nodes: Collection[InputObjectTypeDefinitionNode | InputObjectTypeExtensionNode], ) -> GraphQLInputFieldMap: """Build a GraphQL input field map.""" input_field_map: GraphQLInputFieldMap = {} @@ -633,7 +625,7 @@ def build_input_field_map( @staticmethod def build_enum_value_map( - nodes: Collection[Union[EnumTypeDefinitionNode, EnumTypeExtensionNode]], + nodes: Collection[EnumTypeDefinitionNode | EnumTypeExtensionNode], ) -> GraphQLEnumValueMap: """Build a GraphQL enum value map.""" enum_value_map: GraphQLEnumValueMap = {} @@ -654,14 +646,12 @@ def build_enum_value_map( def build_interfaces( self, nodes: Collection[ - Union[ - InterfaceTypeDefinitionNode, - InterfaceTypeExtensionNode, - ObjectTypeDefinitionNode, - ObjectTypeExtensionNode, - ] + InterfaceTypeDefinitionNode + | InterfaceTypeExtensionNode + | ObjectTypeDefinitionNode + | ObjectTypeExtensionNode ], - ) -> List[GraphQLInterfaceType]: + ) -> list[GraphQLInterfaceType]: """Build GraphQL interface types for the given nodes.""" # Note: While this could make assertions to get the correctly typed # value, that would throw immediately while type system validation @@ -674,8 +664,8 @@ def build_interfaces( def build_union_types( self, - nodes: Collection[Union[UnionTypeDefinitionNode, UnionTypeExtensionNode]], - ) -> List[GraphQLObjectType]: + nodes: Collection[UnionTypeDefinitionNode | UnionTypeExtensionNode], + ) -> list[GraphQLObjectType]: """Build GraphQL object types for the given union type nodes.""" # Note: While this could make assertions to get the correctly typed # value, that would throw immediately while type system validation @@ -691,7 +681,7 @@ def build_object_type( ) -> GraphQLObjectType: """Build a GraphQL object type for the given object type definition node.""" extension_nodes = self.type_extensions.object[ast_node.name.value] - all_nodes: List[Union[ObjectTypeDefinitionNode, ObjectTypeExtensionNode]] = [ + all_nodes: list[ObjectTypeDefinitionNode | ObjectTypeExtensionNode] = [ ast_node, *extension_nodes, ] @@ -710,9 +700,10 @@ def build_interface_type( ) -> GraphQLInterfaceType: """Build a GraphQL interface type for the given type definition nodes.""" extension_nodes = self.type_extensions.interface[ast_node.name.value] - all_nodes: List[ - Union[InterfaceTypeDefinitionNode, InterfaceTypeExtensionNode] - ] = [ast_node, *extension_nodes] + all_nodes: list[InterfaceTypeDefinitionNode | InterfaceTypeExtensionNode] = [ + ast_node, + *extension_nodes, + ] return GraphQLInterfaceType( name=ast_node.name.value, description=ast_node.description.value if ast_node.description else None, @@ -725,7 +716,7 @@ def build_interface_type( def build_enum_type(self, ast_node: EnumTypeDefinitionNode) -> GraphQLEnumType: """Build a GraphQL enum type for the given enum type definition nodes.""" extension_nodes = self.type_extensions.enum[ast_node.name.value] - all_nodes: List[Union[EnumTypeDefinitionNode, EnumTypeExtensionNode]] = [ + all_nodes: list[EnumTypeDefinitionNode | EnumTypeExtensionNode] = [ ast_node, *extension_nodes, ] @@ -740,7 +731,7 @@ def build_enum_type(self, ast_node: EnumTypeDefinitionNode) -> GraphQLEnumType: def build_union_type(self, ast_node: UnionTypeDefinitionNode) -> GraphQLUnionType: """Build a GraphQL union type for the given union type definition nodes.""" extension_nodes = self.type_extensions.union[ast_node.name.value] - all_nodes: List[Union[UnionTypeDefinitionNode, UnionTypeExtensionNode]] = [ + all_nodes: list[UnionTypeDefinitionNode | UnionTypeExtensionNode] = [ ast_node, *extension_nodes, ] @@ -771,8 +762,8 @@ def build_input_object_type( ) -> GraphQLInputObjectType: """Build a GraphQL input object type for the given node.""" extension_nodes = self.type_extensions.input_object[ast_node.name.value] - all_nodes: List[ - Union[InputObjectTypeDefinitionNode, InputObjectTypeExtensionNode] + all_nodes: list[ + InputObjectTypeDefinitionNode | InputObjectTypeExtensionNode ] = [ast_node, *extension_nodes] return GraphQLInputObjectType( name=ast_node.name.value, @@ -801,15 +792,15 @@ def build_type(self, ast_node: TypeDefinitionNode) -> GraphQLNamedType: return build(ast_node) -std_type_map: Mapping[str, Union[GraphQLNamedType, GraphQLObjectType]] = { +std_type_map: Mapping[str, GraphQLNamedType | GraphQLObjectType] = { **specified_scalar_types, **introspection_types, } def get_deprecation_reason( - node: Union[EnumValueDefinitionNode, FieldDefinitionNode, InputValueDefinitionNode], -) -> Optional[str]: + node: EnumValueDefinitionNode | FieldDefinitionNode | InputValueDefinitionNode, +) -> str | None: """Given a field or enum value node, get deprecation reason as string.""" from ..execution import get_directive_values @@ -818,8 +809,8 @@ def get_deprecation_reason( def get_specified_by_url( - node: Union[ScalarTypeDefinitionNode, ScalarTypeExtensionNode], -) -> Optional[str]: + node: ScalarTypeDefinitionNode | ScalarTypeExtensionNode, +) -> str | None: """Given a scalar node, return the string value for the specifiedByURL.""" from ..execution import get_directive_values diff --git a/src/graphql/utilities/find_breaking_changes.py b/src/graphql/utilities/find_breaking_changes.py index c4899f7b..c88c1265 100644 --- a/src/graphql/utilities/find_breaking_changes.py +++ b/src/graphql/utilities/find_breaking_changes.py @@ -1,7 +1,9 @@ """Find breaking changes between GraphQL schemas""" +from __future__ import annotations + from enum import Enum -from typing import Any, Collection, Dict, List, NamedTuple, Union +from typing import Any, Collection, NamedTuple, Union from ..language import print_ast from ..pyutils import Undefined, inspect @@ -99,7 +101,7 @@ class DangerousChange(NamedTuple): def find_breaking_changes( old_schema: GraphQLSchema, new_schema: GraphQLSchema -) -> List[BreakingChange]: +) -> list[BreakingChange]: """Find breaking changes. Given two schemas, returns a list containing descriptions of all the types of @@ -114,7 +116,7 @@ def find_breaking_changes( def find_dangerous_changes( old_schema: GraphQLSchema, new_schema: GraphQLSchema -) -> List[DangerousChange]: +) -> list[DangerousChange]: """Find dangerous changes. Given two schemas, returns a list containing descriptions of all the types of @@ -129,7 +131,7 @@ def find_dangerous_changes( def find_schema_changes( old_schema: GraphQLSchema, new_schema: GraphQLSchema -) -> List[Change]: +) -> list[Change]: return find_type_changes(old_schema, new_schema) + find_directive_changes( old_schema, new_schema ) @@ -137,8 +139,8 @@ def find_schema_changes( def find_directive_changes( old_schema: GraphQLSchema, new_schema: GraphQLSchema -) -> List[Change]: - schema_changes: List[Change] = [] +) -> list[Change]: + schema_changes: list[Change] = [] directives_diff = list_diff(old_schema.directives, new_schema.directives) @@ -192,8 +194,8 @@ def find_directive_changes( def find_type_changes( old_schema: GraphQLSchema, new_schema: GraphQLSchema -) -> List[Change]: - schema_changes: List[Change] = [] +) -> list[Change]: + schema_changes: list[Change] = [] types_diff = dict_diff(old_schema.type_map, new_schema.type_map) for type_name, old_type in types_diff.removed.items(): @@ -239,8 +241,8 @@ def find_type_changes( def find_input_object_type_changes( old_type: GraphQLInputObjectType, new_type: GraphQLInputObjectType, -) -> List[Change]: - schema_changes: List[Change] = [] +) -> list[Change]: + schema_changes: list[Change] = [] fields_diff = dict_diff(old_type.fields, new_type.fields) for field_name, new_field in fields_diff.added.items(): @@ -287,8 +289,8 @@ def find_input_object_type_changes( def find_union_type_changes( old_type: GraphQLUnionType, new_type: GraphQLUnionType -) -> List[Change]: - schema_changes: List[Change] = [] +) -> list[Change]: + schema_changes: list[Change] = [] possible_types_diff = list_diff(old_type.types, new_type.types) for possible_type in possible_types_diff.added: @@ -312,8 +314,8 @@ def find_union_type_changes( def find_enum_type_changes( old_type: GraphQLEnumType, new_type: GraphQLEnumType -) -> List[Change]: - schema_changes: List[Change] = [] +) -> list[Change]: + schema_changes: list[Change] = [] values_diff = dict_diff(old_type.values, new_type.values) for value_name in values_diff.added: @@ -336,10 +338,10 @@ def find_enum_type_changes( def find_implemented_interfaces_changes( - old_type: Union[GraphQLObjectType, GraphQLInterfaceType], - new_type: Union[GraphQLObjectType, GraphQLInterfaceType], -) -> List[Change]: - schema_changes: List[Change] = [] + old_type: GraphQLObjectType | GraphQLInterfaceType, + new_type: GraphQLObjectType | GraphQLInterfaceType, +) -> list[Change]: + schema_changes: list[Change] = [] interfaces_diff = list_diff(old_type.interfaces, new_type.interfaces) for interface in interfaces_diff.added: @@ -362,10 +364,10 @@ def find_implemented_interfaces_changes( def find_field_changes( - old_type: Union[GraphQLObjectType, GraphQLInterfaceType], - new_type: Union[GraphQLObjectType, GraphQLInterfaceType], -) -> List[Change]: - schema_changes: List[Change] = [] + old_type: GraphQLObjectType | GraphQLInterfaceType, + new_type: GraphQLObjectType | GraphQLInterfaceType, +) -> list[Change]: + schema_changes: list[Change] = [] fields_diff = dict_diff(old_type.fields, new_type.fields) for field_name in fields_diff.removed: @@ -396,12 +398,12 @@ def find_field_changes( def find_arg_changes( - old_type: Union[GraphQLObjectType, GraphQLInterfaceType], + old_type: GraphQLObjectType | GraphQLInterfaceType, field_name: str, old_field: GraphQLField, new_field: GraphQLField, -) -> List[Change]: - schema_changes: List[Change] = [] +) -> list[Change]: + schema_changes: list[Change] = [] args_diff = dict_diff(old_field.args, new_field.args) for arg_name in args_diff.removed: @@ -578,9 +580,9 @@ def stringify_value(value: Any, type_: GraphQLInputType) -> str: class ListDiff(NamedTuple): """Tuple with added, removed and persisted list items.""" - added: List - removed: List - persisted: List + added: list + removed: list + persisted: list def list_diff(old_list: Collection, new_list: Collection) -> ListDiff: @@ -609,12 +611,12 @@ def list_diff(old_list: Collection, new_list: Collection) -> ListDiff: class DictDiff(NamedTuple): """Tuple with added, removed and persisted dict entries.""" - added: Dict - removed: Dict - persisted: Dict + added: dict + removed: dict + persisted: dict -def dict_diff(old_dict: Dict, new_dict: Dict) -> DictDiff: +def dict_diff(old_dict: dict, new_dict: dict) -> DictDiff: """Get differences between two dicts.""" added = {} removed = {} diff --git a/src/graphql/utilities/get_introspection_query.py b/src/graphql/utilities/get_introspection_query.py index 67feb598..cffaa12d 100644 --- a/src/graphql/utilities/get_introspection_query.py +++ b/src/graphql/utilities/get_introspection_query.py @@ -1,9 +1,12 @@ """Get introspection query""" +from __future__ import annotations + from textwrap import dedent -from typing import Any, Dict, List, Optional, Union +from typing import TYPE_CHECKING, Any, Dict, Union -from ..language import DirectiveLocation +if TYPE_CHECKING: + from ..language import DirectiveLocation try: from typing import Literal, TypedDict @@ -53,7 +56,7 @@ def get_introspection_query( maybe_directive_is_repeatable = "isRepeatable" if directive_is_repeatable else "" maybe_schema_description = maybe_description if schema_description else "" - def input_deprecation(string: str) -> Optional[str]: + def input_deprecation(string: str) -> str | None: return string if input_value_deprecation else "" return dedent( @@ -168,7 +171,7 @@ def input_deprecation(string: str) -> Optional[str]: class MaybeWithDescription(TypedDict, total=False): - description: Optional[str] + description: str | None class WithName(MaybeWithDescription): @@ -176,26 +179,26 @@ class WithName(MaybeWithDescription): class MaybeWithSpecifiedByUrl(TypedDict, total=False): - specifiedByURL: Optional[str] + specifiedByURL: str | None class WithDeprecated(TypedDict): isDeprecated: bool - deprecationReason: Optional[str] + deprecationReason: str | None class MaybeWithDeprecated(TypedDict, total=False): isDeprecated: bool - deprecationReason: Optional[str] + deprecationReason: str | None class IntrospectionInputValue(WithName, MaybeWithDeprecated): type: SimpleIntrospectionType # should be IntrospectionInputType - defaultValue: Optional[str] + defaultValue: str | None class IntrospectionField(WithName, WithDeprecated): - args: List[IntrospectionInputValue] + args: list[IntrospectionInputValue] type: SimpleIntrospectionType # should be IntrospectionOutputType @@ -208,8 +211,8 @@ class MaybeWithIsRepeatable(TypedDict, total=False): class IntrospectionDirective(WithName, MaybeWithIsRepeatable): - locations: List[DirectiveLocation] - args: List[IntrospectionInputValue] + locations: list[DirectiveLocation] + args: list[IntrospectionInputValue] class IntrospectionScalarType(WithName, MaybeWithSpecifiedByUrl): @@ -218,30 +221,30 @@ class IntrospectionScalarType(WithName, MaybeWithSpecifiedByUrl): class IntrospectionInterfaceType(WithName): kind: Literal["interface"] - fields: List[IntrospectionField] - interfaces: List[SimpleIntrospectionType] # should be InterfaceType - possibleTypes: List[SimpleIntrospectionType] # should be NamedType + fields: list[IntrospectionField] + interfaces: list[SimpleIntrospectionType] # should be InterfaceType + possibleTypes: list[SimpleIntrospectionType] # should be NamedType class IntrospectionObjectType(WithName): kind: Literal["object"] - fields: List[IntrospectionField] - interfaces: List[SimpleIntrospectionType] # should be InterfaceType + fields: list[IntrospectionField] + interfaces: list[SimpleIntrospectionType] # should be InterfaceType class IntrospectionUnionType(WithName): kind: Literal["union"] - possibleTypes: List[SimpleIntrospectionType] # should be NamedType + possibleTypes: list[SimpleIntrospectionType] # should be NamedType class IntrospectionEnumType(WithName): kind: Literal["enum"] - enumValues: List[IntrospectionEnumValue] + enumValues: list[IntrospectionEnumValue] class IntrospectionInputObjectType(WithName): kind: Literal["input_object"] - inputFields: List[IntrospectionInputValue] + inputFields: list[IntrospectionInputValue] IntrospectionType: TypeAlias = Union[ @@ -285,10 +288,10 @@ class IntrospectionNonNullType(TypedDict): class IntrospectionSchema(MaybeWithDescription): queryType: IntrospectionObjectType - mutationType: Optional[IntrospectionObjectType] - subscriptionType: Optional[IntrospectionObjectType] - types: List[IntrospectionType] - directives: List[IntrospectionDirective] + mutationType: IntrospectionObjectType | None + subscriptionType: IntrospectionObjectType | None + types: list[IntrospectionType] + directives: list[IntrospectionDirective] class IntrospectionQuery(TypedDict): diff --git a/src/graphql/utilities/get_operation_ast.py b/src/graphql/utilities/get_operation_ast.py index 8a211f3d..4c88ffa8 100644 --- a/src/graphql/utilities/get_operation_ast.py +++ b/src/graphql/utilities/get_operation_ast.py @@ -1,6 +1,6 @@ """"Get operation AST node""" -from typing import Optional +from __future__ import annotations from ..language import DocumentNode, OperationDefinitionNode @@ -8,8 +8,8 @@ def get_operation_ast( - document_ast: DocumentNode, operation_name: Optional[str] = None -) -> Optional[OperationDefinitionNode]: + document_ast: DocumentNode, operation_name: str | None = None +) -> OperationDefinitionNode | None: """Get operation AST node. Returns an operation AST given a document AST and optionally an operation diff --git a/src/graphql/utilities/introspection_from_schema.py b/src/graphql/utilities/introspection_from_schema.py index 4b67fb8f..cc1e60ce 100644 --- a/src/graphql/utilities/introspection_from_schema.py +++ b/src/graphql/utilities/introspection_from_schema.py @@ -1,12 +1,16 @@ """Building introspection queries from GraphQL schemas""" -from typing import cast +from __future__ import annotations + +from typing import TYPE_CHECKING, cast from ..error import GraphQLError from ..language import parse -from ..type import GraphQLSchema from .get_introspection_query import IntrospectionQuery, get_introspection_query +if TYPE_CHECKING: + from ..type import GraphQLSchema + __all__ = ["introspection_from_schema"] diff --git a/src/graphql/utilities/lexicographic_sort_schema.py b/src/graphql/utilities/lexicographic_sort_schema.py index 810717de..cf0c4959 100644 --- a/src/graphql/utilities/lexicographic_sort_schema.py +++ b/src/graphql/utilities/lexicographic_sort_schema.py @@ -1,8 +1,9 @@ """Sorting GraphQL schemas""" -from typing import Collection, Dict, Optional, Tuple, Union, cast +from __future__ import annotations + +from typing import TYPE_CHECKING, Collection, Optional, cast -from ..language import DirectiveLocation from ..pyutils import inspect, merge_kwargs, natural_comparison_key from ..type import ( GraphQLArgument, @@ -31,6 +32,9 @@ is_union_type, ) +if TYPE_CHECKING: + from ..language import DirectiveLocation + __all__ = ["lexicographic_sort_schema"] @@ -41,8 +45,8 @@ def lexicographic_sort_schema(schema: GraphQLSchema) -> GraphQLSchema: """ def replace_type( - type_: Union[GraphQLList, GraphQLNonNull, GraphQLNamedType], - ) -> Union[GraphQLList, GraphQLNonNull, GraphQLNamedType]: + type_: GraphQLList | GraphQLNonNull | GraphQLNamedType, + ) -> GraphQLList | GraphQLNonNull | GraphQLNamedType: if is_list_type(type_): return GraphQLList(replace_type(type_.of_type)) if is_non_null_type(type_): @@ -53,8 +57,8 @@ def replace_named_type(type_: GraphQLNamedType) -> GraphQLNamedType: return type_map[type_.name] def replace_maybe_type( - maybe_type: Optional[GraphQLNamedType], - ) -> Optional[GraphQLNamedType]: + maybe_type: GraphQLNamedType | None, + ) -> GraphQLNamedType | None: return maybe_type and replace_named_type(maybe_type) def sort_directive(directive: GraphQLDirective) -> GraphQLDirective: @@ -66,7 +70,7 @@ def sort_directive(directive: GraphQLDirective) -> GraphQLDirective: ) ) - def sort_args(args_map: Dict[str, GraphQLArgument]) -> Dict[str, GraphQLArgument]: + def sort_args(args_map: dict[str, GraphQLArgument]) -> dict[str, GraphQLArgument]: args = {} for name, arg in sorted(args_map.items()): args[name] = GraphQLArgument( @@ -77,7 +81,7 @@ def sort_args(args_map: Dict[str, GraphQLArgument]) -> Dict[str, GraphQLArgument ) return args - def sort_fields(fields_map: Dict[str, GraphQLField]) -> Dict[str, GraphQLField]: + def sort_fields(fields_map: dict[str, GraphQLField]) -> dict[str, GraphQLField]: fields = {} for name, field in sorted(fields_map.items()): fields[name] = GraphQLField( @@ -90,8 +94,8 @@ def sort_fields(fields_map: Dict[str, GraphQLField]) -> Dict[str, GraphQLField]: return fields def sort_input_fields( - fields_map: Dict[str, GraphQLInputField], - ) -> Dict[str, GraphQLInputField]: + fields_map: dict[str, GraphQLInputField], + ) -> dict[str, GraphQLInputField]: return { name: GraphQLInputField( cast( @@ -104,7 +108,7 @@ def sort_input_fields( for name, field in sorted(fields_map.items()) } - def sort_types(array: Collection[GraphQLNamedType]) -> Tuple[GraphQLNamedType, ...]: + def sort_types(array: Collection[GraphQLNamedType]) -> tuple[GraphQLNamedType, ...]: return tuple( replace_named_type(type_) for type_ in sorted(array, key=sort_by_name_key) ) @@ -159,7 +163,7 @@ def sort_named_type(type_: GraphQLNamedType) -> GraphQLNamedType: msg = f"Unexpected type: {inspect(type_)}." # pragma: no cover raise TypeError(msg) # pragma: no cover - type_map: Dict[str, GraphQLNamedType] = { + type_map: dict[str, GraphQLNamedType] = { type_.name: sort_named_type(type_) for type_ in sorted(schema.type_map.values(), key=sort_by_name_key) } @@ -182,6 +186,6 @@ def sort_named_type(type_: GraphQLNamedType) -> GraphQLNamedType: def sort_by_name_key( - type_: Union[GraphQLNamedType, GraphQLDirective, DirectiveLocation], -) -> Tuple: + type_: GraphQLNamedType | GraphQLDirective | DirectiveLocation, +) -> tuple: return natural_comparison_key(type_.name) diff --git a/src/graphql/utilities/print_schema.py b/src/graphql/utilities/print_schema.py index a5d2dfc7..b4097b7c 100644 --- a/src/graphql/utilities/print_schema.py +++ b/src/graphql/utilities/print_schema.py @@ -1,6 +1,8 @@ """Printing GraphQL Schemas in SDL format""" -from typing import Any, Callable, Dict, List, Optional, Union +from __future__ import annotations + +from typing import Any, Callable from ..language import StringValueNode, print_ast from ..language.block_string import is_printable_as_block_string @@ -68,7 +70,7 @@ def print_filtered_schema( ) -def print_schema_definition(schema: GraphQLSchema) -> Optional[str]: +def print_schema_definition(schema: GraphQLSchema) -> str | None: """Print GraphQL schema definitions.""" query_type = schema.query_type mutation_type = schema.mutation_type @@ -155,7 +157,7 @@ def print_scalar(type_: GraphQLScalarType) -> str: def print_implemented_interfaces( - type_: Union[GraphQLObjectType, GraphQLInterfaceType], + type_: GraphQLObjectType | GraphQLInterfaceType, ) -> str: """Print the interfaces implemented by a GraphQL object or interface type.""" interfaces = type_.interfaces @@ -209,7 +211,7 @@ def print_input_object(type_: GraphQLInputObjectType) -> str: return print_description(type_) + f"input {type_.name}" + print_block(fields) -def print_fields(type_: Union[GraphQLObjectType, GraphQLInterfaceType]) -> str: +def print_fields(type_: GraphQLObjectType | GraphQLInterfaceType) -> str: """Print the fields of a GraphQL object or interface type.""" fields = [ print_description(field, " ", not i) @@ -222,12 +224,12 @@ def print_fields(type_: Union[GraphQLObjectType, GraphQLInterfaceType]) -> str: return print_block(fields) -def print_block(items: List[str]) -> str: +def print_block(items: list[str]) -> str: """Print a block with the given items.""" return " {\n" + "\n".join(items) + "\n}" if items else "" -def print_args(args: Dict[str, GraphQLArgument], indentation: str = "") -> str: +def print_args(args: dict[str, GraphQLArgument], indentation: str = "") -> str: """Print the given GraphQL arguments.""" if not args: return "" @@ -273,7 +275,7 @@ def print_directive(directive: GraphQLDirective) -> str: ) -def print_deprecated(reason: Optional[str]) -> str: +def print_deprecated(reason: str | None) -> str: """Print a deprecation reason.""" if reason is None: return "" @@ -292,13 +294,11 @@ def print_specified_by_url(scalar: GraphQLScalarType) -> str: def print_description( - def_: Union[ - GraphQLArgument, - GraphQLDirective, - GraphQLEnumValue, - GraphQLNamedType, - GraphQLSchema, - ], + def_: GraphQLArgument + | GraphQLDirective + | GraphQLEnumValue + | GraphQLNamedType + | GraphQLSchema, indentation: str = "", first_in_block: bool = True, ) -> str: diff --git a/src/graphql/utilities/separate_operations.py b/src/graphql/utilities/separate_operations.py index 864b0f4e..b6866748 100644 --- a/src/graphql/utilities/separate_operations.py +++ b/src/graphql/utilities/separate_operations.py @@ -1,6 +1,8 @@ """Separation of GraphQL operations""" -from typing import Any, Dict, List, Set +from __future__ import annotations + +from typing import Any, Dict, List from ..language import ( DocumentNode, @@ -24,14 +26,14 @@ DepGraph: TypeAlias = Dict[str, List[str]] -def separate_operations(document_ast: DocumentNode) -> Dict[str, DocumentNode]: +def separate_operations(document_ast: DocumentNode) -> dict[str, DocumentNode]: """Separate operations in a given AST document. This function accepts a single AST document which may contain many operations and fragments and returns a collection of AST documents each of which contains a single operation as well the fragment definitions it refers to. """ - operations: List[OperationDefinitionNode] = [] + operations: list[OperationDefinitionNode] = [] dep_graph: DepGraph = {} # Populate metadata and build a dependency graph. @@ -47,9 +49,9 @@ def separate_operations(document_ast: DocumentNode) -> Dict[str, DocumentNode]: # For each operation, produce a new synthesized AST which includes only what is # necessary for completing that operation. - separated_document_asts: Dict[str, DocumentNode] = {} + separated_document_asts: dict[str, DocumentNode] = {} for operation in operations: - dependencies: Set[str] = set() + dependencies: set[str] = set() for fragment_name in collect_dependencies(operation.selection_set): collect_transitive_dependencies(dependencies, dep_graph, fragment_name) @@ -75,7 +77,7 @@ def separate_operations(document_ast: DocumentNode) -> Dict[str, DocumentNode]: def collect_transitive_dependencies( - collected: Set[str], dep_graph: DepGraph, from_name: str + collected: set[str], dep_graph: DepGraph, from_name: str ) -> None: """Collect transitive dependencies. @@ -92,7 +94,7 @@ def collect_transitive_dependencies( class DependencyCollector(Visitor): - dependencies: List[str] + dependencies: list[str] def __init__(self) -> None: super().__init__() @@ -103,7 +105,7 @@ def enter_fragment_spread(self, node: FragmentSpreadNode, *_args: Any) -> None: self.add_dependency(node.name.value) -def collect_dependencies(selection_set: SelectionSetNode) -> List[str]: +def collect_dependencies(selection_set: SelectionSetNode) -> list[str]: collector = DependencyCollector() visit(selection_set, collector) return collector.dependencies diff --git a/src/graphql/utilities/sort_value_node.py b/src/graphql/utilities/sort_value_node.py index 8a0c7935..bf20cf37 100644 --- a/src/graphql/utilities/sort_value_node.py +++ b/src/graphql/utilities/sort_value_node.py @@ -1,7 +1,8 @@ """Sorting value nodes""" +from __future__ import annotations + from copy import copy -from typing import Tuple from ..language import ListValueNode, ObjectFieldNode, ObjectValueNode, ValueNode from ..pyutils import natural_comparison_key @@ -31,7 +32,7 @@ def sort_field(field: ObjectFieldNode) -> ObjectFieldNode: return field -def sort_fields(fields: Tuple[ObjectFieldNode, ...]) -> Tuple[ObjectFieldNode, ...]: +def sort_fields(fields: tuple[ObjectFieldNode, ...]) -> tuple[ObjectFieldNode, ...]: return tuple( sorted( (sort_field(field) for field in fields), diff --git a/src/graphql/utilities/strip_ignored_characters.py b/src/graphql/utilities/strip_ignored_characters.py index 1824c102..6521d10b 100644 --- a/src/graphql/utilities/strip_ignored_characters.py +++ b/src/graphql/utilities/strip_ignored_characters.py @@ -1,6 +1,8 @@ """Removal of insignificant characters""" -from typing import Union, cast +from __future__ import annotations + +from typing import cast from ..language import Lexer, TokenKind from ..language.block_string import print_block_string @@ -10,7 +12,7 @@ __all__ = ["strip_ignored_characters"] -def strip_ignored_characters(source: Union[str, Source]) -> str: +def strip_ignored_characters(source: str | Source) -> str: '''Strip characters that are ignored anyway. Strips characters that are not significant to the validity or execution diff --git a/src/graphql/utilities/type_from_ast.py b/src/graphql/utilities/type_from_ast.py index a978ffad..499ec1af 100644 --- a/src/graphql/utilities/type_from_ast.py +++ b/src/graphql/utilities/type_from_ast.py @@ -1,6 +1,8 @@ """Generating GraphQL types from AST nodes""" -from typing import Optional, cast, overload +from __future__ import annotations + +from typing import cast, overload from ..language import ListTypeNode, NamedTypeNode, NonNullTypeNode, TypeNode from ..pyutils import inspect @@ -19,33 +21,33 @@ @overload def type_from_ast( schema: GraphQLSchema, type_node: NamedTypeNode -) -> Optional[GraphQLNamedType]: +) -> GraphQLNamedType | None: ... @overload def type_from_ast( schema: GraphQLSchema, type_node: ListTypeNode -) -> Optional[GraphQLList]: +) -> GraphQLList | None: ... @overload def type_from_ast( schema: GraphQLSchema, type_node: NonNullTypeNode -) -> Optional[GraphQLNonNull]: +) -> GraphQLNonNull | None: ... @overload -def type_from_ast(schema: GraphQLSchema, type_node: TypeNode) -> Optional[GraphQLType]: +def type_from_ast(schema: GraphQLSchema, type_node: TypeNode) -> GraphQLType | None: ... def type_from_ast( schema: GraphQLSchema, type_node: TypeNode, -) -> Optional[GraphQLType]: +) -> GraphQLType | None: """Get the GraphQL type definition from an AST node. Given a Schema and an AST node describing a type, return a GraphQLType definition @@ -54,7 +56,7 @@ def type_from_ast( "User" found in the schema. If a type called "User" is not found in the schema, then None will be returned. """ - inner_type: Optional[GraphQLType] + inner_type: GraphQLType | None if isinstance(type_node, ListTypeNode): inner_type = type_from_ast(schema, type_node.type) return GraphQLList(inner_type) if inner_type else None diff --git a/src/graphql/utilities/type_info.py b/src/graphql/utilities/type_info.py index 2926112a..5763f16e 100644 --- a/src/graphql/utilities/type_info.py +++ b/src/graphql/utilities/type_info.py @@ -1,6 +1,6 @@ """Managing type information""" -from __future__ import annotations # Python < 3.10 +from __future__ import annotations from typing import Any, Callable, Optional diff --git a/src/graphql/utilities/value_from_ast.py b/src/graphql/utilities/value_from_ast.py index 51d64c73..67ed11dc 100644 --- a/src/graphql/utilities/value_from_ast.py +++ b/src/graphql/utilities/value_from_ast.py @@ -1,6 +1,8 @@ """Conversion from GraphQL value AST to Python values.""" -from typing import Any, Dict, List, Optional, cast +from __future__ import annotations + +from typing import Any, cast from ..language import ( ListValueNode, @@ -23,9 +25,9 @@ def value_from_ast( - value_node: Optional[ValueNode], + value_node: ValueNode | None, type_: GraphQLInputType, - variables: Optional[Dict[str, Any]] = None, + variables: dict[str, Any] | None = None, ) -> Any: """Produce a Python value given a GraphQL Value AST. @@ -76,7 +78,7 @@ def value_from_ast( if is_list_type(type_): item_type = type_.of_type if isinstance(value_node, ListValueNode): - coerced_values: List[Any] = [] + coerced_values: list[Any] = [] append_value = coerced_values.append for item_node in value_node.values: if is_missing_variable(item_node, variables): @@ -99,7 +101,7 @@ def value_from_ast( if is_input_object_type(type_): if not isinstance(value_node, ObjectValueNode): return Undefined - coerced_obj: Dict[str, Any] = {} + coerced_obj: dict[str, Any] = {} fields = type_.fields field_nodes = {field.name.value: field for field in value_node.fields} for field_name, field in fields.items(): @@ -138,7 +140,7 @@ def value_from_ast( def is_missing_variable( - value_node: ValueNode, variables: Optional[Dict[str, Any]] = None + value_node: ValueNode, variables: dict[str, Any] | None = None ) -> bool: """Check if ``value_node`` is a variable not defined in the ``variables`` dict.""" return isinstance(value_node, VariableNode) and ( diff --git a/src/graphql/utilities/value_from_ast_untyped.py b/src/graphql/utilities/value_from_ast_untyped.py index 26c1bfb7..4a85154f 100644 --- a/src/graphql/utilities/value_from_ast_untyped.py +++ b/src/graphql/utilities/value_from_ast_untyped.py @@ -1,27 +1,31 @@ """Conversion from GraphQL value AST to Python values without type.""" +from __future__ import annotations + from math import nan -from typing import Any, Callable, Dict, Optional, Union - -from ..language import ( - BooleanValueNode, - EnumValueNode, - FloatValueNode, - IntValueNode, - ListValueNode, - NullValueNode, - ObjectValueNode, - StringValueNode, - ValueNode, - VariableNode, -) +from typing import TYPE_CHECKING, Any, Callable + from ..pyutils import Undefined, inspect +if TYPE_CHECKING: + from ..language import ( + BooleanValueNode, + EnumValueNode, + FloatValueNode, + IntValueNode, + ListValueNode, + NullValueNode, + ObjectValueNode, + StringValueNode, + ValueNode, + VariableNode, + ) + __all__ = ["value_from_ast_untyped"] def value_from_ast_untyped( - value_node: ValueNode, variables: Optional[Dict[str, Any]] = None + value_node: ValueNode, variables: dict[str, Any] | None = None ) -> Any: """Produce a Python value given a GraphQL Value AST. @@ -68,19 +72,19 @@ def value_from_float(value_node: FloatValueNode, _variables: Any) -> Any: def value_from_string( - value_node: Union[BooleanValueNode, EnumValueNode, StringValueNode], _variables: Any + value_node: BooleanValueNode | EnumValueNode | StringValueNode, _variables: Any ) -> Any: return value_node.value def value_from_list( - value_node: ListValueNode, variables: Optional[Dict[str, Any]] + value_node: ListValueNode, variables: dict[str, Any] | None ) -> Any: return [value_from_ast_untyped(node, variables) for node in value_node.values] def value_from_object( - value_node: ObjectValueNode, variables: Optional[Dict[str, Any]] + value_node: ObjectValueNode, variables: dict[str, Any] | None ) -> Any: return { field.name.value: value_from_ast_untyped(field.value, variables) @@ -89,7 +93,7 @@ def value_from_object( def value_from_variable( - value_node: VariableNode, variables: Optional[Dict[str, Any]] + value_node: VariableNode, variables: dict[str, Any] | None ) -> Any: variable_name = value_node.name.value if not variables: @@ -97,7 +101,7 @@ def value_from_variable( return variables.get(variable_name, Undefined) -_value_from_kind_functions: Dict[str, Callable] = { +_value_from_kind_functions: dict[str, Callable] = { "null_value": value_from_null, "int_value": value_from_int, "float_value": value_from_float, diff --git a/src/graphql/validation/rules/custom/no_deprecated.py b/src/graphql/validation/rules/custom/no_deprecated.py index 238e8fa0..c9742911 100644 --- a/src/graphql/validation/rules/custom/no_deprecated.py +++ b/src/graphql/validation/rules/custom/no_deprecated.py @@ -1,12 +1,16 @@ """No deprecated rule""" -from typing import Any +from __future__ import annotations + +from typing import TYPE_CHECKING, Any from ....error import GraphQLError -from ....language import ArgumentNode, EnumValueNode, FieldNode, ObjectFieldNode from ....type import get_named_type, is_input_object_type from .. import ValidationRule +if TYPE_CHECKING: + from ....language import ArgumentNode, EnumValueNode, FieldNode, ObjectFieldNode + __all__ = ["NoDeprecatedCustomRule"] diff --git a/src/graphql/validation/rules/custom/no_schema_introspection.py b/src/graphql/validation/rules/custom/no_schema_introspection.py index 1a16d169..99c12a9e 100644 --- a/src/graphql/validation/rules/custom/no_schema_introspection.py +++ b/src/graphql/validation/rules/custom/no_schema_introspection.py @@ -1,12 +1,16 @@ """No schema introspection rule""" -from typing import Any +from __future__ import annotations + +from typing import TYPE_CHECKING, Any from ....error import GraphQLError -from ....language import FieldNode from ....type import get_named_type, is_introspection_type from .. import ValidationRule +if TYPE_CHECKING: + from ....language import FieldNode + __all__ = ["NoSchemaIntrospectionCustomRule"] diff --git a/src/graphql/validation/rules/defer_stream_directive_on_root_field.py b/src/graphql/validation/rules/defer_stream_directive_on_root_field.py index dbb274b3..7a73a990 100644 --- a/src/graphql/validation/rules/defer_stream_directive_on_root_field.py +++ b/src/graphql/validation/rules/defer_stream_directive_on_root_field.py @@ -1,12 +1,16 @@ """Defer stream directive on root field rule""" -from typing import Any, List, cast +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, cast from ...error import GraphQLError -from ...language import DirectiveNode, Node from ...type import GraphQLDeferDirective, GraphQLStreamDirective from . import ASTValidationRule, ValidationContext +if TYPE_CHECKING: + from ...language import DirectiveNode, Node + __all__ = ["DeferStreamDirectiveOnRootField"] @@ -23,7 +27,7 @@ def enter_directive( _key: Any, _parent: Any, _path: Any, - _ancestors: List[Node], + _ancestors: list[Node], ) -> None: context = cast(ValidationContext, self.context) parent_type = context.get_parent_type() diff --git a/src/graphql/validation/rules/defer_stream_directive_on_valid_operations_rule.py b/src/graphql/validation/rules/defer_stream_directive_on_valid_operations_rule.py index 391c8932..240092b7 100644 --- a/src/graphql/validation/rules/defer_stream_directive_on_valid_operations_rule.py +++ b/src/graphql/validation/rules/defer_stream_directive_on_valid_operations_rule.py @@ -1,6 +1,8 @@ """Defer stream directive on valid operations rule""" -from typing import Any, List, Set +from __future__ import annotations + +from typing import Any from ...error import GraphQLError from ...language import ( @@ -39,7 +41,7 @@ class DeferStreamDirectiveOnValidOperationsRule(ASTValidationRule): def __init__(self, context: ValidationContext) -> None: super().__init__(context) - self.fragments_used_on_subscriptions: Set[str] = set() + self.fragments_used_on_subscriptions: set[str] = set() def enter_operation_definition( self, operation: OperationDefinitionNode, *_args: Any @@ -55,7 +57,7 @@ def enter_directive( _key: Any, _parent: Any, _path: Any, - ancestors: List[Node], + ancestors: list[Node], ) -> None: try: definition_node = ancestors[2] diff --git a/src/graphql/validation/rules/executable_definitions.py b/src/graphql/validation/rules/executable_definitions.py index 5c8f5f67..1f702210 100644 --- a/src/graphql/validation/rules/executable_definitions.py +++ b/src/graphql/validation/rules/executable_definitions.py @@ -1,5 +1,7 @@ """Executable definitions rule""" +from __future__ import annotations + from typing import Any, Union, cast from ...error import GraphQLError diff --git a/src/graphql/validation/rules/fields_on_correct_type.py b/src/graphql/validation/rules/fields_on_correct_type.py index 3eef26ea..83142fae 100644 --- a/src/graphql/validation/rules/fields_on_correct_type.py +++ b/src/graphql/validation/rules/fields_on_correct_type.py @@ -1,11 +1,12 @@ """Fields on correct type rule""" +from __future__ import annotations + from collections import defaultdict from functools import cmp_to_key -from typing import Any, Dict, List, Union +from typing import TYPE_CHECKING, Any from ...error import GraphQLError -from ...language import FieldNode from ...pyutils import did_you_mean, natural_comparison_key, suggestion_list from ...type import ( GraphQLInterfaceType, @@ -18,6 +19,9 @@ ) from . import ValidationRule +if TYPE_CHECKING: + from ...language import FieldNode + __all__ = ["FieldsOnCorrectTypeRule"] @@ -62,7 +66,7 @@ def enter_field(self, node: FieldNode, *_args: Any) -> None: def get_suggested_type_names( schema: GraphQLSchema, type_: GraphQLOutputType, field_name: str -) -> List[str]: +) -> list[str]: """Get a list of suggested type names. Go through all of the implementations of type, as well as the interfaces @@ -74,8 +78,8 @@ def get_suggested_type_names( return [] # Use a dict instead of a set for stable sorting when usage counts are the same - suggested_types: Dict[Union[GraphQLObjectType, GraphQLInterfaceType], None] = {} - usage_count: Dict[str, int] = defaultdict(int) + suggested_types: dict[GraphQLObjectType | GraphQLInterfaceType, None] = {} + usage_count: dict[str, int] = defaultdict(int) for possible_type in schema.get_possible_types(type_): if field_name not in possible_type.fields: continue @@ -93,8 +97,8 @@ def get_suggested_type_names( usage_count[possible_interface.name] += 1 def cmp( - type_a: Union[GraphQLObjectType, GraphQLInterfaceType], - type_b: Union[GraphQLObjectType, GraphQLInterfaceType], + type_a: GraphQLObjectType | GraphQLInterfaceType, + type_b: GraphQLObjectType | GraphQLInterfaceType, ) -> int: # pragma: no cover # Suggest both interface and object types based on how common they are. usage_count_diff = usage_count[type_b.name] - usage_count[type_a.name] @@ -118,7 +122,7 @@ def cmp( return [type_.name for type_ in sorted(suggested_types, key=cmp_to_key(cmp))] -def get_suggested_field_names(type_: GraphQLOutputType, field_name: str) -> List[str]: +def get_suggested_field_names(type_: GraphQLOutputType, field_name: str) -> list[str]: """Get a list of suggested field names. For the field name provided, determine if there are any similar field names that may diff --git a/src/graphql/validation/rules/fragments_on_composite_types.py b/src/graphql/validation/rules/fragments_on_composite_types.py index c679b59d..782f6c70 100644 --- a/src/graphql/validation/rules/fragments_on_composite_types.py +++ b/src/graphql/validation/rules/fragments_on_composite_types.py @@ -1,5 +1,7 @@ """Fragments on composite type rule""" +from __future__ import annotations + from typing import Any from ...error import GraphQLError diff --git a/src/graphql/validation/rules/known_argument_names.py b/src/graphql/validation/rules/known_argument_names.py index da6b7481..dadfd34a 100644 --- a/src/graphql/validation/rules/known_argument_names.py +++ b/src/graphql/validation/rules/known_argument_names.py @@ -1,6 +1,8 @@ """Known argument names on directives rule""" -from typing import Any, Dict, List, Union, cast +from __future__ import annotations + +from typing import Any, List, cast from ...error import GraphQLError from ...language import ( @@ -25,11 +27,11 @@ class KnownArgumentNamesOnDirectivesRule(ASTValidationRule): For internal use only. """ - context: Union[ValidationContext, SDLValidationContext] + context: ValidationContext | SDLValidationContext - def __init__(self, context: Union[ValidationContext, SDLValidationContext]) -> None: + def __init__(self, context: ValidationContext | SDLValidationContext) -> None: super().__init__(context) - directive_args: Dict[str, List[str]] = {} + directive_args: dict[str, list[str]] = {} schema = context.schema defined_directives = schema.directives if schema else specified_directives diff --git a/src/graphql/validation/rules/known_directives.py b/src/graphql/validation/rules/known_directives.py index b7504542..8a0c76c4 100644 --- a/src/graphql/validation/rules/known_directives.py +++ b/src/graphql/validation/rules/known_directives.py @@ -1,6 +1,8 @@ """Known directives rule""" -from typing import Any, Dict, List, Optional, Tuple, Union, cast +from __future__ import annotations + +from typing import Any, List, cast from ...error import GraphQLError from ...language import ( @@ -25,11 +27,11 @@ class KnownDirectivesRule(ASTValidationRule): See https://spec.graphql.org/draft/#sec-Directives-Are-Defined """ - context: Union[ValidationContext, SDLValidationContext] + context: ValidationContext | SDLValidationContext - def __init__(self, context: Union[ValidationContext, SDLValidationContext]) -> None: + def __init__(self, context: ValidationContext | SDLValidationContext) -> None: super().__init__(context) - locations_map: Dict[str, Tuple[DirectiveLocation, ...]] = {} + locations_map: dict[str, tuple[DirectiveLocation, ...]] = {} schema = context.schema defined_directives = ( @@ -51,7 +53,7 @@ def enter_directive( _key: Any, _parent: Any, _path: Any, - ancestors: List[Node], + ancestors: list[Node], ) -> None: name = node.name.value locations = self.locations_map.get(name) @@ -101,8 +103,8 @@ def enter_directive( def get_directive_location_for_ast_path( - ancestors: List[Node], -) -> Optional[DirectiveLocation]: + ancestors: list[Node], +) -> DirectiveLocation | None: applied_to = ancestors[-1] if not isinstance(applied_to, Node): # pragma: no cover msg = "Unexpected error in directive." diff --git a/src/graphql/validation/rules/known_fragment_names.py b/src/graphql/validation/rules/known_fragment_names.py index 990436ed..52e9b679 100644 --- a/src/graphql/validation/rules/known_fragment_names.py +++ b/src/graphql/validation/rules/known_fragment_names.py @@ -1,11 +1,15 @@ """Known fragment names rule""" -from typing import Any +from __future__ import annotations + +from typing import TYPE_CHECKING, Any from ...error import GraphQLError -from ...language import FragmentSpreadNode from . import ValidationRule +if TYPE_CHECKING: + from ...language import FragmentSpreadNode + __all__ = ["KnownFragmentNamesRule"] diff --git a/src/graphql/validation/rules/known_type_names.py b/src/graphql/validation/rules/known_type_names.py index f914e409..118d7c0e 100644 --- a/src/graphql/validation/rules/known_type_names.py +++ b/src/graphql/validation/rules/known_type_names.py @@ -1,6 +1,8 @@ """Known type names rule""" -from typing import Any, Collection, List, Union, cast +from __future__ import annotations + +from typing import Any, Collection, cast from ...error import GraphQLError from ...language import ( @@ -34,7 +36,7 @@ class KnownTypeNamesRule(ASTValidationRule): See https://spec.graphql.org/draft/#sec-Fragment-Spread-Type-Existence """ - def __init__(self, context: Union[ValidationContext, SDLValidationContext]) -> None: + def __init__(self, context: ValidationContext | SDLValidationContext) -> None: super().__init__(context) schema = context.schema self.existing_types_map = schema.type_map if schema else {} @@ -53,7 +55,7 @@ def enter_named_type( _key: Any, parent: Node, _path: Any, - ancestors: List[Node], + ancestors: list[Node], ) -> None: type_name = node.name.value if ( @@ -86,8 +88,8 @@ def enter_named_type( def is_sdl_node( - value: Union[Node, Collection[Node], None], -) -> TypeGuard[Union[TypeSystemDefinitionNode, TypeSystemExtensionNode]]: + value: Node | Collection[Node] | None, +) -> TypeGuard[TypeSystemDefinitionNode | TypeSystemExtensionNode]: return ( value is not None and not isinstance(value, list) diff --git a/src/graphql/validation/rules/lone_anonymous_operation.py b/src/graphql/validation/rules/lone_anonymous_operation.py index dedde5ca..f7587bda 100644 --- a/src/graphql/validation/rules/lone_anonymous_operation.py +++ b/src/graphql/validation/rules/lone_anonymous_operation.py @@ -1,5 +1,7 @@ """Lone anonymous operation rule""" +from __future__ import annotations + from typing import Any from ...error import GraphQLError diff --git a/src/graphql/validation/rules/lone_schema_definition.py b/src/graphql/validation/rules/lone_schema_definition.py index 0e732c47..ceac80d1 100644 --- a/src/graphql/validation/rules/lone_schema_definition.py +++ b/src/graphql/validation/rules/lone_schema_definition.py @@ -1,11 +1,15 @@ """Lone Schema definition rule""" -from typing import Any +from __future__ import annotations + +from typing import TYPE_CHECKING, Any from ...error import GraphQLError -from ...language import SchemaDefinitionNode from . import SDLValidationContext, SDLValidationRule +if TYPE_CHECKING: + from ...language import SchemaDefinitionNode + __all__ = ["LoneSchemaDefinitionRule"] diff --git a/src/graphql/validation/rules/no_fragment_cycles.py b/src/graphql/validation/rules/no_fragment_cycles.py index 5f1a0955..c7584655 100644 --- a/src/graphql/validation/rules/no_fragment_cycles.py +++ b/src/graphql/validation/rules/no_fragment_cycles.py @@ -1,6 +1,8 @@ """No fragment cycles rule""" -from typing import Any, Dict, List, Set +from __future__ import annotations + +from typing import Any from ...error import GraphQLError from ...language import SKIP, FragmentDefinitionNode, FragmentSpreadNode, VisitorAction @@ -23,11 +25,11 @@ def __init__(self, context: ASTValidationContext) -> None: super().__init__(context) # Tracks already visited fragments to maintain O(N) and to ensure that # cycles are not redundantly reported. - self.visited_frags: Set[str] = set() + self.visited_frags: set[str] = set() # List of AST nodes used to produce meaningful errors - self.spread_path: List[FragmentSpreadNode] = [] + self.spread_path: list[FragmentSpreadNode] = [] # Position in the spread path - self.spread_path_index_by_name: Dict[str, int] = {} + self.spread_path_index_by_name: dict[str, int] = {} @staticmethod def enter_operation_definition(*_args: Any) -> VisitorAction: diff --git a/src/graphql/validation/rules/no_undefined_variables.py b/src/graphql/validation/rules/no_undefined_variables.py index 33ff1be8..5c20d647 100644 --- a/src/graphql/validation/rules/no_undefined_variables.py +++ b/src/graphql/validation/rules/no_undefined_variables.py @@ -1,11 +1,15 @@ """No undefined variables rule""" -from typing import Any, Set +from __future__ import annotations + +from typing import TYPE_CHECKING, Any from ...error import GraphQLError -from ...language import OperationDefinitionNode, VariableDefinitionNode from . import ValidationContext, ValidationRule +if TYPE_CHECKING: + from ...language import OperationDefinitionNode, VariableDefinitionNode + __all__ = ["NoUndefinedVariablesRule"] @@ -20,7 +24,7 @@ class NoUndefinedVariablesRule(ValidationRule): def __init__(self, context: ValidationContext) -> None: super().__init__(context) - self.defined_variable_names: Set[str] = set() + self.defined_variable_names: set[str] = set() def enter_operation_definition(self, *_args: Any) -> None: self.defined_variable_names.clear() diff --git a/src/graphql/validation/rules/no_unused_fragments.py b/src/graphql/validation/rules/no_unused_fragments.py index d13da572..b79b5b07 100644 --- a/src/graphql/validation/rules/no_unused_fragments.py +++ b/src/graphql/validation/rules/no_unused_fragments.py @@ -1,6 +1,8 @@ """No unused fragments rule""" -from typing import Any, List +from __future__ import annotations + +from typing import Any from ...error import GraphQLError from ...language import ( @@ -25,8 +27,8 @@ class NoUnusedFragmentsRule(ASTValidationRule): def __init__(self, context: ASTValidationContext) -> None: super().__init__(context) - self.operation_defs: List[OperationDefinitionNode] = [] - self.fragment_defs: List[FragmentDefinitionNode] = [] + self.operation_defs: list[OperationDefinitionNode] = [] + self.fragment_defs: list[FragmentDefinitionNode] = [] def enter_operation_definition( self, node: OperationDefinitionNode, *_args: Any diff --git a/src/graphql/validation/rules/no_unused_variables.py b/src/graphql/validation/rules/no_unused_variables.py index 8e714e83..ec5d0b70 100644 --- a/src/graphql/validation/rules/no_unused_variables.py +++ b/src/graphql/validation/rules/no_unused_variables.py @@ -1,11 +1,15 @@ """No unused variables rule""" -from typing import Any, List, Set +from __future__ import annotations + +from typing import TYPE_CHECKING, Any from ...error import GraphQLError -from ...language import OperationDefinitionNode, VariableDefinitionNode from . import ValidationContext, ValidationRule +if TYPE_CHECKING: + from ...language import OperationDefinitionNode, VariableDefinitionNode + __all__ = ["NoUnusedVariablesRule"] @@ -20,7 +24,7 @@ class NoUnusedVariablesRule(ValidationRule): def __init__(self, context: ValidationContext) -> None: super().__init__(context) - self.variable_defs: List[VariableDefinitionNode] = [] + self.variable_defs: list[VariableDefinitionNode] = [] def enter_operation_definition(self, *_args: Any) -> None: self.variable_defs.clear() @@ -28,7 +32,7 @@ def enter_operation_definition(self, *_args: Any) -> None: def leave_operation_definition( self, operation: OperationDefinitionNode, *_args: Any ) -> None: - variable_name_used: Set[str] = set() + variable_name_used: set[str] = set() usages = self.context.get_recursive_variable_usages(operation) for usage in usages: diff --git a/src/graphql/validation/rules/overlapping_fields_can_be_merged.py b/src/graphql/validation/rules/overlapping_fields_can_be_merged.py index 67714c40..b79bf2a6 100644 --- a/src/graphql/validation/rules/overlapping_fields_can_be_merged.py +++ b/src/graphql/validation/rules/overlapping_fields_can_be_merged.py @@ -1,5 +1,7 @@ """Overlapping fields can be merged rule""" +from __future__ import annotations + from itertools import chain from typing import Any, Dict, List, Optional, Sequence, Tuple, Union, cast @@ -41,7 +43,7 @@ __all__ = ["OverlappingFieldsCanBeMergedRule"] -def reason_message(reason: "ConflictReasonMessage") -> str: +def reason_message(reason: ConflictReasonMessage) -> str: if isinstance(reason, list): return " and ".join( f"subfields '{response_name}' conflict" @@ -70,7 +72,7 @@ def __init__(self, context: ValidationContext) -> None: # A cache for the "field map" and list of fragment names found in any given # selection set. Selection sets may be asked for this information multiple # times, so this improves the performance of this validator. - self.cached_fields_and_fragment_names: Dict = {} + self.cached_fields_and_fragment_names: dict = {} def enter_selection_set(self, selection_set: SelectionSetNode, *_args: Any) -> None: conflicts = find_conflicts_within_selection_set( @@ -161,11 +163,11 @@ def enter_selection_set(self, selection_set: SelectionSetNode, *_args: Any) -> N def find_conflicts_within_selection_set( context: ValidationContext, - cached_fields_and_fragment_names: Dict, - compared_fragment_pairs: "PairSet", - parent_type: Optional[GraphQLNamedType], + cached_fields_and_fragment_names: dict, + compared_fragment_pairs: PairSet, + parent_type: GraphQLNamedType | None, selection_set: SelectionSetNode, -) -> List[Conflict]: +) -> list[Conflict]: """Find conflicts within selection set. Find all conflicts found "within" a selection set, including those found via @@ -173,7 +175,7 @@ def find_conflicts_within_selection_set( Called when visiting each SelectionSet in the GraphQL Document. """ - conflicts: List[Conflict] = [] + conflicts: list[Conflict] = [] field_map, fragment_names = get_fields_and_fragment_names( context, cached_fields_and_fragment_names, parent_type, selection_set @@ -222,9 +224,9 @@ def find_conflicts_within_selection_set( def collect_conflicts_between_fields_and_fragment( context: ValidationContext, - conflicts: List[Conflict], - cached_fields_and_fragment_names: Dict, - compared_fragment_pairs: "PairSet", + conflicts: list[Conflict], + cached_fields_and_fragment_names: dict, + compared_fragment_pairs: PairSet, are_mutually_exclusive: bool, field_map: NodeAndDefCollection, fragment_name: str, @@ -283,9 +285,9 @@ def collect_conflicts_between_fields_and_fragment( def collect_conflicts_between_fragments( context: ValidationContext, - conflicts: List[Conflict], - cached_fields_and_fragment_names: Dict, - compared_fragment_pairs: "PairSet", + conflicts: list[Conflict], + cached_fields_and_fragment_names: dict, + compared_fragment_pairs: PairSet, are_mutually_exclusive: bool, fragment_name1: str, fragment_name2: str, @@ -360,21 +362,21 @@ def collect_conflicts_between_fragments( def find_conflicts_between_sub_selection_sets( context: ValidationContext, - cached_fields_and_fragment_names: Dict, - compared_fragment_pairs: "PairSet", + cached_fields_and_fragment_names: dict, + compared_fragment_pairs: PairSet, are_mutually_exclusive: bool, - parent_type1: Optional[GraphQLNamedType], + parent_type1: GraphQLNamedType | None, selection_set1: SelectionSetNode, - parent_type2: Optional[GraphQLNamedType], + parent_type2: GraphQLNamedType | None, selection_set2: SelectionSetNode, -) -> List[Conflict]: +) -> list[Conflict]: """Find conflicts between sub selection sets. Find all conflicts found between two selection sets, including those found via spreading in fragments. Called when determining if conflicts exist between the sub-fields of two overlapping fields. """ - conflicts: List[Conflict] = [] + conflicts: list[Conflict] = [] field_map1, fragment_names1 = get_fields_and_fragment_names( context, cached_fields_and_fragment_names, parent_type1, selection_set1 @@ -442,9 +444,9 @@ def find_conflicts_between_sub_selection_sets( def collect_conflicts_within( context: ValidationContext, - conflicts: List[Conflict], - cached_fields_and_fragment_names: Dict, - compared_fragment_pairs: "PairSet", + conflicts: list[Conflict], + cached_fields_and_fragment_names: dict, + compared_fragment_pairs: PairSet, field_map: NodeAndDefCollection, ) -> None: """Collect all Conflicts "within" one collection of fields.""" @@ -475,9 +477,9 @@ def collect_conflicts_within( def collect_conflicts_between( context: ValidationContext, - conflicts: List[Conflict], - cached_fields_and_fragment_names: Dict, - compared_fragment_pairs: "PairSet", + conflicts: list[Conflict], + cached_fields_and_fragment_names: dict, + compared_fragment_pairs: PairSet, parent_fields_are_mutually_exclusive: bool, field_map1: NodeAndDefCollection, field_map2: NodeAndDefCollection, @@ -514,13 +516,13 @@ def collect_conflicts_between( def find_conflict( context: ValidationContext, - cached_fields_and_fragment_names: Dict, - compared_fragment_pairs: "PairSet", + cached_fields_and_fragment_names: dict, + compared_fragment_pairs: PairSet, parent_fields_are_mutually_exclusive: bool, response_name: str, field1: NodeAndDef, field2: NodeAndDef, -) -> Optional[Conflict]: +) -> Conflict | None: """Find conflict. Determines if there is a conflict between two particular fields, including comparing @@ -598,7 +600,7 @@ def find_conflict( def same_arguments( - node1: Union[FieldNode, DirectiveNode], node2: Union[FieldNode, DirectiveNode] + node1: FieldNode | DirectiveNode, node2: FieldNode | DirectiveNode ) -> bool: args1 = node1.arguments args2 = node2.arguments @@ -629,7 +631,7 @@ def stringify_value(value: ValueNode) -> str: def get_stream_directive( directives: Sequence[DirectiveNode], -) -> Optional[DirectiveNode]: +) -> DirectiveNode | None: for directive in directives: if directive.name.value == "stream": return directive @@ -681,10 +683,10 @@ def do_types_conflict(type1: GraphQLOutputType, type2: GraphQLOutputType) -> boo def get_fields_and_fragment_names( context: ValidationContext, - cached_fields_and_fragment_names: Dict, - parent_type: Optional[GraphQLNamedType], + cached_fields_and_fragment_names: dict, + parent_type: GraphQLNamedType | None, selection_set: SelectionSetNode, -) -> Tuple[NodeAndDefCollection, List[str]]: +) -> tuple[NodeAndDefCollection, list[str]]: """Get fields and referenced fragment names Given a selection set, return the collection of fields (a mapping of response name @@ -694,7 +696,7 @@ def get_fields_and_fragment_names( cached = cached_fields_and_fragment_names.get(selection_set) if not cached: node_and_defs: NodeAndDefCollection = {} - fragment_names: Dict[str, bool] = {} + fragment_names: dict[str, bool] = {} collect_fields_and_fragment_names( context, parent_type, selection_set, node_and_defs, fragment_names ) @@ -705,9 +707,9 @@ def get_fields_and_fragment_names( def get_referenced_fields_and_fragment_names( context: ValidationContext, - cached_fields_and_fragment_names: Dict, + cached_fields_and_fragment_names: dict, fragment: FragmentDefinitionNode, -) -> Tuple[NodeAndDefCollection, List[str]]: +) -> tuple[NodeAndDefCollection, list[str]]: """Get referenced fields and nested fragment names Given a reference to a fragment, return the represented collection of fields as well @@ -726,10 +728,10 @@ def get_referenced_fields_and_fragment_names( def collect_fields_and_fragment_names( context: ValidationContext, - parent_type: Optional[GraphQLNamedType], + parent_type: GraphQLNamedType | None, selection_set: SelectionSetNode, node_and_defs: NodeAndDefCollection, - fragment_names: Dict[str, bool], + fragment_names: dict[str, bool], ) -> None: for selection in selection_set.selections: if isinstance(selection, FieldNode): @@ -764,8 +766,8 @@ def collect_fields_and_fragment_names( def subfield_conflicts( - conflicts: List[Conflict], response_name: str, node1: FieldNode, node2: FieldNode -) -> Optional[Conflict]: + conflicts: list[Conflict], response_name: str, node1: FieldNode, node2: FieldNode +) -> Conflict | None: """Check whether there are conflicts between sub-fields. Given a series of Conflicts which occurred between two sub-fields, generate a single @@ -788,7 +790,7 @@ class PairSet: __slots__ = ("_data",) - _data: Dict[str, Dict[str, bool]] + _data: dict[str, dict[str, bool]] def __init__(self) -> None: self._data = {} diff --git a/src/graphql/validation/rules/possible_fragment_spreads.py b/src/graphql/validation/rules/possible_fragment_spreads.py index d2a39c2e..11748a47 100644 --- a/src/graphql/validation/rules/possible_fragment_spreads.py +++ b/src/graphql/validation/rules/possible_fragment_spreads.py @@ -1,13 +1,17 @@ """Possible fragment spread rule""" -from typing import Any, Optional +from __future__ import annotations + +from typing import TYPE_CHECKING, Any from ...error import GraphQLError -from ...language import FragmentSpreadNode, InlineFragmentNode from ...type import GraphQLCompositeType, is_composite_type from ...utilities import do_types_overlap, type_from_ast from . import ValidationRule +if TYPE_CHECKING: + from ...language import FragmentSpreadNode, InlineFragmentNode + __all__ = ["PossibleFragmentSpreadsRule"] @@ -54,7 +58,7 @@ def enter_fragment_spread(self, node: FragmentSpreadNode, *_args: Any) -> None: ) ) - def get_fragment_type(self, name: str) -> Optional[GraphQLCompositeType]: + def get_fragment_type(self, name: str) -> GraphQLCompositeType | None: context = self.context frag = context.get_fragment(name) if frag: diff --git a/src/graphql/validation/rules/possible_type_extensions.py b/src/graphql/validation/rules/possible_type_extensions.py index 8eab7111..e8eb349d 100644 --- a/src/graphql/validation/rules/possible_type_extensions.py +++ b/src/graphql/validation/rules/possible_type_extensions.py @@ -1,8 +1,10 @@ """Possible type extension rule""" +from __future__ import annotations + import re from functools import partial -from typing import Any, Optional +from typing import Any from ...error import GraphQLError from ...language import TypeDefinitionNode, TypeExtensionNode @@ -41,7 +43,7 @@ def check_extension(self, node: TypeExtensionNode, *_args: Any) -> None: def_node = self.defined_types.get(type_name) existing_type = schema.get_type(type_name) if schema else None - expected_kind: Optional[str] + expected_kind: str | None if def_node: expected_kind = def_kind_to_ext_kind(def_node.kind) elif existing_type: diff --git a/src/graphql/validation/rules/provided_required_arguments.py b/src/graphql/validation/rules/provided_required_arguments.py index 9da2395f..a9313273 100644 --- a/src/graphql/validation/rules/provided_required_arguments.py +++ b/src/graphql/validation/rules/provided_required_arguments.py @@ -1,6 +1,8 @@ """Provided required arguments on directives rule""" -from typing import Any, Dict, List, Union, cast +from __future__ import annotations + +from typing import Any, List, cast from ...error import GraphQLError from ...language import ( @@ -29,12 +31,12 @@ class ProvidedRequiredArgumentsOnDirectivesRule(ASTValidationRule): For internal use only. """ - context: Union[ValidationContext, SDLValidationContext] + context: ValidationContext | SDLValidationContext - def __init__(self, context: Union[ValidationContext, SDLValidationContext]) -> None: + def __init__(self, context: ValidationContext | SDLValidationContext) -> None: super().__init__(context) - required_args_map: Dict[ - str, Dict[str, Union[GraphQLArgument, InputValueDefinitionNode]] + required_args_map: dict[ + str, dict[str, GraphQLArgument | InputValueDefinitionNode] ] = {} schema = context.schema diff --git a/src/graphql/validation/rules/scalar_leafs.py b/src/graphql/validation/rules/scalar_leafs.py index 31ba0550..73a51c78 100644 --- a/src/graphql/validation/rules/scalar_leafs.py +++ b/src/graphql/validation/rules/scalar_leafs.py @@ -1,12 +1,16 @@ """Scalar leafs rule""" -from typing import Any +from __future__ import annotations + +from typing import TYPE_CHECKING, Any from ...error import GraphQLError -from ...language import FieldNode from ...type import get_named_type, is_leaf_type from . import ValidationRule +if TYPE_CHECKING: + from ...language import FieldNode + __all__ = ["ScalarLeafsRule"] diff --git a/src/graphql/validation/rules/single_field_subscriptions.py b/src/graphql/validation/rules/single_field_subscriptions.py index e8ce9ec5..fc7fd2bc 100644 --- a/src/graphql/validation/rules/single_field_subscriptions.py +++ b/src/graphql/validation/rules/single_field_subscriptions.py @@ -1,6 +1,8 @@ """Single field subscriptions rule""" -from typing import Any, Dict, cast +from __future__ import annotations + +from typing import Any, cast from ...error import GraphQLError from ...execution.collect_fields import collect_fields @@ -33,9 +35,9 @@ def enter_operation_definition( subscription_type = schema.subscription_type if subscription_type: operation_name = node.name.value if node.name else None - variable_values: Dict[str, Any] = {} + variable_values: dict[str, Any] = {} document = self.context.document - fragments: Dict[str, FragmentDefinitionNode] = { + fragments: dict[str, FragmentDefinitionNode] = { definition.name.value: definition for definition in document.definitions if isinstance(definition, FragmentDefinitionNode) diff --git a/src/graphql/validation/rules/stream_directive_on_list_field.py b/src/graphql/validation/rules/stream_directive_on_list_field.py index f0ab3ef4..141984c2 100644 --- a/src/graphql/validation/rules/stream_directive_on_list_field.py +++ b/src/graphql/validation/rules/stream_directive_on_list_field.py @@ -1,12 +1,16 @@ """Stream directive on list field rule""" -from typing import Any, List, cast +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, cast from ...error import GraphQLError -from ...language import DirectiveNode, Node from ...type import GraphQLStreamDirective, is_list_type, is_wrapping_type from . import ASTValidationRule, ValidationContext +if TYPE_CHECKING: + from ...language import DirectiveNode, Node + __all__ = ["StreamDirectiveOnListField"] @@ -22,7 +26,7 @@ def enter_directive( _key: Any, _parent: Any, _path: Any, - _ancestors: List[Node], + _ancestors: list[Node], ) -> None: context = cast(ValidationContext, self.context) field_def = context.get_field_def() diff --git a/src/graphql/validation/rules/unique_argument_definition_names.py b/src/graphql/validation/rules/unique_argument_definition_names.py index 24afa4db..b992577f 100644 --- a/src/graphql/validation/rules/unique_argument_definition_names.py +++ b/src/graphql/validation/rules/unique_argument_definition_names.py @@ -1,5 +1,7 @@ """Unique argument definition names rule""" +from __future__ import annotations + from operator import attrgetter from typing import Any, Collection diff --git a/src/graphql/validation/rules/unique_argument_names.py b/src/graphql/validation/rules/unique_argument_names.py index bf226592..124aa6e6 100644 --- a/src/graphql/validation/rules/unique_argument_names.py +++ b/src/graphql/validation/rules/unique_argument_names.py @@ -1,13 +1,17 @@ """Unique argument names rule""" +from __future__ import annotations + from operator import attrgetter -from typing import Any, Collection +from typing import TYPE_CHECKING, Any, Collection from ...error import GraphQLError -from ...language import ArgumentNode, DirectiveNode, FieldNode from ...pyutils import group_by from . import ASTValidationRule +if TYPE_CHECKING: + from ...language import ArgumentNode, DirectiveNode, FieldNode + __all__ = ["UniqueArgumentNamesRule"] diff --git a/src/graphql/validation/rules/unique_directive_names.py b/src/graphql/validation/rules/unique_directive_names.py index 039b1b48..24d8066f 100644 --- a/src/graphql/validation/rules/unique_directive_names.py +++ b/src/graphql/validation/rules/unique_directive_names.py @@ -1,6 +1,8 @@ """Unique directive names rule""" -from typing import Any, Dict +from __future__ import annotations + +from typing import Any from ...error import GraphQLError from ...language import SKIP, DirectiveDefinitionNode, NameNode, VisitorAction @@ -17,7 +19,7 @@ class UniqueDirectiveNamesRule(SDLValidationRule): def __init__(self, context: SDLValidationContext) -> None: super().__init__(context) - self.known_directive_names: Dict[str, NameNode] = {} + self.known_directive_names: dict[str, NameNode] = {} self.schema = context.schema def enter_directive_definition( diff --git a/src/graphql/validation/rules/unique_directives_per_location.py b/src/graphql/validation/rules/unique_directives_per_location.py index 040c148f..de9a05d0 100644 --- a/src/graphql/validation/rules/unique_directives_per_location.py +++ b/src/graphql/validation/rules/unique_directives_per_location.py @@ -1,7 +1,9 @@ """Unique directive names per location rule""" +from __future__ import annotations + from collections import defaultdict -from typing import Any, Dict, List, Union, cast +from typing import Any, List, cast from ...error import GraphQLError from ...language import ( @@ -28,11 +30,11 @@ class UniqueDirectivesPerLocationRule(ASTValidationRule): See https://spec.graphql.org/draft/#sec-Directives-Are-Unique-Per-Location """ - context: Union[ValidationContext, SDLValidationContext] + context: ValidationContext | SDLValidationContext - def __init__(self, context: Union[ValidationContext, SDLValidationContext]) -> None: + def __init__(self, context: ValidationContext | SDLValidationContext) -> None: super().__init__(context) - unique_directive_map: Dict[str, bool] = {} + unique_directive_map: dict[str, bool] = {} schema = context.schema defined_directives = ( @@ -47,8 +49,8 @@ def __init__(self, context: Union[ValidationContext, SDLValidationContext]) -> N unique_directive_map[def_.name.value] = not def_.repeatable self.unique_directive_map = unique_directive_map - self.schema_directives: Dict[str, DirectiveNode] = {} - self.type_directives_map: Dict[str, Dict[str, DirectiveNode]] = defaultdict( + self.schema_directives: dict[str, DirectiveNode] = {} + self.type_directives_map: dict[str, dict[str, DirectiveNode]] = defaultdict( dict ) diff --git a/src/graphql/validation/rules/unique_enum_value_names.py b/src/graphql/validation/rules/unique_enum_value_names.py index ef50ca2c..1df28d83 100644 --- a/src/graphql/validation/rules/unique_enum_value_names.py +++ b/src/graphql/validation/rules/unique_enum_value_names.py @@ -1,7 +1,9 @@ """Unique enum value names rule""" +from __future__ import annotations + from collections import defaultdict -from typing import Any, Dict +from typing import Any from ...error import GraphQLError from ...language import SKIP, EnumTypeDefinitionNode, NameNode, VisitorAction @@ -21,7 +23,7 @@ def __init__(self, context: SDLValidationContext) -> None: super().__init__(context) schema = context.schema self.existing_type_map = schema.type_map if schema else {} - self.known_value_names: Dict[str, Dict[str, NameNode]] = defaultdict(dict) + self.known_value_names: dict[str, dict[str, NameNode]] = defaultdict(dict) def check_value_uniqueness( self, node: EnumTypeDefinitionNode, *_args: Any diff --git a/src/graphql/validation/rules/unique_field_definition_names.py b/src/graphql/validation/rules/unique_field_definition_names.py index 8c7ca9af..8451bc27 100644 --- a/src/graphql/validation/rules/unique_field_definition_names.py +++ b/src/graphql/validation/rules/unique_field_definition_names.py @@ -1,7 +1,9 @@ """Unique field definition names rule""" +from __future__ import annotations + from collections import defaultdict -from typing import Any, Dict +from typing import Any from ...error import GraphQLError from ...language import SKIP, NameNode, ObjectTypeDefinitionNode, VisitorAction @@ -21,7 +23,7 @@ def __init__(self, context: SDLValidationContext) -> None: super().__init__(context) schema = context.schema self.existing_type_map = schema.type_map if schema else {} - self.known_field_names: Dict[str, Dict[str, NameNode]] = defaultdict(dict) + self.known_field_names: dict[str, dict[str, NameNode]] = defaultdict(dict) def check_field_uniqueness( self, node: ObjectTypeDefinitionNode, *_args: Any diff --git a/src/graphql/validation/rules/unique_fragment_names.py b/src/graphql/validation/rules/unique_fragment_names.py index 40433944..a4c16d86 100644 --- a/src/graphql/validation/rules/unique_fragment_names.py +++ b/src/graphql/validation/rules/unique_fragment_names.py @@ -1,6 +1,8 @@ """Unique fragment names rule""" -from typing import Any, Dict +from __future__ import annotations + +from typing import Any from ...error import GraphQLError from ...language import SKIP, FragmentDefinitionNode, NameNode, VisitorAction @@ -19,7 +21,7 @@ class UniqueFragmentNamesRule(ASTValidationRule): def __init__(self, context: ASTValidationContext) -> None: super().__init__(context) - self.known_fragment_names: Dict[str, NameNode] = {} + self.known_fragment_names: dict[str, NameNode] = {} @staticmethod def enter_operation_definition(*_args: Any) -> VisitorAction: diff --git a/src/graphql/validation/rules/unique_input_field_names.py b/src/graphql/validation/rules/unique_input_field_names.py index a76efcd1..b9de90f7 100644 --- a/src/graphql/validation/rules/unique_input_field_names.py +++ b/src/graphql/validation/rules/unique_input_field_names.py @@ -1,11 +1,15 @@ """Unique input field names rule""" -from typing import Any, Dict, List +from __future__ import annotations + +from typing import TYPE_CHECKING, Any from ...error import GraphQLError -from ...language import NameNode, ObjectFieldNode from . import ASTValidationContext, ASTValidationRule +if TYPE_CHECKING: + from ...language import NameNode, ObjectFieldNode + __all__ = ["UniqueInputFieldNamesRule"] @@ -20,8 +24,8 @@ class UniqueInputFieldNamesRule(ASTValidationRule): def __init__(self, context: ASTValidationContext) -> None: super().__init__(context) - self.known_names_stack: List[Dict[str, NameNode]] = [] - self.known_names: Dict[str, NameNode] = {} + self.known_names_stack: list[dict[str, NameNode]] = [] + self.known_names: dict[str, NameNode] = {} def enter_object_value(self, *_args: Any) -> None: self.known_names_stack.append(self.known_names) diff --git a/src/graphql/validation/rules/unique_operation_names.py b/src/graphql/validation/rules/unique_operation_names.py index 4752d23f..03af6335 100644 --- a/src/graphql/validation/rules/unique_operation_names.py +++ b/src/graphql/validation/rules/unique_operation_names.py @@ -1,6 +1,8 @@ """Unique operation names rule""" -from typing import Any, Dict +from __future__ import annotations + +from typing import Any from ...error import GraphQLError from ...language import SKIP, NameNode, OperationDefinitionNode, VisitorAction @@ -19,7 +21,7 @@ class UniqueOperationNamesRule(ASTValidationRule): def __init__(self, context: ASTValidationContext) -> None: super().__init__(context) - self.known_operation_names: Dict[str, NameNode] = {} + self.known_operation_names: dict[str, NameNode] = {} def enter_operation_definition( self, node: OperationDefinitionNode, *_args: Any diff --git a/src/graphql/validation/rules/unique_operation_types.py b/src/graphql/validation/rules/unique_operation_types.py index ca00f6fa..059c8143 100644 --- a/src/graphql/validation/rules/unique_operation_types.py +++ b/src/graphql/validation/rules/unique_operation_types.py @@ -1,6 +1,8 @@ """Unique operation types rule""" -from typing import TYPE_CHECKING, Any, Dict, Optional, Union +from __future__ import annotations + +from typing import TYPE_CHECKING, Any from ...error import GraphQLError from ...language import ( @@ -28,11 +30,11 @@ class UniqueOperationTypesRule(SDLValidationRule): def __init__(self, context: SDLValidationContext) -> None: super().__init__(context) schema = context.schema - self.defined_operation_types: Dict[ + self.defined_operation_types: dict[ OperationType, OperationTypeDefinitionNode ] = {} - self.existing_operation_types: Dict[ - OperationType, Optional[GraphQLObjectType] + self.existing_operation_types: dict[ + OperationType, GraphQLObjectType | None ] = ( { OperationType.QUERY: schema.query_type, @@ -45,7 +47,7 @@ def __init__(self, context: SDLValidationContext) -> None: self.schema = schema def check_operation_types( - self, node: Union[SchemaDefinitionNode, SchemaExtensionNode], *_args: Any + self, node: SchemaDefinitionNode | SchemaExtensionNode, *_args: Any ) -> VisitorAction: for operation_type in node.operation_types or []: operation = operation_type.operation diff --git a/src/graphql/validation/rules/unique_type_names.py b/src/graphql/validation/rules/unique_type_names.py index 41e0767d..7f7dee8f 100644 --- a/src/graphql/validation/rules/unique_type_names.py +++ b/src/graphql/validation/rules/unique_type_names.py @@ -1,6 +1,8 @@ """Unique type names rule""" -from typing import Any, Dict +from __future__ import annotations + +from typing import Any from ...error import GraphQLError from ...language import SKIP, NameNode, TypeDefinitionNode, VisitorAction @@ -17,7 +19,7 @@ class UniqueTypeNamesRule(SDLValidationRule): def __init__(self, context: SDLValidationContext) -> None: super().__init__(context) - self.known_type_names: Dict[str, NameNode] = {} + self.known_type_names: dict[str, NameNode] = {} self.schema = context.schema def check_type_name(self, node: TypeDefinitionNode, *_args: Any) -> VisitorAction: diff --git a/src/graphql/validation/rules/unique_variable_names.py b/src/graphql/validation/rules/unique_variable_names.py index 2e8a40ac..28e78653 100644 --- a/src/graphql/validation/rules/unique_variable_names.py +++ b/src/graphql/validation/rules/unique_variable_names.py @@ -1,13 +1,17 @@ """Unique variable names rule""" +from __future__ import annotations + from operator import attrgetter -from typing import Any +from typing import TYPE_CHECKING, Any from ...error import GraphQLError -from ...language import OperationDefinitionNode from ...pyutils import group_by from . import ASTValidationRule +if TYPE_CHECKING: + from ...language import OperationDefinitionNode + __all__ = ["UniqueVariableNamesRule"] diff --git a/src/graphql/validation/rules/values_of_correct_type.py b/src/graphql/validation/rules/values_of_correct_type.py index 0d5cc8da..8951a2d9 100644 --- a/src/graphql/validation/rules/values_of_correct_type.py +++ b/src/graphql/validation/rules/values_of_correct_type.py @@ -1,5 +1,7 @@ """Value literals of correct type rule""" +from __future__ import annotations + from typing import Any, cast from ...error import GraphQLError diff --git a/src/graphql/validation/rules/variables_are_input_types.py b/src/graphql/validation/rules/variables_are_input_types.py index e135b667..552fe91b 100644 --- a/src/graphql/validation/rules/variables_are_input_types.py +++ b/src/graphql/validation/rules/variables_are_input_types.py @@ -1,5 +1,7 @@ """Variables are input types rule""" +from __future__ import annotations + from typing import Any from ...error import GraphQLError diff --git a/src/graphql/validation/rules/variables_in_allowed_position.py b/src/graphql/validation/rules/variables_in_allowed_position.py index ef9beccf..1a8fd2e2 100644 --- a/src/graphql/validation/rules/variables_in_allowed_position.py +++ b/src/graphql/validation/rules/variables_in_allowed_position.py @@ -1,6 +1,8 @@ """Variables in allowed position rule""" -from typing import Any, Dict, Optional +from __future__ import annotations + +from typing import Any from ...error import GraphQLError from ...language import ( @@ -27,7 +29,7 @@ class VariablesInAllowedPositionRule(ValidationRule): def __init__(self, context: ValidationContext) -> None: super().__init__(context) - self.var_def_map: Dict[str, Any] = {} + self.var_def_map: dict[str, Any] = {} def enter_operation_definition(self, *_args: Any) -> None: self.var_def_map.clear() @@ -71,7 +73,7 @@ def enter_variable_definition( def allowed_variable_usage( schema: GraphQLSchema, var_type: GraphQLType, - var_default_value: Optional[ValueNode], + var_default_value: ValueNode | None, location_type: GraphQLType, location_default_value: Any, ) -> bool: diff --git a/src/graphql/validation/specified_rules.py b/src/graphql/validation/specified_rules.py index e024d0d1..e7f7c54e 100644 --- a/src/graphql/validation/specified_rules.py +++ b/src/graphql/validation/specified_rules.py @@ -1,8 +1,8 @@ """Specified rules""" -from typing import Tuple, Type +from __future__ import annotations -from .rules import ASTValidationRule +from typing import TYPE_CHECKING # Spec Section: "Defer And Stream Directive Labels Are Unique" from .rules.defer_stream_directive_label import DeferStreamDirectiveLabel @@ -112,6 +112,9 @@ # Spec Section: "All Variable Usages Are Allowed" from .rules.variables_in_allowed_position import VariablesInAllowedPositionRule +if TYPE_CHECKING: + from .rules import ASTValidationRule + __all__ = ["specified_rules", "specified_sdl_rules"] @@ -120,7 +123,7 @@ # The order of the rules in this list has been adjusted to lead to the # most clear output when encountering multiple validation errors. -specified_rules: Tuple[Type[ASTValidationRule], ...] = ( +specified_rules: tuple[type[ASTValidationRule], ...] = ( ExecutableDefinitionsRule, UniqueOperationNamesRule, LoneAnonymousOperationRule, @@ -158,7 +161,7 @@ most clear output when encountering multiple validation errors. """ -specified_sdl_rules: Tuple[Type[ASTValidationRule], ...] = ( +specified_sdl_rules: tuple[type[ASTValidationRule], ...] = ( LoneSchemaDefinitionRule, UniqueOperationTypesRule, UniqueTypeNamesRule, diff --git a/src/graphql/validation/validate.py b/src/graphql/validation/validate.py index 0035d877..1439f7e4 100644 --- a/src/graphql/validation/validate.py +++ b/src/graphql/validation/validate.py @@ -1,15 +1,19 @@ """Validation""" -from typing import Collection, List, Optional, Type +from __future__ import annotations + +from typing import TYPE_CHECKING, Collection from ..error import GraphQLError from ..language import DocumentNode, ParallelVisitor, visit from ..type import GraphQLSchema, assert_valid_schema from ..utilities import TypeInfo, TypeInfoVisitor -from .rules import ASTValidationRule from .specified_rules import specified_rules, specified_sdl_rules from .validation_context import SDLValidationContext, ValidationContext +if TYPE_CHECKING: + from .rules import ASTValidationRule + __all__ = ["assert_valid_sdl", "assert_valid_sdl_extension", "validate", "validate_sdl"] @@ -25,10 +29,10 @@ class ValidationAbortedError(GraphQLError): def validate( schema: GraphQLSchema, document_ast: DocumentNode, - rules: Optional[Collection[Type[ASTValidationRule]]] = None, - max_errors: Optional[int] = None, - type_info: Optional[TypeInfo] = None, -) -> List[GraphQLError]: + rules: Collection[type[ASTValidationRule]] | None = None, + max_errors: int | None = None, + type_info: TypeInfo | None = None, +) -> list[GraphQLError]: """Implements the "Validation" section of the spec. Validation runs synchronously, returning a list of encountered errors, or an empty @@ -56,7 +60,7 @@ def validate( if rules is None: rules = specified_rules - errors: List[GraphQLError] = [] + errors: list[GraphQLError] = [] def on_error(error: GraphQLError) -> None: if len(errors) >= max_errors: @@ -79,14 +83,14 @@ def on_error(error: GraphQLError) -> None: def validate_sdl( document_ast: DocumentNode, - schema_to_extend: Optional[GraphQLSchema] = None, - rules: Optional[Collection[Type[ASTValidationRule]]] = None, -) -> List[GraphQLError]: + schema_to_extend: GraphQLSchema | None = None, + rules: Collection[type[ASTValidationRule]] | None = None, +) -> list[GraphQLError]: """Validate an SDL document. For internal use only. """ - errors: List[GraphQLError] = [] + errors: list[GraphQLError] = [] context = SDLValidationContext(document_ast, schema_to_extend, errors.append) if rules is None: rules = specified_sdl_rules diff --git a/src/graphql/validation/validation_context.py b/src/graphql/validation/validation_context.py index b7be4bca..dec21042 100644 --- a/src/graphql/validation/validation_context.py +++ b/src/graphql/validation/validation_context.py @@ -1,8 +1,16 @@ """Validation context""" -from typing import Any, Callable, Dict, List, NamedTuple, Optional, Set, Union, cast +from __future__ import annotations + +from typing import ( + TYPE_CHECKING, + Any, + Callable, + NamedTuple, + Union, + cast, +) -from ..error import GraphQLError from ..language import ( DocumentNode, FragmentDefinitionNode, @@ -14,18 +22,21 @@ VisitorAction, visit, ) -from ..type import ( - GraphQLArgument, - GraphQLCompositeType, - GraphQLDirective, - GraphQLEnumValue, - GraphQLField, - GraphQLInputType, - GraphQLOutputType, - GraphQLSchema, -) from ..utilities import TypeInfo, TypeInfoVisitor +if TYPE_CHECKING: + from ..error import GraphQLError + from ..type import ( + GraphQLArgument, + GraphQLCompositeType, + GraphQLDirective, + GraphQLEnumValue, + GraphQLField, + GraphQLInputType, + GraphQLOutputType, + GraphQLSchema, + ) + try: from typing import TypeAlias except ImportError: # Python < 3.10 @@ -47,14 +58,14 @@ class VariableUsage(NamedTuple): """Variable usage""" node: VariableNode - type: Optional[GraphQLInputType] + type: GraphQLInputType | None default_value: Any class VariableUsageVisitor(Visitor): """Visitor adding all variable usages to a given list.""" - usages: List[VariableUsage] + usages: list[VariableUsage] def __init__(self, type_info: TypeInfo) -> None: super().__init__() @@ -84,10 +95,10 @@ class ASTValidationContext: document: DocumentNode - _fragments: Optional[Dict[str, FragmentDefinitionNode]] - _fragment_spreads: Dict[SelectionSetNode, List[FragmentSpreadNode]] - _recursively_referenced_fragments: Dict[ - OperationDefinitionNode, List[FragmentDefinitionNode] + _fragments: dict[str, FragmentDefinitionNode] | None + _fragment_spreads: dict[SelectionSetNode, list[FragmentSpreadNode]] + _recursively_referenced_fragments: dict[ + OperationDefinitionNode, list[FragmentDefinitionNode] ] def __init__( @@ -105,7 +116,7 @@ def on_error(self, error: GraphQLError) -> None: def report_error(self, error: GraphQLError) -> None: self.on_error(error) - def get_fragment(self, name: str) -> Optional[FragmentDefinitionNode]: + def get_fragment(self, name: str) -> FragmentDefinitionNode | None: fragments = self._fragments if fragments is None: fragments = { @@ -117,7 +128,7 @@ def get_fragment(self, name: str) -> Optional[FragmentDefinitionNode]: self._fragments = fragments return fragments.get(name) - def get_fragment_spreads(self, node: SelectionSetNode) -> List[FragmentSpreadNode]: + def get_fragment_spreads(self, node: SelectionSetNode) -> list[FragmentSpreadNode]: spreads = self._fragment_spreads.get(node) if spreads is None: spreads = [] @@ -141,12 +152,12 @@ def get_fragment_spreads(self, node: SelectionSetNode) -> List[FragmentSpreadNod def get_recursively_referenced_fragments( self, operation: OperationDefinitionNode - ) -> List[FragmentDefinitionNode]: + ) -> list[FragmentDefinitionNode]: fragments = self._recursively_referenced_fragments.get(operation) if fragments is None: fragments = [] append_fragment = fragments.append - collected_names: Set[str] = set() + collected_names: set[str] = set() add_name = collected_names.add nodes_to_visit = [operation.selection_set] append_node = nodes_to_visit.append @@ -175,12 +186,12 @@ class SDLValidationContext(ASTValidationContext): rule. """ - schema: Optional[GraphQLSchema] + schema: GraphQLSchema | None def __init__( self, ast: DocumentNode, - schema: Optional[GraphQLSchema], + schema: GraphQLSchema | None, on_error: Callable[[GraphQLError], None], ) -> None: super().__init__(ast, on_error) @@ -198,8 +209,8 @@ class ValidationContext(ASTValidationContext): schema: GraphQLSchema _type_info: TypeInfo - _variable_usages: Dict[NodeWithSelectionSet, List[VariableUsage]] - _recursive_variable_usages: Dict[OperationDefinitionNode, List[VariableUsage]] + _variable_usages: dict[NodeWithSelectionSet, list[VariableUsage]] + _recursive_variable_usages: dict[OperationDefinitionNode, list[VariableUsage]] def __init__( self, @@ -214,7 +225,7 @@ def __init__( self._variable_usages = {} self._recursive_variable_usages = {} - def get_variable_usages(self, node: NodeWithSelectionSet) -> List[VariableUsage]: + def get_variable_usages(self, node: NodeWithSelectionSet) -> list[VariableUsage]: usages = self._variable_usages.get(node) if usages is None: usage_visitor = VariableUsageVisitor(self._type_info) @@ -225,7 +236,7 @@ def get_variable_usages(self, node: NodeWithSelectionSet) -> List[VariableUsage] def get_recursive_variable_usages( self, operation: OperationDefinitionNode - ) -> List[VariableUsage]: + ) -> list[VariableUsage]: usages = self._recursive_variable_usages.get(operation) if usages is None: get_variable_usages = self.get_variable_usages @@ -235,26 +246,26 @@ def get_recursive_variable_usages( self._recursive_variable_usages[operation] = usages return usages - def get_type(self) -> Optional[GraphQLOutputType]: + def get_type(self) -> GraphQLOutputType | None: return self._type_info.get_type() - def get_parent_type(self) -> Optional[GraphQLCompositeType]: + def get_parent_type(self) -> GraphQLCompositeType | None: return self._type_info.get_parent_type() - def get_input_type(self) -> Optional[GraphQLInputType]: + def get_input_type(self) -> GraphQLInputType | None: return self._type_info.get_input_type() - def get_parent_input_type(self) -> Optional[GraphQLInputType]: + def get_parent_input_type(self) -> GraphQLInputType | None: return self._type_info.get_parent_input_type() - def get_field_def(self) -> Optional[GraphQLField]: + def get_field_def(self) -> GraphQLField | None: return self._type_info.get_field_def() - def get_directive(self) -> Optional[GraphQLDirective]: + def get_directive(self) -> GraphQLDirective | None: return self._type_info.get_directive() - def get_argument(self) -> Optional[GraphQLArgument]: + def get_argument(self) -> GraphQLArgument | None: return self._type_info.get_argument() - def get_enum_value(self) -> Optional[GraphQLEnumValue]: + def get_enum_value(self) -> GraphQLEnumValue | None: return self._type_info.get_enum_value() diff --git a/src/graphql/version.py b/src/graphql/version.py index 544d59f5..10577318 100644 --- a/src/graphql/version.py +++ b/src/graphql/version.py @@ -1,6 +1,6 @@ """GraphQL-core version number""" -from __future__ import annotations # Python < 3.10 +from __future__ import annotations import re from typing import NamedTuple diff --git a/tests/execution/test_schema.py b/tests/execution/test_schema.py index de93e1de..a3448d89 100644 --- a/tests/execution/test_schema.py +++ b/tests/execution/test_schema.py @@ -1,4 +1,4 @@ -from __future__ import annotations # Python < 3.10 +from __future__ import annotations from graphql.execution import execute_sync from graphql.language import parse diff --git a/tests/execution/test_union_interface.py b/tests/execution/test_union_interface.py index 1adcd8af..e772db5d 100644 --- a/tests/execution/test_union_interface.py +++ b/tests/execution/test_union_interface.py @@ -1,4 +1,4 @@ -from __future__ import annotations # Python < 3.10 +from __future__ import annotations from graphql.execution import execute_sync from graphql.language import parse From 8f4d24532fa08c3487d078b499d2b5992e078202 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 6 Apr 2024 23:22:32 +0200 Subject: [PATCH 145/230] More modernization of typing annotations in tests --- tests/error/test_graphql_error.py | 14 +++--- tests/execution/test_abstract.py | 10 ++-- tests/execution/test_defer.py | 14 +++--- tests/execution/test_executor.py | 6 ++- tests/execution/test_mutations.py | 8 ++-- tests/execution/test_stream.py | 10 ++-- tests/execution/test_variables.py | 8 ++-- tests/language/test_ast.py | 5 +- tests/language/test_block_string.py | 6 ++- tests/language/test_lexer.py | 8 ++-- tests/language/test_parser.py | 48 ++++++++++--------- tests/language/test_schema_parser.py | 16 ++++--- tests/language/test_source.py | 6 ++- tests/language/test_visitor.py | 12 +++-- tests/pyutils/test_inspect.py | 22 +++++---- tests/pyutils/test_suggestion_list.py | 4 +- tests/star_wars_data.py | 16 ++++--- tests/test_docs.py | 8 ++-- tests/test_star_wars_validation.py | 10 ++-- tests/test_user_registry.py | 16 ++++--- tests/type/test_custom_scalars.py | 10 ++-- tests/type/test_definition.py | 12 +++-- tests/type/test_enum.py | 6 ++- tests/type/test_validation.py | 5 +- tests/utilities/test_build_ast_schema.py | 2 + tests/utilities/test_coerce_input_value.py | 12 +++-- tests/utilities/test_extend_schema.py | 2 + .../utilities/test_get_introspection_query.py | 2 + tests/utilities/test_print_schema.py | 2 + .../test_strip_ignored_characters.py | 4 +- .../test_strip_ignored_characters_fuzz.py | 5 +- tests/utilities/test_type_info.py | 4 +- tests/utilities/test_value_from_ast.py | 6 ++- .../utilities/test_value_from_ast_untyped.py | 6 ++- .../assert_equal_awaitables_or_values.py | 2 + tests/validation/harness.py | 26 +++++----- tests/validation/test_no_deprecated.py | 6 ++- 37 files changed, 215 insertions(+), 144 deletions(-) diff --git a/tests/error/test_graphql_error.py b/tests/error/test_graphql_error.py index 121c5c3e..d01e1e8a 100644 --- a/tests/error/test_graphql_error.py +++ b/tests/error/test_graphql_error.py @@ -1,4 +1,6 @@ -from typing import List, Union, cast +from __future__ import annotations + +from typing import cast from graphql.error import GraphQLError from graphql.language import ( @@ -204,7 +206,7 @@ def serializes_to_include_message_and_locations(): } def serializes_to_include_path(): - path: List[Union[int, str]] = ["path", 3, "to", "field"] + path: list[int | str] = ["path", 3, "to", "field"] e = GraphQLError("msg", path=path) assert e.path is path assert repr(e) == "GraphQLError('msg', path=['path', 3, 'to', 'field'])" @@ -218,7 +220,7 @@ def serializes_to_include_all_standard_fields(): assert str(e_short) == "msg" assert repr(e_short) == "GraphQLError('msg')" - path: List[Union[str, int]] = ["path", 2, "field"] + path: list[str | int] = ["path", 2, "field"] extensions = {"foo": "bar "} e_full = GraphQLError("msg", field_node, None, None, path, None, extensions) assert str(e_full) == ( @@ -240,7 +242,7 @@ def repr_includes_extensions(): assert repr(e) == "GraphQLError('msg', extensions={'foo': 'bar'})" def always_stores_path_as_list(): - path: List[Union[int, str]] = ["path", 3, "to", "field"] + path: list[int | str] = ["path", 3, "to", "field"] e = GraphQLError("msg,", path=tuple(path)) assert isinstance(e.path, list) assert e.path == path @@ -346,7 +348,7 @@ def prints_an_error_with_nodes_from_different_sources(): def describe_formatted(): def formats_graphql_error(): - path: List[Union[int, str]] = ["one", 2] + path: list[int | str] = ["one", 2] extensions = {"ext": None} error = GraphQLError( "test message", @@ -379,7 +381,7 @@ def uses_default_message(): } def includes_path(): - path: List[Union[int, str]] = ["path", 3, "to", "field"] + path: list[int | str] = ["path", 3, "to", "field"] error = GraphQLError("msg", path=path) assert error.formatted == {"message": "msg", "path": path} diff --git a/tests/execution/test_abstract.py b/tests/execution/test_abstract.py index 30bdae28..b5ebc45b 100644 --- a/tests/execution/test_abstract.py +++ b/tests/execution/test_abstract.py @@ -1,4 +1,6 @@ -from typing import Any, NamedTuple, Optional +from __future__ import annotations + +from typing import Any, NamedTuple import pytest from graphql.execution import ExecutionResult, execute, execute_sync @@ -448,11 +450,11 @@ class RootValueAsObject: class Pet: __typename = "Pet" - name: Optional[str] = None + name: str | None = None class DogPet(Pet): __typename = "Dog" - woofs: Optional[bool] = None + woofs: bool | None = None class Odie(DogPet): name = "Odie" @@ -460,7 +462,7 @@ class Odie(DogPet): class CatPet(Pet): __typename = "Cat" - meows: Optional[bool] = None + meows: bool | None = None class Tabby(CatPet): pass diff --git a/tests/execution/test_defer.py b/tests/execution/test_defer.py index ff17c9f0..487cedcf 100644 --- a/tests/execution/test_defer.py +++ b/tests/execution/test_defer.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from asyncio import sleep -from typing import Any, AsyncGenerator, Dict, List, NamedTuple +from typing import Any, AsyncGenerator, NamedTuple import pytest from graphql.error import GraphQLError @@ -111,7 +113,7 @@ async def complete(document: DocumentNode, root_value: Any = None) -> Any: result = await result if isinstance(result, ExperimentalIncrementalExecutionResults): - results: List[Any] = [result.initial_result.formatted] + results: list[Any] = [result.initial_result.formatted] async for patch in result.subsequent_results: results.append(patch.formatted) return results @@ -120,7 +122,7 @@ async def complete(document: DocumentNode, root_value: Any = None) -> Any: return result.formatted -def modified_args(args: Dict[str, Any], **modifications: Any) -> Dict[str, Any]: +def modified_args(args: dict[str, Any], **modifications: Any) -> dict[str, Any]: return {**args, **modifications} @@ -152,7 +154,7 @@ def can_format_and_print_incremental_defer_result(): # noinspection PyTypeChecker def can_compare_incremental_defer_result(): - args: Dict[str, Any] = { + args: dict[str, Any] = { "data": {"hello": "world"}, "errors": [GraphQLError("msg")], "path": ["foo", 1], @@ -219,7 +221,7 @@ def can_format_and_print_initial_incremental_execution_result(): def can_compare_initial_incremental_execution_result(): incremental = [IncrementalDeferResult(label="foo")] - args: Dict[str, Any] = { + args: dict[str, Any] = { "data": {"hello": "world"}, "errors": [GraphQLError("msg")], "incremental": incremental, @@ -298,7 +300,7 @@ def can_format_and_print_subsequent_incremental_execution_result(): def can_compare_subsequent_incremental_execution_result(): incremental = [IncrementalDeferResult(label="foo")] - args: Dict[str, Any] = { + args: dict[str, Any] = { "incremental": incremental, "has_next": True, "extensions": {"baz": 2}, diff --git a/tests/execution/test_executor.py b/tests/execution/test_executor.py index fd80051b..b75aaad5 100644 --- a/tests/execution/test_executor.py +++ b/tests/execution/test_executor.py @@ -1,5 +1,7 @@ +from __future__ import annotations + import asyncio -from typing import Any, Awaitable, Optional, cast +from typing import Any, Awaitable, cast import pytest from graphql.error import GraphQLError @@ -263,7 +265,7 @@ def resolve(_obj, info): ) def it_populates_path_correctly_with_complex_types(): - path: Optional[ResponsePath] = None + path: ResponsePath | None = None def resolve(_val, info): nonlocal path diff --git a/tests/execution/test_mutations.py b/tests/execution/test_mutations.py index 9f8d6b06..20ee1c97 100644 --- a/tests/execution/test_mutations.py +++ b/tests/execution/test_mutations.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from asyncio import sleep -from typing import Any, Awaitable, List +from typing import Any, Awaitable import pytest from graphql.execution import ( @@ -232,7 +234,7 @@ async def mutation_fields_with_defer_do_not_block_next_mutation(): schema, document, root_value ) - patches: List[Any] = [] + patches: list[Any] = [] assert isinstance(mutation_result, ExperimentalIncrementalExecutionResults) patches.append(mutation_result.initial_result.formatted) async for patch in mutation_result.subsequent_results: @@ -303,7 +305,7 @@ async def mutation_with_defer_is_not_executed_serially(): schema, document, root_value ) - patches: List[Any] = [] + patches: list[Any] = [] assert isinstance(mutation_result, ExperimentalIncrementalExecutionResults) patches.append(mutation_result.initial_result.formatted) async for patch in mutation_result.subsequent_results: diff --git a/tests/execution/test_stream.py b/tests/execution/test_stream.py index a3c2e49a..fb84c6d9 100644 --- a/tests/execution/test_stream.py +++ b/tests/execution/test_stream.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from asyncio import Event, Lock, gather, sleep -from typing import Any, Awaitable, Dict, List, NamedTuple +from typing import Any, Awaitable, NamedTuple import pytest from graphql.error import GraphQLError @@ -91,7 +93,7 @@ async def complete(document: DocumentNode, root_value: Any = None) -> Any: result = await result if isinstance(result, ExperimentalIncrementalExecutionResults): - results: List[Any] = [result.initial_result.formatted] + results: list[Any] = [result.initial_result.formatted] async for patch in result.subsequent_results: results.append(patch.formatted) return results @@ -140,7 +142,7 @@ async def locked_next(): return [IteratorResult(result).formatted for result in results] -def modified_args(args: Dict[str, Any], **modifications: Any) -> Dict[str, Any]: +def modified_args(args: dict[str, Any], **modifications: Any) -> dict[str, Any]: return {**args, **modifications} @@ -187,7 +189,7 @@ def can_print_stream_record(): # noinspection PyTypeChecker def can_compare_incremental_stream_result(): - args: Dict[str, Any] = { + args: dict[str, Any] = { "items": ["hello", "world"], "errors": [GraphQLError("msg")], "path": ["foo", 1], diff --git a/tests/execution/test_variables.py b/tests/execution/test_variables.py index 277efc0b..3dfdb3ed 100644 --- a/tests/execution/test_variables.py +++ b/tests/execution/test_variables.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from math import nan -from typing import Any, Dict, Optional +from typing import Any from graphql.error import GraphQLError from graphql.execution import ExecutionResult, execute_sync @@ -153,7 +155,7 @@ def field_with_input_arg(input_arg: GraphQLArgument): def execute_query( - query: str, variable_values: Optional[Dict[str, Any]] = None + query: str, variable_values: dict[str, Any] | None = None ) -> ExecutionResult: document = parse(query) return execute_sync(schema, document, variable_values=variable_values) @@ -1039,7 +1041,7 @@ def describe_get_variable_values_limit_maximum_number_of_coercion_errors(): input_value = {"input": [0, 1, 2]} - def _invalid_value_error(value: int, index: int) -> Dict[str, Any]: + def _invalid_value_error(value: int, index: int) -> dict[str, Any]: return { "message": "Variable '$input' got invalid value" f" {value} at 'input[{index}]';" diff --git a/tests/language/test_ast.py b/tests/language/test_ast.py index 35f39171..e9cb80c8 100644 --- a/tests/language/test_ast.py +++ b/tests/language/test_ast.py @@ -1,6 +1,7 @@ +from __future__ import annotations + import weakref from copy import copy, deepcopy -from typing import Optional from graphql.language import Location, NameNode, Node, Source, Token, TokenKind from graphql.pyutils import inspect @@ -17,7 +18,7 @@ class SampleNamedNode(Node): __slots__ = "foo", "name" foo: str - name: Optional[str] + name: str | None def describe_token_class(): diff --git a/tests/language/test_block_string.py b/tests/language/test_block_string.py index 73e31d1b..74f99734 100644 --- a/tests/language/test_block_string.py +++ b/tests/language/test_block_string.py @@ -1,4 +1,6 @@ -from typing import Collection, Optional, cast +from __future__ import annotations + +from typing import Collection, cast from graphql.language.block_string import ( dedent_block_string_lines, @@ -152,7 +154,7 @@ def __str__(self) -> str: def describe_print_block_string(): def _assert_block_string( - s: str, readable: str, minimize: Optional[str] = None + s: str, readable: str, minimize: str | None = None ) -> None: assert print_block_string(s) == readable assert print_block_string(s, minimize=True) == minimize or readable diff --git a/tests/language/test_lexer.py b/tests/language/test_lexer.py index 439446d8..85b30bb7 100644 --- a/tests/language/test_lexer.py +++ b/tests/language/test_lexer.py @@ -1,4 +1,6 @@ -from typing import List, Optional, Tuple +from __future__ import annotations + +from typing import Optional, Tuple import pytest from graphql.error import GraphQLSyntaxError @@ -576,8 +578,8 @@ def produces_double_linked_list_of_tokens_including_comments(): assert end_token.kind != TokenKind.COMMENT assert start_token.prev is None assert end_token.next is None - tokens: List[Token] = [] - tok: Optional[Token] = start_token + tokens: list[Token] = [] + tok: Token | None = start_token while tok: assert not tokens or tok.prev == tokens[-1] tokens.append(tok) diff --git a/tests/language/test_parser.py b/tests/language/test_parser.py index 74f3cf8f..7246c6c5 100644 --- a/tests/language/test_parser.py +++ b/tests/language/test_parser.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import Optional, Tuple, cast import pytest @@ -261,7 +263,7 @@ def parses_required_field(): assert isinstance(definitions, tuple) assert len(definitions) == 1 definition = cast(OperationDefinitionNode, definitions[0]) - selection_set: Optional[SelectionSetNode] = definition.selection_set + selection_set: SelectionSetNode | None = definition.selection_set assert isinstance(selection_set, SelectionSetNode) selections = selection_set.selections assert isinstance(selections, tuple) @@ -326,16 +328,16 @@ def parses_field_with_required_list_elements(): assert isinstance(definitions, tuple) assert len(definitions) == 1 definition = cast(OperationDefinitionNode, definitions[0]) - selection_set: Optional[SelectionSetNode] = definition.selection_set + selection_set: SelectionSetNode | None = definition.selection_set assert isinstance(selection_set, SelectionSetNode) selections = selection_set.selections assert isinstance(selections, tuple) assert len(selections) == 1 field = selections[0] assert isinstance(field, FieldNode) - nullability_assertion: Optional[ - NullabilityAssertionNode - ] = field.nullability_assertion + nullability_assertion: NullabilityAssertionNode | None = ( + field.nullability_assertion + ) assert isinstance(nullability_assertion, ListNullabilityOperatorNode) assert nullability_assertion.loc == (7, 10) nullability_assertion = nullability_assertion.nullability_assertion @@ -350,16 +352,16 @@ def parses_field_with_optional_list_elements(): assert isinstance(definitions, tuple) assert len(definitions) == 1 definition = cast(OperationDefinitionNode, definitions[0]) - selection_set: Optional[SelectionSetNode] = definition.selection_set + selection_set: SelectionSetNode | None = definition.selection_set assert isinstance(selection_set, SelectionSetNode) selections = selection_set.selections assert isinstance(selections, tuple) assert len(selections) == 1 field = selections[0] assert isinstance(field, FieldNode) - nullability_assertion: Optional[ - NullabilityAssertionNode - ] = field.nullability_assertion + nullability_assertion: NullabilityAssertionNode | None = ( + field.nullability_assertion + ) assert isinstance(nullability_assertion, ListNullabilityOperatorNode) assert nullability_assertion.loc == (7, 10) nullability_assertion = nullability_assertion.nullability_assertion @@ -374,16 +376,16 @@ def parses_field_with_required_list(): assert isinstance(definitions, tuple) assert len(definitions) == 1 definition = cast(OperationDefinitionNode, definitions[0]) - selection_set: Optional[SelectionSetNode] = definition.selection_set + selection_set: SelectionSetNode | None = definition.selection_set assert isinstance(selection_set, SelectionSetNode) selections = selection_set.selections assert isinstance(selections, tuple) assert len(selections) == 1 field = selections[0] assert isinstance(field, FieldNode) - nullability_assertion: Optional[ - NullabilityAssertionNode - ] = field.nullability_assertion + nullability_assertion: NullabilityAssertionNode | None = ( + field.nullability_assertion + ) assert isinstance(nullability_assertion, NonNullAssertionNode) assert nullability_assertion.loc == (7, 10) nullability_assertion = nullability_assertion.nullability_assertion @@ -398,16 +400,16 @@ def parses_field_with_optional_list(): assert isinstance(definitions, tuple) assert len(definitions) == 1 definition = cast(OperationDefinitionNode, definitions[0]) - selection_set: Optional[SelectionSetNode] = definition.selection_set + selection_set: SelectionSetNode | None = definition.selection_set assert isinstance(selection_set, SelectionSetNode) selections = selection_set.selections assert isinstance(selections, tuple) assert len(selections) == 1 field = selections[0] assert isinstance(field, FieldNode) - nullability_assertion: Optional[ - NullabilityAssertionNode - ] = field.nullability_assertion + nullability_assertion: NullabilityAssertionNode | None = ( + field.nullability_assertion + ) assert isinstance(nullability_assertion, ErrorBoundaryNode) assert nullability_assertion.loc == (7, 10) nullability_assertion = nullability_assertion.nullability_assertion @@ -422,16 +424,16 @@ def parses_field_with_mixed_list_elements(): assert isinstance(definitions, tuple) assert len(definitions) == 1 definition = cast(OperationDefinitionNode, definitions[0]) - selection_set: Optional[SelectionSetNode] = definition.selection_set + selection_set: SelectionSetNode | None = definition.selection_set assert isinstance(selection_set, SelectionSetNode) selections = selection_set.selections assert isinstance(selections, tuple) assert len(selections) == 1 field = selections[0] assert isinstance(field, FieldNode) - nullability_assertion: Optional[ - NullabilityAssertionNode - ] = field.nullability_assertion + nullability_assertion: NullabilityAssertionNode | None = ( + field.nullability_assertion + ) assert isinstance(nullability_assertion, NonNullAssertionNode) assert nullability_assertion.loc == (7, 16) nullability_assertion = nullability_assertion.nullability_assertion @@ -487,7 +489,7 @@ def creates_ast(): assert definition.name is None assert definition.variable_definitions == () assert definition.directives == () - selection_set: Optional[SelectionSetNode] = definition.selection_set + selection_set: SelectionSetNode | None = definition.selection_set assert isinstance(selection_set, SelectionSetNode) assert selection_set.loc == (0, 40) selections = selection_set.selections @@ -572,7 +574,7 @@ def creates_ast_from_nameless_query_without_variables(): assert definition.name is None assert definition.variable_definitions == () assert definition.directives == () - selection_set: Optional[SelectionSetNode] = definition.selection_set + selection_set: SelectionSetNode | None = definition.selection_set assert isinstance(selection_set, SelectionSetNode) assert selection_set.loc == (6, 29) selections = selection_set.selections diff --git a/tests/language/test_schema_parser.py b/tests/language/test_schema_parser.py index f9100a03..a5005a06 100644 --- a/tests/language/test_schema_parser.py +++ b/tests/language/test_schema_parser.py @@ -1,7 +1,9 @@ +from __future__ import annotations + import pickle from copy import deepcopy from textwrap import dedent -from typing import List, Optional, Tuple +from typing import Optional, Tuple import pytest from graphql.error import GraphQLSyntaxError @@ -78,7 +80,7 @@ def field_node(name: NameNode, type_: TypeNode, loc: Location): return field_node_with_args(name, type_, [], loc) -def field_node_with_args(name: NameNode, type_: TypeNode, args: List, loc: Location): +def field_node_with_args(name: NameNode, type_: TypeNode, args: list, loc: Location): return FieldDefinitionNode( name=name, arguments=args, type=type_, directives=[], loc=loc, description=None ) @@ -95,7 +97,7 @@ def enum_value_node(name: str, loc: Location): def input_value_node( - name: NameNode, type_: TypeNode, default_value: Optional[ValueNode], loc: Location + name: NameNode, type_: TypeNode, default_value: ValueNode | None, loc: Location ): return InputValueDefinitionNode( name=name, @@ -111,7 +113,7 @@ def boolean_value_node(value: bool, loc: Location): return BooleanValueNode(value=value, loc=loc) -def string_value_node(value: str, block: Optional[bool], loc: Location): +def string_value_node(value: str, block: bool | None, loc: Location): return StringValueNode(value=value, block=block, loc=loc) @@ -120,8 +122,8 @@ def list_type_node(type_: TypeNode, loc: Location): def schema_extension_node( - directives: List[DirectiveNode], - operation_types: List[OperationTypeDefinitionNode], + directives: list[DirectiveNode], + operation_types: list[OperationTypeDefinitionNode], loc: Location, ): return SchemaExtensionNode( @@ -133,7 +135,7 @@ def operation_type_definition(operation: OperationType, type_: TypeNode, loc: Lo return OperationTypeDefinitionNode(operation=operation, type=type_, loc=loc) -def directive_node(name: NameNode, arguments: List[ArgumentNode], loc: Location): +def directive_node(name: NameNode, arguments: list[ArgumentNode], loc: Location): return DirectiveNode(name=name, arguments=arguments, loc=loc) diff --git a/tests/language/test_source.py b/tests/language/test_source.py index 9da76d2f..02014445 100644 --- a/tests/language/test_source.py +++ b/tests/language/test_source.py @@ -1,5 +1,7 @@ +from __future__ import annotations + import weakref -from typing import Tuple, cast +from typing import cast import pytest from graphql.language import Source, SourceLocation @@ -77,7 +79,7 @@ def can_create_custom_attribute(): assert node.custom == "bar" # type: ignore def rejects_invalid_location_offset(): - def create_source(location_offset: Tuple[int, int]) -> Source: + def create_source(location_offset: tuple[int, int]) -> Source: return Source("", "", cast(SourceLocation, location_offset)) with pytest.raises(TypeError): diff --git a/tests/language/test_visitor.py b/tests/language/test_visitor.py index dd2fc791..1e74c6ff 100644 --- a/tests/language/test_visitor.py +++ b/tests/language/test_visitor.py @@ -1,6 +1,8 @@ +from __future__ import annotations + from copy import copy from functools import partial -from typing import Any, List, Optional, cast +from typing import Any, cast import pytest from graphql.language import ( @@ -185,7 +187,7 @@ def leave_field(node, *args): TestVisitorWithStaticMethods, ): ast = parse("{ a }") - visited: List[str] = [] + visited: list[str] = [] visit(ast, visitor_class()) assert visited == [ "enter:document", @@ -576,7 +578,7 @@ class CustomFieldNode(SelectionNode): __slots__ = "name", "selection_set" name: NameNode - selection_set: Optional[SelectionSetNode] + selection_set: SelectionSetNode | None custom_selection_set = cast(FieldNode, custom_ast.definitions[0]).selection_set assert custom_selection_set is not None @@ -732,9 +734,9 @@ def leave(*args): # noinspection PyShadowingNames def visits_kitchen_sink(kitchen_sink_query): # noqa: F811 ast = parse(kitchen_sink_query, experimental_client_controlled_nullability=True) - visited: List[Any] = [] + visited: list[Any] = [] record = visited.append - arg_stack: List[Any] = [] + arg_stack: list[Any] = [] push = arg_stack.append pop = arg_stack.pop diff --git a/tests/pyutils/test_inspect.py b/tests/pyutils/test_inspect.py index be8e1e0a..3721d018 100644 --- a/tests/pyutils/test_inspect.py +++ b/tests/pyutils/test_inspect.py @@ -1,7 +1,9 @@ +from __future__ import annotations + from contextlib import contextmanager from importlib import import_module from math import inf, nan -from typing import Any, Dict, FrozenSet, List, Set, Tuple +from typing import Any import pytest from graphql.pyutils import Undefined, inspect @@ -165,13 +167,13 @@ def inspect_lists(): assert inspect([["a", "b"], "c"]) == "[['a', 'b'], 'c']" def inspect_overly_large_list(): - s: List[int] = list(range(20)) + s: list[int] = list(range(20)) assert inspect(s) == "[0, 1, 2, 3, 4, ..., 16, 17, 18, 19]" with increased_list_size(): assert inspect(s) == repr(s) def inspect_overly_nested_list(): - s: List[List[List]] = [[[]]] + s: list[list[list]] = [[[]]] assert inspect(s) == "[[[]]]" s = [[[1, 2, 3]]] assert inspect(s) == "[[[...]]]" @@ -179,7 +181,7 @@ def inspect_overly_nested_list(): assert inspect(s) == repr(s) def inspect_recursive_list(): - s: List[Any] = [1, 2, 3] + s: list[Any] = [1, 2, 3] s[1] = s assert inspect(s) == "[1, [...], 3]" @@ -197,7 +199,7 @@ def inspect_overly_large_tuple(): assert inspect(s) == repr(s) def inspect_overly_nested_tuple(): - s: Tuple[Tuple[Tuple]] = (((),),) + s: tuple[tuple[tuple]] = (((),),) assert inspect(s) == "(((),),)" s = (((1, 2, 3),),) assert inspect(s) == "(((...),),)" @@ -205,7 +207,7 @@ def inspect_overly_nested_tuple(): assert inspect(s) == repr(s) def inspect_recursive_tuple(): - s: List[Any] = [1, 2, 3] + s: list[Any] = [1, 2, 3] s[1] = s t = tuple(s) assert inspect(t) == "(1, [1, [...], 3], 3)" @@ -238,7 +240,7 @@ def inspect_overly_large_dict(): assert inspect(s) == repr(s) def inspect_overly_nested_dict(): - s: Dict[str, Dict[str, Dict]] = {"a": {"b": {}}} + s: dict[str, dict[str, dict]] = {"a": {"b": {}}} assert inspect(s) == "{'a': {'b': {}}}" s = {"a": {"b": {"c": 3}}} assert inspect(s) == "{'a': {'b': {...}}}" @@ -246,7 +248,7 @@ def inspect_overly_nested_dict(): assert inspect(s) == repr(s) def inspect_recursive_dict(): - s: Dict[int, Any] = {} + s: dict[int, Any] = {} s[1] = s assert inspect(s) == "{1: {...}}" @@ -267,7 +269,7 @@ def inspect_overly_large_set(): assert inspect(s) == repr(s) def inspect_overly_nested_set(): - s: List[List[Set]] = [[set()]] + s: list[list[set]] = [[set()]] assert inspect(s) == "[[set()]]" s = [[{1, 2, 3}]] assert inspect(s) == "[[set(...)]]" @@ -294,7 +296,7 @@ def inspect_overly_large_frozenset(): assert inspect(s) == repr(s) def inspect_overly_nested_frozenset(): - s: FrozenSet[FrozenSet[FrozenSet]] = frozenset([frozenset([frozenset()])]) + s: frozenset[frozenset[frozenset]] = frozenset([frozenset([frozenset()])]) assert inspect(s) == "frozenset({frozenset({frozenset()})})" s = frozenset([frozenset([frozenset([1, 2, 3])])]) assert inspect(s) == "frozenset({frozenset({frozenset(...)})})" diff --git a/tests/pyutils/test_suggestion_list.py b/tests/pyutils/test_suggestion_list.py index 57161386..216ba3c5 100644 --- a/tests/pyutils/test_suggestion_list.py +++ b/tests/pyutils/test_suggestion_list.py @@ -1,9 +1,9 @@ -from typing import List +from __future__ import annotations from graphql.pyutils import suggestion_list -def expect_suggestions(input_: str, options: List[str], expected: List[str]) -> None: +def expect_suggestions(input_: str, options: list[str], expected: list[str]) -> None: assert suggestion_list(input_, options) == expected diff --git a/tests/star_wars_data.py b/tests/star_wars_data.py index 68768534..158bf937 100644 --- a/tests/star_wars_data.py +++ b/tests/star_wars_data.py @@ -5,7 +5,9 @@ demo. """ -from typing import Awaitable, Collection, Dict, Iterator, Optional +from __future__ import annotations + +from typing import Awaitable, Collection, Iterator __all__ = ["get_droid", "get_friends", "get_hero", "get_human", "get_secret_backstory"] @@ -80,7 +82,7 @@ def __init__(self, id, name, friends, appearsIn, primaryFunction): # noqa: A002 id="1004", name="Wilhuff Tarkin", friends=["1001"], appearsIn=[4], homePlanet=None ) -human_data: Dict[str, Human] = { +human_data: dict[str, Human] = { "1000": luke, "1001": vader, "1002": han, @@ -104,17 +106,17 @@ def __init__(self, id, name, friends, appearsIn, primaryFunction): # noqa: A002 primaryFunction="Astromech", ) -droid_data: Dict[str, Droid] = {"2000": threepio, "2001": artoo} +droid_data: dict[str, Droid] = {"2000": threepio, "2001": artoo} # noinspection PyShadowingBuiltins -async def get_character(id: str) -> Optional[Character]: # noqa: A002 +async def get_character(id: str) -> Character | None: # noqa: A002 """Helper function to get a character by ID.""" # We use an async function just to illustrate that GraphQL-core supports it. return human_data.get(id) or droid_data.get(id) -def get_friends(character: Character) -> Iterator[Awaitable[Optional[Character]]]: +def get_friends(character: Character) -> Iterator[Awaitable[Character | None]]: """Allows us to query for a character's friends.""" # Notice that GraphQL-core accepts iterators of awaitables. return map(get_character, character.friends) @@ -130,13 +132,13 @@ def get_hero(episode: int) -> Character: # noinspection PyShadowingBuiltins -def get_human(id: str) -> Optional[Human]: # noqa: A002 +def get_human(id: str) -> Human | None: # noqa: A002 """Allows us to query for the human with the given id.""" return human_data.get(id) # noinspection PyShadowingBuiltins -def get_droid(id: str) -> Optional[Droid]: # noqa: A002 +def get_droid(id: str) -> Droid | None: # noqa: A002 """Allows us to query for the droid with the given id.""" return droid_data.get(id) diff --git a/tests/test_docs.py b/tests/test_docs.py index 618dcb47..23c157e2 100644 --- a/tests/test_docs.py +++ b/tests/test_docs.py @@ -1,7 +1,9 @@ """Test all code snippets in the documentation""" +from __future__ import annotations + from pathlib import Path -from typing import Any, Dict, List +from typing import Any, Dict from .utils import dedent @@ -21,8 +23,8 @@ def get_snippets(source, indent=4): source_path = Path(__file__).parents[1] / "docs" / source with source_path.open() as source_file: lines = source_file.readlines() - snippets: List[str] = [] - snippet: List[str] = [] + snippets: list[str] = [] + snippet: list[str] = [] snippet_start = " " * indent for line in lines: if not line.rstrip() and snippet: diff --git a/tests/test_star_wars_validation.py b/tests/test_star_wars_validation.py index 2c469b5f..a40a5224 100644 --- a/tests/test_star_wars_validation.py +++ b/tests/test_star_wars_validation.py @@ -1,13 +1,17 @@ -from typing import List +from __future__ import annotations + +from typing import TYPE_CHECKING -from graphql.error import GraphQLError from graphql.language import Source, parse from graphql.validation import validate from .star_wars_schema import star_wars_schema +if TYPE_CHECKING: + from graphql.error import GraphQLError + -def validation_errors(query: str) -> List[GraphQLError]: +def validation_errors(query: str) -> list[GraphQLError]: """Helper function to test a query and the expected response.""" source = Source(query, "StarWars.graphql") ast = parse(source) diff --git a/tests/test_user_registry.py b/tests/test_user_registry.py index 42cb579a..7d134a52 100644 --- a/tests/test_user_registry.py +++ b/tests/test_user_registry.py @@ -4,10 +4,12 @@ operations on a simulated user registry database backend. """ +from __future__ import annotations + from asyncio import create_task, sleep, wait from collections import defaultdict from enum import Enum -from typing import Any, AsyncIterable, Dict, List, NamedTuple, Optional +from typing import Any, AsyncIterable, NamedTuple import pytest from graphql import ( @@ -35,8 +37,8 @@ class User(NamedTuple): firstName: str lastName: str - tweets: Optional[int] - id: Optional[str] = None + tweets: int | None + id: str | None = None verified: bool = False @@ -52,10 +54,10 @@ class UserRegistry: """Simulation of a user registry with asynchronous database backend access.""" def __init__(self, **users): - self._registry: Dict[str, User] = users + self._registry: dict[str, User] = users self._pubsub = defaultdict(SimplePubSub) - async def get(self, id_: str) -> Optional[User]: + async def get(self, id_: str) -> User | None: """Get a user object from the registry""" await sleep(0) return self._registry.get(id_) @@ -91,7 +93,7 @@ def emit_event(self, mutation: MutationEnum, user: User) -> None: self._pubsub[None].emit(payload) # notify all user subscriptions self._pubsub[user.id].emit(payload) # notify single user subscriptions - def event_iterator(self, id_: Optional[str]) -> SimplePubSubIterator: + def event_iterator(self, id_: str | None) -> SimplePubSubIterator: return self._pubsub[id_].get_subscriber() @@ -509,7 +511,7 @@ async def receive_all(): done, pending = await wait(tasks, timeout=1) assert not pending - expected_data: List[Dict[str, Any]] = [ + expected_data: list[dict[str, Any]] = [ { "mutation": "CREATED", "user": { diff --git a/tests/type/test_custom_scalars.py b/tests/type/test_custom_scalars.py index 2fa91d9d..82c611f6 100644 --- a/tests/type/test_custom_scalars.py +++ b/tests/type/test_custom_scalars.py @@ -1,9 +1,10 @@ +from __future__ import annotations + from math import isfinite -from typing import Any, Dict, NamedTuple +from typing import TYPE_CHECKING, Any, NamedTuple from graphql import graphql_sync from graphql.error import GraphQLError -from graphql.language import ValueNode from graphql.pyutils import inspect from graphql.type import ( GraphQLArgument, @@ -15,6 +16,9 @@ ) from graphql.utilities import value_from_ast_untyped +if TYPE_CHECKING: + from graphql.language import ValueNode + # this test is not (yet) part of GraphQL.js, see # https://github.com/graphql/graphql-js/issues/2657 @@ -31,7 +35,7 @@ def is_finite(value: Any) -> bool: ) -def serialize_money(output_value: Any) -> Dict[str, float]: +def serialize_money(output_value: Any) -> dict[str, float]: if not isinstance(output_value, Money): raise GraphQLError("Cannot serialize money value: " + inspect(output_value)) return output_value._asdict() diff --git a/tests/type/test_definition.py b/tests/type/test_definition.py index 8ecb2bc2..51b82ec6 100644 --- a/tests/type/test_definition.py +++ b/tests/type/test_definition.py @@ -1,8 +1,10 @@ +from __future__ import annotations + import pickle import sys from enum import Enum from math import isnan, nan -from typing import Any, Callable, Dict, List +from typing import Any, Callable try: from typing import TypedDict @@ -920,7 +922,7 @@ def rejects_an_enum_type_with_incorrectly_typed_name(): assert str(exc_info.value) == "Expected name to be a string." def rejects_an_enum_type_with_invalid_name(): - values: Dict[str, GraphQLEnumValue] = {} + values: dict[str, GraphQLEnumValue] = {} with pytest.raises(GraphQLError) as exc_info: GraphQLEnumType("", values) assert str(exc_info.value) == "Expected name to be a non-empty string." @@ -1320,15 +1322,15 @@ class InfoArgs(TypedDict): """Arguments for GraphQLResolveInfo""" field_name: str - field_nodes: List[FieldNode] + field_nodes: list[FieldNode] return_type: GraphQLOutputType parent_type: GraphQLObjectType path: Path schema: GraphQLSchema - fragments: Dict[str, FragmentDefinitionNode] + fragments: dict[str, FragmentDefinitionNode] root_value: Any operation: OperationDefinitionNode - variable_values: Dict[str, Any] + variable_values: dict[str, Any] is_awaitable: Callable[[Any], bool] info_args: InfoArgs = { diff --git a/tests/type/test_enum.py b/tests/type/test_enum.py index 3219224d..20f8b5f4 100644 --- a/tests/type/test_enum.py +++ b/tests/type/test_enum.py @@ -1,6 +1,8 @@ +from __future__ import annotations + from datetime import datetime from enum import Enum -from typing import Any, Dict, Optional +from typing import Any from graphql import graphql_sync from graphql.type import ( @@ -113,7 +115,7 @@ class Complex2: ) -def execute_query(source: str, variable_values: Optional[Dict[str, Any]] = None): +def execute_query(source: str, variable_values: dict[str, Any] | None = None): return graphql_sync(schema, source, variable_values=variable_values) diff --git a/tests/type/test_validation.py b/tests/type/test_validation.py index 4ed1c09e..eb4e2ab7 100644 --- a/tests/type/test_validation.py +++ b/tests/type/test_validation.py @@ -1,5 +1,6 @@ +from __future__ import annotations + from operator import attrgetter -from typing import List, Union import pytest from graphql.language import DirectiveLocation, parse @@ -65,7 +66,7 @@ def with_modifiers( type_: GraphQLNamedType, -) -> List[Union[GraphQLNamedType, GraphQLNonNull, GraphQLList]]: +) -> list[GraphQLNamedType | GraphQLNonNull | GraphQLList]: return [ type_, GraphQLList(type_), diff --git a/tests/utilities/test_build_ast_schema.py b/tests/utilities/test_build_ast_schema.py index 816a3898..a0aefb1a 100644 --- a/tests/utilities/test_build_ast_schema.py +++ b/tests/utilities/test_build_ast_schema.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import pickle import sys from collections import namedtuple diff --git a/tests/utilities/test_coerce_input_value.py b/tests/utilities/test_coerce_input_value.py index 2808b6ac..61b1feab 100644 --- a/tests/utilities/test_coerce_input_value.py +++ b/tests/utilities/test_coerce_input_value.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from math import nan -from typing import Any, List, NamedTuple, Union +from typing import Any, NamedTuple import pytest from graphql.error import GraphQLError @@ -20,12 +22,12 @@ class CoercedValueError(NamedTuple): error: str - path: List[Union[str, int]] + path: list[str | int] value: Any class CoercedValue(NamedTuple): - errors: List[CoercedValueError] + errors: list[CoercedValueError] value: Any @@ -34,13 +36,13 @@ def expect_value(result: CoercedValue) -> Any: return result.value -def expect_errors(result: CoercedValue) -> List[CoercedValueError]: +def expect_errors(result: CoercedValue) -> list[CoercedValueError]: return result.errors def describe_coerce_input_value(): def _coerce_value(input_value: Any, type_: GraphQLInputType): - errors: List[CoercedValueError] = [] + errors: list[CoercedValueError] = [] append = errors.append def on_error(path, invalid_value, error): diff --git a/tests/utilities/test_extend_schema.py b/tests/utilities/test_extend_schema.py index 9afd707e..75c70efd 100644 --- a/tests/utilities/test_extend_schema.py +++ b/tests/utilities/test_extend_schema.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import Union import pytest diff --git a/tests/utilities/test_get_introspection_query.py b/tests/utilities/test_get_introspection_query.py index 05a5cad5..348d2cbf 100644 --- a/tests/utilities/test_get_introspection_query.py +++ b/tests/utilities/test_get_introspection_query.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import re from typing import Pattern diff --git a/tests/utilities/test_print_schema.py b/tests/utilities/test_print_schema.py index 34258d49..d59b4fde 100644 --- a/tests/utilities/test_print_schema.py +++ b/tests/utilities/test_print_schema.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import Any, Dict, cast from graphql.language import DirectiveLocation diff --git a/tests/utilities/test_strip_ignored_characters.py b/tests/utilities/test_strip_ignored_characters.py index 9c07d1f1..d708bfdb 100644 --- a/tests/utilities/test_strip_ignored_characters.py +++ b/tests/utilities/test_strip_ignored_characters.py @@ -1,4 +1,4 @@ -from typing import Optional +from __future__ import annotations import pytest from graphql.error import GraphQLSyntaxError @@ -9,7 +9,7 @@ from ..utils import dedent -def lex_value(s: str) -> Optional[str]: +def lex_value(s: str) -> str | None: lexer = Lexer(Source(s)) value = lexer.advance().value assert lexer.advance().kind == TokenKind.EOF, "Expected EOF" diff --git a/tests/utilities/test_strip_ignored_characters_fuzz.py b/tests/utilities/test_strip_ignored_characters_fuzz.py index aed5cc2a..b61094e2 100644 --- a/tests/utilities/test_strip_ignored_characters_fuzz.py +++ b/tests/utilities/test_strip_ignored_characters_fuzz.py @@ -1,5 +1,6 @@ +from __future__ import annotations + from json import dumps -from typing import Optional import pytest from graphql.error import GraphQLSyntaxError @@ -65,7 +66,7 @@ def to_stay_the_same(self): self.to_equal(self.doc_string) -def lex_value(s: str) -> Optional[str]: +def lex_value(s: str) -> str | None: lexer = Lexer(Source(s)) value = lexer.advance().value assert lexer.advance().kind == TokenKind.EOF, "Expected EOF" diff --git a/tests/utilities/test_type_info.py b/tests/utilities/test_type_info.py index 8b0cae05..d23b878b 100644 --- a/tests/utilities/test_type_info.py +++ b/tests/utilities/test_type_info.py @@ -1,4 +1,4 @@ -from typing import List, Optional, Tuple +from __future__ import annotations from graphql.language import ( FieldNode, @@ -180,7 +180,7 @@ def supports_introspection_fields(): """ ) - visited_fields: List[Tuple[Optional[str], Optional[str]]] = [] + visited_fields: list[tuple[str | None, str | None]] = [] class TestVisitor(Visitor): @staticmethod diff --git a/tests/utilities/test_value_from_ast.py b/tests/utilities/test_value_from_ast.py index 1760367f..f21abcc2 100644 --- a/tests/utilities/test_value_from_ast.py +++ b/tests/utilities/test_value_from_ast.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from math import isnan, nan -from typing import Any, Dict, Optional +from typing import Any from graphql.language import ValueNode, parse_value from graphql.pyutils import Undefined @@ -24,7 +26,7 @@ def describe_value_from_ast(): def _value_from( value_text: str, type_: GraphQLInputType, - variables: Optional[Dict[str, Any]] = None, + variables: dict[str, Any] | None = None, ): ast = parse_value(value_text) return value_from_ast(ast, type_, variables) diff --git a/tests/utilities/test_value_from_ast_untyped.py b/tests/utilities/test_value_from_ast_untyped.py index 78c4edeb..0461cc20 100644 --- a/tests/utilities/test_value_from_ast_untyped.py +++ b/tests/utilities/test_value_from_ast_untyped.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from math import nan -from typing import Any, Dict, Optional +from typing import Any from graphql.language import FloatValueNode, IntValueNode, parse_value from graphql.pyutils import Undefined @@ -23,7 +25,7 @@ def _expect_value_from(value_text: str, expected: Any): _compare_value(value, expected) def _expect_value_from_vars( - value_text: str, variables: Optional[Dict[str, Any]], expected: Any + value_text: str, variables: dict[str, Any] | None, expected: Any ): ast = parse_value(value_text) value = value_from_ast_untyped(ast, variables) diff --git a/tests/utils/assert_equal_awaitables_or_values.py b/tests/utils/assert_equal_awaitables_or_values.py index 9c4d562c..8ed8d175 100644 --- a/tests/utils/assert_equal_awaitables_or_values.py +++ b/tests/utils/assert_equal_awaitables_or_values.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import asyncio from typing import Awaitable, Tuple, TypeVar, cast diff --git a/tests/validation/harness.py b/tests/validation/harness.py index 42e6c768..3689c8fe 100644 --- a/tests/validation/harness.py +++ b/tests/validation/harness.py @@ -1,12 +1,16 @@ -from typing import List, Optional, Type +from __future__ import annotations + +from typing import TYPE_CHECKING -from graphql.error import GraphQLError from graphql.language import parse -from graphql.type import GraphQLSchema from graphql.utilities import build_schema -from graphql.validation import SDLValidationRule, ValidationRule from graphql.validation.validate import validate, validate_sdl +if TYPE_CHECKING: + from graphql.error import GraphQLError + from graphql.type import GraphQLSchema + from graphql.validation import SDLValidationRule, ValidationRule + __all__ = [ "test_schema", "assert_validation_errors", @@ -121,11 +125,11 @@ def assert_validation_errors( - rule: Type[ValidationRule], + rule: type[ValidationRule], query_str: str, - errors: List[GraphQLError], + errors: list[GraphQLError], schema: GraphQLSchema = test_schema, -) -> List[GraphQLError]: +) -> list[GraphQLError]: doc = parse(query_str) returned_errors = validate(schema, doc, [rule]) assert returned_errors == errors @@ -133,11 +137,11 @@ def assert_validation_errors( def assert_sdl_validation_errors( - rule: Type[SDLValidationRule], + rule: type[SDLValidationRule], sdl_str: str, - errors: List[GraphQLError], - schema: Optional[GraphQLSchema] = None, -) -> List[GraphQLError]: + errors: list[GraphQLError], + schema: GraphQLSchema | None = None, +) -> list[GraphQLError]: doc = parse(sdl_str) returned_errors = validate_sdl(doc, schema, [rule]) assert returned_errors == errors diff --git a/tests/validation/test_no_deprecated.py b/tests/validation/test_no_deprecated.py index c4ac992a..1f9bd163 100644 --- a/tests/validation/test_no_deprecated.py +++ b/tests/validation/test_no_deprecated.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from functools import partial -from typing import Callable, List, Tuple +from typing import Callable from graphql.utilities import build_schema from graphql.validation import NoDeprecatedCustomRule @@ -9,7 +11,7 @@ def build_assertions( sdl_str: str, -) -> Tuple[Callable[[str], None], Callable[[str, List], None]]: +) -> tuple[Callable[[str], None], Callable[[str, list], None]]: schema = build_schema(sdl_str) assert_errors = partial( assert_validation_errors, NoDeprecatedCustomRule, schema=schema From 8efb8b39d744b9bd91453556f65e2f859463e032 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 7 Apr 2024 01:35:19 +0200 Subject: [PATCH 146/230] Update dependencies and reformat --- docs/conf.py | 2 + poetry.lock | 273 ++++++++++++------ pyproject.toml | 21 +- src/graphql/execution/execute.py | 2 +- src/graphql/language/lexer.py | 2 +- src/graphql/pyutils/simple_pub_sub.py | 4 +- src/graphql/type/definition.py | 15 +- src/graphql/type/schema.py | 12 +- src/graphql/type/validate.py | 4 +- src/graphql/utilities/ast_to_dict.py | 9 +- src/graphql/utilities/get_operation_ast.py | 2 +- src/graphql/utilities/type_from_ast.py | 12 +- .../utilities/value_from_ast_untyped.py | 4 +- .../rules/unique_operation_types.py | 4 +- tests/language/test_lexer.py | 56 ++-- tests/language/test_parser.py | 6 +- tests/language/test_print_string.py | 22 +- .../test_strip_ignored_characters_fuzz.py | 4 +- tox.ini | 8 +- 19 files changed, 267 insertions(+), 195 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index ce27fe29..b5f3a241 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -152,8 +152,10 @@ ExperimentalIncrementalExecutionResults FormattedSourceLocation GraphQLAbstractType +GraphQLCompositeType GraphQLErrorExtensions GraphQLFieldResolver +GraphQLInputType GraphQLTypeResolver GraphQLOutputType Middleware diff --git a/poetry.lock b/poetry.lock index bc3735f0..ad771a31 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "alabaster" @@ -41,13 +41,13 @@ files = [ [[package]] name = "cachetools" -version = "5.3.2" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, - {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] @@ -257,6 +257,73 @@ tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.1 [package.extras] toml = ["tomli"] +[[package]] +name = "coverage" +version = "7.4.4" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "coverage-7.4.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0be5efd5127542ef31f165de269f77560d6cdef525fffa446de6f7e9186cfb2"}, + {file = "coverage-7.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ccd341521be3d1b3daeb41960ae94a5e87abe2f46f17224ba5d6f2b8398016cf"}, + {file = "coverage-7.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fa497a8ab37784fbb20ab699c246053ac294d13fc7eb40ec007a5043ec91f8"}, + {file = "coverage-7.4.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b1a93009cb80730c9bca5d6d4665494b725b6e8e157c1cb7f2db5b4b122ea562"}, + {file = "coverage-7.4.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:690db6517f09336559dc0b5f55342df62370a48f5469fabf502db2c6d1cffcd2"}, + {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:09c3255458533cb76ef55da8cc49ffab9e33f083739c8bd4f58e79fecfe288f7"}, + {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8ce1415194b4a6bd0cdcc3a1dfbf58b63f910dcb7330fe15bdff542c56949f87"}, + {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b91cbc4b195444e7e258ba27ac33769c41b94967919f10037e6355e998af255c"}, + {file = "coverage-7.4.4-cp310-cp310-win32.whl", hash = "sha256:598825b51b81c808cb6f078dcb972f96af96b078faa47af7dfcdf282835baa8d"}, + {file = "coverage-7.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:09ef9199ed6653989ebbcaacc9b62b514bb63ea2f90256e71fea3ed74bd8ff6f"}, + {file = "coverage-7.4.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0f9f50e7ef2a71e2fae92774c99170eb8304e3fdf9c8c3c7ae9bab3e7229c5cf"}, + {file = "coverage-7.4.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:623512f8ba53c422fcfb2ce68362c97945095b864cda94a92edbaf5994201083"}, + {file = "coverage-7.4.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0513b9508b93da4e1716744ef6ebc507aff016ba115ffe8ecff744d1322a7b63"}, + {file = "coverage-7.4.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40209e141059b9370a2657c9b15607815359ab3ef9918f0196b6fccce8d3230f"}, + {file = "coverage-7.4.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a2b2b78c78293782fd3767d53e6474582f62443d0504b1554370bde86cc8227"}, + {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:73bfb9c09951125d06ee473bed216e2c3742f530fc5acc1383883125de76d9cd"}, + {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1f384c3cc76aeedce208643697fb3e8437604b512255de6d18dae3f27655a384"}, + {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:54eb8d1bf7cacfbf2a3186019bcf01d11c666bd495ed18717162f7eb1e9dd00b"}, + {file = "coverage-7.4.4-cp311-cp311-win32.whl", hash = "sha256:cac99918c7bba15302a2d81f0312c08054a3359eaa1929c7e4b26ebe41e9b286"}, + {file = "coverage-7.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:b14706df8b2de49869ae03a5ccbc211f4041750cd4a66f698df89d44f4bd30ec"}, + {file = "coverage-7.4.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:201bef2eea65e0e9c56343115ba3814e896afe6d36ffd37bab783261db430f76"}, + {file = "coverage-7.4.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:41c9c5f3de16b903b610d09650e5e27adbfa7f500302718c9ffd1c12cf9d6818"}, + {file = "coverage-7.4.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d898fe162d26929b5960e4e138651f7427048e72c853607f2b200909794ed978"}, + {file = "coverage-7.4.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ea79bb50e805cd6ac058dfa3b5c8f6c040cb87fe83de10845857f5535d1db70"}, + {file = "coverage-7.4.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce4b94265ca988c3f8e479e741693d143026632672e3ff924f25fab50518dd51"}, + {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:00838a35b882694afda09f85e469c96367daa3f3f2b097d846a7216993d37f4c"}, + {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:fdfafb32984684eb03c2d83e1e51f64f0906b11e64482df3c5db936ce3839d48"}, + {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:69eb372f7e2ece89f14751fbcbe470295d73ed41ecd37ca36ed2eb47512a6ab9"}, + {file = "coverage-7.4.4-cp312-cp312-win32.whl", hash = "sha256:137eb07173141545e07403cca94ab625cc1cc6bc4c1e97b6e3846270e7e1fea0"}, + {file = "coverage-7.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:d71eec7d83298f1af3326ce0ff1d0ea83c7cb98f72b577097f9083b20bdaf05e"}, + {file = "coverage-7.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d5ae728ff3b5401cc320d792866987e7e7e880e6ebd24433b70a33b643bb0384"}, + {file = "coverage-7.4.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cc4f1358cb0c78edef3ed237ef2c86056206bb8d9140e73b6b89fbcfcbdd40e1"}, + {file = "coverage-7.4.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8130a2aa2acb8788e0b56938786c33c7c98562697bf9f4c7d6e8e5e3a0501e4a"}, + {file = "coverage-7.4.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf271892d13e43bc2b51e6908ec9a6a5094a4df1d8af0bfc360088ee6c684409"}, + {file = "coverage-7.4.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4cdc86d54b5da0df6d3d3a2f0b710949286094c3a6700c21e9015932b81447e"}, + {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ae71e7ddb7a413dd60052e90528f2f65270aad4b509563af6d03d53e979feafd"}, + {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:38dd60d7bf242c4ed5b38e094baf6401faa114fc09e9e6632374388a404f98e7"}, + {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa5b1c1bfc28384f1f53b69a023d789f72b2e0ab1b3787aae16992a7ca21056c"}, + {file = "coverage-7.4.4-cp38-cp38-win32.whl", hash = "sha256:dfa8fe35a0bb90382837b238fff375de15f0dcdb9ae68ff85f7a63649c98527e"}, + {file = "coverage-7.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:b2991665420a803495e0b90a79233c1433d6ed77ef282e8e152a324bbbc5e0c8"}, + {file = "coverage-7.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3b799445b9f7ee8bf299cfaed6f5b226c0037b74886a4e11515e569b36fe310d"}, + {file = "coverage-7.4.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b4d33f418f46362995f1e9d4f3a35a1b6322cb959c31d88ae56b0298e1c22357"}, + {file = "coverage-7.4.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aadacf9a2f407a4688d700e4ebab33a7e2e408f2ca04dbf4aef17585389eff3e"}, + {file = "coverage-7.4.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c95949560050d04d46b919301826525597f07b33beba6187d04fa64d47ac82e"}, + {file = "coverage-7.4.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff7687ca3d7028d8a5f0ebae95a6e4827c5616b31a4ee1192bdfde697db110d4"}, + {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5fc1de20b2d4a061b3df27ab9b7c7111e9a710f10dc2b84d33a4ab25065994ec"}, + {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c74880fc64d4958159fbd537a091d2a585448a8f8508bf248d72112723974cbd"}, + {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:742a76a12aa45b44d236815d282b03cfb1de3b4323f3e4ec933acfae08e54ade"}, + {file = "coverage-7.4.4-cp39-cp39-win32.whl", hash = "sha256:d89d7b2974cae412400e88f35d86af72208e1ede1a541954af5d944a8ba46c57"}, + {file = "coverage-7.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:9ca28a302acb19b6af89e90f33ee3e1906961f94b54ea37de6737b7ca9d8827c"}, + {file = "coverage-7.4.4-pp38.pp39.pp310-none-any.whl", hash = "sha256:b2c5edc4ac10a7ef6605a966c58929ec6c1bd0917fb8c15cb3363f65aa40e677"}, + {file = "coverage-7.4.4.tar.gz", hash = "sha256:c901df83d097649e257e803be22592aedfd5182f07b3cc87d640bbb9afd50f49"}, +] + +[package.dependencies] +tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} + +[package.extras] +toml = ["tomli"] + [[package]] name = "distlib" version = "0.3.8" @@ -321,18 +388,18 @@ testing = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "diff-cover (>=7.5)", "p [[package]] name = "filelock" -version = "3.13.1" +version = "3.13.3" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.13.1-py3-none-any.whl", hash = "sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c"}, - {file = "filelock-3.13.1.tar.gz", hash = "sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e"}, + {file = "filelock-3.13.3-py3-none-any.whl", hash = "sha256:5ffa845303983e7a0b7ae17636509bc97997d58afeafa72fb141a17b152284cb"}, + {file = "filelock-3.13.3.tar.gz", hash = "sha256:a79895a25bbefdf55d1a2a0a80968f7dbb28edcd6d4234a0afb3f37ecde4b546"}, ] [package.extras] -docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.24)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] +docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] typing = ["typing-extensions (>=4.8)"] [[package]] @@ -379,22 +446,22 @@ testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs [[package]] name = "importlib-metadata" -version = "7.0.1" +version = "7.1.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-7.0.1-py3-none-any.whl", hash = "sha256:4805911c3a4ec7c3966410053e9ec6a1fecd629117df5adee56dfc9432a1081e"}, - {file = "importlib_metadata-7.0.1.tar.gz", hash = "sha256:f238736bb06590ae52ac1fab06a3a9ef1d8dce2b7a35b5ab329371d6c8f5d2cc"}, + {file = "importlib_metadata-7.1.0-py3-none-any.whl", hash = "sha256:30962b96c0c223483ed6cc7280e7f0199feb01a0e40cfae4d4450fc6fab1f570"}, + {file = "importlib_metadata-7.1.0.tar.gz", hash = "sha256:b78938b926ee8d5f020fc4772d487045805a55ddbad2ecf21c6d60938dc7fcd2"}, ] [package.dependencies] zipp = ">=0.5" [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] [[package]] name = "iniconfig" @@ -542,38 +609,38 @@ reports = ["lxml"] [[package]] name = "mypy" -version = "1.8.0" +version = "1.9.0" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:485a8942f671120f76afffff70f259e1cd0f0cfe08f81c05d8816d958d4577d3"}, - {file = "mypy-1.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:df9824ac11deaf007443e7ed2a4a26bebff98d2bc43c6da21b2b64185da011c4"}, - {file = "mypy-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2afecd6354bbfb6e0160f4e4ad9ba6e4e003b767dd80d85516e71f2e955ab50d"}, - {file = "mypy-1.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8963b83d53ee733a6e4196954502b33567ad07dfd74851f32be18eb932fb1cb9"}, - {file = "mypy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:e46f44b54ebddbeedbd3d5b289a893219065ef805d95094d16a0af6630f5d410"}, - {file = "mypy-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:855fe27b80375e5c5878492f0729540db47b186509c98dae341254c8f45f42ae"}, - {file = "mypy-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c886c6cce2d070bd7df4ec4a05a13ee20c0aa60cb587e8d1265b6c03cf91da3"}, - {file = "mypy-1.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d19c413b3c07cbecf1f991e2221746b0d2a9410b59cb3f4fb9557f0365a1a817"}, - {file = "mypy-1.8.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9261ed810972061388918c83c3f5cd46079d875026ba97380f3e3978a72f503d"}, - {file = "mypy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:51720c776d148bad2372ca21ca29256ed483aa9a4cdefefcef49006dff2a6835"}, - {file = "mypy-1.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:52825b01f5c4c1c4eb0db253ec09c7aa17e1a7304d247c48b6f3599ef40db8bd"}, - {file = "mypy-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f5ac9a4eeb1ec0f1ccdc6f326bcdb464de5f80eb07fb38b5ddd7b0de6bc61e55"}, - {file = "mypy-1.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afe3fe972c645b4632c563d3f3eff1cdca2fa058f730df2b93a35e3b0c538218"}, - {file = "mypy-1.8.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:42c6680d256ab35637ef88891c6bd02514ccb7e1122133ac96055ff458f93fc3"}, - {file = "mypy-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:720a5ca70e136b675af3af63db533c1c8c9181314d207568bbe79051f122669e"}, - {file = "mypy-1.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:028cf9f2cae89e202d7b6593cd98db6759379f17a319b5faf4f9978d7084cdc6"}, - {file = "mypy-1.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4e6d97288757e1ddba10dd9549ac27982e3e74a49d8d0179fc14d4365c7add66"}, - {file = "mypy-1.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f1478736fcebb90f97e40aff11a5f253af890c845ee0c850fe80aa060a267c6"}, - {file = "mypy-1.8.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42419861b43e6962a649068a61f4a4839205a3ef525b858377a960b9e2de6e0d"}, - {file = "mypy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:2b5b6c721bd4aabaadead3a5e6fa85c11c6c795e0c81a7215776ef8afc66de02"}, - {file = "mypy-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5c1538c38584029352878a0466f03a8ee7547d7bd9f641f57a0f3017a7c905b8"}, - {file = "mypy-1.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ef4be7baf08a203170f29e89d79064463b7fc7a0908b9d0d5114e8009c3a259"}, - {file = "mypy-1.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7178def594014aa6c35a8ff411cf37d682f428b3b5617ca79029d8ae72f5402b"}, - {file = "mypy-1.8.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ab3c84fa13c04aeeeabb2a7f67a25ef5d77ac9d6486ff33ded762ef353aa5592"}, - {file = "mypy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:99b00bc72855812a60d253420d8a2eae839b0afa4938f09f4d2aa9bb4654263a"}, - {file = "mypy-1.8.0-py3-none-any.whl", hash = "sha256:538fd81bb5e430cc1381a443971c0475582ff9f434c16cd46d2c66763ce85d9d"}, - {file = "mypy-1.8.0.tar.gz", hash = "sha256:6ff8b244d7085a0b425b56d327b480c3b29cafbd2eff27316a004f9a7391ae07"}, + {file = "mypy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f8a67616990062232ee4c3952f41c779afac41405806042a8126fe96e098419f"}, + {file = "mypy-1.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d357423fa57a489e8c47b7c85dfb96698caba13d66e086b412298a1a0ea3b0ed"}, + {file = "mypy-1.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49c87c15aed320de9b438ae7b00c1ac91cd393c1b854c2ce538e2a72d55df150"}, + {file = "mypy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:48533cdd345c3c2e5ef48ba3b0d3880b257b423e7995dada04248725c6f77374"}, + {file = "mypy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:4d3dbd346cfec7cb98e6cbb6e0f3c23618af826316188d587d1c1bc34f0ede03"}, + {file = "mypy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:653265f9a2784db65bfca694d1edd23093ce49740b2244cde583aeb134c008f3"}, + {file = "mypy-1.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3a3c007ff3ee90f69cf0a15cbcdf0995749569b86b6d2f327af01fd1b8aee9dc"}, + {file = "mypy-1.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2418488264eb41f69cc64a69a745fad4a8f86649af4b1041a4c64ee61fc61129"}, + {file = "mypy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:68edad3dc7d70f2f17ae4c6c1b9471a56138ca22722487eebacfd1eb5321d612"}, + {file = "mypy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:85ca5fcc24f0b4aeedc1d02f93707bccc04733f21d41c88334c5482219b1ccb3"}, + {file = "mypy-1.9.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aceb1db093b04db5cd390821464504111b8ec3e351eb85afd1433490163d60cd"}, + {file = "mypy-1.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0235391f1c6f6ce487b23b9dbd1327b4ec33bb93934aa986efe8a9563d9349e6"}, + {file = "mypy-1.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4d5ddc13421ba3e2e082a6c2d74c2ddb3979c39b582dacd53dd5d9431237185"}, + {file = "mypy-1.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:190da1ee69b427d7efa8aa0d5e5ccd67a4fb04038c380237a0d96829cb157913"}, + {file = "mypy-1.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:fe28657de3bfec596bbeef01cb219833ad9d38dd5393fc649f4b366840baefe6"}, + {file = "mypy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e54396d70be04b34f31d2edf3362c1edd023246c82f1730bbf8768c28db5361b"}, + {file = "mypy-1.9.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5e6061f44f2313b94f920e91b204ec600982961e07a17e0f6cd83371cb23f5c2"}, + {file = "mypy-1.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a10926e5473c5fc3da8abb04119a1f5811a236dc3a38d92015cb1e6ba4cb9e"}, + {file = "mypy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b685154e22e4e9199fc95f298661deea28aaede5ae16ccc8cbb1045e716b3e04"}, + {file = "mypy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:5d741d3fc7c4da608764073089e5f58ef6352bedc223ff58f2f038c2c4698a89"}, + {file = "mypy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:587ce887f75dd9700252a3abbc9c97bbe165a4a630597845c61279cf32dfbf02"}, + {file = "mypy-1.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f88566144752999351725ac623471661c9d1cd8caa0134ff98cceeea181789f4"}, + {file = "mypy-1.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61758fabd58ce4b0720ae1e2fea5cfd4431591d6d590b197775329264f86311d"}, + {file = "mypy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e49499be624dead83927e70c756970a0bc8240e9f769389cdf5714b0784ca6bf"}, + {file = "mypy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:571741dc4194b4f82d344b15e8837e8c5fcc462d66d076748142327626a1b6e9"}, + {file = "mypy-1.9.0-py3-none-any.whl", hash = "sha256:a260627a570559181a9ea5de61ac6297aa5af202f06fd7ab093ce74e7181e43e"}, + {file = "mypy-1.9.0.tar.gz", hash = "sha256:3cc5da0127e6a478cddd906068496a97a7618a21ce9b54bde5bf7e539c7af974"}, ] [package.dependencies] @@ -600,13 +667,13 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] @@ -756,13 +823,13 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no [[package]] name = "pytest" -version = "8.0.1" +version = "8.1.1" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-8.0.1-py3-none-any.whl", hash = "sha256:3e4f16fe1c0a9dc9d9389161c127c3edc5d810c38d6793042fb81d9f48a59fca"}, - {file = "pytest-8.0.1.tar.gz", hash = "sha256:267f6563751877d772019b13aacbe4e860d73fe8f651f28112e9ac37de7513ae"}, + {file = "pytest-8.1.1-py3-none-any.whl", hash = "sha256:2a8386cfc11fa9d2c50ee7b2a57e7d898ef90470a7a34c4b949ff59662bb78b7"}, + {file = "pytest-8.1.1.tar.gz", hash = "sha256:ac978141a75948948817d360297b7aae0fcb9d6ff6bc9ec6d514b85d5a65c044"}, ] [package.dependencies] @@ -770,11 +837,11 @@ colorama = {version = "*", markers = "sys_platform == \"win32\""} exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} iniconfig = "*" packaging = "*" -pluggy = ">=1.3.0,<2.0" -tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} +pluggy = ">=1.4,<2.0" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] -testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +testing = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] [[package]] name = "pytest-asyncio" @@ -797,13 +864,13 @@ testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy [[package]] name = "pytest-asyncio" -version = "0.23.5" +version = "0.23.6" description = "Pytest support for asyncio" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-asyncio-0.23.5.tar.gz", hash = "sha256:3a048872a9c4ba14c3e90cc1aa20cbc2def7d01c7c8db3777ec281ba9c057675"}, - {file = "pytest_asyncio-0.23.5-py3-none-any.whl", hash = "sha256:4e7093259ba018d58ede7d5315131d21923a60f8a6e9ee266ce1589685c89eac"}, + {file = "pytest-asyncio-0.23.6.tar.gz", hash = "sha256:ffe523a89c1c222598c76856e76852b787504ddb72dd5d9b6617ffa8aa2cde5f"}, + {file = "pytest_asyncio-0.23.6-py3-none-any.whl", hash = "sha256:68516fdd1018ac57b846c9846b954f0393b26f094764a28c955eabb0536a4e8a"}, ] [package.dependencies] @@ -851,6 +918,24 @@ pytest = ">=4.6" [package.extras] testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] +[[package]] +name = "pytest-cov" +version = "5.0.0" +description = "Pytest plugin for measuring coverage." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-cov-5.0.0.tar.gz", hash = "sha256:5837b58e9f6ebd335b0f8060eecce69b662415b16dc503883a02f45dfeb14857"}, + {file = "pytest_cov-5.0.0-py3-none-any.whl", hash = "sha256:4f0764a1219df53214206bf1feea4633c3b558a2925c8b59f144f682861ce652"}, +] + +[package.dependencies] +coverage = {version = ">=5.2.1", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"] + [[package]] name = "pytest-describe" version = "2.2.0" @@ -867,17 +952,17 @@ pytest = ">=4.6,<9" [[package]] name = "pytest-timeout" -version = "2.2.0" +version = "2.3.1" description = "pytest plugin to abort hanging tests" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-timeout-2.2.0.tar.gz", hash = "sha256:3b0b95dabf3cb50bac9ef5ca912fa0cfc286526af17afc806824df20c2f72c90"}, - {file = "pytest_timeout-2.2.0-py3-none-any.whl", hash = "sha256:bde531e096466f49398a59f2dde76fa78429a09a12411466f88a07213e220de2"}, + {file = "pytest-timeout-2.3.1.tar.gz", hash = "sha256:12397729125c6ecbdaca01035b9e5239d4db97352320af155b3f5de1ba5165d9"}, + {file = "pytest_timeout-2.3.1-py3-none-any.whl", hash = "sha256:68188cb703edfc6a18fad98dc25a3c61e9f24d644b0b70f33af545219fc7813e"}, ] [package.dependencies] -pytest = ">=5.0.0" +pytest = ">=7.0.0" [[package]] name = "pytz" @@ -913,28 +998,28 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "ruff" -version = "0.2.1" +version = "0.3.5" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.2.1-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:dd81b911d28925e7e8b323e8d06951554655021df8dd4ac3045d7212ac4ba080"}, - {file = "ruff-0.2.1-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:dc586724a95b7d980aa17f671e173df00f0a2eef23f8babbeee663229a938fec"}, - {file = "ruff-0.2.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c92db7101ef5bfc18e96777ed7bc7c822d545fa5977e90a585accac43d22f18a"}, - {file = "ruff-0.2.1-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:13471684694d41ae0f1e8e3a7497e14cd57ccb7dd72ae08d56a159d6c9c3e30e"}, - {file = "ruff-0.2.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a11567e20ea39d1f51aebd778685582d4c56ccb082c1161ffc10f79bebe6df35"}, - {file = "ruff-0.2.1-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:00a818e2db63659570403e44383ab03c529c2b9678ba4ba6c105af7854008105"}, - {file = "ruff-0.2.1-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be60592f9d218b52f03384d1325efa9d3b41e4c4d55ea022cd548547cc42cd2b"}, - {file = "ruff-0.2.1-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbd2288890b88e8aab4499e55148805b58ec711053588cc2f0196a44f6e3d855"}, - {file = "ruff-0.2.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3ef052283da7dec1987bba8d8733051c2325654641dfe5877a4022108098683"}, - {file = "ruff-0.2.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:7022d66366d6fded4ba3889f73cd791c2d5621b2ccf34befc752cb0df70f5fad"}, - {file = "ruff-0.2.1-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:0a725823cb2a3f08ee743a534cb6935727d9e47409e4ad72c10a3faf042ad5ba"}, - {file = "ruff-0.2.1-py3-none-musllinux_1_2_i686.whl", hash = "sha256:0034d5b6323e6e8fe91b2a1e55b02d92d0b582d2953a2b37a67a2d7dedbb7acc"}, - {file = "ruff-0.2.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:e5cb5526d69bb9143c2e4d2a115d08ffca3d8e0fddc84925a7b54931c96f5c02"}, - {file = "ruff-0.2.1-py3-none-win32.whl", hash = "sha256:6b95ac9ce49b4fb390634d46d6ece32ace3acdd52814671ccaf20b7f60adb232"}, - {file = "ruff-0.2.1-py3-none-win_amd64.whl", hash = "sha256:e3affdcbc2afb6f5bd0eb3130139ceedc5e3f28d206fe49f63073cb9e65988e0"}, - {file = "ruff-0.2.1-py3-none-win_arm64.whl", hash = "sha256:efababa8e12330aa94a53e90a81eb6e2d55f348bc2e71adbf17d9cad23c03ee6"}, - {file = "ruff-0.2.1.tar.gz", hash = "sha256:3b42b5d8677cd0c72b99fcaf068ffc62abb5a19e71b4a3b9cfa50658a0af02f1"}, + {file = "ruff-0.3.5-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:aef5bd3b89e657007e1be6b16553c8813b221ff6d92c7526b7e0227450981eac"}, + {file = "ruff-0.3.5-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:89b1e92b3bd9fca249153a97d23f29bed3992cff414b222fcd361d763fc53f12"}, + {file = "ruff-0.3.5-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e55771559c89272c3ebab23326dc23e7f813e492052391fe7950c1a5a139d89"}, + {file = "ruff-0.3.5-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dabc62195bf54b8a7876add6e789caae0268f34582333cda340497c886111c39"}, + {file = "ruff-0.3.5-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a05f3793ba25f194f395578579c546ca5d83e0195f992edc32e5907d142bfa3"}, + {file = "ruff-0.3.5-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:dfd3504e881082959b4160ab02f7a205f0fadc0a9619cc481982b6837b2fd4c0"}, + {file = "ruff-0.3.5-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87258e0d4b04046cf1d6cc1c56fadbf7a880cc3de1f7294938e923234cf9e498"}, + {file = "ruff-0.3.5-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:712e71283fc7d9f95047ed5f793bc019b0b0a29849b14664a60fd66c23b96da1"}, + {file = "ruff-0.3.5-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a532a90b4a18d3f722c124c513ffb5e5eaff0cc4f6d3aa4bda38e691b8600c9f"}, + {file = "ruff-0.3.5-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:122de171a147c76ada00f76df533b54676f6e321e61bd8656ae54be326c10296"}, + {file = "ruff-0.3.5-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:d80a6b18a6c3b6ed25b71b05eba183f37d9bc8b16ace9e3d700997f00b74660b"}, + {file = "ruff-0.3.5-py3-none-musllinux_1_2_i686.whl", hash = "sha256:a7b6e63194c68bca8e71f81de30cfa6f58ff70393cf45aab4c20f158227d5936"}, + {file = "ruff-0.3.5-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:a759d33a20c72f2dfa54dae6e85e1225b8e302e8ac655773aff22e542a300985"}, + {file = "ruff-0.3.5-py3-none-win32.whl", hash = "sha256:9d8605aa990045517c911726d21293ef4baa64f87265896e491a05461cae078d"}, + {file = "ruff-0.3.5-py3-none-win_amd64.whl", hash = "sha256:dc56bb16a63c1303bd47563c60482a1512721053d93231cf7e9e1c6954395a0e"}, + {file = "ruff-0.3.5-py3-none-win_arm64.whl", hash = "sha256:faeeae9905446b975dcf6d4499dc93439b131f1443ee264055c5716dd947af55"}, + {file = "ruff-0.3.5.tar.gz", hash = "sha256:a067daaeb1dc2baf9b82a32dae67d154d95212080c80435eb052d95da647763d"}, ] [[package]] @@ -1220,13 +1305,13 @@ testing = ["flaky (>=3.4.0)", "freezegun (>=0.3.11)", "pathlib2 (>=2.3.3)", "psu [[package]] name = "tox" -version = "4.13.0" +version = "4.14.2" description = "tox is a generic virtualenv management and test command line tool" optional = false python-versions = ">=3.8" files = [ - {file = "tox-4.13.0-py3-none-any.whl", hash = "sha256:1143c7e2489c68026a55d3d4ae84c02c449f073b28e62f80e3e440a3b72a4afa"}, - {file = "tox-4.13.0.tar.gz", hash = "sha256:dd789a554c16c4b532924ba393c92fc8991323c4b3d466712bfecc8c9b9f24f7"}, + {file = "tox-4.14.2-py3-none-any.whl", hash = "sha256:2900c4eb7b716af4a928a7fdc2ed248ad6575294ed7cfae2ea41203937422847"}, + {file = "tox-4.14.2.tar.gz", hash = "sha256:0defb44f6dafd911b61788325741cc6b2e12ea71f987ac025ad4d649f1f1a104"}, ] [package.dependencies] @@ -1308,13 +1393,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.11.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"}, + {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"}, ] [[package]] @@ -1336,13 +1421,13 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "urllib3" -version = "2.2.0" +version = "2.2.1" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, - {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, ] [package.extras] @@ -1353,13 +1438,13 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "virtualenv" -version = "20.25.0" +version = "20.25.1" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.25.0-py3-none-any.whl", hash = "sha256:4238949c5ffe6876362d9c0180fc6c3a824a7b12b80604eeb8085f2ed7460de3"}, - {file = "virtualenv-20.25.0.tar.gz", hash = "sha256:bf51c0d9c7dd63ea8e44086fa1e4fb1093a31e963b86959257378aef020e1f1b"}, + {file = "virtualenv-20.25.1-py3-none-any.whl", hash = "sha256:961c026ac520bac5f69acb8ea063e8a4f071bcc9457b9c1f28f6b085c511583a"}, + {file = "virtualenv-20.25.1.tar.gz", hash = "sha256:e08e13ecdca7a0bd53798f356d5831434afa5b07b93f0abdf0797b7a06ffe197"}, ] [package.dependencies] @@ -1389,20 +1474,20 @@ testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more [[package]] name = "zipp" -version = "3.17.0" +version = "3.18.1" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.17.0-py3-none-any.whl", hash = "sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31"}, - {file = "zipp-3.17.0.tar.gz", hash = "sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0"}, + {file = "zipp-3.18.1-py3-none-any.whl", hash = "sha256:206f5a15f2af3dbaee80769fb7dc6f249695e940acca08dfb2a4769fe61e538b"}, + {file = "zipp-3.18.1.tar.gz", hash = "sha256:2884ed22e7d8961de1c9a05142eb69a247f120291bc0206a00a7642f09b5b715"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] [metadata] lock-version = "2.0" python-versions = "^3.7" -content-hash = "b78e75f3de0aa66a09e5f2d319fc43cc3201402707385827a1ddee81c22941ad" +content-hash = "4790d59c5e4684ad6eb1c04d97c0816cf12a9ef870f6b151da291f4bae56ecee" diff --git a/pyproject.toml b/pyproject.toml index 12d48c10..02e8a7c3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -35,6 +35,7 @@ packages = [ { include = "CODEOWNERS", format = "sdist" }, { include = "SECURITY.md", format = "sdist" } ] +exclude = ["docs/_build/**"] [tool.poetry.urls] Changelog = "https://github.com/graphql-python/graphql-core/releases" @@ -51,19 +52,22 @@ optional = true [tool.poetry.group.test.dependencies] pytest = [ - { version = "^8.0", python = ">=3.8" }, + { version = "^8.1", python = ">=3.8" }, { version = "^7.4", python = "<3.8"} ] pytest-asyncio = [ - { version = "^0.23.5", python = ">=3.8" }, + { version = "^0.23.6", python = ">=3.8" }, { version = "~0.21.1", python = "<3.8"} ] pytest-benchmark = "^4.0" -pytest-cov = "^4.1" +pytest-cov = [ + { version = "^5.0", python = ">=3.8" }, + { version = "^4.1", python = "<3.8" }, +] pytest-describe = "^2.2" -pytest-timeout = "^2.2" +pytest-timeout = "^2.3" tox = [ - { version = "^4.13", python = ">=3.8" }, + { version = "^4.14", python = ">=3.8" }, { version = "^3.28", python = "<3.8" } ] @@ -71,9 +75,9 @@ tox = [ optional = true [tool.poetry.group.lint.dependencies] -ruff = ">=0.2.1,<0.3" +ruff = ">=0.3.5,<0.4" mypy = [ - { version = "^1.8", python = ">=3.8" }, + { version = "^1.9", python = ">=3.8" }, { version = "~1.4", python = "<3.8" } ] bump2version = ">=1.0,<2" @@ -253,7 +257,8 @@ exclude_lines = [ "if MYPY:", "if TYPE_CHECKING:", '^\s+\.\.\.$', - '^\s+pass$' + '^\s+pass$', + ': \.\.\.$' ] ignore_errors = true diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index c28338e6..07800520 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -36,7 +36,7 @@ # noinspection PyCompatibility from asyncio.exceptions import TimeoutError except ImportError: # Python < 3.7 - from concurrent.futures import TimeoutError # type: ignore + from concurrent.futures import TimeoutError from ..error import GraphQLError, GraphQLFormattedError, located_error from ..language import ( diff --git a/src/graphql/language/lexer.py b/src/graphql/language/lexer.py index 2d42f346..f93bd3b7 100644 --- a/src/graphql/language/lexer.py +++ b/src/graphql/language/lexer.py @@ -75,7 +75,7 @@ def print_code_point_at(self, location: int) -> str: return TokenKind.EOF.value char = body[location] # Printable ASCII - if "\x20" <= char <= "\x7E": + if "\x20" <= char <= "\x7e": return "'\"'" if char == '"' else f"'{char}'" # Unicode code point point = ord( diff --git a/src/graphql/pyutils/simple_pub_sub.py b/src/graphql/pyutils/simple_pub_sub.py index 6b040ef3..3e88d3b8 100644 --- a/src/graphql/pyutils/simple_pub_sub.py +++ b/src/graphql/pyutils/simple_pub_sub.py @@ -31,9 +31,7 @@ def emit(self, event: Any) -> bool: create_task(result) # type: ignore # noqa: RUF006 return bool(self.subscribers) - def get_subscriber( - self, transform: Callable | None = None - ) -> SimplePubSubIterator: + def get_subscriber(self, transform: Callable | None = None) -> SimplePubSubIterator: """Return subscriber iterator""" return SimplePubSubIterator(self, transform) diff --git a/src/graphql/type/definition.py b/src/graphql/type/definition.py index 6307eee6..4686d3d1 100644 --- a/src/graphql/type/definition.py +++ b/src/graphql/type/definition.py @@ -1638,18 +1638,15 @@ def assert_nullable_type(type_: Any) -> GraphQLNullableType: @overload -def get_nullable_type(type_: None) -> None: - ... +def get_nullable_type(type_: None) -> None: ... @overload -def get_nullable_type(type_: GraphQLNullableType) -> GraphQLNullableType: - ... +def get_nullable_type(type_: GraphQLNullableType) -> GraphQLNullableType: ... @overload -def get_nullable_type(type_: GraphQLNonNull) -> GraphQLNullableType: - ... +def get_nullable_type(type_: GraphQLNonNull) -> GraphQLNullableType: ... def get_nullable_type( @@ -1690,13 +1687,11 @@ def assert_named_type(type_: Any) -> GraphQLNamedType: @overload -def get_named_type(type_: None) -> None: - ... +def get_named_type(type_: None) -> None: ... @overload -def get_named_type(type_: GraphQLType) -> GraphQLNamedType: - ... +def get_named_type(type_: GraphQLType) -> GraphQLNamedType: ... def get_named_type(type_: GraphQLType | None) -> GraphQLNamedType | None: diff --git a/src/graphql/type/schema.py b/src/graphql/type/schema.py index 4da894c1..5e546298 100644 --- a/src/graphql/type/schema.py +++ b/src/graphql/type/schema.py @@ -238,9 +238,9 @@ def __init__( if iface.name in implementations_map: implementations = implementations_map[iface.name] else: - implementations = implementations_map[ - iface.name - ] = InterfaceImplementations(objects=[], interfaces=[]) + implementations = implementations_map[iface.name] = ( + InterfaceImplementations(objects=[], interfaces=[]) + ) implementations.interfaces.append(named_type) elif is_object_type(named_type): @@ -250,9 +250,9 @@ def __init__( if iface.name in implementations_map: implementations = implementations_map[iface.name] else: - implementations = implementations_map[ - iface.name - ] = InterfaceImplementations(objects=[], interfaces=[]) + implementations = implementations_map[iface.name] = ( + InterfaceImplementations(objects=[], interfaces=[]) + ) implementations.objects.append(named_type) diff --git a/src/graphql/type/validate.py b/src/graphql/type/validate.py index daf9935a..8a6b7257 100644 --- a/src/graphql/type/validate.py +++ b/src/graphql/type/validate.py @@ -235,9 +235,7 @@ def validate_types(self) -> None: # Ensure Input Objects do not contain non-nullable circular references validate_input_object_circular_refs(type_) - def validate_fields( - self, type_: GraphQLObjectType | GraphQLInterfaceType - ) -> None: + def validate_fields(self, type_: GraphQLObjectType | GraphQLInterfaceType) -> None: fields = type_.fields # Objects and Interfaces both must define one or more fields. diff --git a/src/graphql/utilities/ast_to_dict.py b/src/graphql/utilities/ast_to_dict.py index 959a90a8..fea70b32 100644 --- a/src/graphql/utilities/ast_to_dict.py +++ b/src/graphql/utilities/ast_to_dict.py @@ -13,8 +13,7 @@ @overload def ast_to_dict( node: Node, locations: bool = False, cache: dict[Node, Any] | None = None -) -> dict: - ... +) -> dict: ... @overload @@ -22,8 +21,7 @@ def ast_to_dict( node: Collection[Node], locations: bool = False, cache: dict[Node, Any] | None = None, -) -> list[Node]: - ... +) -> list[Node]: ... @overload @@ -31,8 +29,7 @@ def ast_to_dict( node: OperationType, locations: bool = False, cache: dict[Node, Any] | None = None, -) -> str: - ... +) -> str: ... def ast_to_dict( diff --git a/src/graphql/utilities/get_operation_ast.py b/src/graphql/utilities/get_operation_ast.py index 4c88ffa8..2323e57f 100644 --- a/src/graphql/utilities/get_operation_ast.py +++ b/src/graphql/utilities/get_operation_ast.py @@ -1,4 +1,4 @@ -""""Get operation AST node""" +"""Get operation AST node""" from __future__ import annotations diff --git a/src/graphql/utilities/type_from_ast.py b/src/graphql/utilities/type_from_ast.py index 499ec1af..c082ebc1 100644 --- a/src/graphql/utilities/type_from_ast.py +++ b/src/graphql/utilities/type_from_ast.py @@ -21,27 +21,23 @@ @overload def type_from_ast( schema: GraphQLSchema, type_node: NamedTypeNode -) -> GraphQLNamedType | None: - ... +) -> GraphQLNamedType | None: ... @overload def type_from_ast( schema: GraphQLSchema, type_node: ListTypeNode -) -> GraphQLList | None: - ... +) -> GraphQLList | None: ... @overload def type_from_ast( schema: GraphQLSchema, type_node: NonNullTypeNode -) -> GraphQLNonNull | None: - ... +) -> GraphQLNonNull | None: ... @overload -def type_from_ast(schema: GraphQLSchema, type_node: TypeNode) -> GraphQLType | None: - ... +def type_from_ast(schema: GraphQLSchema, type_node: TypeNode) -> GraphQLType | None: ... def type_from_ast( diff --git a/src/graphql/utilities/value_from_ast_untyped.py b/src/graphql/utilities/value_from_ast_untyped.py index 4a85154f..a9ad0632 100644 --- a/src/graphql/utilities/value_from_ast_untyped.py +++ b/src/graphql/utilities/value_from_ast_untyped.py @@ -77,9 +77,7 @@ def value_from_string( return value_node.value -def value_from_list( - value_node: ListValueNode, variables: dict[str, Any] | None -) -> Any: +def value_from_list(value_node: ListValueNode, variables: dict[str, Any] | None) -> Any: return [value_from_ast_untyped(node, variables) for node in value_node.values] diff --git a/src/graphql/validation/rules/unique_operation_types.py b/src/graphql/validation/rules/unique_operation_types.py index 059c8143..da737751 100644 --- a/src/graphql/validation/rules/unique_operation_types.py +++ b/src/graphql/validation/rules/unique_operation_types.py @@ -33,9 +33,7 @@ def __init__(self, context: SDLValidationContext) -> None: self.defined_operation_types: dict[ OperationType, OperationTypeDefinitionNode ] = {} - self.existing_operation_types: dict[ - OperationType, GraphQLObjectType | None - ] = ( + self.existing_operation_types: dict[OperationType, GraphQLObjectType | None] = ( { OperationType.QUERY: schema.query_type, OperationType.MUTATION: schema.mutation_type, diff --git a/tests/language/test_lexer.py b/tests/language/test_lexer.py index 85b30bb7..0bc9a398 100644 --- a/tests/language/test_lexer.py +++ b/tests/language/test_lexer.py @@ -41,7 +41,7 @@ def assert_syntax_error(text: str, message: str, location: Location) -> None: def describe_lexer(): def ignores_bom_header(): - token = lex_one("\uFEFF foo") + token = lex_one("\ufeff foo") assert token == Token(TokenKind.NAME, 2, 5, 1, 3, "foo") def tracks_line_breaks(): @@ -145,8 +145,8 @@ def lexes_strings(): assert lex_one('"slashes \\\\ \\/"') == Token( TokenKind.STRING, 0, 15, 1, 1, "slashes \\ /" ) - assert lex_one('"unescaped surrogate pair \uD83D\uDE00"') == Token( - TokenKind.STRING, 0, 29, 1, 1, "unescaped surrogate pair \uD83D\uDE00" + assert lex_one('"unescaped surrogate pair \ud83d\ude00"') == Token( + TokenKind.STRING, 0, 29, 1, 1, "unescaped surrogate pair \ud83d\ude00" ) assert lex_one('"unescaped unicode outside BMP \U0001f600"') == Token( TokenKind.STRING, 0, 33, 1, 1, "unescaped unicode outside BMP \U0001f600" @@ -160,10 +160,10 @@ def lexes_strings(): "unescaped maximal unicode outside BMP \U0010ffff", ) assert lex_one('"unicode \\u1234\\u5678\\u90AB\\uCDEF"') == Token( - TokenKind.STRING, 0, 34, 1, 1, "unicode \u1234\u5678\u90AB\uCDEF" + TokenKind.STRING, 0, 34, 1, 1, "unicode \u1234\u5678\u90ab\ucdef" ) assert lex_one('"unicode \\u{1234}\\u{5678}\\u{90AB}\\u{CDEF}"') == Token( - TokenKind.STRING, 0, 42, 1, 1, "unicode \u1234\u5678\u90AB\uCDEF" + TokenKind.STRING, 0, 42, 1, 1, "unicode \u1234\u5678\u90ab\ucdef" ) assert lex_one('"string with unicode escape outside BMP \\u{1F600}"') == Token( TokenKind.STRING, @@ -171,7 +171,7 @@ def lexes_strings(): 50, 1, 1, - "string with unicode escape outside BMP \U0001F600", + "string with unicode escape outside BMP \U0001f600", ) assert lex_one('"string with minimal unicode escape \\u{0}"') == Token( TokenKind.STRING, 0, 42, 1, 1, "string with minimal unicode escape \u0000" @@ -182,7 +182,7 @@ def lexes_strings(): 47, 1, 1, - "string with maximal unicode escape \U0010FFFF", + "string with maximal unicode escape \U0010ffff", ) assert lex_one( '"string with maximal minimal unicode escape \\u{00000000}"' @@ -222,7 +222,7 @@ def lexes_strings(): 56, 1, 1, - "string with unicode surrogate pair escape \U0010FFFF", + "string with unicode surrogate pair escape \U0010ffff", ) def lex_reports_useful_string_errors(): @@ -237,17 +237,17 @@ def lex_reports_useful_string_errors(): (1, 1), ) assert_syntax_error( - '"bad surrogate \uDEAD"', + '"bad surrogate \udead"', "Invalid character within String: U+DEAD.", (1, 16), ) assert_syntax_error( - '"bad high surrogate pair \uDEAD\uDEAD"', + '"bad high surrogate pair \udead\udead"', "Invalid character within String: U+DEAD.", (1, 26), ) assert_syntax_error( - '"bad low surrogate pair \uD800\uD800"', + '"bad low surrogate pair \ud800\ud800"', "Invalid character within String: U+D800.", (1, 25), ) @@ -329,12 +329,12 @@ def lex_reports_useful_string_errors(): (1, 25), ) assert_syntax_error( - '"cannot escape half a pair \uD83D\\uDE00 esc"', + '"cannot escape half a pair \ud83d\\uDE00 esc"', "Invalid character within String: U+D83D.", (1, 28), ) assert_syntax_error( - '"cannot escape half a pair \\uD83D\uDE00 esc"', + '"cannot escape half a pair \\uD83D\ude00 esc"', "Invalid Unicode escape sequence: '\\uD83D'.", (1, 28), ) @@ -373,13 +373,13 @@ def lexes_block_strings(): 1, "unescaped \\n\\r\\b\\t\\f\\u1234", ) - assert lex_one('"""unescaped surrogate pair \uD83D\uDE00"""') == Token( + assert lex_one('"""unescaped surrogate pair \ud83d\ude00"""') == Token( TokenKind.BLOCK_STRING, 0, 33, 1, 1, - "unescaped surrogate pair \uD83D\uDE00", + "unescaped surrogate pair \ud83d\ude00", ) assert lex_one('"""unescaped unicode outside BMP \U0001f600"""') == Token( TokenKind.BLOCK_STRING, @@ -412,7 +412,7 @@ def lex_reports_useful_block_string_errors(): assert_syntax_error('"""', "Unterminated string.", (1, 4)) assert_syntax_error('"""no end quote', "Unterminated string.", (1, 16)) assert_syntax_error( - '"""contains invalid surrogate \uDEAD"""', + '"""contains invalid surrogate \udead"""', "Invalid character within String: U+DEAD.", (1, 31), ) @@ -535,16 +535,16 @@ def lex_reports_useful_unknown_character_error(): assert_syntax_error("~", "Unexpected character: '~'.", (1, 1)) assert_syntax_error("\x00", "Unexpected character: U+0000.", (1, 1)) assert_syntax_error("\b", "Unexpected character: U+0008.", (1, 1)) - assert_syntax_error("\xAA", "Unexpected character: U+00AA.", (1, 1)) - assert_syntax_error("\u0AAA", "Unexpected character: U+0AAA.", (1, 1)) - assert_syntax_error("\u203B", "Unexpected character: U+203B.", (1, 1)) + assert_syntax_error("\xaa", "Unexpected character: U+00AA.", (1, 1)) + assert_syntax_error("\u0aaa", "Unexpected character: U+0AAA.", (1, 1)) + assert_syntax_error("\u203b", "Unexpected character: U+203B.", (1, 1)) assert_syntax_error("\U0001f600", "Unexpected character: U+1F600.", (1, 1)) - assert_syntax_error("\uD83D\uDE00", "Unexpected character: U+1F600.", (1, 1)) - assert_syntax_error("\uD800\uDC00", "Unexpected character: U+10000.", (1, 1)) - assert_syntax_error("\uDBFF\uDFFF", "Unexpected character: U+10FFFF.", (1, 1)) - assert_syntax_error("\uD800", "Invalid character: U+D800.", (1, 1)) - assert_syntax_error("\uDBFF", "Invalid character: U+DBFF.", (1, 1)) - assert_syntax_error("\uDEAD", "Invalid character: U+DEAD.", (1, 1)) + assert_syntax_error("\ud83d\ude00", "Unexpected character: U+1F600.", (1, 1)) + assert_syntax_error("\ud800\udc00", "Unexpected character: U+10000.", (1, 1)) + assert_syntax_error("\udbff\udfff", "Unexpected character: U+10FFFF.", (1, 1)) + assert_syntax_error("\ud800", "Invalid character: U+D800.", (1, 1)) + assert_syntax_error("\udbff", "Invalid character: U+DBFF.", (1, 1)) + assert_syntax_error("\udead", "Invalid character: U+DEAD.", (1, 1)) # noinspection PyArgumentEqualDefault def lex_reports_useful_information_for_dashes_in_names(): @@ -606,11 +606,11 @@ def lexes_comments(): assert lex_one("# Comment \U0001f600").prev == Token( TokenKind.COMMENT, 0, 11, 1, 1, " Comment \U0001f600" ) - assert lex_one("# Comment \uD83D\uDE00").prev == Token( - TokenKind.COMMENT, 0, 12, 1, 1, " Comment \uD83D\uDE00" + assert lex_one("# Comment \ud83d\ude00").prev == Token( + TokenKind.COMMENT, 0, 12, 1, 1, " Comment \ud83d\ude00" ) assert_syntax_error( - "# Invalid surrogate \uDEAD", "Invalid character: U+DEAD.", (1, 21) + "# Invalid surrogate \udead", "Invalid character: U+DEAD.", (1, 21) ) diff --git a/tests/language/test_parser.py b/tests/language/test_parser.py index 7246c6c5..b671e444 100644 --- a/tests/language/test_parser.py +++ b/tests/language/test_parser.py @@ -173,8 +173,8 @@ def parses_multi_byte_characters(): # Note: \u0A0A could be naively interpreted as two line-feed chars. doc = parse( """ - # This comment has a \u0A0A multi-byte character. - { field(arg: "Has a \u0A0A multi-byte character.") } + # This comment has a \u0a0a multi-byte character. + { field(arg: "Has a \u0a0a multi-byte character.") } """ ) definitions = doc.definitions @@ -189,7 +189,7 @@ def parses_multi_byte_characters(): assert len(arguments) == 1 value = arguments[0].value assert isinstance(value, StringValueNode) - assert value.value == "Has a \u0A0A multi-byte character." + assert value.value == "Has a \u0a0a multi-byte character." # noinspection PyShadowingNames def parses_kitchen_sink(kitchen_sink_query): # noqa: F811 diff --git a/tests/language/test_print_string.py b/tests/language/test_print_string.py index 644c6669..8daa2e27 100644 --- a/tests/language/test_print_string.py +++ b/tests/language/test_print_string.py @@ -21,23 +21,23 @@ def does_not_escape_space(): assert print_string(" ") == '" "' def does_not_escape_non_ascii_character(): - assert print_string("\u21BB") == '"\u21BB"' + assert print_string("\u21bb") == '"\u21bb"' def does_not_escape_supplementary_character(): assert print_string("\U0001f600") == '"\U0001f600"' def escapes_all_control_chars(): assert print_string( - "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0A\x0B\x0C\x0D\x0E\x0F" - "\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1A\x1B\x1C\x1D\x1E\x1F" - "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2A\x2B\x2C\x2D\x2E\x2F" - "\x30\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3A\x3B\x3C\x3D\x3E\x3F" - "\x40\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4A\x4B\x4C\x4D\x4E\x4F" - "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5A\x5B\x5C\x5D\x5E\x5F" - "\x60\x61\x62\x63\x64\x65\x66\x67\x68\x69\x6A\x6B\x6C\x6D\x6E\x6F" - "\x70\x71\x72\x73\x74\x75\x76\x77\x78\x79\x7A\x7B\x7C\x7D\x7E\x7F" - "\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8A\x8B\x8C\x8D\x8E\x8F" - "\x90\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9A\x9B\x9C\x9D\x9E\x9F" + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f" + "\x30\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f" + "\x40\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f" + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f" + "\x60\x61\x62\x63\x64\x65\x66\x67\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f" + "\x70\x71\x72\x73\x74\x75\x76\x77\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f" + "\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f" + "\x90\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f" ) == ( '"\\u0000\\u0001\\u0002\\u0003\\u0004\\u0005\\u0006\\u0007' "\\b\\t\\n\\u000B\\f\\r\\u000E\\u000F" diff --git a/tests/utilities/test_strip_ignored_characters_fuzz.py b/tests/utilities/test_strip_ignored_characters_fuzz.py index b61094e2..85c43aec 100644 --- a/tests/utilities/test_strip_ignored_characters_fuzz.py +++ b/tests/utilities/test_strip_ignored_characters_fuzz.py @@ -11,7 +11,7 @@ ignored_tokens = [ # UnicodeBOM - "\uFEFF", # Byte Order Mark (U+FEFF) + "\ufeff", # Byte Order Mark (U+FEFF) # WhiteSpace "\t", # Horizontal Tab (U+0009) " ", # Space (U+0020) @@ -55,7 +55,7 @@ def to_equal(self, expected: str): stripped_twice = strip_ignored_characters(stripped) assert stripped == stripped_twice, dedent( - f"""" + f""" Expected strip_ignored_characters({stripped!r})" to equal {stripped!r} but got {stripped_twice!r} diff --git a/tox.ini b/tox.ini index d0bf90d3..1d965e63 100644 --- a/tox.ini +++ b/tox.ini @@ -17,7 +17,7 @@ python = [testenv:ruff] basepython = python3.12 -deps = ruff>=0.2.1,<0.3 +deps = ruff>=0.3.5,<0.4 commands = ruff check src tests ruff format --check src tests @@ -25,7 +25,7 @@ commands = [testenv:mypy] basepython = python3.12 deps = - mypy>=1.8.0,<1.9 + mypy>=1.9,<2 pytest>=8.0,<9 commands = mypy src tests @@ -43,9 +43,9 @@ deps = pytest>=7.4,<9 pytest-asyncio>=0.21.1,<1 pytest-benchmark>=4,<5 - pytest-cov>=4.1,<5 + pytest-cov>=4.1,<6 pytest-describe>=2.2,<3 - pytest-timeout>=2.2,<3 + pytest-timeout>=2.3,<3 py37,py38,py39,pypy39: typing-extensions>=4.7.1,<5 commands = # to also run the time-consuming tests: tox -e py311 -- --run-slow From 5899a612226b2108a171eae066b24cd955d7010c Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 7 Apr 2024 12:00:15 +0200 Subject: [PATCH 147/230] Narrow the return type of ast_from_value Replicates graphql/graphql-js@aa43fec435c52d86ff0ff66b2df6bb20ec358e51 --- src/graphql/utilities/ast_from_value.py | 16 +++++++------- tests/utilities/test_ast_from_value.py | 28 ++++++++++++++----------- 2 files changed, 24 insertions(+), 20 deletions(-) diff --git a/src/graphql/utilities/ast_from_value.py b/src/graphql/utilities/ast_from_value.py index 99bf0769..dea67665 100644 --- a/src/graphql/utilities/ast_from_value.py +++ b/src/graphql/utilities/ast_from_value.py @@ -8,16 +8,16 @@ from ..language import ( BooleanValueNode, + ConstListValueNode, + ConstObjectFieldNode, + ConstObjectValueNode, + ConstValueNode, EnumValueNode, FloatValueNode, IntValueNode, - ListValueNode, NameNode, NullValueNode, - ObjectFieldNode, - ObjectValueNode, StringValueNode, - ValueNode, ) from ..pyutils import Undefined, inspect, is_iterable from ..type import ( @@ -35,7 +35,7 @@ _re_integer_string = re.compile("^-?(?:0|[1-9][0-9]*)$") -def ast_from_value(value: Any, type_: GraphQLInputType) -> ValueNode | None: +def ast_from_value(value: Any, type_: GraphQLInputType) -> ConstValueNode | None: """Produce a GraphQL Value AST given a Python object. This function will match Python/JSON values to GraphQL AST schema format by using @@ -80,7 +80,7 @@ def ast_from_value(value: Any, type_: GraphQLInputType) -> ValueNode | None: if is_iterable(value): maybe_value_nodes = (ast_from_value(item, item_type) for item in value) value_nodes = tuple(node for node in maybe_value_nodes if node) - return ListValueNode(values=value_nodes) + return ConstListValueNode(values=value_nodes) return ast_from_value(value, item_type) # Populate the fields of the input object by creating ASTs from each value in the @@ -94,11 +94,11 @@ def ast_from_value(value: Any, type_: GraphQLInputType) -> ValueNode | None: if field_name in value ) field_nodes = tuple( - ObjectFieldNode(name=NameNode(value=field_name), value=field_value) + ConstObjectFieldNode(name=NameNode(value=field_name), value=field_value) for field_name, field_value in field_items if field_value ) - return ObjectValueNode(fields=field_nodes) + return ConstObjectValueNode(fields=field_nodes) if is_leaf_type(type_): # Since value is an internally represented value, it must be serialized to an diff --git a/tests/utilities/test_ast_from_value.py b/tests/utilities/test_ast_from_value.py index cc01df45..1432d7a4 100644 --- a/tests/utilities/test_ast_from_value.py +++ b/tests/utilities/test_ast_from_value.py @@ -4,14 +4,14 @@ from graphql.error import GraphQLError from graphql.language import ( BooleanValueNode, + ConstListValueNode, + ConstObjectFieldNode, + ConstObjectValueNode, EnumValueNode, FloatValueNode, IntValueNode, - ListValueNode, NameNode, NullValueNode, - ObjectFieldNode, - ObjectValueNode, StringValueNode, ) from graphql.pyutils import Undefined @@ -202,13 +202,13 @@ def converts_string_values_to_enum_asts_if_possible(): def converts_list_values_to_list_asts(): assert ast_from_value( ["FOO", "BAR"], GraphQLList(GraphQLString) - ) == ListValueNode( + ) == ConstListValueNode( values=[StringValueNode(value="FOO"), StringValueNode(value="BAR")] ) assert ast_from_value( ["HELLO", "GOODBYE"], GraphQLList(my_enum) - ) == ListValueNode( + ) == ConstListValueNode( values=[EnumValueNode(value="HELLO"), EnumValueNode(value="GOODBYE")] ) @@ -218,7 +218,7 @@ def list_generator(): yield 3 assert ast_from_value(list_generator(), GraphQLList(GraphQLInt)) == ( - ListValueNode( + ConstListValueNode( values=[ IntValueNode(value="1"), IntValueNode(value="2"), @@ -237,7 +237,7 @@ def skips_invalid_list_items(): ["FOO", None, "BAR"], GraphQLList(GraphQLNonNull(GraphQLString)) ) - assert ast == ListValueNode( + assert ast == ConstListValueNode( values=[StringValueNode(value="FOO"), StringValueNode(value="BAR")] ) @@ -247,20 +247,24 @@ def skips_invalid_list_items(): ) def converts_input_objects(): - assert ast_from_value({"foo": 3, "bar": "HELLO"}, input_obj) == ObjectValueNode( + assert ast_from_value( + {"foo": 3, "bar": "HELLO"}, input_obj + ) == ConstObjectValueNode( fields=[ - ObjectFieldNode( + ConstObjectFieldNode( name=NameNode(value="foo"), value=FloatValueNode(value="3") ), - ObjectFieldNode( + ConstObjectFieldNode( name=NameNode(value="bar"), value=EnumValueNode(value="HELLO") ), ] ) def converts_input_objects_with_explicit_nulls(): - assert ast_from_value({"foo": None}, input_obj) == ObjectValueNode( - fields=[ObjectFieldNode(name=NameNode(value="foo"), value=NullValueNode())] + assert ast_from_value({"foo": None}, input_obj) == ConstObjectValueNode( + fields=[ + ConstObjectFieldNode(name=NameNode(value="foo"), value=NullValueNode()) + ] ) def does_not_convert_non_object_values_as_input_objects(): From 4659d0a72a6068b506593d221152877279355c19 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 7 Apr 2024 15:08:34 +0200 Subject: [PATCH 148/230] print_schema: correctly print empty description Replicates graphql/graphql-js@3cf08e6279cc4ca5461b58fea4049f918c47acce --- src/graphql/utilities/extend_schema.py | 14 ++-- src/graphql/utilities/print_schema.py | 4 +- tests/utilities/test_print_schema.py | 100 ++++++++++++++++++++++++- 3 files changed, 108 insertions(+), 10 deletions(-) diff --git a/src/graphql/utilities/extend_schema.py b/src/graphql/utilities/extend_schema.py index 6c3eebc7..1b55b752 100644 --- a/src/graphql/utilities/extend_schema.py +++ b/src/graphql/utilities/extend_schema.py @@ -245,6 +245,13 @@ def extend_schema_args( # Then produce and return the kwargs for a Schema with these types. get_operation = operation_types.get + description = ( + schema_def.description.value + if schema_def and schema_def.description + else None + ) + if description is None: + description = schema_kwargs["description"] return GraphQLSchemaKwargs( query=get_operation(OperationType.QUERY), # type: ignore mutation=get_operation(OperationType.MUTATION), # type: ignore @@ -255,12 +262,7 @@ def extend_schema_args( for directive in schema_kwargs["directives"] ) + tuple(self.build_directive(directive) for directive in directive_defs), - description=( - schema_def.description.value - if schema_def and schema_def.description - else None - ) - or schema_kwargs["description"], + description=description, extensions=schema_kwargs["extensions"], ast_node=schema_def or schema_kwargs["ast_node"], extension_ast_nodes=schema_kwargs["extension_ast_nodes"] diff --git a/src/graphql/utilities/print_schema.py b/src/graphql/utilities/print_schema.py index b4097b7c..294f7391 100644 --- a/src/graphql/utilities/print_schema.py +++ b/src/graphql/utilities/print_schema.py @@ -83,7 +83,7 @@ def print_schema_definition(schema: GraphQLSchema) -> str | None: # Only print a schema definition if there is a description or if it should # not be omitted because of having default type names. - if schema.description or not has_default_root_operation_types(schema): + if not (schema.description is None and has_default_root_operation_types(schema)): return ( print_description(schema) + "schema {\n" @@ -235,7 +235,7 @@ def print_args(args: dict[str, GraphQLArgument], indentation: str = "") -> str: return "" # If every arg does not have a description, print them on one line. - if not any(arg.description for arg in args.values()): + if all(arg.description is None for arg in args.values()): return ( "(" + ", ".join(print_input_value(name, arg) for name, arg in args.items()) diff --git a/tests/utilities/test_print_schema.py b/tests/utilities/test_print_schema.py index d59b4fde..1939ed59 100644 --- a/tests/utilities/test_print_schema.py +++ b/tests/utilities/test_print_schema.py @@ -8,6 +8,7 @@ GraphQLBoolean, GraphQLDirective, GraphQLEnumType, + GraphQLEnumValue, GraphQLField, GraphQLFloat, GraphQLInputField, @@ -602,13 +603,108 @@ def prints_custom_directives(): ) def prints_an_empty_description(): - schema = build_single_field_schema(GraphQLField(GraphQLString, description="")) + args = { + "someArg": GraphQLArgument(GraphQLString, description=""), + "anotherArg": GraphQLArgument(GraphQLString, description=""), + } + fields = { + "someField": GraphQLField(GraphQLString, args, description=""), + "anotherField": GraphQLField(GraphQLString, args, description=""), + } + query_type = GraphQLObjectType("Query", fields, description="") + scalar_type = GraphQLScalarType("SomeScalar", description="") + interface_type = GraphQLInterfaceType("SomeInterface", fields, description="") + union_type = GraphQLUnionType("SomeUnion", [query_type], description="") + enum_type = GraphQLEnumType( + "SomeEnum", + { + "SOME_VALUE": GraphQLEnumValue("Some Value", description=""), + "ANOTHER_VALUE": GraphQLEnumValue("Another Value", description=""), + }, + description="", + ) + some_directive = GraphQLDirective( + "someDirective", [DirectiveLocation.QUERY], args, description="" + ) + + schema = GraphQLSchema( + query_type, + types=[scalar_type, interface_type, union_type, enum_type], + directives=[some_directive], + description="", + ) assert expect_printed_schema(schema) == dedent( ''' + """""" + schema { + query: Query + } + + """""" + directive @someDirective( + """""" + someArg: String + + """""" + anotherArg: String + ) on QUERY + + """""" + scalar SomeScalar + + """""" + interface SomeInterface { + """""" + someField( + """""" + someArg: String + + """""" + anotherArg: String + ): String + + """""" + anotherField( + """""" + someArg: String + + """""" + anotherArg: String + ): String + } + + """""" + union SomeUnion = Query + + """""" type Query { """""" - singleField: String + someField( + """""" + someArg: String + + """""" + anotherArg: String + ): String + + """""" + anotherField( + """""" + someArg: String + + """""" + anotherArg: String + ): String + } + + """""" + enum SomeEnum { + """""" + SOME_VALUE + + """""" + ANOTHER_VALUE } ''' ) From fe6fd146bbe89c69e1a4302078600ba1c0e4e297 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 7 Apr 2024 15:21:57 +0200 Subject: [PATCH 149/230] Fix stream directive validation error message Replicates graphql/graphql-js@8e9813f8c283d94da66fad6fd9562432846c17d4 --- .../defer_stream_directive_on_valid_operations_rule.py | 2 +- .../test_defer_stream_directive_on_valid_operations.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/graphql/validation/rules/defer_stream_directive_on_valid_operations_rule.py b/src/graphql/validation/rules/defer_stream_directive_on_valid_operations_rule.py index 240092b7..c412b89e 100644 --- a/src/graphql/validation/rules/defer_stream_directive_on_valid_operations_rule.py +++ b/src/graphql/validation/rules/defer_stream_directive_on_valid_operations_rule.py @@ -80,6 +80,6 @@ def enter_directive( if not if_argument_can_be_false(node): msg = ( "Stream directive not supported on subscription operations." - " Disable `@defer` by setting the `if` argument to `false`." + " Disable `@stream` by setting the `if` argument to `false`." ) self.report_error(GraphQLError(msg, node)) diff --git a/tests/validation/test_defer_stream_directive_on_valid_operations.py b/tests/validation/test_defer_stream_directive_on_valid_operations.py index 7d33fd2b..70207650 100644 --- a/tests/validation/test_defer_stream_directive_on_valid_operations.py +++ b/tests/validation/test_defer_stream_directive_on_valid_operations.py @@ -274,7 +274,7 @@ def stream_on_subscription_field(): { "message": "Stream directive not supported" " on subscription operations." - " Disable `@defer` by setting the `if` argument to `false`.", + " Disable `@stream` by setting the `if` argument to `false`.", "locations": [(4, 26)], }, ], @@ -296,7 +296,7 @@ def stream_on_fragment_on_subscription_field(): { "message": "Stream directive not supported" " on subscription operations." - " Disable `@defer` by setting the `if` argument to `false`.", + " Disable `@stream` by setting the `if` argument to `false`.", "locations": [(8, 24)], }, ], @@ -344,7 +344,7 @@ def stream_on_subscription_in_multi_operation_document(): { "message": "Stream directive not supported" " on subscription operations." - " Disable `@defer` by setting the `if` argument to `false`.", + " Disable `@stream` by setting the `if` argument to `false`.", "locations": [(15, 24)], }, ], From 860064ff1d544cf0c873cfd5d4ef75183b94b897 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 7 Apr 2024 15:32:58 +0200 Subject: [PATCH 150/230] GraphQLInputObjectType: remove check that duplicate type checks Replicates graphql/graphql-js@74e51d7cefa92c366aab0fe4ef89f5d5471514c4 --- tests/type/test_definition.py | 29 ----------------------------- 1 file changed, 29 deletions(-) diff --git a/tests/type/test_definition.py b/tests/type/test_definition.py index 51b82ec6..cb666b1c 100644 --- a/tests/type/test_definition.py +++ b/tests/type/test_definition.py @@ -1116,35 +1116,6 @@ def fields(): "SomeInputObject fields cannot be resolved. Oops!" ) - def describe_input_objects_fields_must_not_have_resolvers(): - def rejects_an_input_object_type_with_resolvers(): - def resolve(): - pass - - with pytest.raises( - TypeError, match="got an unexpected keyword argument 'resolve'" - ): - # noinspection PyArgumentList - GraphQLInputObjectType( - "SomeInputObject", - { - "f": GraphQLInputField( # type: ignore - ScalarType, - resolve=resolve, - ) - }, - ) - - def rejects_an_input_object_type_with_resolver_constant(): - with pytest.raises( - TypeError, match="got an unexpected keyword argument 'resolve'" - ): - # noinspection PyArgumentList - GraphQLInputObjectType( - "SomeInputObject", - {"f": GraphQLInputField(ScalarType, resolve={})}, # type: ignore - ) - def describe_type_system_arguments(): def accepts_an_argument_with_a_description(): From e8559b0294ffd6c2756bbd618f6c707fda14adb1 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 7 Apr 2024 15:51:24 +0200 Subject: [PATCH 151/230] Make print() break long List and Object Values over multiple line Replicates graphql/graphql-js@ddd6a01c389b7c4c8c33a4e26b9a6582b4106247 --- src/graphql/language/printer.py | 12 +++++- tests/language/test_printer.py | 69 +++++++++++++++++++++++++++++++++ 2 files changed, 79 insertions(+), 2 deletions(-) diff --git a/src/graphql/language/printer.py b/src/graphql/language/printer.py index 7062b5c8..d4898b06 100644 --- a/src/graphql/language/printer.py +++ b/src/graphql/language/printer.py @@ -200,11 +200,19 @@ def leave_enum_value(node: PrintedNode, *_args: Any) -> str: @staticmethod def leave_list_value(node: PrintedNode, *_args: Any) -> str: - return f"[{join(node.values, ', ')}]" + values = node.values + values_line = f"[{join(values, ', ')}]" + return ( + "\n".join(("[", indent(join(values, "\n")), "]")) + if len(values_line) > 80 + else values_line + ) @staticmethod def leave_object_value(node: PrintedNode, *_args: Any) -> str: - return f"{{ {join(node.fields, ', ')} }}" + fields = node.fields + fields_line = f"{{ {join(fields, ', ')} }}" + return block(fields) if len(fields_line) > MAX_LINE_LENGTH else fields_line @staticmethod def leave_object_field(node: PrintedNode, *_args: Any) -> str: diff --git a/tests/language/test_printer.py b/tests/language/test_printer.py index 7669e963..6117c69d 100644 --- a/tests/language/test_printer.py +++ b/tests/language/test_printer.py @@ -106,6 +106,75 @@ def puts_arguments_on_multiple_lines_if_line_has_more_than_80_chars(): """ ) + def puts_large_object_values_on_multiple_lines_if_line_has_more_than_80_chars(): + printed = print_ast( + parse( + "{trip(obj:{wheelchair:false,smallObj:{a: 1},largeObj:" + "{wheelchair:false,smallObj:{a: 1},arriveBy:false," + "includePlannedCancellations:true,transitDistanceReluctance:2000," + 'anotherLongFieldName:"Lots and lots and lots and lots of text"},' + "arriveBy:false,includePlannedCancellations:true," + "transitDistanceReluctance:2000,anotherLongFieldName:" + '"Lots and lots and lots and lots of text"}){dateTime}}' + ) + ) + + assert printed == dedent( + """ + { + trip( + obj: { + wheelchair: false + smallObj: { a: 1 } + largeObj: { + wheelchair: false + smallObj: { a: 1 } + arriveBy: false + includePlannedCancellations: true + transitDistanceReluctance: 2000 + anotherLongFieldName: "Lots and lots and lots and lots of text" + } + arriveBy: false + includePlannedCancellations: true + transitDistanceReluctance: 2000 + anotherLongFieldName: "Lots and lots and lots and lots of text" + } + ) { + dateTime + } + } + """ + ) + + def puts_large_list_values_on_multiple_lines_if_line_has_more_than_80_chars(): + printed = print_ast( + parse( + '{trip(list:[["small array", "small", "small"],' + ' ["Lots and lots and lots and lots of text",' + ' "Lots and lots and lots and lots of text",' + ' "Lots and lots and lots and lots of text"]]){dateTime}}' + ) + ) + + assert printed == dedent( + """ + { + trip( + list: [ + ["small array", "small", "small"] + [ + "Lots and lots and lots and lots of text" + "Lots and lots and lots and lots of text" + "Lots and lots and lots and lots of text" + ] + ] + ) { + dateTime + } + } + """ + ) + def legacy_prints_fragment_with_variable_directives(): query_ast_with_variable_directive = parse( "fragment Foo($foo: TestType @test) on TestType @testDirective { id }", From f4d5501c102902c6bac95a28cbd4507e14db8b89 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 7 Apr 2024 17:10:09 +0200 Subject: [PATCH 152/230] introduce FieldGroup type Replicates graphql/graphql-js@b1dceba4fb84e74bd65f0f657b963ecc71d36c92 --- docs/conf.py | 1 + pyproject.toml | 1 + src/graphql/execution/collect_fields.py | 22 ++++++++--- src/graphql/execution/execute.py | 50 +++++++++++++------------ 4 files changed, 46 insertions(+), 28 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index b5f3a241..95f2fbc0 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -150,6 +150,7 @@ AwaitableOrValue EnterLeaveVisitor ExperimentalIncrementalExecutionResults +FieldGroup FormattedSourceLocation GraphQLAbstractType GraphQLCompositeType diff --git a/pyproject.toml b/pyproject.toml index 02e8a7c3..918bc418 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -251,6 +251,7 @@ exclude_lines = [ "pragma: no cover", "except ImportError:", "# Python <", + 'sys\.version_info <', "raise NotImplementedError", "assert False,", '\s+next\($', diff --git a/src/graphql/execution/collect_fields.py b/src/graphql/execution/collect_fields.py index de19aaec..f8f1ba61 100644 --- a/src/graphql/execution/collect_fields.py +++ b/src/graphql/execution/collect_fields.py @@ -2,8 +2,9 @@ from __future__ import annotations +import sys from collections import defaultdict -from typing import Any, NamedTuple +from typing import Any, List, NamedTuple from ..language import ( FieldNode, @@ -25,20 +26,31 @@ from ..utilities.type_from_ast import type_from_ast from .values import get_directive_values -__all__ = ["collect_fields", "collect_subfields", "FieldsAndPatches"] +try: + from typing import TypeAlias +except ImportError: # Python < 3.10 + from typing_extensions import TypeAlias + + +__all__ = ["collect_fields", "collect_subfields", "FieldGroup", "FieldsAndPatches"] + +if sys.version_info < (3, 9): + FieldGroup: TypeAlias = List[FieldNode] +else: # Python >= 3.9 + FieldGroup: TypeAlias = list[FieldNode] class PatchFields(NamedTuple): """Optionally labelled set of fields to be used as a patch.""" label: str | None - fields: dict[str, list[FieldNode]] + fields: dict[str, FieldGroup] class FieldsAndPatches(NamedTuple): """Tuple of collected fields and patches to be applied.""" - fields: dict[str, list[FieldNode]] + fields: dict[str, FieldGroup] patches: list[PatchFields] @@ -81,7 +93,7 @@ def collect_subfields( variable_values: dict[str, Any], operation: OperationDefinitionNode, return_type: GraphQLObjectType, - field_nodes: list[FieldNode], + field_nodes: FieldGroup, ) -> FieldsAndPatches: """Collect subfields. diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index 07800520..beafa186 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -41,7 +41,6 @@ from ..error import GraphQLError, GraphQLFormattedError, located_error from ..language import ( DocumentNode, - FieldNode, FragmentDefinitionNode, OperationDefinitionNode, OperationType, @@ -75,7 +74,12 @@ is_object_type, ) from .async_iterables import map_async_iterable -from .collect_fields import FieldsAndPatches, collect_fields, collect_subfields +from .collect_fields import ( + FieldGroup, + FieldsAndPatches, + collect_fields, + collect_subfields, +) from .middleware import MiddlewareManager from .values import get_argument_values, get_directive_values, get_variable_values @@ -837,7 +841,7 @@ def execute_fields_serially( parent_type: GraphQLObjectType, source_value: Any, path: Path | None, - fields: dict[str, list[FieldNode]], + fields: dict[str, FieldGroup], ) -> AwaitableOrValue[dict[str, Any]]: """Execute the given fields serially. @@ -847,7 +851,7 @@ def execute_fields_serially( is_awaitable = self.is_awaitable def reducer( - results: dict[str, Any], field_item: tuple[str, list[FieldNode]] + results: dict[str, Any], field_item: tuple[str, FieldGroup] ) -> AwaitableOrValue[dict[str, Any]]: response_name, field_nodes = field_item field_path = Path(path, response_name, parent_type.name) @@ -877,7 +881,7 @@ def execute_fields( parent_type: GraphQLObjectType, source_value: Any, path: Path | None, - fields: dict[str, list[FieldNode]], + fields: dict[str, FieldGroup], async_payload_record: AsyncPayloadRecord | None = None, ) -> AwaitableOrValue[dict[str, Any]]: """Execute the given fields concurrently. @@ -927,7 +931,7 @@ def execute_field( self, parent_type: GraphQLObjectType, source: Any, - field_nodes: list[FieldNode], + field_nodes: FieldGroup, path: Path, async_payload_record: AsyncPayloadRecord | None = None, ) -> AwaitableOrValue[Any]: @@ -996,7 +1000,7 @@ async def await_completed() -> Any: def build_resolve_info( self, field_def: GraphQLField, - field_nodes: list[FieldNode], + field_nodes: FieldGroup, parent_type: GraphQLObjectType, path: Path, ) -> GraphQLResolveInfo: @@ -1024,7 +1028,7 @@ def build_resolve_info( def complete_value( self, return_type: GraphQLOutputType, - field_nodes: list[FieldNode], + field_nodes: FieldGroup, info: GraphQLResolveInfo, path: Path, result: Any, @@ -1113,7 +1117,7 @@ def complete_value( async def complete_awaitable_value( self, return_type: GraphQLOutputType, - field_nodes: list[FieldNode], + field_nodes: FieldGroup, info: GraphQLResolveInfo, path: Path, result: Any, @@ -1143,7 +1147,7 @@ async def complete_awaitable_value( return completed def get_stream_values( - self, field_nodes: list[FieldNode], path: Path + self, field_nodes: FieldGroup, path: Path ) -> StreamArguments | None: """Get stream values. @@ -1182,7 +1186,7 @@ def get_stream_values( async def complete_async_iterator_value( self, item_type: GraphQLOutputType, - field_nodes: list[FieldNode], + field_nodes: FieldGroup, info: GraphQLResolveInfo, path: Path, iterator: AsyncIterator[Any], @@ -1269,7 +1273,7 @@ async def complete_async_iterator_value( def complete_list_value( self, return_type: GraphQLList[GraphQLOutputType], - field_nodes: list[FieldNode], + field_nodes: FieldGroup, info: GraphQLResolveInfo, path: Path, result: AsyncIterable[Any] | Iterable[Any], @@ -1367,7 +1371,7 @@ def complete_list_item_value( complete_results: list[Any], errors: list[GraphQLError], item_type: GraphQLOutputType, - field_nodes: list[FieldNode], + field_nodes: FieldGroup, info: GraphQLResolveInfo, item_path: Path, async_payload_record: AsyncPayloadRecord | None, @@ -1442,7 +1446,7 @@ def complete_leaf_value(return_type: GraphQLLeafType, result: Any) -> Any: def complete_abstract_value( self, return_type: GraphQLAbstractType, - field_nodes: list[FieldNode], + field_nodes: FieldGroup, info: GraphQLResolveInfo, path: Path, result: Any, @@ -1496,7 +1500,7 @@ def ensure_valid_runtime_type( self, runtime_type_name: Any, return_type: GraphQLAbstractType, - field_nodes: list[FieldNode], + field_nodes: FieldGroup, info: GraphQLResolveInfo, result: Any, ) -> GraphQLObjectType: @@ -1557,7 +1561,7 @@ def ensure_valid_runtime_type( def complete_object_value( self, return_type: GraphQLObjectType, - field_nodes: list[FieldNode], + field_nodes: FieldGroup, info: GraphQLResolveInfo, path: Path, result: Any, @@ -1593,7 +1597,7 @@ async def execute_subfields_async() -> dict[str, Any]: def collect_and_execute_subfields( self, return_type: GraphQLObjectType, - field_nodes: list[FieldNode], + field_nodes: FieldGroup, path: Path, result: Any, async_payload_record: AsyncPayloadRecord | None, @@ -1619,7 +1623,7 @@ def collect_and_execute_subfields( return sub_fields def collect_subfields( - self, return_type: GraphQLObjectType, field_nodes: list[FieldNode] + self, return_type: GraphQLObjectType, field_nodes: FieldGroup ) -> FieldsAndPatches: """Collect subfields. @@ -1688,7 +1692,7 @@ def execute_deferred_fragment( self, parent_type: GraphQLObjectType, source_value: Any, - fields: dict[str, list[FieldNode]], + fields: dict[str, FieldGroup], label: str | None = None, path: Path | None = None, parent_context: AsyncPayloadRecord | None = None, @@ -1724,7 +1728,7 @@ def execute_stream_field( path: Path, item_path: Path, item: AwaitableOrValue[Any], - field_nodes: list[FieldNode], + field_nodes: FieldGroup, info: GraphQLResolveInfo, item_type: GraphQLOutputType, label: str | None = None, @@ -1817,7 +1821,7 @@ async def await_completed_items() -> list[Any] | None: async def execute_stream_iterator_item( self, iterator: AsyncIterator[Any], - field_nodes: list[FieldNode], + field_nodes: FieldGroup, info: GraphQLResolveInfo, item_type: GraphQLOutputType, async_payload_record: StreamRecord, @@ -1851,7 +1855,7 @@ async def execute_stream_iterator( self, initial_index: int, iterator: AsyncIterator[Any], - field_modes: list[FieldNode], + field_modes: FieldGroup, info: GraphQLResolveInfo, item_type: GraphQLOutputType, path: Path, @@ -2238,7 +2242,7 @@ def handle_field_error( def invalid_return_type_error( - return_type: GraphQLObjectType, result: Any, field_nodes: list[FieldNode] + return_type: GraphQLObjectType, result: Any, field_nodes: FieldGroup ) -> GraphQLError: """Create a GraphQLError for an invalid return type.""" return GraphQLError( From 1a96cfdbc3c15a31072cc4cdad6508f6062b62cc Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 7 Apr 2024 17:33:28 +0200 Subject: [PATCH 153/230] Rename field_nodes variable to field_group Replicates graphql/graphql-js@0fb9f1fa1a5020dcdb194e2392c30639a18d71e8 --- src/graphql/execution/collect_fields.py | 4 +- src/graphql/execution/execute.py | 174 +++++++++--------- .../rules/single_field_subscriptions.py | 6 +- tests/execution/test_customize.py | 4 +- 4 files changed, 94 insertions(+), 94 deletions(-) diff --git a/src/graphql/execution/collect_fields.py b/src/graphql/execution/collect_fields.py index f8f1ba61..82456370 100644 --- a/src/graphql/execution/collect_fields.py +++ b/src/graphql/execution/collect_fields.py @@ -93,7 +93,7 @@ def collect_subfields( variable_values: dict[str, Any], operation: OperationDefinitionNode, return_type: GraphQLObjectType, - field_nodes: FieldGroup, + field_group: FieldGroup, ) -> FieldsAndPatches: """Collect subfields. @@ -112,7 +112,7 @@ def collect_subfields( sub_patches: list[PatchFields] = [] sub_fields_and_patches = FieldsAndPatches(sub_field_nodes, sub_patches) - for node in field_nodes: + for node in field_group: if node.selection_set: collect_fields_impl( schema, diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index beafa186..83c380f6 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -853,10 +853,10 @@ def execute_fields_serially( def reducer( results: dict[str, Any], field_item: tuple[str, FieldGroup] ) -> AwaitableOrValue[dict[str, Any]]: - response_name, field_nodes = field_item + response_name, field_group = field_item field_path = Path(path, response_name, parent_type.name) result = self.execute_field( - parent_type, source_value, field_nodes, field_path + parent_type, source_value, field_group, field_path ) if result is Undefined: return results @@ -893,10 +893,10 @@ def execute_fields( is_awaitable = self.is_awaitable awaitable_fields: list[str] = [] append_awaitable = awaitable_fields.append - for response_name, field_nodes in fields.items(): + for response_name, field_group in fields.items(): field_path = Path(path, response_name, parent_type.name) result = self.execute_field( - parent_type, source_value, field_nodes, field_path, async_payload_record + parent_type, source_value, field_group, field_path, async_payload_record ) if result is not Undefined: results[response_name] = result @@ -931,7 +931,7 @@ def execute_field( self, parent_type: GraphQLObjectType, source: Any, - field_nodes: FieldGroup, + field_group: FieldGroup, path: Path, async_payload_record: AsyncPayloadRecord | None = None, ) -> AwaitableOrValue[Any]: @@ -944,7 +944,7 @@ def execute_field( objects, serialize scalars, or execute the sub-selection-set for objects. """ errors = async_payload_record.errors if async_payload_record else self.errors - field_name = field_nodes[0].name.value + field_name = field_group[0].name.value field_def = self.schema.get_field(parent_type, field_name) if not field_def: return Undefined @@ -955,14 +955,14 @@ def execute_field( if self.middleware_manager: resolve_fn = self.middleware_manager.get_field_resolver(resolve_fn) - info = self.build_resolve_info(field_def, field_nodes, parent_type, path) + info = self.build_resolve_info(field_def, field_group, parent_type, path) # Get the resolve function, regardless of if its result is normal or abrupt # (error). try: # Build a dictionary of arguments from the field.arguments AST, using the # variables scope to fulfill any variable references. - args = get_argument_values(field_def, field_nodes[0], self.variable_values) + args = get_argument_values(field_def, field_group[0], self.variable_values) # Note that contrary to the JavaScript implementation, we pass the context # value as part of the resolve info. @@ -970,11 +970,11 @@ def execute_field( if self.is_awaitable(result): return self.complete_awaitable_value( - return_type, field_nodes, info, path, result, async_payload_record + return_type, field_group, info, path, result, async_payload_record ) completed = self.complete_value( - return_type, field_nodes, info, path, result, async_payload_record + return_type, field_group, info, path, result, async_payload_record ) if self.is_awaitable(completed): # noinspection PyShadowingNames @@ -982,7 +982,7 @@ async def await_completed() -> Any: try: return await completed except Exception as raw_error: - error = located_error(raw_error, field_nodes, path.as_list()) + error = located_error(raw_error, field_group, path.as_list()) handle_field_error(error, return_type, errors) self.filter_subsequent_payloads(path, async_payload_record) return None @@ -990,7 +990,7 @@ async def await_completed() -> Any: return await_completed() except Exception as raw_error: - error = located_error(raw_error, field_nodes, path.as_list()) + error = located_error(raw_error, field_group, path.as_list()) handle_field_error(error, return_type, errors) self.filter_subsequent_payloads(path, async_payload_record) return None @@ -1000,7 +1000,7 @@ async def await_completed() -> Any: def build_resolve_info( self, field_def: GraphQLField, - field_nodes: FieldGroup, + field_group: FieldGroup, parent_type: GraphQLObjectType, path: Path, ) -> GraphQLResolveInfo: @@ -1011,8 +1011,8 @@ def build_resolve_info( # The resolve function's first argument is a collection of information about # the current execution state. return GraphQLResolveInfo( - field_nodes[0].name.value, - field_nodes, + field_group[0].name.value, + field_group, field_def.type, parent_type, path, @@ -1028,7 +1028,7 @@ def build_resolve_info( def complete_value( self, return_type: GraphQLOutputType, - field_nodes: FieldGroup, + field_group: FieldGroup, info: GraphQLResolveInfo, path: Path, result: Any, @@ -1065,7 +1065,7 @@ def complete_value( if is_non_null_type(return_type): completed = self.complete_value( return_type.of_type, - field_nodes, + field_group, info, path, result, @@ -1086,7 +1086,7 @@ def complete_value( # If field type is List, complete each item in the list with inner type if is_list_type(return_type): return self.complete_list_value( - return_type, field_nodes, info, path, result, async_payload_record + return_type, field_group, info, path, result, async_payload_record ) # If field type is a leaf type, Scalar or Enum, serialize to a valid value, @@ -1098,13 +1098,13 @@ def complete_value( # Object type and complete for that type. if is_abstract_type(return_type): return self.complete_abstract_value( - return_type, field_nodes, info, path, result, async_payload_record + return_type, field_group, info, path, result, async_payload_record ) # If field type is Object, execute and complete all sub-selections. if is_object_type(return_type): return self.complete_object_value( - return_type, field_nodes, info, path, result, async_payload_record + return_type, field_group, info, path, result, async_payload_record ) # Not reachable. All possible output types have been considered. @@ -1117,7 +1117,7 @@ def complete_value( async def complete_awaitable_value( self, return_type: GraphQLOutputType, - field_nodes: FieldGroup, + field_group: FieldGroup, info: GraphQLResolveInfo, path: Path, result: Any, @@ -1128,7 +1128,7 @@ async def complete_awaitable_value( resolved = await result completed = self.complete_value( return_type, - field_nodes, + field_group, info, path, resolved, @@ -1140,14 +1140,14 @@ async def complete_awaitable_value( errors = ( async_payload_record.errors if async_payload_record else self.errors ) - error = located_error(raw_error, field_nodes, path.as_list()) + error = located_error(raw_error, field_group, path.as_list()) handle_field_error(error, return_type, errors) self.filter_subsequent_payloads(path, async_payload_record) completed = None return completed def get_stream_values( - self, field_nodes: FieldGroup, path: Path + self, field_group: FieldGroup, path: Path ) -> StreamArguments | None: """Get stream values. @@ -1162,7 +1162,7 @@ def get_stream_values( # validation only allows equivalent streams on multiple fields, so it is # safe to only check the first field_node for the stream directive stream = get_directive_values( - GraphQLStreamDirective, field_nodes[0], self.variable_values + GraphQLStreamDirective, field_group[0], self.variable_values ) if not stream or stream.get("if") is False: @@ -1186,7 +1186,7 @@ def get_stream_values( async def complete_async_iterator_value( self, item_type: GraphQLOutputType, - field_nodes: FieldGroup, + field_group: FieldGroup, info: GraphQLResolveInfo, path: Path, iterator: AsyncIterator[Any], @@ -1198,7 +1198,7 @@ async def complete_async_iterator_value( recursively until all the results are completed. """ errors = async_payload_record.errors if async_payload_record else self.errors - stream = self.get_stream_values(field_nodes, path) + stream = self.get_stream_values(field_group, path) complete_list_item_value = self.complete_list_item_value awaitable_indices: list[int] = [] append_awaitable = awaitable_indices.append @@ -1216,7 +1216,7 @@ async def complete_async_iterator_value( self.execute_stream_iterator( index, iterator, - field_nodes, + field_group, info, item_type, path, @@ -1235,7 +1235,7 @@ async def complete_async_iterator_value( except StopAsyncIteration: break except Exception as raw_error: - error = located_error(raw_error, field_nodes, item_path.as_list()) + error = located_error(raw_error, field_group, item_path.as_list()) handle_field_error(error, item_type, errors) completed_results.append(None) break @@ -1244,7 +1244,7 @@ async def complete_async_iterator_value( completed_results, errors, item_type, - field_nodes, + field_group, info, item_path, async_payload_record, @@ -1273,7 +1273,7 @@ async def complete_async_iterator_value( def complete_list_value( self, return_type: GraphQLList[GraphQLOutputType], - field_nodes: FieldGroup, + field_group: FieldGroup, info: GraphQLResolveInfo, path: Path, result: AsyncIterable[Any] | Iterable[Any], @@ -1290,7 +1290,7 @@ def complete_list_value( iterator = result.__aiter__() return self.complete_async_iterator_value( - item_type, field_nodes, info, path, iterator, async_payload_record + item_type, field_group, info, path, iterator, async_payload_record ) if not is_iterable(result): @@ -1300,7 +1300,7 @@ def complete_list_value( ) raise GraphQLError(msg) - stream = self.get_stream_values(field_nodes, path) + stream = self.get_stream_values(field_group, path) # This is specified as a simple map, however we're optimizing the path where # the list contains no coroutine objects by avoiding creating another coroutine @@ -1324,7 +1324,7 @@ def complete_list_value( path, item_path, item, - field_nodes, + field_group, info, item_type, stream.label, @@ -1337,7 +1337,7 @@ def complete_list_value( completed_results, errors, item_type, - field_nodes, + field_group, info, item_path, async_payload_record, @@ -1371,7 +1371,7 @@ def complete_list_item_value( complete_results: list[Any], errors: list[GraphQLError], item_type: GraphQLOutputType, - field_nodes: FieldGroup, + field_group: FieldGroup, info: GraphQLResolveInfo, item_path: Path, async_payload_record: AsyncPayloadRecord | None, @@ -1385,7 +1385,7 @@ def complete_list_item_value( if is_awaitable(item): complete_results.append( self.complete_awaitable_value( - item_type, field_nodes, info, item_path, item, async_payload_record + item_type, field_group, info, item_path, item, async_payload_record ) ) return True @@ -1393,7 +1393,7 @@ def complete_list_item_value( try: completed_item = self.complete_value( item_type, - field_nodes, + field_group, info, item_path, item, @@ -1407,7 +1407,7 @@ async def await_completed() -> Any: return await completed_item except Exception as raw_error: error = located_error( - raw_error, field_nodes, item_path.as_list() + raw_error, field_group, item_path.as_list() ) handle_field_error(error, item_type, errors) self.filter_subsequent_payloads(item_path, async_payload_record) @@ -1419,7 +1419,7 @@ async def await_completed() -> Any: complete_results.append(completed_item) except Exception as raw_error: - error = located_error(raw_error, field_nodes, item_path.as_list()) + error = located_error(raw_error, field_group, item_path.as_list()) handle_field_error(error, item_type, errors) self.filter_subsequent_payloads(item_path, async_payload_record) complete_results.append(None) @@ -1446,7 +1446,7 @@ def complete_leaf_value(return_type: GraphQLLeafType, result: Any) -> Any: def complete_abstract_value( self, return_type: GraphQLAbstractType, - field_nodes: FieldGroup, + field_group: FieldGroup, info: GraphQLResolveInfo, path: Path, result: Any, @@ -1468,11 +1468,11 @@ async def await_complete_object_value() -> Any: self.ensure_valid_runtime_type( await runtime_type, # type: ignore return_type, - field_nodes, + field_group, info, result, ), - field_nodes, + field_group, info, path, result, @@ -1487,9 +1487,9 @@ async def await_complete_object_value() -> Any: return self.complete_object_value( self.ensure_valid_runtime_type( - runtime_type, return_type, field_nodes, info, result + runtime_type, return_type, field_group, info, result ), - field_nodes, + field_group, info, path, result, @@ -1500,7 +1500,7 @@ def ensure_valid_runtime_type( self, runtime_type_name: Any, return_type: GraphQLAbstractType, - field_nodes: FieldGroup, + field_group: FieldGroup, info: GraphQLResolveInfo, result: Any, ) -> GraphQLObjectType: @@ -1514,7 +1514,7 @@ def ensure_valid_runtime_type( " a 'resolve_type' function or each possible type should provide" " an 'is_type_of' function." ) - raise GraphQLError(msg, field_nodes) + raise GraphQLError(msg, field_group) if is_object_type(runtime_type_name): # pragma: no cover msg = ( @@ -1530,7 +1530,7 @@ def ensure_valid_runtime_type( f" for field '{info.parent_type.name}.{info.field_name}' with value" f" {inspect(result)}, received '{inspect(runtime_type_name)}'." ) - raise GraphQLError(msg, field_nodes) + raise GraphQLError(msg, field_group) runtime_type = self.schema.get_type(runtime_type_name) @@ -1539,21 +1539,21 @@ def ensure_valid_runtime_type( f"Abstract type '{return_type.name}' was resolved to a type" f" '{runtime_type_name}' that does not exist inside the schema." ) - raise GraphQLError(msg, field_nodes) + raise GraphQLError(msg, field_group) if not is_object_type(runtime_type): msg = ( f"Abstract type '{return_type.name}' was resolved" f" to a non-object type '{runtime_type_name}'." ) - raise GraphQLError(msg, field_nodes) + raise GraphQLError(msg, field_group) if not self.schema.is_sub_type(return_type, runtime_type): msg = ( f"Runtime Object type '{runtime_type.name}' is not a possible" f" type for '{return_type.name}'." ) - raise GraphQLError(msg, field_nodes) + raise GraphQLError(msg, field_group) # noinspection PyTypeChecker return runtime_type @@ -1561,7 +1561,7 @@ def ensure_valid_runtime_type( def complete_object_value( self, return_type: GraphQLObjectType, - field_nodes: FieldGroup, + field_group: FieldGroup, info: GraphQLResolveInfo, path: Path, result: Any, @@ -1579,31 +1579,31 @@ def complete_object_value( async def execute_subfields_async() -> dict[str, Any]: if not await is_type_of: # type: ignore raise invalid_return_type_error( - return_type, result, field_nodes + return_type, result, field_group ) return self.collect_and_execute_subfields( - return_type, field_nodes, path, result, async_payload_record + return_type, field_group, path, result, async_payload_record ) # type: ignore return execute_subfields_async() if not is_type_of: - raise invalid_return_type_error(return_type, result, field_nodes) + raise invalid_return_type_error(return_type, result, field_group) return self.collect_and_execute_subfields( - return_type, field_nodes, path, result, async_payload_record + return_type, field_group, path, result, async_payload_record ) def collect_and_execute_subfields( self, return_type: GraphQLObjectType, - field_nodes: FieldGroup, + field_group: FieldGroup, path: Path, result: Any, async_payload_record: AsyncPayloadRecord | None, ) -> AwaitableOrValue[dict[str, Any]]: """Collect sub-fields to execute to complete this value.""" - sub_field_nodes, sub_patches = self.collect_subfields(return_type, field_nodes) + sub_field_nodes, sub_patches = self.collect_subfields(return_type, field_group) sub_fields = self.execute_fields( return_type, result, path, sub_field_nodes, async_payload_record @@ -1623,7 +1623,7 @@ def collect_and_execute_subfields( return sub_fields def collect_subfields( - self, return_type: GraphQLObjectType, field_nodes: FieldGroup + self, return_type: GraphQLObjectType, field_group: FieldGroup ) -> FieldsAndPatches: """Collect subfields. @@ -1633,17 +1633,17 @@ def collect_subfields( lists of values. """ cache = self._subfields_cache - # We cannot use the field_nodes themselves as key for the cache, since they - # are not hashable as a list. We also do not want to use the field_nodes - # themselves (converted to a tuple) as keys, since hashing them is slow. - # Therefore, we use the ids of the field_nodes as keys. Note that we do not - # use the id of the list, since we want to hit the cache for all lists of + # We cannot use the field_group itself as key for the cache, since it + # is not hashable as a list. We also do not want to use the field_group + # itself (converted to a tuple) as keys, since hashing them is slow. + # Therefore, we use the ids of the field_group items as keys. Note that we do + # not use the id of the list, since we want to hit the cache for all lists of # the same nodes, not only for the same list of nodes. Also, the list id may # even be reused, in which case we would get wrong results from the cache. key = ( - (return_type, id(field_nodes[0])) - if len(field_nodes) == 1 # optimize most frequent case - else (return_type, *map(id, field_nodes)) + (return_type, id(field_group[0])) + if len(field_group) == 1 # optimize most frequent case + else (return_type, *map(id, field_group)) ) sub_fields_and_patches = cache.get(key) if sub_fields_and_patches is None: @@ -1653,7 +1653,7 @@ def collect_subfields( self.variable_values, self.operation, return_type, - field_nodes, + field_group, ) cache[key] = sub_fields_and_patches return sub_fields_and_patches @@ -1728,7 +1728,7 @@ def execute_stream_field( path: Path, item_path: Path, item: AwaitableOrValue[Any], - field_nodes: FieldGroup, + field_group: FieldGroup, info: GraphQLResolveInfo, item_type: GraphQLOutputType, label: str | None = None, @@ -1748,7 +1748,7 @@ async def await_completed_items() -> list[Any] | None: return [ await self.complete_awaitable_value( item_type, - field_nodes, + field_group, info, item_path, item, @@ -1767,7 +1767,7 @@ async def await_completed_items() -> list[Any] | None: try: completed_item = self.complete_value( item_type, - field_nodes, + field_group, info, item_path, item, @@ -1786,7 +1786,7 @@ async def await_completed_items() -> list[Any] | None: except Exception as raw_error: # pragma: no cover # noinspection PyShadowingNames error = located_error( - raw_error, field_nodes, item_path.as_list() + raw_error, field_group, item_path.as_list() ) handle_field_error( error, item_type, async_payload_record.errors @@ -1805,7 +1805,7 @@ async def await_completed_items() -> list[Any] | None: completed_items = [completed_item] except Exception as raw_error: - error = located_error(raw_error, field_nodes, item_path.as_list()) + error = located_error(raw_error, field_group, item_path.as_list()) handle_field_error(error, item_type, async_payload_record.errors) self.filter_subsequent_payloads(item_path, async_payload_record) completed_items = [None] @@ -1821,7 +1821,7 @@ async def await_completed_items() -> list[Any] | None: async def execute_stream_iterator_item( self, iterator: AsyncIterator[Any], - field_nodes: FieldGroup, + field_group: FieldGroup, info: GraphQLResolveInfo, item_type: GraphQLOutputType, async_payload_record: StreamRecord, @@ -1833,7 +1833,7 @@ async def execute_stream_iterator_item( try: item = await anext(iterator) completed_item = self.complete_value( - item_type, field_nodes, info, item_path, item, async_payload_record + item_type, field_group, info, item_path, item, async_payload_record ) return ( @@ -1847,7 +1847,7 @@ async def execute_stream_iterator_item( raise StopAsyncIteration from raw_error except Exception as raw_error: - error = located_error(raw_error, field_nodes, item_path.as_list()) + error = located_error(raw_error, field_group, item_path.as_list()) handle_field_error(error, item_type, async_payload_record.errors) self.filter_subsequent_payloads(item_path, async_payload_record) @@ -1855,7 +1855,7 @@ async def execute_stream_iterator( self, initial_index: int, iterator: AsyncIterator[Any], - field_modes: FieldGroup, + field_group: FieldGroup, info: GraphQLResolveInfo, item_type: GraphQLOutputType, path: Path, @@ -1875,7 +1875,7 @@ async def execute_stream_iterator( try: data = await self.execute_stream_iterator_item( iterator, - field_modes, + field_group, info, item_type, async_payload_record, @@ -2242,12 +2242,12 @@ def handle_field_error( def invalid_return_type_error( - return_type: GraphQLObjectType, result: Any, field_nodes: FieldGroup + return_type: GraphQLObjectType, result: Any, field_group: FieldGroup ) -> GraphQLError: """Create a GraphQLError for an invalid return type.""" return GraphQLError( f"Expected value of type '{return_type.name}' but got: {inspect(result)}.", - field_nodes, + field_group, ) @@ -2510,16 +2510,16 @@ def execute_subscription( context.operation, ).fields first_root_field = next(iter(root_fields.items())) - response_name, field_nodes = first_root_field - field_name = field_nodes[0].name.value + response_name, field_group = first_root_field + field_name = field_group[0].name.value field_def = schema.get_field(root_type, field_name) if not field_def: msg = f"The subscription field '{field_name}' is not defined." - raise GraphQLError(msg, field_nodes) + raise GraphQLError(msg, field_group) path = Path(None, response_name, root_type.name) - info = context.build_resolve_info(field_def, field_nodes, root_type, path) + info = context.build_resolve_info(field_def, field_group, root_type, path) # Implements the "ResolveFieldEventStream" algorithm from GraphQL specification. # It differs from "ResolveFieldValue" due to providing a different `resolveFn`. @@ -2527,7 +2527,7 @@ def execute_subscription( try: # Build a dictionary of arguments from the field.arguments AST, using the # variables scope to fulfill any variable references. - args = get_argument_values(field_def, field_nodes[0], context.variable_values) + args = get_argument_values(field_def, field_group[0], context.variable_values) # Call the `subscribe()` resolver or the default resolver to produce an # AsyncIterable yielding raw payloads. @@ -2540,14 +2540,14 @@ async def await_result() -> AsyncIterable[Any]: try: return assert_event_stream(await result) except Exception as error: - raise located_error(error, field_nodes, path.as_list()) from error + raise located_error(error, field_group, path.as_list()) from error return await_result() return assert_event_stream(result) except Exception as error: - raise located_error(error, field_nodes, path.as_list()) from error + raise located_error(error, field_group, path.as_list()) from error def assert_event_stream(result: Any) -> AsyncIterable: diff --git a/src/graphql/validation/rules/single_field_subscriptions.py b/src/graphql/validation/rules/single_field_subscriptions.py index fc7fd2bc..ece56542 100644 --- a/src/graphql/validation/rules/single_field_subscriptions.py +++ b/src/graphql/validation/rules/single_field_subscriptions.py @@ -72,8 +72,8 @@ def enter_operation_definition( extra_field_selection, ) ) - for field_nodes in fields.values(): - field_name = field_nodes[0].name.value + for field_group in fields.values(): + field_name = field_group[0].name.value if field_name.startswith("__"): self.report_error( GraphQLError( @@ -83,6 +83,6 @@ def enter_operation_definition( else f"Subscription '{operation_name}'" ) + " must not select an introspection top level field.", - field_nodes, + field_group, ) ) diff --git a/tests/execution/test_customize.py b/tests/execution/test_customize.py index 1eca78eb..6d8cd369 100644 --- a/tests/execution/test_customize.py +++ b/tests/execution/test_customize.py @@ -43,10 +43,10 @@ def uses_a_custom_execution_context_class(): class TestExecutionContext(ExecutionContext): def execute_field( - self, parent_type, source, field_nodes, path, async_payload_record=None + self, parent_type, source, field_group, path, async_payload_record=None ): result = super().execute_field( - parent_type, source, field_nodes, path, async_payload_record + parent_type, source, field_group, path, async_payload_record ) return result * 2 # type: ignore From 600db31dcec0212c08662e4ce5d5869d975109dd Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 7 Apr 2024 17:41:35 +0200 Subject: [PATCH 154/230] introduce GroupedFieldSet type Replicates graphql/graphql-js@45f2a59e12bd5ebe94c826a340e1d8b039ddbef2 --- src/graphql/execution/collect_fields.py | 16 ++++++++++++---- src/graphql/execution/execute.py | 7 ++++--- 2 files changed, 16 insertions(+), 7 deletions(-) diff --git a/src/graphql/execution/collect_fields.py b/src/graphql/execution/collect_fields.py index 82456370..a14ddc65 100644 --- a/src/graphql/execution/collect_fields.py +++ b/src/graphql/execution/collect_fields.py @@ -4,7 +4,7 @@ import sys from collections import defaultdict -from typing import Any, List, NamedTuple +from typing import Any, Dict, List, NamedTuple from ..language import ( FieldNode, @@ -32,25 +32,33 @@ from typing_extensions import TypeAlias -__all__ = ["collect_fields", "collect_subfields", "FieldGroup", "FieldsAndPatches"] +__all__ = [ + "collect_fields", + "collect_subfields", + "FieldGroup", + "FieldsAndPatches", + "GroupedFieldSet", +] if sys.version_info < (3, 9): FieldGroup: TypeAlias = List[FieldNode] + GroupedFieldSet = Dict[str, FieldGroup] else: # Python >= 3.9 FieldGroup: TypeAlias = list[FieldNode] + GroupedFieldSet = dict[str, FieldGroup] class PatchFields(NamedTuple): """Optionally labelled set of fields to be used as a patch.""" label: str | None - fields: dict[str, FieldGroup] + fields: GroupedFieldSet class FieldsAndPatches(NamedTuple): """Tuple of collected fields and patches to be applied.""" - fields: dict[str, FieldGroup] + fields: GroupedFieldSet patches: list[PatchFields] diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index 83c380f6..d3348917 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -77,6 +77,7 @@ from .collect_fields import ( FieldGroup, FieldsAndPatches, + GroupedFieldSet, collect_fields, collect_subfields, ) @@ -841,7 +842,7 @@ def execute_fields_serially( parent_type: GraphQLObjectType, source_value: Any, path: Path | None, - fields: dict[str, FieldGroup], + fields: GroupedFieldSet, ) -> AwaitableOrValue[dict[str, Any]]: """Execute the given fields serially. @@ -881,7 +882,7 @@ def execute_fields( parent_type: GraphQLObjectType, source_value: Any, path: Path | None, - fields: dict[str, FieldGroup], + fields: GroupedFieldSet, async_payload_record: AsyncPayloadRecord | None = None, ) -> AwaitableOrValue[dict[str, Any]]: """Execute the given fields concurrently. @@ -1692,7 +1693,7 @@ def execute_deferred_fragment( self, parent_type: GraphQLObjectType, source_value: Any, - fields: dict[str, FieldGroup], + fields: GroupedFieldSet, label: str | None = None, path: Path | None = None, parent_context: AsyncPayloadRecord | None = None, From afa6b93895bf6e3cd7e358a0c123d3d30d3899cb Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 7 Apr 2024 19:21:21 +0200 Subject: [PATCH 155/230] use groupedFieldSet as variable name Replicates graphql/graphql-js@a07440045d1a29a659cde9ce97234cecde0df1a3 --- src/graphql/execution/collect_fields.py | 24 +++++++++---------- src/graphql/execution/execute.py | 20 +++++++++------- .../rules/single_field_subscriptions.py | 10 ++++---- 3 files changed, 28 insertions(+), 26 deletions(-) diff --git a/src/graphql/execution/collect_fields.py b/src/graphql/execution/collect_fields.py index a14ddc65..0bfbdf2a 100644 --- a/src/graphql/execution/collect_fields.py +++ b/src/graphql/execution/collect_fields.py @@ -52,13 +52,13 @@ class PatchFields(NamedTuple): """Optionally labelled set of fields to be used as a patch.""" label: str | None - fields: GroupedFieldSet + grouped_field_set: GroupedFieldSet class FieldsAndPatches(NamedTuple): """Tuple of collected fields and patches to be applied.""" - fields: GroupedFieldSet + grouped_field_set: GroupedFieldSet patches: list[PatchFields] @@ -79,7 +79,7 @@ def collect_fields( For internal use only. """ - fields: dict[str, list[FieldNode]] = defaultdict(list) + grouped_field_set: dict[str, list[FieldNode]] = defaultdict(list) patches: list[PatchFields] = [] collect_fields_impl( schema, @@ -88,11 +88,11 @@ def collect_fields( operation, runtime_type, operation.selection_set, - fields, + grouped_field_set, patches, set(), ) - return FieldsAndPatches(fields, patches) + return FieldsAndPatches(grouped_field_set, patches) def collect_subfields( @@ -114,11 +114,11 @@ def collect_subfields( For internal use only. """ - sub_field_nodes: dict[str, list[FieldNode]] = defaultdict(list) + sub_grouped_field_set: dict[str, list[FieldNode]] = defaultdict(list) visited_fragment_names: set[str] = set() sub_patches: list[PatchFields] = [] - sub_fields_and_patches = FieldsAndPatches(sub_field_nodes, sub_patches) + sub_fields_and_patches = FieldsAndPatches(sub_grouped_field_set, sub_patches) for node in field_group: if node.selection_set: @@ -129,7 +129,7 @@ def collect_subfields( operation, return_type, node.selection_set, - sub_field_nodes, + sub_grouped_field_set, sub_patches, visited_fragment_names, ) @@ -143,7 +143,7 @@ def collect_fields_impl( operation: OperationDefinitionNode, runtime_type: GraphQLObjectType, selection_set: SelectionSetNode, - fields: dict[str, list[FieldNode]], + grouped_field_set: dict[str, list[FieldNode]], patches: list[PatchFields], visited_fragment_names: set[str], ) -> None: @@ -154,7 +154,7 @@ def collect_fields_impl( if isinstance(selection, FieldNode): if not should_include_node(variable_values, selection): continue - fields[get_field_entry_key(selection)].append(selection) + grouped_field_set[get_field_entry_key(selection)].append(selection) elif isinstance(selection, InlineFragmentNode): if not should_include_node( variable_values, selection @@ -184,7 +184,7 @@ def collect_fields_impl( operation, runtime_type, selection.selection_set, - fields, + grouped_field_set, patches, visited_fragment_names, ) @@ -229,7 +229,7 @@ def collect_fields_impl( operation, runtime_type, fragment.selection_set, - fields, + grouped_field_set, patches, visited_fragment_names, ) diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index d3348917..027b67f9 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -813,7 +813,7 @@ def execute_operation(self) -> AwaitableOrValue[dict[str, Any]]: ) raise GraphQLError(msg, operation) - root_fields, patches = collect_fields( + grouped_field_set, patches = collect_fields( schema, self.fragments, self.variable_values, @@ -827,12 +827,12 @@ def execute_operation(self) -> AwaitableOrValue[dict[str, Any]]: self.execute_fields_serially if operation.operation == OperationType.MUTATION else self.execute_fields - )(root_type, root_value, None, root_fields) # type: ignore + )(root_type, root_value, None, grouped_field_set) # type: ignore for patch in patches: - label, patch_fields = patch + label, patch_grouped_filed_set = patch self.execute_deferred_fragment( - root_type, root_value, patch_fields, label, None + root_type, root_value, patch_grouped_filed_set, label, None ) return result @@ -1604,10 +1604,12 @@ def collect_and_execute_subfields( async_payload_record: AsyncPayloadRecord | None, ) -> AwaitableOrValue[dict[str, Any]]: """Collect sub-fields to execute to complete this value.""" - sub_field_nodes, sub_patches = self.collect_subfields(return_type, field_group) + sub_grouped_field_set, sub_patches = self.collect_subfields( + return_type, field_group + ) sub_fields = self.execute_fields( - return_type, result, path, sub_field_nodes, async_payload_record + return_type, result, path, sub_grouped_field_set, async_payload_record ) for sub_patch in sub_patches: @@ -2503,14 +2505,14 @@ def execute_subscription( msg = "Schema is not configured to execute subscription operation." raise GraphQLError(msg, context.operation) - root_fields = collect_fields( + grouped_field_set = collect_fields( schema, context.fragments, context.variable_values, root_type, context.operation, - ).fields - first_root_field = next(iter(root_fields.items())) + ).grouped_field_set + first_root_field = next(iter(grouped_field_set.items())) response_name, field_group = first_root_field field_name = field_group[0].name.value field_def = schema.get_field(root_type, field_name) diff --git a/src/graphql/validation/rules/single_field_subscriptions.py b/src/graphql/validation/rules/single_field_subscriptions.py index ece56542..9a689809 100644 --- a/src/graphql/validation/rules/single_field_subscriptions.py +++ b/src/graphql/validation/rules/single_field_subscriptions.py @@ -42,15 +42,15 @@ def enter_operation_definition( for definition in document.definitions if isinstance(definition, FragmentDefinitionNode) } - fields = collect_fields( + grouped_field_set = collect_fields( schema, fragments, variable_values, subscription_type, node, - ).fields - if len(fields) > 1: - field_selection_lists = list(fields.values()) + ).grouped_field_set + if len(grouped_field_set) > 1: + field_selection_lists = list(grouped_field_set.values()) extra_field_selection_lists = field_selection_lists[1:] extra_field_selection = [ field @@ -72,7 +72,7 @@ def enter_operation_definition( extra_field_selection, ) ) - for field_group in fields.values(): + for field_group in grouped_field_set.values(): field_name = field_group[0].name.value if field_name.startswith("__"): self.report_error( From bce7a3df3e27739d0c4b122ab4be8d2fd1abc247 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 7 Apr 2024 19:48:18 +0200 Subject: [PATCH 156/230] refactor handleFieldError Replicates graphql/graphql-js@31e1f8ca32445ea644e2380d7d33767da81ff4a3 --- src/graphql/execution/execute.py | 114 +++++++++++++++++++------------ 1 file changed, 70 insertions(+), 44 deletions(-) diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index 027b67f9..69f1c7ee 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -944,7 +944,6 @@ def execute_field( calling its resolve function, then calls complete_value to await coroutine objects, serialize scalars, or execute the sub-selection-set for objects. """ - errors = async_payload_record.errors if async_payload_record else self.errors field_name = field_group[0].name.value field_def = self.schema.get_field(parent_type, field_name) if not field_def: @@ -983,16 +982,26 @@ async def await_completed() -> Any: try: return await completed except Exception as raw_error: - error = located_error(raw_error, field_group, path.as_list()) - handle_field_error(error, return_type, errors) + self.handle_field_error( + raw_error, + return_type, + field_group, + path, + async_payload_record, + ) self.filter_subsequent_payloads(path, async_payload_record) return None return await_completed() except Exception as raw_error: - error = located_error(raw_error, field_group, path.as_list()) - handle_field_error(error, return_type, errors) + self.handle_field_error( + raw_error, + return_type, + field_group, + path, + async_payload_record, + ) self.filter_subsequent_payloads(path, async_payload_record) return None @@ -1026,6 +1035,28 @@ def build_resolve_info( self.is_awaitable, ) + def handle_field_error( + self, + raw_error: Exception, + return_type: GraphQLOutputType, + field_group: FieldGroup, + path: Path, + async_payload_record: AsyncPayloadRecord | None = None, + ) -> None: + """Handle error properly according to the field type.""" + error = located_error(raw_error, field_group, path.as_list()) + + # If the field type is non-nullable, then it is resolved without any protection + # from errors, however it still properly locates the error. + if is_non_null_type(return_type): + raise error + + errors = async_payload_record.errors if async_payload_record else self.errors + + # Otherwise, error protection is applied, logging the error and resolving a + # null value for this field if one is encountered. + errors.append(error) + def complete_value( self, return_type: GraphQLOutputType, @@ -1138,11 +1169,9 @@ async def complete_awaitable_value( if self.is_awaitable(completed): completed = await completed except Exception as raw_error: - errors = ( - async_payload_record.errors if async_payload_record else self.errors + self.handle_field_error( + raw_error, return_type, field_group, path, async_payload_record ) - error = located_error(raw_error, field_group, path.as_list()) - handle_field_error(error, return_type, errors) self.filter_subsequent_payloads(path, async_payload_record) completed = None return completed @@ -1198,7 +1227,6 @@ async def complete_async_iterator_value( Complete an async iterator value by completing the result and calling recursively until all the results are completed. """ - errors = async_payload_record.errors if async_payload_record else self.errors stream = self.get_stream_values(field_group, path) complete_list_item_value = self.complete_list_item_value awaitable_indices: list[int] = [] @@ -1236,14 +1264,14 @@ async def complete_async_iterator_value( except StopAsyncIteration: break except Exception as raw_error: - error = located_error(raw_error, field_group, item_path.as_list()) - handle_field_error(error, item_type, errors) + self.handle_field_error( + raw_error, item_type, field_group, item_path, async_payload_record + ) completed_results.append(None) break if complete_list_item_value( value, completed_results, - errors, item_type, field_group, info, @@ -1285,7 +1313,6 @@ def complete_list_value( Complete a list value by completing each item in the list with the inner type. """ item_type = return_type.of_type - errors = async_payload_record.errors if async_payload_record else self.errors if isinstance(result, AsyncIterable): iterator = result.__aiter__() @@ -1336,7 +1363,6 @@ def complete_list_value( if complete_list_item_value( item, completed_results, - errors, item_type, field_group, info, @@ -1370,7 +1396,6 @@ def complete_list_item_value( self, item: Any, complete_results: list[Any], - errors: list[GraphQLError], item_type: GraphQLOutputType, field_group: FieldGroup, info: GraphQLResolveInfo, @@ -1407,10 +1432,13 @@ async def await_completed() -> Any: try: return await completed_item except Exception as raw_error: - error = located_error( - raw_error, field_group, item_path.as_list() + self.handle_field_error( + raw_error, + item_type, + field_group, + item_path, + async_payload_record, ) - handle_field_error(error, item_type, errors) self.filter_subsequent_payloads(item_path, async_payload_record) return None @@ -1420,8 +1448,13 @@ async def await_completed() -> Any: complete_results.append(completed_item) except Exception as raw_error: - error = located_error(raw_error, field_group, item_path.as_list()) - handle_field_error(error, item_type, errors) + self.handle_field_error( + raw_error, + item_type, + field_group, + item_path, + async_payload_record, + ) self.filter_subsequent_payloads(item_path, async_payload_record) complete_results.append(None) @@ -1787,12 +1820,12 @@ async def await_completed_items() -> list[Any] | None: try: return [await completed_item] except Exception as raw_error: # pragma: no cover - # noinspection PyShadowingNames - error = located_error( - raw_error, field_group, item_path.as_list() - ) - handle_field_error( - error, item_type, async_payload_record.errors + self.handle_field_error( + raw_error, + item_type, + field_group, + item_path, + async_payload_record, ) self.filter_subsequent_payloads( item_path, async_payload_record @@ -1808,8 +1841,13 @@ async def await_completed_items() -> list[Any] | None: completed_items = [completed_item] except Exception as raw_error: - error = located_error(raw_error, field_group, item_path.as_list()) - handle_field_error(error, item_type, async_payload_record.errors) + self.handle_field_error( + raw_error, + item_type, + field_group, + item_path, + async_payload_record, + ) self.filter_subsequent_payloads(item_path, async_payload_record) completed_items = [None] @@ -1850,8 +1888,9 @@ async def execute_stream_iterator_item( raise StopAsyncIteration from raw_error except Exception as raw_error: - error = located_error(raw_error, field_group, item_path.as_list()) - handle_field_error(error, item_type, async_payload_record.errors) + self.handle_field_error( + raw_error, item_type, field_group, item_path, async_payload_record + ) self.filter_subsequent_payloads(item_path, async_payload_record) async def execute_stream_iterator( @@ -2231,19 +2270,6 @@ def execute_sync( return cast(ExecutionResult, result) -def handle_field_error( - error: GraphQLError, return_type: GraphQLOutputType, errors: list[GraphQLError] -) -> None: - """Handle error properly according to the field type.""" - # If the field type is non-nullable, then it is resolved without any protection - # from errors, however it still properly locates the error. - if is_non_null_type(return_type): - raise error - # Otherwise, error protection is applied, logging the error and resolving a - # null value for this field if one is encountered. - errors.append(error) - - def invalid_return_type_error( return_type: GraphQLObjectType, result: Any, field_group: FieldGroup ) -> GraphQLError: From d73b874af8a46c06ddb68939807220e22608e60a Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 7 Apr 2024 19:59:34 +0200 Subject: [PATCH 157/230] rename executeStreamIterator Replicates graphql/graphql-js@fab6426917e75d1b5b62543c12c81841ca99967d --- docs/conf.py | 1 + src/graphql/execution/execute.py | 8 ++++---- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 95f2fbc0..f54580f2 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -159,6 +159,7 @@ GraphQLInputType GraphQLTypeResolver GraphQLOutputType +GroupedFieldSet Middleware asyncio.events.AbstractEventLoop graphql.execution.collect_fields.FieldsAndPatches diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index 69f1c7ee..989a9d21 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -1242,7 +1242,7 @@ async def complete_async_iterator_value( with suppress(TimeoutError): await wait_for( shield( - self.execute_stream_iterator( + self.execute_stream_async_iterator( index, iterator, field_group, @@ -1859,7 +1859,7 @@ async def await_completed_items() -> list[Any] | None: async_payload_record.add_items(completed_items) return async_payload_record - async def execute_stream_iterator_item( + async def execute_stream_async_iterator_item( self, iterator: AsyncIterator[Any], field_group: FieldGroup, @@ -1893,7 +1893,7 @@ async def execute_stream_iterator_item( ) self.filter_subsequent_payloads(item_path, async_payload_record) - async def execute_stream_iterator( + async def execute_stream_async_iterator( self, initial_index: int, iterator: AsyncIterator[Any], @@ -1915,7 +1915,7 @@ async def execute_stream_iterator( ) try: - data = await self.execute_stream_iterator_item( + data = await self.execute_stream_async_iterator_item( iterator, field_group, info, From a8cffbf280590f24257380a3f182ed46007831b3 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 7 Apr 2024 20:54:52 +0200 Subject: [PATCH 158/230] simplify schema in defer tests Replicates graphql/graphql-js@75114af537f00d9017d5dd63e54183567efda9a5 --- tests/execution/test_defer.py | 169 +++++++++++++++++----------------- 1 file changed, 83 insertions(+), 86 deletions(-) diff --git a/tests/execution/test_defer.py b/tests/execution/test_defer.py index 487cedcf..d3ae8568 100644 --- a/tests/execution/test_defer.py +++ b/tests/execution/test_defer.py @@ -28,28 +28,29 @@ GraphQLString, ) - -def resolve_null_sync(_obj, _info) -> None: - """A resolver returning a null value synchronously.""" - return - - -async def resolve_null_async(_obj, _info) -> None: - """A resolver returning a null value asynchronously.""" - return - - friend_type = GraphQLObjectType( "Friend", { "id": GraphQLField(GraphQLID), "name": GraphQLField(GraphQLString), - "asyncNonNullErrorField": GraphQLField( - GraphQLNonNull(GraphQLString), resolve=resolve_null_async - ), + "nonNullName": GraphQLField(GraphQLNonNull(GraphQLString)), + }, +) + +hero_type = GraphQLObjectType( + "Hero", + { + "id": GraphQLField(GraphQLID), + "name": GraphQLField(GraphQLString), + "nonNullName": GraphQLField(GraphQLNonNull(GraphQLString)), + "friends": GraphQLField(GraphQLList(friend_type)), }, ) +query = GraphQLObjectType("Query", {"hero": GraphQLField(hero_type)}) + +schema = GraphQLSchema(query) + class Friend(NamedTuple): id: int @@ -58,57 +59,44 @@ class Friend(NamedTuple): friends = [Friend(2, "Han"), Friend(3, "Leia"), Friend(4, "C-3PO")] +hero = {"id": 1, "name": "Luke", "friends": friends} -async def resolve_slow(_obj, _info) -> str: - """Simulate a slow async resolver returning a value.""" - await sleep(0) - return "slow" +class Resolvers: + """Various resolver functions for testing.""" -async def resolve_bad(_obj, _info) -> str: - """Simulate a bad async resolver raising an error.""" - raise RuntimeError("bad") + @staticmethod + def null(_info) -> None: + """A resolver returning a null value synchronously.""" + return + @staticmethod + async def null_async(_info) -> None: + """A resolver returning a null value asynchronously.""" + return -async def resolve_friends_async(_obj, _info) -> AsyncGenerator[Friend, None]: - """A slow async generator yielding the first friend.""" - await sleep(0) - yield friends[0] + @staticmethod + async def slow(_info) -> str: + """Simulate a slow async resolver returning a value.""" + await sleep(0) + return "slow" + @staticmethod + def bad(_info) -> str: + """Simulate a bad resolver raising an error.""" + raise RuntimeError("bad") -hero_type = GraphQLObjectType( - "Hero", - { - "id": GraphQLField(GraphQLID), - "name": GraphQLField(GraphQLString), - "slowField": GraphQLField(GraphQLString, resolve=resolve_slow), - "errorField": GraphQLField(GraphQLString, resolve=resolve_bad), - "nonNullErrorField": GraphQLField( - GraphQLNonNull(GraphQLString), resolve=resolve_null_sync - ), - "asyncNonNullErrorField": GraphQLField( - GraphQLNonNull(GraphQLString), resolve=resolve_null_async - ), - "friends": GraphQLField( - GraphQLList(friend_type), resolve=lambda _obj, _info: friends - ), - "asyncFriends": GraphQLField( - GraphQLList(friend_type), resolve=resolve_friends_async - ), - }, -) - -hero = Friend(1, "Luke") - -query = GraphQLObjectType( - "Query", {"hero": GraphQLField(hero_type, resolve=lambda _obj, _info: hero)} -) - -schema = GraphQLSchema(query) + @staticmethod + async def friends(_info) -> AsyncGenerator[Friend, None]: + """A slow async generator yielding the first friend.""" + await sleep(0) + yield friends[0] async def complete(document: DocumentNode, root_value: Any = None) -> Any: - result = experimental_execute_incrementally(schema, document, root_value) + result = experimental_execute_incrementally( + schema, document, root_value or {"hero": hero} + ) if is_awaitable(result): result = await result @@ -485,24 +473,24 @@ async def can_defer_fragments_with_errors_on_the_top_level_query_field(): } fragment QueryFragment on Query { hero { - errorField + name } } """ ) - result = await complete(document) + result = await complete(document, {"hero": {**hero, "name": Resolvers.bad}}) assert result == [ {"data": {}, "hasNext": True}, { "incremental": [ { - "data": {"hero": {"errorField": None}}, + "data": {"hero": {"name": None}}, "errors": [ { "message": "bad", "locations": [{"column": 17, "line": 7}], - "path": ["hero", "errorField"], + "path": ["hero", "name"], } ], "path": [], @@ -666,24 +654,24 @@ async def handles_errors_thrown_in_deferred_fragments(): } } fragment NameFragment on Hero { - errorField + name } """ ) - result = await complete(document) + result = await complete(document, {"hero": {**hero, "name": Resolvers.bad}}) assert result == [ {"data": {"hero": {"id": "1"}}, "hasNext": True}, { "incremental": [ { - "data": {"errorField": None}, + "data": {"name": None}, "path": ["hero"], "errors": [ { "message": "bad", "locations": [{"line": 9, "column": 15}], - "path": ["hero", "errorField"], + "path": ["hero", "name"], } ], }, @@ -703,11 +691,13 @@ async def handles_non_nullable_errors_thrown_in_deferred_fragments(): } } fragment NameFragment on Hero { - nonNullErrorField + nonNullName } """ ) - result = await complete(document) + result = await complete( + document, {"hero": {**hero, "nonNullName": Resolvers.null}} + ) assert result == [ {"data": {"hero": {"id": "1"}}, "hasNext": True}, @@ -719,9 +709,9 @@ async def handles_non_nullable_errors_thrown_in_deferred_fragments(): "errors": [ { "message": "Cannot return null for non-nullable field" - " Hero.nonNullErrorField.", + " Hero.nonNullName.", "locations": [{"line": 9, "column": 15}], - "path": ["hero", "nonNullErrorField"], + "path": ["hero", "nonNullName"], } ], }, @@ -736,7 +726,7 @@ async def handles_non_nullable_errors_thrown_outside_deferred_fragments(): """ query HeroNameQuery { hero { - nonNullErrorField + nonNullName ...NameFragment @defer } } @@ -745,16 +735,18 @@ async def handles_non_nullable_errors_thrown_outside_deferred_fragments(): } """ ) - result = await complete(document) + result = await complete( + document, {"hero": {**hero, "nonNullName": Resolvers.null}} + ) assert result == { "data": {"hero": None}, "errors": [ { "message": "Cannot return null for non-nullable field" - " Hero.nonNullErrorField.", + " Hero.nonNullName.", "locations": [{"line": 4, "column": 17}], - "path": ["hero", "nonNullErrorField"], + "path": ["hero", "nonNullName"], } ], } @@ -770,11 +762,13 @@ async def handles_async_non_nullable_errors_thrown_in_deferred_fragments(): } } fragment NameFragment on Hero { - asyncNonNullErrorField + nonNullName } """ ) - result = await complete(document) + result = await complete( + document, {"hero": {**hero, "nonNullName": Resolvers.null_async}} + ) assert result == [ {"data": {"hero": {"id": "1"}}, "hasNext": True}, @@ -786,9 +780,9 @@ async def handles_async_non_nullable_errors_thrown_in_deferred_fragments(): "errors": [ { "message": "Cannot return null for non-nullable field" - " Hero.asyncNonNullErrorField.", + " Hero.nonNullName.", "locations": [{"line": 9, "column": 15}], - "path": ["hero", "asyncNonNullErrorField"], + "path": ["hero", "nonNullName"], } ], }, @@ -808,7 +802,7 @@ async def returns_payloads_in_correct_order(): } } fragment NameFragment on Hero { - slowField + name friends { ...NestedFragment @defer } @@ -818,14 +812,14 @@ async def returns_payloads_in_correct_order(): } """ ) - result = await complete(document) + result = await complete(document, {"hero": {**hero, "name": Resolvers.slow}}) assert result == [ {"data": {"hero": {"id": "1"}}, "hasNext": True}, { "incremental": [ { - "data": {"slowField": "slow", "friends": [{}, {}, {}]}, + "data": {"name": "slow", "friends": [{}, {}, {}]}, "path": ["hero"], } ], @@ -909,8 +903,8 @@ async def filters_deferred_payloads_when_list_item_from_async_iterable_nulled(): """ query { hero { - asyncFriends { - asyncNonNullErrorField + friends { + nonNullName ...NameFragment @defer } } @@ -921,16 +915,18 @@ async def filters_deferred_payloads_when_list_item_from_async_iterable_nulled(): """ ) - result = await complete(document) + result = await complete( + document, {"hero": {**hero, "friends": Resolvers.friends}} + ) assert result == { - "data": {"hero": {"asyncFriends": [None]}}, + "data": {"hero": {"friends": [None]}}, "errors": [ { "message": "Cannot return null for non-nullable field" - " Friend.asyncNonNullErrorField.", + " Friend.nonNullName.", "locations": [{"line": 5, "column": 19}], - "path": ["hero", "asyncFriends", 0, "asyncNonNullErrorField"], + "path": ["hero", "friends", 0, "nonNullName"], } ], } @@ -958,14 +954,15 @@ async def original_execute_function_throws_error_if_deferred_and_not_all_is_sync document = parse( """ query Deferred { - hero { slowField } + hero { name } ... @defer { hero { id } } } """ ) + root_value = {"hero": {**hero, "name": Resolvers.slow}} with pytest.raises(GraphQLError) as exc_info: - await execute(schema, document, {}) # type: ignore + await execute(schema, document, root_value) # type: ignore assert str(exc_info.value) == ( "Executing this GraphQL operation would unexpectedly produce" From 7f4f04d441f3f0b7519a79646e7835872f0c9484 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 7 Apr 2024 21:04:13 +0200 Subject: [PATCH 159/230] Expose print_directive function to enable schema sharding Replicates graphql/graphql-js@d45e48b3c45c3fe6c630c93262a060c0a6c2f71d --- docs/modules/utilities.rst | 3 ++- src/graphql/__init__.py | 3 +++ src/graphql/utilities/__init__.py | 6 ++++-- src/graphql/utilities/print_schema.py | 8 +++++++- 4 files changed, 16 insertions(+), 4 deletions(-) diff --git a/docs/modules/utilities.rst b/docs/modules/utilities.rst index e79809f4..65169b39 100644 --- a/docs/modules/utilities.rst +++ b/docs/modules/utilities.rst @@ -41,9 +41,10 @@ Sort a GraphQLSchema: Print a GraphQLSchema to GraphQL Schema language: -.. autofunction:: print_introspection_schema .. autofunction:: print_schema .. autofunction:: print_type +.. autofunction:: print_directive +.. autofunction:: print_introspection_schema Create a GraphQLType from a GraphQL language AST: diff --git a/src/graphql/__init__.py b/src/graphql/__init__.py index d4805cda..e85c51ee 100644 --- a/src/graphql/__init__.py +++ b/src/graphql/__init__.py @@ -188,6 +188,8 @@ print_schema, # Print a GraphQLType to GraphQL Schema language. print_type, + # Print a GraphQLDirective to GraphQL Schema language. + print_directive, # Prints the built-in introspection schema in the Schema Language format. print_introspection_schema, # Create a GraphQLType from a GraphQL language AST. @@ -788,6 +790,7 @@ "lexicographic_sort_schema", "print_schema", "print_type", + "print_directive", "print_introspection_schema", "type_from_ast", "value_from_ast", diff --git a/src/graphql/utilities/__init__.py b/src/graphql/utilities/__init__.py index 26585595..f528bdcc 100644 --- a/src/graphql/utilities/__init__.py +++ b/src/graphql/utilities/__init__.py @@ -27,9 +27,10 @@ # Print a GraphQLSchema to GraphQL Schema language. from .print_schema import ( - print_introspection_schema, print_schema, print_type, + print_directive, + print_introspection_schema, print_value, # deprecated ) @@ -103,9 +104,10 @@ "is_type_sub_type_of", "introspection_from_schema", "lexicographic_sort_schema", - "print_introspection_schema", "print_schema", "print_type", + "print_directive", + "print_introspection_schema", "print_value", "separate_operations", "strip_ignored_characters", diff --git a/src/graphql/utilities/print_schema.py b/src/graphql/utilities/print_schema.py index 294f7391..44c876dc 100644 --- a/src/graphql/utilities/print_schema.py +++ b/src/graphql/utilities/print_schema.py @@ -32,7 +32,13 @@ ) from .ast_from_value import ast_from_value -__all__ = ["print_schema", "print_introspection_schema", "print_type", "print_value"] +__all__ = [ + "print_schema", + "print_type", + "print_directive", + "print_introspection_schema", + "print_value", +] def print_schema(schema: GraphQLSchema) -> str: From 154ab12be6948d1cea73fc4771f61bcbd3fe23b4 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 7 Apr 2024 21:08:11 +0200 Subject: [PATCH 160/230] remove unnecessary duplicated fields from defer tests Replicates graphql/graphql-js@24b97617c315922b85625c221e56a93ec2c47557 --- tests/execution/test_defer.py | 18 ++++++------------ 1 file changed, 6 insertions(+), 12 deletions(-) diff --git a/tests/execution/test_defer.py b/tests/execution/test_defer.py index d3ae8568..6ca1984b 100644 --- a/tests/execution/test_defer.py +++ b/tests/execution/test_defer.py @@ -349,7 +349,6 @@ async def can_defer_fragments_containing_scalar_types(): } } fragment NameFragment on Hero { - id name } """ @@ -359,9 +358,7 @@ async def can_defer_fragments_containing_scalar_types(): assert result == [ {"data": {"hero": {"id": "1"}}, "hasNext": True}, { - "incremental": [ - {"data": {"id": "1", "name": "Luke"}, "path": ["hero"]} - ], + "incremental": [{"data": {"name": "Luke"}, "path": ["hero"]}], "hasNext": False, }, ] @@ -507,12 +504,11 @@ async def can_defer_a_fragment_within_an_already_deferred_fragment(): """ query HeroNameQuery { hero { - id ...TopFragment @defer(label: "DeferTop") } } fragment TopFragment on Hero { - name + id ...NestedFragment @defer(label: "DeferNested") } fragment NestedFragment on Hero { @@ -525,7 +521,7 @@ async def can_defer_a_fragment_within_an_already_deferred_fragment(): result = await complete(document) assert result == [ - {"data": {"hero": {"id": "1"}}, "hasNext": True}, + {"data": {"hero": {}}, "hasNext": True}, { "incremental": [ { @@ -540,7 +536,7 @@ async def can_defer_a_fragment_within_an_already_deferred_fragment(): "label": "DeferNested", }, { - "data": {"name": "Luke"}, + "data": {"id": "1"}, "path": ["hero"], "label": "DeferTop", }, @@ -555,7 +551,6 @@ async def can_defer_a_fragment_that_is_also_not_deferred_with_deferred_first(): """ query HeroNameQuery { hero { - id ...TopFragment @defer(label: "DeferTop") ...TopFragment } @@ -568,7 +563,7 @@ async def can_defer_a_fragment_that_is_also_not_deferred_with_deferred_first(): result = await complete(document) assert result == [ - {"data": {"hero": {"id": "1", "name": "Luke"}}, "hasNext": True}, + {"data": {"hero": {"name": "Luke"}}, "hasNext": True}, { "incremental": [ { @@ -587,7 +582,6 @@ async def can_defer_a_fragment_that_is_also_not_deferred_with_non_deferred_first """ query HeroNameQuery { hero { - id ...TopFragment ...TopFragment @defer(label: "DeferTop") } @@ -600,7 +594,7 @@ async def can_defer_a_fragment_that_is_also_not_deferred_with_non_deferred_first result = await complete(document) assert result == [ - {"data": {"hero": {"id": "1", "name": "Luke"}}, "hasNext": True}, + {"data": {"hero": {"name": "Luke"}}, "hasNext": True}, { "incremental": [ { From 1b6cc58fbc73d00947d3ac9b9937dcc1061f375e Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 7 Apr 2024 21:10:41 +0200 Subject: [PATCH 161/230] Fix misleading test section description Replicates graphql/graphql-js@c994728bd271ae92a49370102627c6b74af42c60 --- tests/validation/test_defer_stream_directive_label.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/validation/test_defer_stream_directive_label.py b/tests/validation/test_defer_stream_directive_label.py index 3ecbcf46..a75acd6f 100644 --- a/tests/validation/test_defer_stream_directive_label.py +++ b/tests/validation/test_defer_stream_directive_label.py @@ -9,7 +9,7 @@ assert_valid = partial(assert_errors, errors=[]) -def describe_defer_stream_label(): +def describe_defer_stream_directive_labels(): def defer_fragments_with_no_label(): assert_valid( """ From 39a873bd85ee824029403ff2a1aa34405e5dec0e Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 7 Apr 2024 21:14:04 +0200 Subject: [PATCH 162/230] rename fields parameter in execute_fields_serially to grouped_field_set Replicates graphql/graphql-js@5f58075500cadfa454720f788f12cd20b872386c --- src/graphql/execution/execute.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index 989a9d21..b0547975 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -842,7 +842,7 @@ def execute_fields_serially( parent_type: GraphQLObjectType, source_value: Any, path: Path | None, - fields: GroupedFieldSet, + grouped_field_set: GroupedFieldSet, ) -> AwaitableOrValue[dict[str, Any]]: """Execute the given fields serially. @@ -875,7 +875,7 @@ async def set_result( return results # noinspection PyTypeChecker - return async_reduce(reducer, fields.items(), {}) + return async_reduce(reducer, grouped_field_set.items(), {}) def execute_fields( self, From a2d1f89373b411eccc2d7d321ec038da9165e398 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 7 Apr 2024 21:23:16 +0200 Subject: [PATCH 163/230] rename iterator to async_iterator Replicates graphql/graphql-js@d42f6e35ae9e920eab6589bbc6d32109d98d51c3 --- src/graphql/execution/execute.py | 50 ++++++++++++++++---------------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index b0547975..83452cba 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -1219,7 +1219,7 @@ async def complete_async_iterator_value( field_group: FieldGroup, info: GraphQLResolveInfo, path: Path, - iterator: AsyncIterator[Any], + async_iterator: AsyncIterator[Any], async_payload_record: AsyncPayloadRecord | None, ) -> list[Any]: """Complete an async iterator. @@ -1244,7 +1244,7 @@ async def complete_async_iterator_value( shield( self.execute_stream_async_iterator( index, - iterator, + async_iterator, field_group, info, item_type, @@ -1260,7 +1260,7 @@ async def complete_async_iterator_value( item_path = path.add_key(index, None) try: try: - value = await anext(iterator) + value = await anext(async_iterator) except StopAsyncIteration: break except Exception as raw_error: @@ -1315,10 +1315,10 @@ def complete_list_value( item_type = return_type.of_type if isinstance(result, AsyncIterable): - iterator = result.__aiter__() + async_iterator = result.__aiter__() return self.complete_async_iterator_value( - item_type, field_group, info, path, iterator, async_payload_record + item_type, field_group, info, path, async_iterator, async_payload_record ) if not is_iterable(result): @@ -1861,7 +1861,7 @@ async def await_completed_items() -> list[Any] | None: async def execute_stream_async_iterator_item( self, - iterator: AsyncIterator[Any], + async_iterator: AsyncIterator[Any], field_group: FieldGroup, info: GraphQLResolveInfo, item_type: GraphQLOutputType, @@ -1869,10 +1869,10 @@ async def execute_stream_async_iterator_item( item_path: Path, ) -> Any: """Execute stream iterator item.""" - if iterator in self._canceled_iterators: + if async_iterator in self._canceled_iterators: raise StopAsyncIteration try: - item = await anext(iterator) + item = await anext(async_iterator) completed_item = self.complete_value( item_type, field_group, info, item_path, item, async_payload_record ) @@ -1884,7 +1884,7 @@ async def execute_stream_async_iterator_item( ) except StopAsyncIteration as raw_error: - async_payload_record.set_is_completed_iterator() + async_payload_record.set_is_completed_async_iterator() raise StopAsyncIteration from raw_error except Exception as raw_error: @@ -1896,7 +1896,7 @@ async def execute_stream_async_iterator_item( async def execute_stream_async_iterator( self, initial_index: int, - iterator: AsyncIterator[Any], + async_iterator: AsyncIterator[Any], field_group: FieldGroup, info: GraphQLResolveInfo, item_type: GraphQLOutputType, @@ -1911,12 +1911,12 @@ async def execute_stream_async_iterator( while True: item_path = Path(path, index, None) async_payload_record = StreamRecord( - label, item_path, iterator, previous_async_payload_record, self + label, item_path, async_iterator, previous_async_payload_record, self ) try: data = await self.execute_stream_async_iterator_item( - iterator, + async_iterator, field_group, info, item_type, @@ -1933,12 +1933,12 @@ async def execute_stream_async_iterator( async_payload_record.errors.append(error) self.filter_subsequent_payloads(path, async_payload_record) async_payload_record.add_items(None) - if iterator: # pragma: no cover else + if async_iterator: # pragma: no cover else with suppress(Exception): - await iterator.aclose() # type: ignore + await async_iterator.aclose() # type: ignore # running generators cannot be closed since Python 3.8, # so we need to remember that this iterator is already canceled - self._canceled_iterators.add(iterator) + self._canceled_iterators.add(async_iterator) break async_payload_record.add_items([data]) @@ -1961,8 +1961,8 @@ def filter_subsequent_payloads( # async_record points to a path unaffected by this payload continue # async_record path points to nulled error field - if isinstance(async_record, StreamRecord) and async_record.iterator: - self._canceled_iterators.add(async_record.iterator) + if isinstance(async_record, StreamRecord) and async_record.async_iterator: + self._canceled_iterators.add(async_record.async_iterator) del self.subsequent_payloads[async_record] def get_completed_incremental_results(self) -> list[IncrementalResult]: @@ -1977,7 +1977,7 @@ def get_completed_incremental_results(self) -> list[IncrementalResult]: del self.subsequent_payloads[async_payload_record] if isinstance(async_payload_record, StreamRecord): items = async_payload_record.items - if async_payload_record.is_completed_iterator: + if async_payload_record.is_completed_async_iterator: # async iterable resolver finished but there may be pending payload continue # pragma: no cover incremental_result = IncrementalStreamResult( @@ -2667,8 +2667,8 @@ class StreamRecord: path: list[str | int] items: list[str] | None parent_context: AsyncPayloadRecord | None - iterator: AsyncIterator[Any] | None - is_completed_iterator: bool + async_iterator: AsyncIterator[Any] | None + is_completed_async_iterator: bool completed: Event _context: ExecutionContext _items: AwaitableOrValue[list[Any] | None] @@ -2678,21 +2678,21 @@ def __init__( self, label: str | None, path: Path | None, - iterator: AsyncIterator[Any] | None, + async_iterator: AsyncIterator[Any] | None, parent_context: AsyncPayloadRecord | None, context: ExecutionContext, ) -> None: self.label = label self.path = path.as_list() if path else [] self.parent_context = parent_context - self.iterator = iterator + self.async_iterator = async_iterator self.errors = [] self._context = context context.subsequent_payloads[self] = None self.items = self._items = None self.completed = Event() self._items_added = Event() - self.is_completed_iterator = False + self.is_completed_async_iterator = False def __repr__(self) -> str: name = self.__class__.__name__ @@ -2729,9 +2729,9 @@ def add_items(self, items: AwaitableOrValue[list[Any] | None]) -> None: self._items = items self._items_added.set() - def set_is_completed_iterator(self) -> None: + def set_is_completed_async_iterator(self) -> None: """Mark as completed.""" - self.is_completed_iterator = True + self.is_completed_async_iterator = True self._items_added.set() From 398d5cc2c0112ff9876a21dcdd7553bfa45cdb5c Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 7 Apr 2024 21:28:20 +0200 Subject: [PATCH 164/230] rename StreamRecord to StreamItemsRecord Replicates graphql/graphql-js@ce64e567c0d58addded5ee02e75ffdc378c6099b --- src/graphql/execution/execute.py | 19 +++++++++++-------- tests/execution/test_stream.py | 8 +++++--- 2 files changed, 16 insertions(+), 11 deletions(-) diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index 83452cba..14a73199 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -106,7 +106,7 @@ async def anext(iterator: AsyncIterator) -> Any: # noqa: A001 "subscribe", "AsyncPayloadRecord", "DeferredFragmentRecord", - "StreamRecord", + "StreamItemsRecord", "ExecutionResult", "ExecutionContext", "ExperimentalIncrementalExecutionResults", @@ -1772,7 +1772,7 @@ def execute_stream_field( ) -> AsyncPayloadRecord: """Execute stream field.""" is_awaitable = self.is_awaitable - async_payload_record = StreamRecord( + async_payload_record = StreamItemsRecord( label, item_path, None, parent_context, self ) completed_item: Any @@ -1865,7 +1865,7 @@ async def execute_stream_async_iterator_item( field_group: FieldGroup, info: GraphQLResolveInfo, item_type: GraphQLOutputType, - async_payload_record: StreamRecord, + async_payload_record: StreamItemsRecord, item_path: Path, ) -> Any: """Execute stream iterator item.""" @@ -1910,7 +1910,7 @@ async def execute_stream_async_iterator( while True: item_path = Path(path, index, None) - async_payload_record = StreamRecord( + async_payload_record = StreamItemsRecord( label, item_path, async_iterator, previous_async_payload_record, self ) @@ -1961,7 +1961,10 @@ def filter_subsequent_payloads( # async_record points to a path unaffected by this payload continue # async_record path points to nulled error field - if isinstance(async_record, StreamRecord) and async_record.async_iterator: + if ( + isinstance(async_record, StreamItemsRecord) + and async_record.async_iterator + ): self._canceled_iterators.add(async_record.async_iterator) del self.subsequent_payloads[async_record] @@ -1975,7 +1978,7 @@ def get_completed_incremental_results(self) -> list[IncrementalResult]: if not async_payload_record.completed.is_set(): continue del self.subsequent_payloads[async_payload_record] - if isinstance(async_payload_record, StreamRecord): + if isinstance(async_payload_record, StreamItemsRecord): items = async_payload_record.items if async_payload_record.is_completed_async_iterator: # async iterable resolver finished but there may be pending payload @@ -2659,7 +2662,7 @@ def add_data(self, data: AwaitableOrValue[dict[str, Any] | None]) -> None: self._data_added.set() -class StreamRecord: +class StreamItemsRecord: """A record collecting items marked with the stream directive""" errors: list[GraphQLError] @@ -2735,4 +2738,4 @@ def set_is_completed_async_iterator(self) -> None: self._items_added.set() -AsyncPayloadRecord = Union[DeferredFragmentRecord, StreamRecord] +AsyncPayloadRecord = Union[DeferredFragmentRecord, StreamItemsRecord] diff --git a/tests/execution/test_stream.py b/tests/execution/test_stream.py index fb84c6d9..091484e2 100644 --- a/tests/execution/test_stream.py +++ b/tests/execution/test_stream.py @@ -12,7 +12,7 @@ IncrementalStreamResult, experimental_execute_incrementally, ) -from graphql.execution.execute import StreamRecord +from graphql.execution.execute import StreamItemsRecord from graphql.language import DocumentNode, parse from graphql.pyutils import Path from graphql.type import ( @@ -175,9 +175,11 @@ def can_format_and_print_incremental_stream_result(): def can_print_stream_record(): context = ExecutionContext.build(schema, parse("{ hero { id } }")) assert isinstance(context, ExecutionContext) - record = StreamRecord(None, None, None, None, context) + record = StreamItemsRecord(None, None, None, None, context) assert str(record) == "StreamRecord(path=[])" - record = StreamRecord("foo", Path(None, "bar", "Bar"), None, record, context) + record = StreamItemsRecord( + "foo", Path(None, "bar", "Bar"), None, record, context + ) assert ( str(record) == "StreamRecord(" "path=['bar'], label='foo', parent_context)" ) From 040294877232a97e618fe3dcfdd09258aafa6c2f Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 7 Apr 2024 21:44:37 +0200 Subject: [PATCH 165/230] rename AsyncPayloadRecord to IncrementalDataRecord Replicates graphql/graphql-js@b5813f06d419c24d8dd3165d7b8ed0914b78a423 --- docs/conf.py | 4 +- src/graphql/execution/execute.py | 275 +++++++++++++++++------------- tests/execution/test_customize.py | 9 +- tests/execution/test_stream.py | 7 +- 4 files changed, 166 insertions(+), 129 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index f54580f2..6f719343 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -146,7 +146,6 @@ traceback types.TracebackType TypeMap -AsyncPayloadRecord AwaitableOrValue EnterLeaveVisitor ExperimentalIncrementalExecutionResults @@ -160,6 +159,7 @@ GraphQLTypeResolver GraphQLOutputType GroupedFieldSet +IncrementalDataRecord Middleware asyncio.events.AbstractEventLoop graphql.execution.collect_fields.FieldsAndPatches @@ -168,7 +168,7 @@ graphql.execution.execute.DeferredFragmentRecord graphql.execution.execute.ExperimentalIncrementalExecutionResults graphql.execution.execute.StreamArguments -graphql.execution.execute.StreamRecord +graphql.execution.execute.StreamItemsRecord graphql.language.lexer.EscapeSequence graphql.language.visitor.EnterLeaveVisitor graphql.type.definition.TContext diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index 14a73199..dba46135 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -104,7 +104,7 @@ async def anext(iterator: AsyncIterator) -> Any: # noqa: A001 "execute_sync", "experimental_execute_incrementally", "subscribe", - "AsyncPayloadRecord", + "IncrementalDataRecord", "DeferredFragmentRecord", "StreamItemsRecord", "ExecutionResult", @@ -632,7 +632,7 @@ class ExecutionContext: type_resolver: GraphQLTypeResolver subscribe_field_resolver: GraphQLFieldResolver errors: list[GraphQLError] - subsequent_payloads: dict[AsyncPayloadRecord, None] # used as ordered set + subsequent_payloads: dict[IncrementalDataRecord, None] # used as ordered set middleware_manager: MiddlewareManager | None is_awaitable: Callable[[Any], TypeGuard[Awaitable]] = staticmethod( @@ -650,7 +650,7 @@ def __init__( field_resolver: GraphQLFieldResolver, type_resolver: GraphQLTypeResolver, subscribe_field_resolver: GraphQLFieldResolver, - subsequent_payloads: dict[AsyncPayloadRecord, None], + subsequent_payloads: dict[IncrementalDataRecord, None], errors: list[GraphQLError], middleware_manager: MiddlewareManager | None, is_awaitable: Callable[[Any], bool] | None, @@ -883,7 +883,7 @@ def execute_fields( source_value: Any, path: Path | None, fields: GroupedFieldSet, - async_payload_record: AsyncPayloadRecord | None = None, + incremental_data_record: IncrementalDataRecord | None = None, ) -> AwaitableOrValue[dict[str, Any]]: """Execute the given fields concurrently. @@ -897,7 +897,11 @@ def execute_fields( for response_name, field_group in fields.items(): field_path = Path(path, response_name, parent_type.name) result = self.execute_field( - parent_type, source_value, field_group, field_path, async_payload_record + parent_type, + source_value, + field_group, + field_path, + incremental_data_record, ) if result is not Undefined: results[response_name] = result @@ -934,7 +938,7 @@ def execute_field( source: Any, field_group: FieldGroup, path: Path, - async_payload_record: AsyncPayloadRecord | None = None, + incremental_data_record: IncrementalDataRecord | None = None, ) -> AwaitableOrValue[Any]: """Resolve the field on the given source object. @@ -970,11 +974,16 @@ def execute_field( if self.is_awaitable(result): return self.complete_awaitable_value( - return_type, field_group, info, path, result, async_payload_record + return_type, + field_group, + info, + path, + result, + incremental_data_record, ) completed = self.complete_value( - return_type, field_group, info, path, result, async_payload_record + return_type, field_group, info, path, result, incremental_data_record ) if self.is_awaitable(completed): # noinspection PyShadowingNames @@ -987,9 +996,9 @@ async def await_completed() -> Any: return_type, field_group, path, - async_payload_record, + incremental_data_record, ) - self.filter_subsequent_payloads(path, async_payload_record) + self.filter_subsequent_payloads(path, incremental_data_record) return None return await_completed() @@ -1000,9 +1009,9 @@ async def await_completed() -> Any: return_type, field_group, path, - async_payload_record, + incremental_data_record, ) - self.filter_subsequent_payloads(path, async_payload_record) + self.filter_subsequent_payloads(path, incremental_data_record) return None return completed @@ -1041,7 +1050,7 @@ def handle_field_error( return_type: GraphQLOutputType, field_group: FieldGroup, path: Path, - async_payload_record: AsyncPayloadRecord | None = None, + incremental_data_record: IncrementalDataRecord | None = None, ) -> None: """Handle error properly according to the field type.""" error = located_error(raw_error, field_group, path.as_list()) @@ -1051,7 +1060,9 @@ def handle_field_error( if is_non_null_type(return_type): raise error - errors = async_payload_record.errors if async_payload_record else self.errors + errors = ( + incremental_data_record.errors if incremental_data_record else self.errors + ) # Otherwise, error protection is applied, logging the error and resolving a # null value for this field if one is encountered. @@ -1064,7 +1075,7 @@ def complete_value( info: GraphQLResolveInfo, path: Path, result: Any, - async_payload_record: AsyncPayloadRecord | None, + incremental_data_record: IncrementalDataRecord | None, ) -> AwaitableOrValue[Any]: """Complete a value. @@ -1101,7 +1112,7 @@ def complete_value( info, path, result, - async_payload_record, + incremental_data_record, ) if completed is None: msg = ( @@ -1118,7 +1129,7 @@ def complete_value( # If field type is List, complete each item in the list with inner type if is_list_type(return_type): return self.complete_list_value( - return_type, field_group, info, path, result, async_payload_record + return_type, field_group, info, path, result, incremental_data_record ) # If field type is a leaf type, Scalar or Enum, serialize to a valid value, @@ -1130,13 +1141,13 @@ def complete_value( # Object type and complete for that type. if is_abstract_type(return_type): return self.complete_abstract_value( - return_type, field_group, info, path, result, async_payload_record + return_type, field_group, info, path, result, incremental_data_record ) # If field type is Object, execute and complete all sub-selections. if is_object_type(return_type): return self.complete_object_value( - return_type, field_group, info, path, result, async_payload_record + return_type, field_group, info, path, result, incremental_data_record ) # Not reachable. All possible output types have been considered. @@ -1153,7 +1164,7 @@ async def complete_awaitable_value( info: GraphQLResolveInfo, path: Path, result: Any, - async_payload_record: AsyncPayloadRecord | None = None, + incremental_data_record: IncrementalDataRecord | None = None, ) -> Any: """Complete an awaitable value.""" try: @@ -1164,15 +1175,15 @@ async def complete_awaitable_value( info, path, resolved, - async_payload_record, + incremental_data_record, ) if self.is_awaitable(completed): completed = await completed except Exception as raw_error: self.handle_field_error( - raw_error, return_type, field_group, path, async_payload_record + raw_error, return_type, field_group, path, incremental_data_record ) - self.filter_subsequent_payloads(path, async_payload_record) + self.filter_subsequent_payloads(path, incremental_data_record) completed = None return completed @@ -1220,7 +1231,7 @@ async def complete_async_iterator_value( info: GraphQLResolveInfo, path: Path, async_iterator: AsyncIterator[Any], - async_payload_record: AsyncPayloadRecord | None, + incremental_data_record: IncrementalDataRecord | None, ) -> list[Any]: """Complete an async iterator. @@ -1250,7 +1261,7 @@ async def complete_async_iterator_value( item_type, path, stream.label, - async_payload_record, + incremental_data_record, ) ), timeout=ASYNC_DELAY, @@ -1265,7 +1276,11 @@ async def complete_async_iterator_value( break except Exception as raw_error: self.handle_field_error( - raw_error, item_type, field_group, item_path, async_payload_record + raw_error, + item_type, + field_group, + item_path, + incremental_data_record, ) completed_results.append(None) break @@ -1276,7 +1291,7 @@ async def complete_async_iterator_value( field_group, info, item_path, - async_payload_record, + incremental_data_record, ): append_awaitable(index) @@ -1306,7 +1321,7 @@ def complete_list_value( info: GraphQLResolveInfo, path: Path, result: AsyncIterable[Any] | Iterable[Any], - async_payload_record: AsyncPayloadRecord | None, + incremental_data_record: IncrementalDataRecord | None, ) -> AwaitableOrValue[list[Any]]: """Complete a list value. @@ -1318,7 +1333,12 @@ def complete_list_value( async_iterator = result.__aiter__() return self.complete_async_iterator_value( - item_type, field_group, info, path, async_iterator, async_payload_record + item_type, + field_group, + info, + path, + async_iterator, + incremental_data_record, ) if not is_iterable(result): @@ -1336,7 +1356,7 @@ def complete_list_value( complete_list_item_value = self.complete_list_item_value awaitable_indices: list[int] = [] append_awaitable = awaitable_indices.append - previous_async_payload_record = async_payload_record + previous_incremental_data_record = incremental_data_record completed_results: list[Any] = [] for index, item in enumerate(result): # No need to modify the info object containing the path, since from here on @@ -1348,7 +1368,7 @@ def complete_list_value( and isinstance(stream.initial_count, int) and index >= stream.initial_count ): - previous_async_payload_record = self.execute_stream_field( + previous_incremental_data_record = self.execute_stream_field( path, item_path, item, @@ -1356,7 +1376,7 @@ def complete_list_value( info, item_type, stream.label, - previous_async_payload_record, + previous_incremental_data_record, ) continue @@ -1367,7 +1387,7 @@ def complete_list_value( field_group, info, item_path, - async_payload_record, + incremental_data_record, ): append_awaitable(index) @@ -1400,7 +1420,7 @@ def complete_list_item_value( field_group: FieldGroup, info: GraphQLResolveInfo, item_path: Path, - async_payload_record: AsyncPayloadRecord | None, + incremental_data_record: IncrementalDataRecord | None, ) -> bool: """Complete a list item value by adding it to the completed results. @@ -1411,7 +1431,12 @@ def complete_list_item_value( if is_awaitable(item): complete_results.append( self.complete_awaitable_value( - item_type, field_group, info, item_path, item, async_payload_record + item_type, + field_group, + info, + item_path, + item, + incremental_data_record, ) ) return True @@ -1423,7 +1448,7 @@ def complete_list_item_value( info, item_path, item, - async_payload_record, + incremental_data_record, ) if is_awaitable(completed_item): @@ -1437,9 +1462,11 @@ async def await_completed() -> Any: item_type, field_group, item_path, - async_payload_record, + incremental_data_record, + ) + self.filter_subsequent_payloads( + item_path, incremental_data_record ) - self.filter_subsequent_payloads(item_path, async_payload_record) return None complete_results.append(await_completed()) @@ -1453,9 +1480,9 @@ async def await_completed() -> Any: item_type, field_group, item_path, - async_payload_record, + incremental_data_record, ) - self.filter_subsequent_payloads(item_path, async_payload_record) + self.filter_subsequent_payloads(item_path, incremental_data_record) complete_results.append(None) return False @@ -1484,7 +1511,7 @@ def complete_abstract_value( info: GraphQLResolveInfo, path: Path, result: Any, - async_payload_record: AsyncPayloadRecord | None, + incremental_data_record: IncrementalDataRecord | None, ) -> AwaitableOrValue[Any]: """Complete an abstract value. @@ -1510,7 +1537,7 @@ async def await_complete_object_value() -> Any: info, path, result, - async_payload_record, + incremental_data_record, ) if self.is_awaitable(value): return await value # type: ignore @@ -1527,7 +1554,7 @@ async def await_complete_object_value() -> Any: info, path, result, - async_payload_record, + incremental_data_record, ) def ensure_valid_runtime_type( @@ -1599,7 +1626,7 @@ def complete_object_value( info: GraphQLResolveInfo, path: Path, result: Any, - async_payload_record: AsyncPayloadRecord | None, + incremental_data_record: IncrementalDataRecord | None, ) -> AwaitableOrValue[dict[str, Any]]: """Complete an Object value by executing all sub-selections.""" # If there is an `is_type_of()` predicate function, call it with the current @@ -1616,7 +1643,7 @@ async def execute_subfields_async() -> dict[str, Any]: return_type, result, field_group ) return self.collect_and_execute_subfields( - return_type, field_group, path, result, async_payload_record + return_type, field_group, path, result, incremental_data_record ) # type: ignore return execute_subfields_async() @@ -1625,7 +1652,7 @@ async def execute_subfields_async() -> dict[str, Any]: raise invalid_return_type_error(return_type, result, field_group) return self.collect_and_execute_subfields( - return_type, field_group, path, result, async_payload_record + return_type, field_group, path, result, incremental_data_record ) def collect_and_execute_subfields( @@ -1634,7 +1661,7 @@ def collect_and_execute_subfields( field_group: FieldGroup, path: Path, result: Any, - async_payload_record: AsyncPayloadRecord | None, + incremental_data_record: IncrementalDataRecord | None, ) -> AwaitableOrValue[dict[str, Any]]: """Collect sub-fields to execute to complete this value.""" sub_grouped_field_set, sub_patches = self.collect_subfields( @@ -1642,7 +1669,7 @@ def collect_and_execute_subfields( ) sub_fields = self.execute_fields( - return_type, result, path, sub_grouped_field_set, async_payload_record + return_type, result, path, sub_grouped_field_set, incremental_data_record ) for sub_patch in sub_patches: @@ -1653,7 +1680,7 @@ def collect_and_execute_subfields( sub_patch_field_nodes, label, path, - async_payload_record, + incremental_data_record, ) return sub_fields @@ -1731,13 +1758,15 @@ def execute_deferred_fragment( fields: GroupedFieldSet, label: str | None = None, path: Path | None = None, - parent_context: AsyncPayloadRecord | None = None, + parent_context: IncrementalDataRecord | None = None, ) -> None: """Execute deferred fragment.""" - async_payload_record = DeferredFragmentRecord(label, path, parent_context, self) + incremental_data_record = DeferredFragmentRecord( + label, path, parent_context, self + ) try: awaitable_or_data = self.execute_fields( - parent_type, source_value, path, fields, async_payload_record + parent_type, source_value, path, fields, incremental_data_record ) if self.is_awaitable(awaitable_or_data): @@ -1749,15 +1778,15 @@ async def await_data( try: return await awaitable except GraphQLError as error: - async_payload_record.errors.append(error) + incremental_data_record.errors.append(error) return None awaitable_or_data = await_data(awaitable_or_data) # type: ignore except GraphQLError as error: - async_payload_record.errors.append(error) + incremental_data_record.errors.append(error) awaitable_or_data = None - async_payload_record.add_data(awaitable_or_data) + incremental_data_record.add_data(awaitable_or_data) def execute_stream_field( self, @@ -1768,11 +1797,11 @@ def execute_stream_field( info: GraphQLResolveInfo, item_type: GraphQLOutputType, label: str | None = None, - parent_context: AsyncPayloadRecord | None = None, - ) -> AsyncPayloadRecord: + parent_context: IncrementalDataRecord | None = None, + ) -> IncrementalDataRecord: """Execute stream field.""" is_awaitable = self.is_awaitable - async_payload_record = StreamItemsRecord( + incremental_data_record = StreamItemsRecord( label, item_path, None, parent_context, self ) completed_item: Any @@ -1788,16 +1817,16 @@ async def await_completed_items() -> list[Any] | None: info, item_path, item, - async_payload_record, + incremental_data_record, ) ] except GraphQLError as error: - async_payload_record.errors.append(error) - self.filter_subsequent_payloads(path, async_payload_record) + incremental_data_record.errors.append(error) + self.filter_subsequent_payloads(path, incremental_data_record) return None - async_payload_record.add_items(await_completed_items()) - return async_payload_record + incremental_data_record.add_items(await_completed_items()) + return incremental_data_record try: try: @@ -1807,7 +1836,7 @@ async def await_completed_items() -> list[Any] | None: info, item_path, item, - async_payload_record, + incremental_data_record, ) completed_items: Any @@ -1825,15 +1854,17 @@ async def await_completed_items() -> list[Any] | None: item_type, field_group, item_path, - async_payload_record, + incremental_data_record, ) self.filter_subsequent_payloads( - item_path, async_payload_record + item_path, incremental_data_record ) return [None] except GraphQLError as error: # pragma: no cover - async_payload_record.errors.append(error) - self.filter_subsequent_payloads(path, async_payload_record) + incremental_data_record.errors.append(error) + self.filter_subsequent_payloads( + path, incremental_data_record + ) return None completed_items = await_completed_items() @@ -1846,18 +1877,18 @@ async def await_completed_items() -> list[Any] | None: item_type, field_group, item_path, - async_payload_record, + incremental_data_record, ) - self.filter_subsequent_payloads(item_path, async_payload_record) + self.filter_subsequent_payloads(item_path, incremental_data_record) completed_items = [None] except GraphQLError as error: - async_payload_record.errors.append(error) - self.filter_subsequent_payloads(item_path, async_payload_record) + incremental_data_record.errors.append(error) + self.filter_subsequent_payloads(item_path, incremental_data_record) completed_items = None - async_payload_record.add_items(completed_items) - return async_payload_record + incremental_data_record.add_items(completed_items) + return incremental_data_record async def execute_stream_async_iterator_item( self, @@ -1865,7 +1896,7 @@ async def execute_stream_async_iterator_item( field_group: FieldGroup, info: GraphQLResolveInfo, item_type: GraphQLOutputType, - async_payload_record: StreamItemsRecord, + incremental_data_record: StreamItemsRecord, item_path: Path, ) -> Any: """Execute stream iterator item.""" @@ -1874,7 +1905,7 @@ async def execute_stream_async_iterator_item( try: item = await anext(async_iterator) completed_item = self.complete_value( - item_type, field_group, info, item_path, item, async_payload_record + item_type, field_group, info, item_path, item, incremental_data_record ) return ( @@ -1884,14 +1915,14 @@ async def execute_stream_async_iterator_item( ) except StopAsyncIteration as raw_error: - async_payload_record.set_is_completed_async_iterator() + incremental_data_record.set_is_completed_async_iterator() raise StopAsyncIteration from raw_error except Exception as raw_error: self.handle_field_error( - raw_error, item_type, field_group, item_path, async_payload_record + raw_error, item_type, field_group, item_path, incremental_data_record ) - self.filter_subsequent_payloads(item_path, async_payload_record) + self.filter_subsequent_payloads(item_path, incremental_data_record) async def execute_stream_async_iterator( self, @@ -1902,16 +1933,16 @@ async def execute_stream_async_iterator( item_type: GraphQLOutputType, path: Path, label: str | None = None, - parent_context: AsyncPayloadRecord | None = None, + parent_context: IncrementalDataRecord | None = None, ) -> None: """Execute stream iterator.""" index = initial_index - previous_async_payload_record = parent_context + previous_incremental_data_record = parent_context while True: item_path = Path(path, index, None) - async_payload_record = StreamItemsRecord( - label, item_path, async_iterator, previous_async_payload_record, self + incremental_data_record = StreamItemsRecord( + label, item_path, async_iterator, previous_incremental_data_record, self ) try: @@ -1920,19 +1951,19 @@ async def execute_stream_async_iterator( field_group, info, item_type, - async_payload_record, + incremental_data_record, item_path, ) except StopAsyncIteration: - if async_payload_record.errors: - async_payload_record.add_items(None) # pragma: no cover + if incremental_data_record.errors: + incremental_data_record.add_items(None) # pragma: no cover else: - del self.subsequent_payloads[async_payload_record] + del self.subsequent_payloads[incremental_data_record] break except GraphQLError as error: - async_payload_record.errors.append(error) - self.filter_subsequent_payloads(path, async_payload_record) - async_payload_record.add_items(None) + incremental_data_record.errors.append(error) + self.filter_subsequent_payloads(path, incremental_data_record) + incremental_data_record.add_items(None) if async_iterator: # pragma: no cover else with suppress(Exception): await async_iterator.aclose() # type: ignore @@ -1941,65 +1972,65 @@ async def execute_stream_async_iterator( self._canceled_iterators.add(async_iterator) break - async_payload_record.add_items([data]) + incremental_data_record.add_items([data]) - previous_async_payload_record = async_payload_record + previous_incremental_data_record = incremental_data_record index += 1 def filter_subsequent_payloads( self, null_path: Path, - current_async_record: AsyncPayloadRecord | None = None, + current_incremental_data_record: IncrementalDataRecord | None = None, ) -> None: """Filter subsequent payloads.""" null_path_list = null_path.as_list() - for async_record in list(self.subsequent_payloads): - if async_record is current_async_record: + for incremental_data_record in list(self.subsequent_payloads): + if incremental_data_record is current_incremental_data_record: # don't remove payload from where error originates continue - if async_record.path[: len(null_path_list)] != null_path_list: - # async_record points to a path unaffected by this payload + if incremental_data_record.path[: len(null_path_list)] != null_path_list: + # incremental_data_record points to a path unaffected by this payload continue - # async_record path points to nulled error field + # incremental_data_record path points to nulled error field if ( - isinstance(async_record, StreamItemsRecord) - and async_record.async_iterator + isinstance(incremental_data_record, StreamItemsRecord) + and incremental_data_record.async_iterator ): - self._canceled_iterators.add(async_record.async_iterator) - del self.subsequent_payloads[async_record] + self._canceled_iterators.add(incremental_data_record.async_iterator) + del self.subsequent_payloads[incremental_data_record] def get_completed_incremental_results(self) -> list[IncrementalResult]: """Get completed incremental results.""" incremental_results: list[IncrementalResult] = [] append_result = incremental_results.append subsequent_payloads = list(self.subsequent_payloads) - for async_payload_record in subsequent_payloads: + for incremental_data_record in subsequent_payloads: incremental_result: IncrementalResult - if not async_payload_record.completed.is_set(): + if not incremental_data_record.completed.is_set(): continue - del self.subsequent_payloads[async_payload_record] - if isinstance(async_payload_record, StreamItemsRecord): - items = async_payload_record.items - if async_payload_record.is_completed_async_iterator: + del self.subsequent_payloads[incremental_data_record] + if isinstance(incremental_data_record, StreamItemsRecord): + items = incremental_data_record.items + if incremental_data_record.is_completed_async_iterator: # async iterable resolver finished but there may be pending payload continue # pragma: no cover incremental_result = IncrementalStreamResult( items, - async_payload_record.errors - if async_payload_record.errors + incremental_data_record.errors + if incremental_data_record.errors else None, - async_payload_record.path, - async_payload_record.label, + incremental_data_record.path, + incremental_data_record.label, ) else: - data = async_payload_record.data + data = incremental_data_record.data incremental_result = IncrementalDeferResult( data, - async_payload_record.errors - if async_payload_record.errors + incremental_data_record.errors + if incremental_data_record.errors else None, - async_payload_record.path, - async_payload_record.label, + incremental_data_record.path, + incremental_data_record.label, ) append_result(incremental_result) @@ -2604,7 +2635,7 @@ class DeferredFragmentRecord: label: str | None path: list[str | int] data: dict[str, Any] | None - parent_context: AsyncPayloadRecord | None + parent_context: IncrementalDataRecord | None completed: Event _context: ExecutionContext _data: AwaitableOrValue[dict[str, Any] | None] @@ -2614,7 +2645,7 @@ def __init__( self, label: str | None, path: Path | None, - parent_context: AsyncPayloadRecord | None, + parent_context: IncrementalDataRecord | None, context: ExecutionContext, ) -> None: self.label = label @@ -2669,7 +2700,7 @@ class StreamItemsRecord: label: str | None path: list[str | int] items: list[str] | None - parent_context: AsyncPayloadRecord | None + parent_context: IncrementalDataRecord | None async_iterator: AsyncIterator[Any] | None is_completed_async_iterator: bool completed: Event @@ -2682,7 +2713,7 @@ def __init__( label: str | None, path: Path | None, async_iterator: AsyncIterator[Any] | None, - parent_context: AsyncPayloadRecord | None, + parent_context: IncrementalDataRecord | None, context: ExecutionContext, ) -> None: self.label = label @@ -2738,4 +2769,4 @@ def set_is_completed_async_iterator(self) -> None: self._items_added.set() -AsyncPayloadRecord = Union[DeferredFragmentRecord, StreamItemsRecord] +IncrementalDataRecord = Union[DeferredFragmentRecord, StreamItemsRecord] diff --git a/tests/execution/test_customize.py b/tests/execution/test_customize.py index 6d8cd369..23740237 100644 --- a/tests/execution/test_customize.py +++ b/tests/execution/test_customize.py @@ -43,10 +43,15 @@ def uses_a_custom_execution_context_class(): class TestExecutionContext(ExecutionContext): def execute_field( - self, parent_type, source, field_group, path, async_payload_record=None + self, + parent_type, + source, + field_group, + path, + incremental_data_record=None, ): result = super().execute_field( - parent_type, source, field_group, path, async_payload_record + parent_type, source, field_group, path, incremental_data_record ) return result * 2 # type: ignore diff --git a/tests/execution/test_stream.py b/tests/execution/test_stream.py index 091484e2..b8c722a2 100644 --- a/tests/execution/test_stream.py +++ b/tests/execution/test_stream.py @@ -176,16 +176,17 @@ def can_print_stream_record(): context = ExecutionContext.build(schema, parse("{ hero { id } }")) assert isinstance(context, ExecutionContext) record = StreamItemsRecord(None, None, None, None, context) - assert str(record) == "StreamRecord(path=[])" + assert str(record) == "StreamItemsRecord(path=[])" record = StreamItemsRecord( "foo", Path(None, "bar", "Bar"), None, record, context ) assert ( - str(record) == "StreamRecord(" "path=['bar'], label='foo', parent_context)" + str(record) == "StreamItemsRecord(" + "path=['bar'], label='foo', parent_context)" ) record.items = ["hello", "world"] assert ( - str(record) == "StreamRecord(" + str(record) == "StreamItemsRecord(" "path=['bar'], label='foo', parent_context, items)" ) From 27e0cd080e9c6c0edc8520f2607b6163db6459ad Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 7 Apr 2024 21:58:36 +0200 Subject: [PATCH 166/230] executeFields: update grouped field set variable name Replicates graphql/graphql-js@e17a0897f67305626c6090ce0174f101b7a96fc4 --- src/graphql/execution/execute.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index dba46135..6d53fd86 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -882,7 +882,7 @@ def execute_fields( parent_type: GraphQLObjectType, source_value: Any, path: Path | None, - fields: GroupedFieldSet, + grouped_field_set: GroupedFieldSet, incremental_data_record: IncrementalDataRecord | None = None, ) -> AwaitableOrValue[dict[str, Any]]: """Execute the given fields concurrently. @@ -894,7 +894,7 @@ def execute_fields( is_awaitable = self.is_awaitable awaitable_fields: list[str] = [] append_awaitable = awaitable_fields.append - for response_name, field_group in fields.items(): + for response_name, field_group in grouped_field_set.items(): field_path = Path(path, response_name, parent_type.name) result = self.execute_field( parent_type, From ddfe2bf52a8d0b38006e789bc1ea397ad36780a8 Mon Sep 17 00:00:00 2001 From: "Juang, Yi-Lin" Date: Wed, 10 Apr 2024 00:07:02 +0800 Subject: [PATCH 167/230] Enable recursive type definitions (#218) --- src/graphql/language/visitor.py | 2 +- src/graphql/pyutils/path.py | 6 +++--- .../validation/rules/overlapping_fields_can_be_merged.py | 7 +------ tests/execution/test_executor.py | 2 ++ 4 files changed, 7 insertions(+), 10 deletions(-) diff --git a/src/graphql/language/visitor.py b/src/graphql/language/visitor.py index 0538c2e2..be410466 100644 --- a/src/graphql/language/visitor.py +++ b/src/graphql/language/visitor.py @@ -162,7 +162,7 @@ class Stack(NamedTuple): idx: int keys: tuple[Node, ...] edits: list[tuple[int | str, Node]] - prev: Any # 'Stack' (python/mypy/issues/731) + prev: Stack def visit( diff --git a/src/graphql/pyutils/path.py b/src/graphql/pyutils/path.py index 089f5970..cc2202c4 100644 --- a/src/graphql/pyutils/path.py +++ b/src/graphql/pyutils/path.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import Any, NamedTuple +from typing import NamedTuple __all__ = ["Path"] @@ -10,7 +10,7 @@ class Path(NamedTuple): """A generic path of string or integer indices""" - prev: Any # Optional['Path'] (python/mypy/issues/731) + prev: Path | None """path with the previous indices""" key: str | int """current index in the path (string or integer)""" @@ -25,7 +25,7 @@ def as_list(self) -> list[str | int]: """Return a list of the path keys.""" flattened: list[str | int] = [] append = flattened.append - curr: Path = self + curr: Path | None = self while curr: append(curr.key) curr = curr.prev diff --git a/src/graphql/validation/rules/overlapping_fields_can_be_merged.py b/src/graphql/validation/rules/overlapping_fields_can_be_merged.py index b79bf2a6..b077958b 100644 --- a/src/graphql/validation/rules/overlapping_fields_can_be_merged.py +++ b/src/graphql/validation/rules/overlapping_fields_can_be_merged.py @@ -38,8 +38,6 @@ from typing_extensions import TypeAlias -MYPY = False - __all__ = ["OverlappingFieldsCanBeMergedRule"] @@ -98,10 +96,7 @@ def enter_selection_set(self, selection_set: SelectionSetNode, *_args: Any) -> N # Field name and reason. ConflictReason: TypeAlias = Tuple[str, "ConflictReasonMessage"] # Reason is a string, or a nested list of conflicts. -if MYPY: # recursive types not fully supported yet (/python/mypy/issues/731) - ConflictReasonMessage: TypeAlias = Union[str, List] -else: - ConflictReasonMessage: TypeAlias = Union[str, List[ConflictReason]] +ConflictReasonMessage: TypeAlias = Union[str, List[ConflictReason]] # Tuple defining a field node in a context. NodeAndDef: TypeAlias = Tuple[GraphQLCompositeType, FieldNode, Optional[GraphQLField]] # Dictionary of lists of those. diff --git a/tests/execution/test_executor.py b/tests/execution/test_executor.py index b75aaad5..391a1de6 100644 --- a/tests/execution/test_executor.py +++ b/tests/execution/test_executor.py @@ -308,9 +308,11 @@ def resolve_type(_val, _info, _type): prev, key, typename = path assert key == "l2" assert typename == "SomeObject" + assert prev is not None prev, key, typename = prev assert key == 0 assert typename is None + assert prev is not None prev, key, typename = prev assert key == "l1" assert typename == "SomeQuery" From e7f3b01156088939aae52fa3929178f94563ddfb Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Tue, 9 Apr 2024 20:20:41 +0200 Subject: [PATCH 168/230] locate async iterator errors to the collection Replicates graphql/graphql-js@bd558cbbba55f041c739bd7d899c42df148d9251 --- src/graphql/execution/execute.py | 26 +++++++++++--------------- tests/execution/test_lists.py | 4 ++-- tests/execution/test_stream.py | 8 ++++---- 3 files changed, 17 insertions(+), 21 deletions(-) diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index 6d53fd86..f271a55c 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -1275,15 +1275,9 @@ async def complete_async_iterator_value( except StopAsyncIteration: break except Exception as raw_error: - self.handle_field_error( - raw_error, - item_type, - field_group, - item_path, - incremental_data_record, - ) - completed_results.append(None) - break + raise located_error( + raw_error, field_group, path.as_list() + ) from raw_error if complete_list_item_value( value, completed_results, @@ -1897,6 +1891,7 @@ async def execute_stream_async_iterator_item( info: GraphQLResolveInfo, item_type: GraphQLOutputType, incremental_data_record: StreamItemsRecord, + path: Path, item_path: Path, ) -> Any: """Execute stream iterator item.""" @@ -1904,20 +1899,20 @@ async def execute_stream_async_iterator_item( raise StopAsyncIteration try: item = await anext(async_iterator) + except StopAsyncIteration as raw_error: + incremental_data_record.set_is_completed_async_iterator() + raise StopAsyncIteration from raw_error + except Exception as raw_error: + raise located_error(raw_error, field_group, path.as_list()) from raw_error + try: completed_item = self.complete_value( item_type, field_group, info, item_path, item, incremental_data_record ) - return ( await completed_item if self.is_awaitable(completed_item) else completed_item ) - - except StopAsyncIteration as raw_error: - incremental_data_record.set_is_completed_async_iterator() - raise StopAsyncIteration from raw_error - except Exception as raw_error: self.handle_field_error( raw_error, item_type, field_group, item_path, incremental_data_record @@ -1952,6 +1947,7 @@ async def execute_stream_async_iterator( info, item_type, incremental_data_record, + path, item_path, ) except StopAsyncIteration: diff --git a/tests/execution/test_lists.py b/tests/execution/test_lists.py index 91e1bb3f..3d2bb8fa 100644 --- a/tests/execution/test_lists.py +++ b/tests/execution/test_lists.py @@ -210,8 +210,8 @@ async def list_field(): raise RuntimeError("bad") assert await _complete(list_field()) == ( - {"listField": ["two", "4", None]}, - [{"message": "bad", "locations": [(1, 3)], "path": ["listField", 2]}], + {"listField": None}, + [{"message": "bad", "locations": [(1, 3)], "path": ["listField"]}], ) @pytest.mark.asyncio() diff --git a/tests/execution/test_stream.py b/tests/execution/test_stream.py index b8c722a2..7de06bd2 100644 --- a/tests/execution/test_stream.py +++ b/tests/execution/test_stream.py @@ -893,10 +893,10 @@ async def friend_list(_info): { "message": "bad", "locations": [{"line": 3, "column": 15}], - "path": ["friendList", 1], + "path": ["friendList"], } ], - "data": {"friendList": [{"name": "Luke", "id": "1"}, None]}, + "data": {"friendList": None}, } @pytest.mark.asyncio() @@ -929,13 +929,13 @@ async def friend_list(_info): { "incremental": [ { - "items": [None], + "items": None, "path": ["friendList", 1], "errors": [ { "message": "bad", "locations": [{"line": 3, "column": 15}], - "path": ["friendList", 1], + "path": ["friendList"], }, ], }, From 4863578b8c566a8bdd381e0b11b910ea002ce731 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 14 Apr 2024 18:57:49 +0200 Subject: [PATCH 169/230] execute: move publishing code into separate file Replicates graphql/graphql-js@04e948bbc4972ddeb61443fab540e03fdff457b4 --- docs/conf.py | 5 +- src/graphql/execution/__init__.py | 14 +- src/graphql/execution/execute.py | 533 +---------------- .../execution/incremental_publisher.py | 562 ++++++++++++++++++ tests/execution/test_defer.py | 2 +- tests/execution/test_stream.py | 2 +- 6 files changed, 587 insertions(+), 531 deletions(-) create mode 100644 src/graphql/execution/incremental_publisher.py diff --git a/docs/conf.py b/docs/conf.py index 6f719343..ee49ab0e 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -165,10 +165,11 @@ graphql.execution.collect_fields.FieldsAndPatches graphql.execution.map_async_iterable.map_async_iterable graphql.execution.Middleware -graphql.execution.execute.DeferredFragmentRecord graphql.execution.execute.ExperimentalIncrementalExecutionResults graphql.execution.execute.StreamArguments -graphql.execution.execute.StreamItemsRecord +graphql.execution.incremental_publisher.IncrementalPublisherMixin +graphql.execution.incremental_publisher.StreamItemsRecord +graphql.execution.incremental_publisher.DeferredFragmentRecord graphql.language.lexer.EscapeSequence graphql.language.visitor.EnterLeaveVisitor graphql.type.definition.TContext diff --git a/src/graphql/execution/__init__.py b/src/graphql/execution/__init__.py index e33d4ce7..aec85be1 100644 --- a/src/graphql/execution/__init__.py +++ b/src/graphql/execution/__init__.py @@ -17,17 +17,19 @@ ExecutionResult, ExperimentalIncrementalExecutionResults, InitialIncrementalExecutionResult, - SubsequentIncrementalExecutionResult, - IncrementalDeferResult, - IncrementalStreamResult, - IncrementalResult, FormattedExecutionResult, FormattedInitialIncrementalExecutionResult, + Middleware, +) +from .incremental_publisher import ( FormattedSubsequentIncrementalExecutionResult, FormattedIncrementalDeferResult, - FormattedIncrementalStreamResult, FormattedIncrementalResult, - Middleware, + FormattedIncrementalStreamResult, + IncrementalDeferResult, + IncrementalResult, + IncrementalStreamResult, + SubsequentIncrementalExecutionResult, ) from .async_iterables import map_async_iterable from .middleware import MiddlewareManager diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index f271a55c..7d3d85ed 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -2,7 +2,7 @@ from __future__ import annotations -from asyncio import Event, as_completed, ensure_future, gather, shield, sleep, wait_for +from asyncio import ensure_future, gather, shield, wait_for from collections.abc import Mapping from contextlib import suppress from typing import ( @@ -12,7 +12,6 @@ AsyncIterator, Awaitable, Callable, - Generator, Iterable, Iterator, List, @@ -81,11 +80,19 @@ collect_fields, collect_subfields, ) +from .incremental_publisher import ( + ASYNC_DELAY, + DeferredFragmentRecord, + FormattedIncrementalResult, + IncrementalDataRecord, + IncrementalPublisherMixin, + IncrementalResult, + StreamItemsRecord, + SubsequentIncrementalExecutionResult, +) from .middleware import MiddlewareManager from .values import get_argument_values, get_directive_values, get_variable_values -ASYNC_DELAY = 1 / 512 # wait time in seconds for deferring execution - try: # pragma: no cover anext # noqa: B018 except NameError: # pragma: no cover (Python < 3.10) @@ -104,24 +111,13 @@ async def anext(iterator: AsyncIterator) -> Any: # noqa: A001 "execute_sync", "experimental_execute_incrementally", "subscribe", - "IncrementalDataRecord", - "DeferredFragmentRecord", - "StreamItemsRecord", "ExecutionResult", "ExecutionContext", "ExperimentalIncrementalExecutionResults", "FormattedExecutionResult", - "FormattedIncrementalDeferResult", - "FormattedIncrementalResult", - "FormattedIncrementalStreamResult", "FormattedInitialIncrementalExecutionResult", - "FormattedSubsequentIncrementalExecutionResult", - "IncrementalDeferResult", - "IncrementalResult", - "IncrementalStreamResult", "InitialIncrementalExecutionResult", "Middleware", - "SubsequentIncrementalExecutionResult", ] @@ -218,199 +214,6 @@ def __ne__(self, other: object) -> bool: return not self == other -class FormattedIncrementalDeferResult(TypedDict, total=False): - """Formatted incremental deferred execution result""" - - data: dict[str, Any] | None - errors: list[GraphQLFormattedError] - path: list[str | int] - label: str - extensions: dict[str, Any] - - -class IncrementalDeferResult: - """Incremental deferred execution result""" - - data: dict[str, Any] | None - errors: list[GraphQLError] | None - path: list[str | int] | None - label: str | None - extensions: dict[str, Any] | None - - __slots__ = "data", "errors", "path", "label", "extensions" - - def __init__( - self, - data: dict[str, Any] | None = None, - errors: list[GraphQLError] | None = None, - path: list[str | int] | None = None, - label: str | None = None, - extensions: dict[str, Any] | None = None, - ) -> None: - self.data = data - self.errors = errors - self.path = path - self.label = label - self.extensions = extensions - - def __repr__(self) -> str: - name = self.__class__.__name__ - args: list[str] = [f"data={self.data!r}, errors={self.errors!r}"] - if self.path: - args.append(f"path={self.path!r}") - if self.label: - args.append(f"label={self.label!r}") - if self.extensions: - args.append(f"extensions={self.extensions}") - return f"{name}({', '.join(args)})" - - @property - def formatted(self) -> FormattedIncrementalDeferResult: - """Get execution result formatted according to the specification.""" - formatted: FormattedIncrementalDeferResult = {"data": self.data} - if self.errors is not None: - formatted["errors"] = [error.formatted for error in self.errors] - if self.path is not None: - formatted["path"] = self.path - if self.label is not None: - formatted["label"] = self.label - if self.extensions is not None: - formatted["extensions"] = self.extensions - return formatted - - def __eq__(self, other: object) -> bool: - if isinstance(other, dict): - return ( - other.get("data") == self.data - and other.get("errors") == self.errors - and ("path" not in other or other["path"] == self.path) - and ("label" not in other or other["label"] == self.label) - and ( - "extensions" not in other or other["extensions"] == self.extensions - ) - ) - if isinstance(other, tuple): - size = len(other) - return ( - 1 < size < 6 - and (self.data, self.errors, self.path, self.label, self.extensions)[ - :size - ] - == other - ) - return ( - isinstance(other, self.__class__) - and other.data == self.data - and other.errors == self.errors - and other.path == self.path - and other.label == self.label - and other.extensions == self.extensions - ) - - def __ne__(self, other: object) -> bool: - return not self == other - - -class FormattedIncrementalStreamResult(TypedDict, total=False): - """Formatted incremental stream execution result""" - - items: list[Any] | None - errors: list[GraphQLFormattedError] - path: list[str | int] - label: str - extensions: dict[str, Any] - - -class IncrementalStreamResult: - """Incremental streamed execution result""" - - items: list[Any] | None - errors: list[GraphQLError] | None - path: list[str | int] | None - label: str | None - extensions: dict[str, Any] | None - - __slots__ = "items", "errors", "path", "label", "extensions" - - def __init__( - self, - items: list[Any] | None = None, - errors: list[GraphQLError] | None = None, - path: list[str | int] | None = None, - label: str | None = None, - extensions: dict[str, Any] | None = None, - ) -> None: - self.items = items - self.errors = errors - self.path = path - self.label = label - self.extensions = extensions - - def __repr__(self) -> str: - name = self.__class__.__name__ - args: list[str] = [f"items={self.items!r}, errors={self.errors!r}"] - if self.path: - args.append(f"path={self.path!r}") - if self.label: - args.append(f"label={self.label!r}") - if self.extensions: - args.append(f"extensions={self.extensions}") - return f"{name}({', '.join(args)})" - - @property - def formatted(self) -> FormattedIncrementalStreamResult: - """Get execution result formatted according to the specification.""" - formatted: FormattedIncrementalStreamResult = {"items": self.items} - if self.errors is not None: - formatted["errors"] = [error.formatted for error in self.errors] - if self.path is not None: - formatted["path"] = self.path - if self.label is not None: - formatted["label"] = self.label - if self.extensions is not None: - formatted["extensions"] = self.extensions - return formatted - - def __eq__(self, other: object) -> bool: - if isinstance(other, dict): - return ( - other.get("items") == self.items - and other.get("errors") == self.errors - and ("path" not in other or other["path"] == self.path) - and ("label" not in other or other["label"] == self.label) - and ( - "extensions" not in other or other["extensions"] == self.extensions - ) - ) - if isinstance(other, tuple): - size = len(other) - return ( - 1 < size < 6 - and (self.items, self.errors, self.path, self.label, self.extensions)[ - :size - ] - == other - ) - return ( - isinstance(other, self.__class__) - and other.items == self.items - and other.errors == self.errors - and other.path == self.path - and other.label == self.label - and other.extensions == self.extensions - ) - - def __ne__(self, other: object) -> bool: - return not self == other - - -FormattedIncrementalResult = Union[ - FormattedIncrementalDeferResult, FormattedIncrementalStreamResult -] - -IncrementalResult = Union[IncrementalDeferResult, IncrementalStreamResult] - - class FormattedInitialIncrementalExecutionResult(TypedDict, total=False): """Formatted initial incremental execution result""" @@ -514,90 +317,6 @@ def __ne__(self, other: object) -> bool: return not self == other -class FormattedSubsequentIncrementalExecutionResult(TypedDict, total=False): - """Formatted subsequent incremental execution result""" - - incremental: list[FormattedIncrementalResult] - hasNext: bool - extensions: dict[str, Any] - - -class SubsequentIncrementalExecutionResult: - """Subsequent incremental execution result. - - - ``has_next`` is True if a future payload is expected. - - ``incremental`` is a list of the results from defer/stream directives. - """ - - __slots__ = "has_next", "incremental", "extensions" - - incremental: Sequence[IncrementalResult] | None - has_next: bool - extensions: dict[str, Any] | None - - def __init__( - self, - incremental: Sequence[IncrementalResult] | None = None, - has_next: bool = False, - extensions: dict[str, Any] | None = None, - ) -> None: - self.incremental = incremental - self.has_next = has_next - self.extensions = extensions - - def __repr__(self) -> str: - name = self.__class__.__name__ - args: list[str] = [] - if self.incremental: - args.append(f"incremental[{len(self.incremental)}]") - if self.has_next: - args.append("has_next") - if self.extensions: - args.append(f"extensions={self.extensions}") - return f"{name}({', '.join(args)})" - - @property - def formatted(self) -> FormattedSubsequentIncrementalExecutionResult: - """Get execution result formatted according to the specification.""" - formatted: FormattedSubsequentIncrementalExecutionResult = {} - if self.incremental: - formatted["incremental"] = [result.formatted for result in self.incremental] - formatted["hasNext"] = self.has_next - if self.extensions is not None: - formatted["extensions"] = self.extensions - return formatted - - def __eq__(self, other: object) -> bool: - if isinstance(other, dict): - return ( - ("incremental" not in other or other["incremental"] == self.incremental) - and ("hasNext" in other and other["hasNext"] == self.has_next) - and ( - "extensions" not in other or other["extensions"] == self.extensions - ) - ) - if isinstance(other, tuple): - size = len(other) - return ( - 1 < size < 4 - and ( - self.incremental, - self.has_next, - self.extensions, - )[:size] - == other - ) - return ( - isinstance(other, self.__class__) - and other.incremental == self.incremental - and other.has_next == self.has_next - and other.extensions == self.extensions - ) - - def __ne__(self, other: object) -> bool: - return not self == other - - class StreamArguments(NamedTuple): """Arguments of the stream directive""" @@ -615,7 +334,7 @@ class ExperimentalIncrementalExecutionResults(NamedTuple): Middleware: TypeAlias = Optional[Union[Tuple, List, MiddlewareManager]] -class ExecutionContext: +class ExecutionContext(IncrementalPublisherMixin): """Data that must be available at all points during query execution. Namely, schema of the type system that is currently executing, and the fragments @@ -632,7 +351,6 @@ class ExecutionContext: type_resolver: GraphQLTypeResolver subscribe_field_resolver: GraphQLFieldResolver errors: list[GraphQLError] - subsequent_payloads: dict[IncrementalDataRecord, None] # used as ordered set middleware_manager: MiddlewareManager | None is_awaitable: Callable[[Any], TypeGuard[Awaitable]] = staticmethod( @@ -1973,89 +1691,6 @@ async def execute_stream_async_iterator( previous_incremental_data_record = incremental_data_record index += 1 - def filter_subsequent_payloads( - self, - null_path: Path, - current_incremental_data_record: IncrementalDataRecord | None = None, - ) -> None: - """Filter subsequent payloads.""" - null_path_list = null_path.as_list() - for incremental_data_record in list(self.subsequent_payloads): - if incremental_data_record is current_incremental_data_record: - # don't remove payload from where error originates - continue - if incremental_data_record.path[: len(null_path_list)] != null_path_list: - # incremental_data_record points to a path unaffected by this payload - continue - # incremental_data_record path points to nulled error field - if ( - isinstance(incremental_data_record, StreamItemsRecord) - and incremental_data_record.async_iterator - ): - self._canceled_iterators.add(incremental_data_record.async_iterator) - del self.subsequent_payloads[incremental_data_record] - - def get_completed_incremental_results(self) -> list[IncrementalResult]: - """Get completed incremental results.""" - incremental_results: list[IncrementalResult] = [] - append_result = incremental_results.append - subsequent_payloads = list(self.subsequent_payloads) - for incremental_data_record in subsequent_payloads: - incremental_result: IncrementalResult - if not incremental_data_record.completed.is_set(): - continue - del self.subsequent_payloads[incremental_data_record] - if isinstance(incremental_data_record, StreamItemsRecord): - items = incremental_data_record.items - if incremental_data_record.is_completed_async_iterator: - # async iterable resolver finished but there may be pending payload - continue # pragma: no cover - incremental_result = IncrementalStreamResult( - items, - incremental_data_record.errors - if incremental_data_record.errors - else None, - incremental_data_record.path, - incremental_data_record.label, - ) - else: - data = incremental_data_record.data - incremental_result = IncrementalDeferResult( - data, - incremental_data_record.errors - if incremental_data_record.errors - else None, - incremental_data_record.path, - incremental_data_record.label, - ) - - append_result(incremental_result) - - return incremental_results - - async def yield_subsequent_payloads( - self, - ) -> AsyncGenerator[SubsequentIncrementalExecutionResult, None]: - """Yield subsequent payloads.""" - payloads = self.subsequent_payloads - has_next = bool(payloads) - - while has_next: - for awaitable in as_completed(payloads): - await awaitable - - incremental = self.get_completed_incremental_results() - - has_next = bool(payloads) - - if incremental or not has_next: - yield SubsequentIncrementalExecutionResult( - incremental=incremental or None, has_next=has_next - ) - - if not has_next: - break - UNEXPECTED_EXPERIMENTAL_DIRECTIVES = ( "The provided schema unexpectedly contains experimental directives" @@ -2622,147 +2257,3 @@ def assert_event_stream(result: Any) -> AsyncIterable: raise GraphQLError(msg) return result - - -class DeferredFragmentRecord: - """A record collecting data marked with the defer directive""" - - errors: list[GraphQLError] - label: str | None - path: list[str | int] - data: dict[str, Any] | None - parent_context: IncrementalDataRecord | None - completed: Event - _context: ExecutionContext - _data: AwaitableOrValue[dict[str, Any] | None] - _data_added: Event - - def __init__( - self, - label: str | None, - path: Path | None, - parent_context: IncrementalDataRecord | None, - context: ExecutionContext, - ) -> None: - self.label = label - self.path = path.as_list() if path else [] - self.parent_context = parent_context - self.errors = [] - self._context = context - context.subsequent_payloads[self] = None - self.data = self._data = None - self.completed = Event() - self._data_added = Event() - - def __repr__(self) -> str: - name = self.__class__.__name__ - args: list[str] = [f"path={self.path!r}"] - if self.label: - args.append(f"label={self.label!r}") - if self.parent_context: - args.append("parent_context") - if self.data is not None: - args.append("data") - return f"{name}({', '.join(args)})" - - def __await__(self) -> Generator[Any, None, dict[str, Any] | None]: - return self.wait().__await__() - - async def wait(self) -> dict[str, Any] | None: - """Wait until data is ready.""" - if self.parent_context: - await self.parent_context.completed.wait() - _data = self._data - data = ( - await _data # type: ignore - if self._context.is_awaitable(_data) - else _data - ) - await sleep(ASYNC_DELAY) # always defer completion a little bit - self.completed.set() - self.data = data - return data - - def add_data(self, data: AwaitableOrValue[dict[str, Any] | None]) -> None: - """Add data to the record.""" - self._data = data - self._data_added.set() - - -class StreamItemsRecord: - """A record collecting items marked with the stream directive""" - - errors: list[GraphQLError] - label: str | None - path: list[str | int] - items: list[str] | None - parent_context: IncrementalDataRecord | None - async_iterator: AsyncIterator[Any] | None - is_completed_async_iterator: bool - completed: Event - _context: ExecutionContext - _items: AwaitableOrValue[list[Any] | None] - _items_added: Event - - def __init__( - self, - label: str | None, - path: Path | None, - async_iterator: AsyncIterator[Any] | None, - parent_context: IncrementalDataRecord | None, - context: ExecutionContext, - ) -> None: - self.label = label - self.path = path.as_list() if path else [] - self.parent_context = parent_context - self.async_iterator = async_iterator - self.errors = [] - self._context = context - context.subsequent_payloads[self] = None - self.items = self._items = None - self.completed = Event() - self._items_added = Event() - self.is_completed_async_iterator = False - - def __repr__(self) -> str: - name = self.__class__.__name__ - args: list[str] = [f"path={self.path!r}"] - if self.label: - args.append(f"label={self.label!r}") - if self.parent_context: - args.append("parent_context") - if self.items is not None: - args.append("items") - return f"{name}({', '.join(args)})" - - def __await__(self) -> Generator[Any, None, list[str] | None]: - return self.wait().__await__() - - async def wait(self) -> list[str] | None: - """Wait until data is ready.""" - await self._items_added.wait() - if self.parent_context: - await self.parent_context.completed.wait() - _items = self._items - items = ( - await _items # type: ignore - if self._context.is_awaitable(_items) - else _items - ) - await sleep(ASYNC_DELAY) # always defer completion a little bit - self.items = items - self.completed.set() - return items - - def add_items(self, items: AwaitableOrValue[list[Any] | None]) -> None: - """Add items to the record.""" - self._items = items - self._items_added.set() - - def set_is_completed_async_iterator(self) -> None: - """Mark as completed.""" - self.is_completed_async_iterator = True - self._items_added.set() - - -IncrementalDataRecord = Union[DeferredFragmentRecord, StreamItemsRecord] diff --git a/src/graphql/execution/incremental_publisher.py b/src/graphql/execution/incremental_publisher.py new file mode 100644 index 00000000..b6d9bcf4 --- /dev/null +++ b/src/graphql/execution/incremental_publisher.py @@ -0,0 +1,562 @@ +"""Incremental Publisher""" + +from __future__ import annotations + +from asyncio import Event, as_completed, sleep +from typing import ( + TYPE_CHECKING, + Any, + AsyncGenerator, + AsyncIterator, + Awaitable, + Callable, + Generator, + Sequence, + Union, +) + +try: + from typing import TypedDict +except ImportError: # Python < 3.8 + from typing_extensions import TypedDict +try: + from typing import TypeGuard +except ImportError: # Python < 3.10 + from typing_extensions import TypeGuard + + +if TYPE_CHECKING: + from ..error import GraphQLError, GraphQLFormattedError + from ..pyutils import AwaitableOrValue, Path + +__all__ = [ + "ASYNC_DELAY", + "DeferredFragmentRecord", + "FormattedIncrementalDeferResult", + "FormattedIncrementalResult", + "FormattedIncrementalStreamResult", + "FormattedSubsequentIncrementalExecutionResult", + "IncrementalDataRecord", + "IncrementalDeferResult", + "IncrementalPublisherMixin", + "IncrementalResult", + "IncrementalStreamResult", + "StreamItemsRecord", + "SubsequentIncrementalExecutionResult", +] + + +ASYNC_DELAY = 1 / 512 # wait time in seconds for deferring execution + + +class FormattedIncrementalDeferResult(TypedDict, total=False): + """Formatted incremental deferred execution result""" + + data: dict[str, Any] | None + errors: list[GraphQLFormattedError] + path: list[str | int] + label: str + extensions: dict[str, Any] + + +class IncrementalDeferResult: + """Incremental deferred execution result""" + + data: dict[str, Any] | None + errors: list[GraphQLError] | None + path: list[str | int] | None + label: str | None + extensions: dict[str, Any] | None + + __slots__ = "data", "errors", "path", "label", "extensions" + + def __init__( + self, + data: dict[str, Any] | None = None, + errors: list[GraphQLError] | None = None, + path: list[str | int] | None = None, + label: str | None = None, + extensions: dict[str, Any] | None = None, + ) -> None: + self.data = data + self.errors = errors + self.path = path + self.label = label + self.extensions = extensions + + def __repr__(self) -> str: + name = self.__class__.__name__ + args: list[str] = [f"data={self.data!r}, errors={self.errors!r}"] + if self.path: + args.append(f"path={self.path!r}") + if self.label: + args.append(f"label={self.label!r}") + if self.extensions: + args.append(f"extensions={self.extensions}") + return f"{name}({', '.join(args)})" + + @property + def formatted(self) -> FormattedIncrementalDeferResult: + """Get execution result formatted according to the specification.""" + formatted: FormattedIncrementalDeferResult = {"data": self.data} + if self.errors is not None: + formatted["errors"] = [error.formatted for error in self.errors] + if self.path is not None: + formatted["path"] = self.path + if self.label is not None: + formatted["label"] = self.label + if self.extensions is not None: + formatted["extensions"] = self.extensions + return formatted + + def __eq__(self, other: object) -> bool: + if isinstance(other, dict): + return ( + other.get("data") == self.data + and other.get("errors") == self.errors + and ("path" not in other or other["path"] == self.path) + and ("label" not in other or other["label"] == self.label) + and ( + "extensions" not in other or other["extensions"] == self.extensions + ) + ) + if isinstance(other, tuple): + size = len(other) + return ( + 1 < size < 6 + and (self.data, self.errors, self.path, self.label, self.extensions)[ + :size + ] + == other + ) + return ( + isinstance(other, self.__class__) + and other.data == self.data + and other.errors == self.errors + and other.path == self.path + and other.label == self.label + and other.extensions == self.extensions + ) + + def __ne__(self, other: object) -> bool: + return not self == other + + +class FormattedIncrementalStreamResult(TypedDict, total=False): + """Formatted incremental stream execution result""" + + items: list[Any] | None + errors: list[GraphQLFormattedError] + path: list[str | int] + label: str + extensions: dict[str, Any] + + +class IncrementalStreamResult: + """Incremental streamed execution result""" + + items: list[Any] | None + errors: list[GraphQLError] | None + path: list[str | int] | None + label: str | None + extensions: dict[str, Any] | None + + __slots__ = "items", "errors", "path", "label", "extensions" + + def __init__( + self, + items: list[Any] | None = None, + errors: list[GraphQLError] | None = None, + path: list[str | int] | None = None, + label: str | None = None, + extensions: dict[str, Any] | None = None, + ) -> None: + self.items = items + self.errors = errors + self.path = path + self.label = label + self.extensions = extensions + + def __repr__(self) -> str: + name = self.__class__.__name__ + args: list[str] = [f"items={self.items!r}, errors={self.errors!r}"] + if self.path: + args.append(f"path={self.path!r}") + if self.label: + args.append(f"label={self.label!r}") + if self.extensions: + args.append(f"extensions={self.extensions}") + return f"{name}({', '.join(args)})" + + @property + def formatted(self) -> FormattedIncrementalStreamResult: + """Get execution result formatted according to the specification.""" + formatted: FormattedIncrementalStreamResult = {"items": self.items} + if self.errors is not None: + formatted["errors"] = [error.formatted for error in self.errors] + if self.path is not None: + formatted["path"] = self.path + if self.label is not None: + formatted["label"] = self.label + if self.extensions is not None: + formatted["extensions"] = self.extensions + return formatted + + def __eq__(self, other: object) -> bool: + if isinstance(other, dict): + return ( + other.get("items") == self.items + and other.get("errors") == self.errors + and ("path" not in other or other["path"] == self.path) + and ("label" not in other or other["label"] == self.label) + and ( + "extensions" not in other or other["extensions"] == self.extensions + ) + ) + if isinstance(other, tuple): + size = len(other) + return ( + 1 < size < 6 + and (self.items, self.errors, self.path, self.label, self.extensions)[ + :size + ] + == other + ) + return ( + isinstance(other, self.__class__) + and other.items == self.items + and other.errors == self.errors + and other.path == self.path + and other.label == self.label + and other.extensions == self.extensions + ) + + def __ne__(self, other: object) -> bool: + return not self == other + + +FormattedIncrementalResult = Union[ + FormattedIncrementalDeferResult, FormattedIncrementalStreamResult +] + +IncrementalResult = Union[IncrementalDeferResult, IncrementalStreamResult] + + +class FormattedSubsequentIncrementalExecutionResult(TypedDict, total=False): + """Formatted subsequent incremental execution result""" + + incremental: list[FormattedIncrementalResult] + hasNext: bool + extensions: dict[str, Any] + + +class SubsequentIncrementalExecutionResult: + """Subsequent incremental execution result. + + - ``has_next`` is True if a future payload is expected. + - ``incremental`` is a list of the results from defer/stream directives. + """ + + __slots__ = "has_next", "incremental", "extensions" + + incremental: Sequence[IncrementalResult] | None + has_next: bool + extensions: dict[str, Any] | None + + def __init__( + self, + incremental: Sequence[IncrementalResult] | None = None, + has_next: bool = False, + extensions: dict[str, Any] | None = None, + ) -> None: + self.incremental = incremental + self.has_next = has_next + self.extensions = extensions + + def __repr__(self) -> str: + name = self.__class__.__name__ + args: list[str] = [] + if self.incremental: + args.append(f"incremental[{len(self.incremental)}]") + if self.has_next: + args.append("has_next") + if self.extensions: + args.append(f"extensions={self.extensions}") + return f"{name}({', '.join(args)})" + + @property + def formatted(self) -> FormattedSubsequentIncrementalExecutionResult: + """Get execution result formatted according to the specification.""" + formatted: FormattedSubsequentIncrementalExecutionResult = {} + if self.incremental: + formatted["incremental"] = [result.formatted for result in self.incremental] + formatted["hasNext"] = self.has_next + if self.extensions is not None: + formatted["extensions"] = self.extensions + return formatted + + def __eq__(self, other: object) -> bool: + if isinstance(other, dict): + return ( + ("incremental" not in other or other["incremental"] == self.incremental) + and ("hasNext" in other and other["hasNext"] == self.has_next) + and ( + "extensions" not in other or other["extensions"] == self.extensions + ) + ) + if isinstance(other, tuple): + size = len(other) + return ( + 1 < size < 4 + and ( + self.incremental, + self.has_next, + self.extensions, + )[:size] + == other + ) + return ( + isinstance(other, self.__class__) + and other.incremental == self.incremental + and other.has_next == self.has_next + and other.extensions == self.extensions + ) + + def __ne__(self, other: object) -> bool: + return not self == other + + +class IncrementalPublisherMixin: + """Mixin to add incremental publishing to the ExecutionContext.""" + + _canceled_iterators: set[AsyncIterator] + subsequent_payloads: dict[IncrementalDataRecord, None] # used as ordered set + + is_awaitable: Callable[[Any], TypeGuard[Awaitable]] + + def filter_subsequent_payloads( + self, + null_path: Path, + current_incremental_data_record: IncrementalDataRecord | None = None, + ) -> None: + """Filter subsequent payloads.""" + null_path_list = null_path.as_list() + for incremental_data_record in list(self.subsequent_payloads): + if incremental_data_record is current_incremental_data_record: + # don't remove payload from where error originates + continue + if incremental_data_record.path[: len(null_path_list)] != null_path_list: + # incremental_data_record points to a path unaffected by this payload + continue + # incremental_data_record path points to nulled error field + if ( + isinstance(incremental_data_record, StreamItemsRecord) + and incremental_data_record.async_iterator + ): + self._canceled_iterators.add(incremental_data_record.async_iterator) + del self.subsequent_payloads[incremental_data_record] + + def get_completed_incremental_results(self) -> list[IncrementalResult]: + """Get completed incremental results.""" + incremental_results: list[IncrementalResult] = [] + append_result = incremental_results.append + subsequent_payloads = list(self.subsequent_payloads) + for incremental_data_record in subsequent_payloads: + incremental_result: IncrementalResult + if not incremental_data_record.completed.is_set(): + continue + del self.subsequent_payloads[incremental_data_record] + if isinstance(incremental_data_record, StreamItemsRecord): + items = incremental_data_record.items + if incremental_data_record.is_completed_async_iterator: + # async iterable resolver finished but there may be pending payload + continue # pragma: no cover + incremental_result = IncrementalStreamResult( + items, + incremental_data_record.errors + if incremental_data_record.errors + else None, + incremental_data_record.path, + incremental_data_record.label, + ) + else: + data = incremental_data_record.data + incremental_result = IncrementalDeferResult( + data, + incremental_data_record.errors + if incremental_data_record.errors + else None, + incremental_data_record.path, + incremental_data_record.label, + ) + + append_result(incremental_result) + + return incremental_results + + async def yield_subsequent_payloads( + self, + ) -> AsyncGenerator[SubsequentIncrementalExecutionResult, None]: + """Yield subsequent payloads.""" + payloads = self.subsequent_payloads + has_next = bool(payloads) + + while has_next: + for awaitable in as_completed(payloads): + await awaitable + + incremental = self.get_completed_incremental_results() + + has_next = bool(payloads) + + if incremental or not has_next: + yield SubsequentIncrementalExecutionResult( + incremental=incremental or None, has_next=has_next + ) + + if not has_next: + break + + +class DeferredFragmentRecord: + """A record collecting data marked with the defer directive""" + + errors: list[GraphQLError] + label: str | None + path: list[str | int] + data: dict[str, Any] | None + parent_context: IncrementalDataRecord | None + completed: Event + _publisher: IncrementalPublisherMixin + _data: AwaitableOrValue[dict[str, Any] | None] + _data_added: Event + + def __init__( + self, + label: str | None, + path: Path | None, + parent_context: IncrementalDataRecord | None, + context: IncrementalPublisherMixin, + ) -> None: + self.label = label + self.path = path.as_list() if path else [] + self.parent_context = parent_context + self.errors = [] + self._publisher = context + context.subsequent_payloads[self] = None + self.data = self._data = None + self.completed = Event() + self._data_added = Event() + + def __repr__(self) -> str: + name = self.__class__.__name__ + args: list[str] = [f"path={self.path!r}"] + if self.label: + args.append(f"label={self.label!r}") + if self.parent_context: + args.append("parent_context") + if self.data is not None: + args.append("data") + return f"{name}({', '.join(args)})" + + def __await__(self) -> Generator[Any, None, dict[str, Any] | None]: + return self.wait().__await__() + + async def wait(self) -> dict[str, Any] | None: + """Wait until data is ready.""" + if self.parent_context: + await self.parent_context.completed.wait() + _data = self._data + data = ( + await _data # type: ignore + if self._publisher.is_awaitable(_data) + else _data + ) + await sleep(ASYNC_DELAY) # always defer completion a little bit + self.completed.set() + self.data = data + return data + + def add_data(self, data: AwaitableOrValue[dict[str, Any] | None]) -> None: + """Add data to the record.""" + self._data = data + self._data_added.set() + + +class StreamItemsRecord: + """A record collecting items marked with the stream directive""" + + errors: list[GraphQLError] + label: str | None + path: list[str | int] + items: list[str] | None + parent_context: IncrementalDataRecord | None + async_iterator: AsyncIterator[Any] | None + is_completed_async_iterator: bool + completed: Event + _publisher: IncrementalPublisherMixin + _items: AwaitableOrValue[list[Any] | None] + _items_added: Event + + def __init__( + self, + label: str | None, + path: Path | None, + async_iterator: AsyncIterator[Any] | None, + parent_context: IncrementalDataRecord | None, + context: IncrementalPublisherMixin, + ) -> None: + self.label = label + self.path = path.as_list() if path else [] + self.parent_context = parent_context + self.async_iterator = async_iterator + self.errors = [] + self._publisher = context + context.subsequent_payloads[self] = None + self.items = self._items = None + self.completed = Event() + self._items_added = Event() + self.is_completed_async_iterator = False + + def __repr__(self) -> str: + name = self.__class__.__name__ + args: list[str] = [f"path={self.path!r}"] + if self.label: + args.append(f"label={self.label!r}") + if self.parent_context: + args.append("parent_context") + if self.items is not None: + args.append("items") + return f"{name}({', '.join(args)})" + + def __await__(self) -> Generator[Any, None, list[str] | None]: + return self.wait().__await__() + + async def wait(self) -> list[str] | None: + """Wait until data is ready.""" + await self._items_added.wait() + if self.parent_context: + await self.parent_context.completed.wait() + _items = self._items + items = ( + await _items # type: ignore + if self._publisher.is_awaitable(_items) + else _items + ) + await sleep(ASYNC_DELAY) # always defer completion a little bit + self.items = items + self.completed.set() + return items + + def add_items(self, items: AwaitableOrValue[list[Any] | None]) -> None: + """Add items to the record.""" + self._items = items + self._items_added.set() + + def set_is_completed_async_iterator(self) -> None: + """Mark as completed.""" + self.is_completed_async_iterator = True + self._items_added.set() + + +IncrementalDataRecord = Union[DeferredFragmentRecord, StreamItemsRecord] diff --git a/tests/execution/test_defer.py b/tests/execution/test_defer.py index 6ca1984b..b43ba00a 100644 --- a/tests/execution/test_defer.py +++ b/tests/execution/test_defer.py @@ -15,7 +15,7 @@ execute, experimental_execute_incrementally, ) -from graphql.execution.execute import DeferredFragmentRecord +from graphql.execution.incremental_publisher import DeferredFragmentRecord from graphql.language import DocumentNode, parse from graphql.pyutils import Path, is_awaitable from graphql.type import ( diff --git a/tests/execution/test_stream.py b/tests/execution/test_stream.py index 7de06bd2..bffc26c5 100644 --- a/tests/execution/test_stream.py +++ b/tests/execution/test_stream.py @@ -12,7 +12,7 @@ IncrementalStreamResult, experimental_execute_incrementally, ) -from graphql.execution.execute import StreamItemsRecord +from graphql.execution.incremental_publisher import StreamItemsRecord from graphql.language import DocumentNode, parse from graphql.pyutils import Path from graphql.type import ( From 601129be74e7361e3da145b1ea4e9721df6a8f84 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 14 Apr 2024 19:06:54 +0200 Subject: [PATCH 170/230] Update dependencies --- poetry.lock | 50 +++++++++++++++++++++++++------------------------- pyproject.toml | 2 +- tox.ini | 2 +- 3 files changed, 27 insertions(+), 27 deletions(-) diff --git a/poetry.lock b/poetry.lock index ad771a31..d3828409 100644 --- a/poetry.lock +++ b/poetry.lock @@ -388,13 +388,13 @@ testing = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "diff-cover (>=7.5)", "p [[package]] name = "filelock" -version = "3.13.3" +version = "3.13.4" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.13.3-py3-none-any.whl", hash = "sha256:5ffa845303983e7a0b7ae17636509bc97997d58afeafa72fb141a17b152284cb"}, - {file = "filelock-3.13.3.tar.gz", hash = "sha256:a79895a25bbefdf55d1a2a0a80968f7dbb28edcd6d4234a0afb3f37ecde4b546"}, + {file = "filelock-3.13.4-py3-none-any.whl", hash = "sha256:404e5e9253aa60ad457cae1be07c0f0ca90a63931200a47d9b6a6af84fd7b45f"}, + {file = "filelock-3.13.4.tar.gz", hash = "sha256:d13f466618bfde72bd2c18255e269f72542c6e70e7bac83a0232d6b1cc5c8cf4"}, ] [package.extras] @@ -404,13 +404,13 @@ typing = ["typing-extensions (>=4.8)"] [[package]] name = "idna" -version = "3.6" +version = "3.7" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.5" files = [ - {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, - {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, + {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, + {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, ] [[package]] @@ -998,28 +998,28 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "ruff" -version = "0.3.5" +version = "0.3.7" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.3.5-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:aef5bd3b89e657007e1be6b16553c8813b221ff6d92c7526b7e0227450981eac"}, - {file = "ruff-0.3.5-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:89b1e92b3bd9fca249153a97d23f29bed3992cff414b222fcd361d763fc53f12"}, - {file = "ruff-0.3.5-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e55771559c89272c3ebab23326dc23e7f813e492052391fe7950c1a5a139d89"}, - {file = "ruff-0.3.5-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dabc62195bf54b8a7876add6e789caae0268f34582333cda340497c886111c39"}, - {file = "ruff-0.3.5-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a05f3793ba25f194f395578579c546ca5d83e0195f992edc32e5907d142bfa3"}, - {file = "ruff-0.3.5-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:dfd3504e881082959b4160ab02f7a205f0fadc0a9619cc481982b6837b2fd4c0"}, - {file = "ruff-0.3.5-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87258e0d4b04046cf1d6cc1c56fadbf7a880cc3de1f7294938e923234cf9e498"}, - {file = "ruff-0.3.5-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:712e71283fc7d9f95047ed5f793bc019b0b0a29849b14664a60fd66c23b96da1"}, - {file = "ruff-0.3.5-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a532a90b4a18d3f722c124c513ffb5e5eaff0cc4f6d3aa4bda38e691b8600c9f"}, - {file = "ruff-0.3.5-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:122de171a147c76ada00f76df533b54676f6e321e61bd8656ae54be326c10296"}, - {file = "ruff-0.3.5-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:d80a6b18a6c3b6ed25b71b05eba183f37d9bc8b16ace9e3d700997f00b74660b"}, - {file = "ruff-0.3.5-py3-none-musllinux_1_2_i686.whl", hash = "sha256:a7b6e63194c68bca8e71f81de30cfa6f58ff70393cf45aab4c20f158227d5936"}, - {file = "ruff-0.3.5-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:a759d33a20c72f2dfa54dae6e85e1225b8e302e8ac655773aff22e542a300985"}, - {file = "ruff-0.3.5-py3-none-win32.whl", hash = "sha256:9d8605aa990045517c911726d21293ef4baa64f87265896e491a05461cae078d"}, - {file = "ruff-0.3.5-py3-none-win_amd64.whl", hash = "sha256:dc56bb16a63c1303bd47563c60482a1512721053d93231cf7e9e1c6954395a0e"}, - {file = "ruff-0.3.5-py3-none-win_arm64.whl", hash = "sha256:faeeae9905446b975dcf6d4499dc93439b131f1443ee264055c5716dd947af55"}, - {file = "ruff-0.3.5.tar.gz", hash = "sha256:a067daaeb1dc2baf9b82a32dae67d154d95212080c80435eb052d95da647763d"}, + {file = "ruff-0.3.7-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:0e8377cccb2f07abd25e84fc5b2cbe48eeb0fea9f1719cad7caedb061d70e5ce"}, + {file = "ruff-0.3.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:15a4d1cc1e64e556fa0d67bfd388fed416b7f3b26d5d1c3e7d192c897e39ba4b"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d28bdf3d7dc71dd46929fafeec98ba89b7c3550c3f0978e36389b5631b793663"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:379b67d4f49774ba679593b232dcd90d9e10f04d96e3c8ce4a28037ae473f7bb"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c060aea8ad5ef21cdfbbe05475ab5104ce7827b639a78dd55383a6e9895b7c51"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:ebf8f615dde968272d70502c083ebf963b6781aacd3079081e03b32adfe4d58a"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d48098bd8f5c38897b03604f5428901b65e3c97d40b3952e38637b5404b739a2"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da8a4fda219bf9024692b1bc68c9cff4b80507879ada8769dc7e985755d662ea"}, + {file = "ruff-0.3.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c44e0149f1d8b48c4d5c33d88c677a4aa22fd09b1683d6a7ff55b816b5d074f"}, + {file = "ruff-0.3.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3050ec0af72b709a62ecc2aca941b9cd479a7bf2b36cc4562f0033d688e44fa1"}, + {file = "ruff-0.3.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:a29cc38e4c1ab00da18a3f6777f8b50099d73326981bb7d182e54a9a21bb4ff7"}, + {file = "ruff-0.3.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:5b15cc59c19edca917f51b1956637db47e200b0fc5e6e1878233d3a938384b0b"}, + {file = "ruff-0.3.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:e491045781b1e38b72c91247cf4634f040f8d0cb3e6d3d64d38dcf43616650b4"}, + {file = "ruff-0.3.7-py3-none-win32.whl", hash = "sha256:bc931de87593d64fad3a22e201e55ad76271f1d5bfc44e1a1887edd0903c7d9f"}, + {file = "ruff-0.3.7-py3-none-win_amd64.whl", hash = "sha256:5ef0e501e1e39f35e03c2acb1d1238c595b8bb36cf7a170e7c1df1b73da00e74"}, + {file = "ruff-0.3.7-py3-none-win_arm64.whl", hash = "sha256:789e144f6dc7019d1f92a812891c645274ed08af6037d11fc65fcbc183b7d59f"}, + {file = "ruff-0.3.7.tar.gz", hash = "sha256:d5c1aebee5162c2226784800ae031f660c350e7a3402c4d1f8ea4e97e232e3ba"}, ] [[package]] @@ -1490,4 +1490,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.7" -content-hash = "4790d59c5e4684ad6eb1c04d97c0816cf12a9ef870f6b151da291f4bae56ecee" +content-hash = "81ca5ed14a2f62d3dd600ee6b318b9d5a4dd228c20045d68426743be3c1c0714" diff --git a/pyproject.toml b/pyproject.toml index 918bc418..1923f9b5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -75,7 +75,7 @@ tox = [ optional = true [tool.poetry.group.lint.dependencies] -ruff = ">=0.3.5,<0.4" +ruff = ">=0.3.7,<0.4" mypy = [ { version = "^1.9", python = ">=3.8" }, { version = "~1.4", python = "<3.8" } diff --git a/tox.ini b/tox.ini index 1d965e63..8082ac27 100644 --- a/tox.ini +++ b/tox.ini @@ -17,7 +17,7 @@ python = [testenv:ruff] basepython = python3.12 -deps = ruff>=0.3.5,<0.4 +deps = ruff>=0.3.7,<0.4 commands = ruff check src tests ruff format --check src tests From 3cf0d267b096ba92e7289d009800083166a5b7d9 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 14 Apr 2024 19:11:35 +0200 Subject: [PATCH 171/230] Bump patch version --- .bumpversion.cfg | 2 +- README.md | 2 +- docs/conf.py | 2 +- pyproject.toml | 2 +- src/graphql/version.py | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 61892e80..f9e8ce93 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 3.3.0a4 +current_version = 3.3.0a5 commit = False tag = False diff --git a/README.md b/README.md index 313af1ba..127c226b 100644 --- a/README.md +++ b/README.md @@ -17,7 +17,7 @@ reliable and compatible with GraphQL.js. The current stable version 3.2.3 of GraphQL-core is up-to-date with GraphQL.js version 16.6.0 and supports Python version 3.7 and newer. -You can also try out the latest alpha version 3.3.0a4 of GraphQL-core +You can also try out the latest alpha version 3.3.0a5 of GraphQL-core which is up-to-date with GraphQL.js version 17.0.0a2. Please note that this new minor version of GraphQL-core does not support Python 3.6 anymore. diff --git a/docs/conf.py b/docs/conf.py index ee49ab0e..d38172df 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -60,7 +60,7 @@ # The short X.Y version. # version = '3.3' # The full version, including alpha/beta/rc tags. -version = release = "3.3.0a4" +version = release = "3.3.0a5" # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/pyproject.toml b/pyproject.toml index 1923f9b5..f35be012 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "graphql-core" -version = "3.3.0a4" +version = "3.3.0a5" description = """\ GraphQL-core is a Python port of GraphQL.js,\ the JavaScript reference implementation for GraphQL.""" diff --git a/src/graphql/version.py b/src/graphql/version.py index 10577318..7d09b483 100644 --- a/src/graphql/version.py +++ b/src/graphql/version.py @@ -8,7 +8,7 @@ __all__ = ["version", "version_info", "version_js", "version_info_js"] -version = "3.3.0a3" +version = "3.3.0a5" version_js = "17.0.0a2" From 639405906432273cb2aa3b37b1f6e48bc6b5e962 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 7 Jul 2024 18:46:52 +0200 Subject: [PATCH 172/230] Improve config for development with VS Code --- .github/workflows/test.yml | 2 +- .gitignore | 1 + pyproject.toml | 23 +++++++++++++-- src/graphql/type/definition.py | 53 ++++++++++++++++++---------------- 4 files changed, 51 insertions(+), 28 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 6f9c3ce6..e99059b8 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -3,7 +3,7 @@ name: Tests on: [push, pull_request] jobs: - build: + tests: runs-on: ubuntu-latest strategy: diff --git a/.gitignore b/.gitignore index 6b51313b..a15cbec4 100644 --- a/.gitignore +++ b/.gitignore @@ -8,6 +8,7 @@ .tox/ .venv*/ .vs/ +.vscode/ build/ dist/ diff --git a/pyproject.toml b/pyproject.toml index f35be012..05e84f7c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -274,13 +274,32 @@ disallow_untyped_defs = true [[tool.mypy.overrides]] module = [ - "graphql.pyutils.frozen_dict", - "graphql.pyutils.frozen_list", "graphql.type.introspection", "tests.*" ] disallow_untyped_defs = false +[tool.pyright] +reportIncompatibleVariableOverride = false +reportMissingTypeArgument = false +reportUnknownArgumentType = false +reportUnknownMemberType = false +reportUnknownParameterType = false +reportUnnecessaryIsInstance = false +reportUnknownVariableType = false +ignore = ["**/test_*"] # test functions + +[tool.pylint.basic] +max-module-lines = 2000 + +[tool.pylint.messages_control] +disable = [ + "method-hidden", + "missing-module-docstring", # test modules + "redefined-outer-name", + "unused-variable", # test functions +] + [tool.pytest.ini_options] minversion = "7.4" # Only run benchmarks as tests. diff --git a/src/graphql/type/definition.py b/src/graphql/type/definition.py index 4686d3d1..004a3e26 100644 --- a/src/graphql/type/definition.py +++ b/src/graphql/type/definition.py @@ -189,15 +189,15 @@ def assert_type(type_: Any) -> GraphQLType: # These types wrap and modify other types -GT = TypeVar("GT", bound=GraphQLType, covariant=True) # noqa: PLC0105 +GT_co = TypeVar("GT_co", bound=GraphQLType, covariant=True) -class GraphQLWrappingType(GraphQLType, Generic[GT]): +class GraphQLWrappingType(GraphQLType, Generic[GT_co]): """Base class for all GraphQL wrapping types""" - of_type: GT + of_type: GT_co - def __init__(self, type_: GT) -> None: + def __init__(self, type_: GT_co) -> None: self.of_type = type_ def __repr__(self) -> str: @@ -255,7 +255,7 @@ def _get_instance(cls, name: str, args: tuple) -> GraphQLNamedType: try: return cls.reserved_types[name] except KeyError: - return cls(**dict(args)) + return cls(**dict(args)) # pyright: ignore def __init__( self, @@ -429,8 +429,8 @@ def parse_literal( def to_kwargs(self) -> GraphQLScalarTypeKwargs: """Get corresponding arguments.""" # noinspection PyArgumentList - return GraphQLScalarTypeKwargs( # type: ignore - super().to_kwargs(), + return GraphQLScalarTypeKwargs( + super().to_kwargs(), # type: ignore serialize=None if self.serialize is GraphQLScalarType.serialize else self.serialize, @@ -552,11 +552,11 @@ def __copy__(self) -> GraphQLField: # pragma: no cover return self.__class__(**self.to_kwargs()) -TContext = TypeVar("TContext") +TContext = TypeVar("TContext") # pylint: disable=invalid-name try: - class GraphQLResolveInfo(NamedTuple, Generic[TContext]): + class GraphQLResolveInfo(NamedTuple, Generic[TContext]): # pyright: ignore """Collection of information passed to the resolvers. This is always passed as the first argument to the resolvers. @@ -768,8 +768,8 @@ def __init__( def to_kwargs(self) -> GraphQLObjectTypeKwargs: """Get corresponding arguments.""" # noinspection PyArgumentList - return GraphQLObjectTypeKwargs( # type: ignore - super().to_kwargs(), + return GraphQLObjectTypeKwargs( + super().to_kwargs(), # type: ignore fields=self.fields.copy(), interfaces=self.interfaces, is_type_of=self.is_type_of, @@ -873,8 +873,8 @@ def __init__( def to_kwargs(self) -> GraphQLInterfaceTypeKwargs: """Get corresponding arguments.""" # noinspection PyArgumentList - return GraphQLInterfaceTypeKwargs( # type: ignore - super().to_kwargs(), + return GraphQLInterfaceTypeKwargs( + super().to_kwargs(), # type: ignore fields=self.fields.copy(), interfaces=self.interfaces, resolve_type=self.resolve_type, @@ -978,8 +978,10 @@ def __init__( def to_kwargs(self) -> GraphQLUnionTypeKwargs: """Get corresponding arguments.""" # noinspection PyArgumentList - return GraphQLUnionTypeKwargs( # type: ignore - super().to_kwargs(), types=self.types, resolve_type=self.resolve_type + return GraphQLUnionTypeKwargs( + super().to_kwargs(), # type: ignore + types=self.types, + resolve_type=self.resolve_type, ) def __copy__(self) -> GraphQLUnionType: # pragma: no cover @@ -1082,7 +1084,7 @@ def __init__( isinstance(name, str) for name in values ): try: - values = dict(values) + values = dict(values) # pyright: ignore except (TypeError, ValueError) as error: msg = ( f"{name} values must be an Enum or a mapping" @@ -1107,8 +1109,9 @@ def __init__( def to_kwargs(self) -> GraphQLEnumTypeKwargs: """Get corresponding arguments.""" # noinspection PyArgumentList - return GraphQLEnumTypeKwargs( # type: ignore - super().to_kwargs(), values=self.values.copy() + return GraphQLEnumTypeKwargs( + super().to_kwargs(), # type: ignore + values=self.values.copy(), ) def __copy__(self) -> GraphQLEnumType: # pragma: no cover @@ -1331,8 +1334,8 @@ def out_type(value: dict[str, Any]) -> Any: def to_kwargs(self) -> GraphQLInputObjectTypeKwargs: """Get corresponding arguments.""" # noinspection PyArgumentList - return GraphQLInputObjectTypeKwargs( # type: ignore - super().to_kwargs(), + return GraphQLInputObjectTypeKwargs( + super().to_kwargs(), # type: ignore fields=self.fields.copy(), out_type=None if self.out_type is GraphQLInputObjectType.out_type @@ -1448,7 +1451,7 @@ def is_required_input_field(field: GraphQLInputField) -> bool: # Wrapper types -class GraphQLList(GraphQLWrappingType[GT]): +class GraphQLList(GraphQLWrappingType[GT_co]): """List Type Wrapper A list is a wrapping type which points to another type. Lists are often created @@ -1467,7 +1470,7 @@ def fields(self): } """ - def __init__(self, type_: GT) -> None: + def __init__(self, type_: GT_co) -> None: super().__init__(type_=type_) def __str__(self) -> str: @@ -1487,10 +1490,10 @@ def assert_list_type(type_: Any) -> GraphQLList: return type_ -GNT = TypeVar("GNT", bound="GraphQLNullableType", covariant=True) # noqa: PLC0105 +GNT_co = TypeVar("GNT_co", bound="GraphQLNullableType", covariant=True) -class GraphQLNonNull(GraphQLWrappingType[GNT]): +class GraphQLNonNull(GraphQLWrappingType[GNT_co]): """Non-Null Type Wrapper A non-null is a wrapping type which points to another type. Non-null types enforce @@ -1510,7 +1513,7 @@ class RowType(GraphQLObjectType): Note: the enforcement of non-nullability occurs within the executor. """ - def __init__(self, type_: GNT) -> None: + def __init__(self, type_: GNT_co) -> None: super().__init__(type_=type_) def __str__(self) -> str: From 9f19b40e72948c9910ad34385de816ff8f685b6f Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 7 Jul 2024 19:35:35 +0200 Subject: [PATCH 173/230] Update dependencies --- docs/conf.py | 2 + docs/requirements.txt | 4 +- poetry.lock | 403 ++++++++++++++++++++++++------------------ pyproject.toml | 8 +- tox.ini | 6 +- 5 files changed, 245 insertions(+), 178 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index d38172df..90ff122b 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -172,6 +172,8 @@ graphql.execution.incremental_publisher.DeferredFragmentRecord graphql.language.lexer.EscapeSequence graphql.language.visitor.EnterLeaveVisitor +graphql.type.definition.GT_co +graphql.type.definition.GNT_co graphql.type.definition.TContext graphql.type.schema.InterfaceImplementations graphql.validation.validation_context.VariableUsage diff --git a/docs/requirements.txt b/docs/requirements.txt index f4f9b8af..f52741c8 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,2 +1,2 @@ -sphinx>=5.2.1,<6 -sphinx_rtd_theme>=1,<2 +sphinx>=7.3.7,<8 +sphinx_rtd_theme>=2.0.0,<3 diff --git a/poetry.lock b/poetry.lock index d3828409..b1aa2914 100644 --- a/poetry.lock +++ b/poetry.lock @@ -28,6 +28,23 @@ pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} [package.extras] dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] +[[package]] +name = "babel" +version = "2.15.0" +description = "Internationalization utilities" +optional = false +python-versions = ">=3.8" +files = [ + {file = "Babel-2.15.0-py3-none-any.whl", hash = "sha256:08706bdad8d0a3413266ab61bd6c34d0c28d6e1e7badf40a2cebe67644e2e1fb"}, + {file = "babel-2.15.0.tar.gz", hash = "sha256:8daf0e265d05768bc6c7a314cf1321e9a123afc328cc635c18622a2f30a04413"}, +] + +[package.dependencies] +pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} + +[package.extras] +dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] + [[package]] name = "bump2version" version = "1.0.1" @@ -52,13 +69,13 @@ files = [ [[package]] name = "certifi" -version = "2024.2.2" +version = "2024.7.4" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, - {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, + {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, + {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, ] [[package]] @@ -259,63 +276,63 @@ toml = ["tomli"] [[package]] name = "coverage" -version = "7.4.4" +version = "7.5.4" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.4.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0be5efd5127542ef31f165de269f77560d6cdef525fffa446de6f7e9186cfb2"}, - {file = "coverage-7.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ccd341521be3d1b3daeb41960ae94a5e87abe2f46f17224ba5d6f2b8398016cf"}, - {file = "coverage-7.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fa497a8ab37784fbb20ab699c246053ac294d13fc7eb40ec007a5043ec91f8"}, - {file = "coverage-7.4.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b1a93009cb80730c9bca5d6d4665494b725b6e8e157c1cb7f2db5b4b122ea562"}, - {file = "coverage-7.4.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:690db6517f09336559dc0b5f55342df62370a48f5469fabf502db2c6d1cffcd2"}, - {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:09c3255458533cb76ef55da8cc49ffab9e33f083739c8bd4f58e79fecfe288f7"}, - {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8ce1415194b4a6bd0cdcc3a1dfbf58b63f910dcb7330fe15bdff542c56949f87"}, - {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b91cbc4b195444e7e258ba27ac33769c41b94967919f10037e6355e998af255c"}, - {file = "coverage-7.4.4-cp310-cp310-win32.whl", hash = "sha256:598825b51b81c808cb6f078dcb972f96af96b078faa47af7dfcdf282835baa8d"}, - {file = "coverage-7.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:09ef9199ed6653989ebbcaacc9b62b514bb63ea2f90256e71fea3ed74bd8ff6f"}, - {file = "coverage-7.4.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0f9f50e7ef2a71e2fae92774c99170eb8304e3fdf9c8c3c7ae9bab3e7229c5cf"}, - {file = "coverage-7.4.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:623512f8ba53c422fcfb2ce68362c97945095b864cda94a92edbaf5994201083"}, - {file = "coverage-7.4.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0513b9508b93da4e1716744ef6ebc507aff016ba115ffe8ecff744d1322a7b63"}, - {file = "coverage-7.4.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40209e141059b9370a2657c9b15607815359ab3ef9918f0196b6fccce8d3230f"}, - {file = "coverage-7.4.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a2b2b78c78293782fd3767d53e6474582f62443d0504b1554370bde86cc8227"}, - {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:73bfb9c09951125d06ee473bed216e2c3742f530fc5acc1383883125de76d9cd"}, - {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1f384c3cc76aeedce208643697fb3e8437604b512255de6d18dae3f27655a384"}, - {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:54eb8d1bf7cacfbf2a3186019bcf01d11c666bd495ed18717162f7eb1e9dd00b"}, - {file = "coverage-7.4.4-cp311-cp311-win32.whl", hash = "sha256:cac99918c7bba15302a2d81f0312c08054a3359eaa1929c7e4b26ebe41e9b286"}, - {file = "coverage-7.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:b14706df8b2de49869ae03a5ccbc211f4041750cd4a66f698df89d44f4bd30ec"}, - {file = "coverage-7.4.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:201bef2eea65e0e9c56343115ba3814e896afe6d36ffd37bab783261db430f76"}, - {file = "coverage-7.4.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:41c9c5f3de16b903b610d09650e5e27adbfa7f500302718c9ffd1c12cf9d6818"}, - {file = "coverage-7.4.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d898fe162d26929b5960e4e138651f7427048e72c853607f2b200909794ed978"}, - {file = "coverage-7.4.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ea79bb50e805cd6ac058dfa3b5c8f6c040cb87fe83de10845857f5535d1db70"}, - {file = "coverage-7.4.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce4b94265ca988c3f8e479e741693d143026632672e3ff924f25fab50518dd51"}, - {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:00838a35b882694afda09f85e469c96367daa3f3f2b097d846a7216993d37f4c"}, - {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:fdfafb32984684eb03c2d83e1e51f64f0906b11e64482df3c5db936ce3839d48"}, - {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:69eb372f7e2ece89f14751fbcbe470295d73ed41ecd37ca36ed2eb47512a6ab9"}, - {file = "coverage-7.4.4-cp312-cp312-win32.whl", hash = "sha256:137eb07173141545e07403cca94ab625cc1cc6bc4c1e97b6e3846270e7e1fea0"}, - {file = "coverage-7.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:d71eec7d83298f1af3326ce0ff1d0ea83c7cb98f72b577097f9083b20bdaf05e"}, - {file = "coverage-7.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d5ae728ff3b5401cc320d792866987e7e7e880e6ebd24433b70a33b643bb0384"}, - {file = "coverage-7.4.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cc4f1358cb0c78edef3ed237ef2c86056206bb8d9140e73b6b89fbcfcbdd40e1"}, - {file = "coverage-7.4.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8130a2aa2acb8788e0b56938786c33c7c98562697bf9f4c7d6e8e5e3a0501e4a"}, - {file = "coverage-7.4.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf271892d13e43bc2b51e6908ec9a6a5094a4df1d8af0bfc360088ee6c684409"}, - {file = "coverage-7.4.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4cdc86d54b5da0df6d3d3a2f0b710949286094c3a6700c21e9015932b81447e"}, - {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ae71e7ddb7a413dd60052e90528f2f65270aad4b509563af6d03d53e979feafd"}, - {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:38dd60d7bf242c4ed5b38e094baf6401faa114fc09e9e6632374388a404f98e7"}, - {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa5b1c1bfc28384f1f53b69a023d789f72b2e0ab1b3787aae16992a7ca21056c"}, - {file = "coverage-7.4.4-cp38-cp38-win32.whl", hash = "sha256:dfa8fe35a0bb90382837b238fff375de15f0dcdb9ae68ff85f7a63649c98527e"}, - {file = "coverage-7.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:b2991665420a803495e0b90a79233c1433d6ed77ef282e8e152a324bbbc5e0c8"}, - {file = "coverage-7.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3b799445b9f7ee8bf299cfaed6f5b226c0037b74886a4e11515e569b36fe310d"}, - {file = "coverage-7.4.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b4d33f418f46362995f1e9d4f3a35a1b6322cb959c31d88ae56b0298e1c22357"}, - {file = "coverage-7.4.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aadacf9a2f407a4688d700e4ebab33a7e2e408f2ca04dbf4aef17585389eff3e"}, - {file = "coverage-7.4.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c95949560050d04d46b919301826525597f07b33beba6187d04fa64d47ac82e"}, - {file = "coverage-7.4.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff7687ca3d7028d8a5f0ebae95a6e4827c5616b31a4ee1192bdfde697db110d4"}, - {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5fc1de20b2d4a061b3df27ab9b7c7111e9a710f10dc2b84d33a4ab25065994ec"}, - {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c74880fc64d4958159fbd537a091d2a585448a8f8508bf248d72112723974cbd"}, - {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:742a76a12aa45b44d236815d282b03cfb1de3b4323f3e4ec933acfae08e54ade"}, - {file = "coverage-7.4.4-cp39-cp39-win32.whl", hash = "sha256:d89d7b2974cae412400e88f35d86af72208e1ede1a541954af5d944a8ba46c57"}, - {file = "coverage-7.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:9ca28a302acb19b6af89e90f33ee3e1906961f94b54ea37de6737b7ca9d8827c"}, - {file = "coverage-7.4.4-pp38.pp39.pp310-none-any.whl", hash = "sha256:b2c5edc4ac10a7ef6605a966c58929ec6c1bd0917fb8c15cb3363f65aa40e677"}, - {file = "coverage-7.4.4.tar.gz", hash = "sha256:c901df83d097649e257e803be22592aedfd5182f07b3cc87d640bbb9afd50f49"}, + {file = "coverage-7.5.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6cfb5a4f556bb51aba274588200a46e4dd6b505fb1a5f8c5ae408222eb416f99"}, + {file = "coverage-7.5.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2174e7c23e0a454ffe12267a10732c273243b4f2d50d07544a91198f05c48f47"}, + {file = "coverage-7.5.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2214ee920787d85db1b6a0bd9da5f8503ccc8fcd5814d90796c2f2493a2f4d2e"}, + {file = "coverage-7.5.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1137f46adb28e3813dec8c01fefadcb8c614f33576f672962e323b5128d9a68d"}, + {file = "coverage-7.5.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b385d49609f8e9efc885790a5a0e89f2e3ae042cdf12958b6034cc442de428d3"}, + {file = "coverage-7.5.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b4a474f799456e0eb46d78ab07303286a84a3140e9700b9e154cfebc8f527016"}, + {file = "coverage-7.5.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:5cd64adedf3be66f8ccee418473c2916492d53cbafbfcff851cbec5a8454b136"}, + {file = "coverage-7.5.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e564c2cf45d2f44a9da56f4e3a26b2236504a496eb4cb0ca7221cd4cc7a9aca9"}, + {file = "coverage-7.5.4-cp310-cp310-win32.whl", hash = "sha256:7076b4b3a5f6d2b5d7f1185fde25b1e54eb66e647a1dfef0e2c2bfaf9b4c88c8"}, + {file = "coverage-7.5.4-cp310-cp310-win_amd64.whl", hash = "sha256:018a12985185038a5b2bcafab04ab833a9a0f2c59995b3cec07e10074c78635f"}, + {file = "coverage-7.5.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:db14f552ac38f10758ad14dd7b983dbab424e731588d300c7db25b6f89e335b5"}, + {file = "coverage-7.5.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3257fdd8e574805f27bb5342b77bc65578e98cbc004a92232106344053f319ba"}, + {file = "coverage-7.5.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a6612c99081d8d6134005b1354191e103ec9705d7ba2754e848211ac8cacc6b"}, + {file = "coverage-7.5.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d45d3cbd94159c468b9b8c5a556e3f6b81a8d1af2a92b77320e887c3e7a5d080"}, + {file = "coverage-7.5.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed550e7442f278af76d9d65af48069f1fb84c9f745ae249c1a183c1e9d1b025c"}, + {file = "coverage-7.5.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7a892be37ca35eb5019ec85402c3371b0f7cda5ab5056023a7f13da0961e60da"}, + {file = "coverage-7.5.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8192794d120167e2a64721d88dbd688584675e86e15d0569599257566dec9bf0"}, + {file = "coverage-7.5.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:820bc841faa502e727a48311948e0461132a9c8baa42f6b2b84a29ced24cc078"}, + {file = "coverage-7.5.4-cp311-cp311-win32.whl", hash = "sha256:6aae5cce399a0f065da65c7bb1e8abd5c7a3043da9dceb429ebe1b289bc07806"}, + {file = "coverage-7.5.4-cp311-cp311-win_amd64.whl", hash = "sha256:d2e344d6adc8ef81c5a233d3a57b3c7d5181f40e79e05e1c143da143ccb6377d"}, + {file = "coverage-7.5.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:54317c2b806354cbb2dc7ac27e2b93f97096912cc16b18289c5d4e44fc663233"}, + {file = "coverage-7.5.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:042183de01f8b6d531e10c197f7f0315a61e8d805ab29c5f7b51a01d62782747"}, + {file = "coverage-7.5.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6bb74ed465d5fb204b2ec41d79bcd28afccf817de721e8a807d5141c3426638"}, + {file = "coverage-7.5.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3d45ff86efb129c599a3b287ae2e44c1e281ae0f9a9bad0edc202179bcc3a2e"}, + {file = "coverage-7.5.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5013ed890dc917cef2c9f765c4c6a8ae9df983cd60dbb635df8ed9f4ebc9f555"}, + {file = "coverage-7.5.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1014fbf665fef86cdfd6cb5b7371496ce35e4d2a00cda501cf9f5b9e6fced69f"}, + {file = "coverage-7.5.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3684bc2ff328f935981847082ba4fdc950d58906a40eafa93510d1b54c08a66c"}, + {file = "coverage-7.5.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:581ea96f92bf71a5ec0974001f900db495488434a6928a2ca7f01eee20c23805"}, + {file = "coverage-7.5.4-cp312-cp312-win32.whl", hash = "sha256:73ca8fbc5bc622e54627314c1a6f1dfdd8db69788f3443e752c215f29fa87a0b"}, + {file = "coverage-7.5.4-cp312-cp312-win_amd64.whl", hash = "sha256:cef4649ec906ea7ea5e9e796e68b987f83fa9a718514fe147f538cfeda76d7a7"}, + {file = "coverage-7.5.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cdd31315fc20868c194130de9ee6bfd99755cc9565edff98ecc12585b90be882"}, + {file = "coverage-7.5.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:02ff6e898197cc1e9fa375581382b72498eb2e6d5fc0b53f03e496cfee3fac6d"}, + {file = "coverage-7.5.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d05c16cf4b4c2fc880cb12ba4c9b526e9e5d5bb1d81313d4d732a5b9fe2b9d53"}, + {file = "coverage-7.5.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c5986ee7ea0795a4095ac4d113cbb3448601efca7f158ec7f7087a6c705304e4"}, + {file = "coverage-7.5.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5df54843b88901fdc2f598ac06737f03d71168fd1175728054c8f5a2739ac3e4"}, + {file = "coverage-7.5.4-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ab73b35e8d109bffbda9a3e91c64e29fe26e03e49addf5b43d85fc426dde11f9"}, + {file = "coverage-7.5.4-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:aea072a941b033813f5e4814541fc265a5c12ed9720daef11ca516aeacd3bd7f"}, + {file = "coverage-7.5.4-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:16852febd96acd953b0d55fc842ce2dac1710f26729b31c80b940b9afcd9896f"}, + {file = "coverage-7.5.4-cp38-cp38-win32.whl", hash = "sha256:8f894208794b164e6bd4bba61fc98bf6b06be4d390cf2daacfa6eca0a6d2bb4f"}, + {file = "coverage-7.5.4-cp38-cp38-win_amd64.whl", hash = "sha256:e2afe743289273209c992075a5a4913e8d007d569a406ffed0bd080ea02b0633"}, + {file = "coverage-7.5.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b95c3a8cb0463ba9f77383d0fa8c9194cf91f64445a63fc26fb2327e1e1eb088"}, + {file = "coverage-7.5.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3d7564cc09dd91b5a6001754a5b3c6ecc4aba6323baf33a12bd751036c998be4"}, + {file = "coverage-7.5.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44da56a2589b684813f86d07597fdf8a9c6ce77f58976727329272f5a01f99f7"}, + {file = "coverage-7.5.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e16f3d6b491c48c5ae726308e6ab1e18ee830b4cdd6913f2d7f77354b33f91c8"}, + {file = "coverage-7.5.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dbc5958cb471e5a5af41b0ddaea96a37e74ed289535e8deca404811f6cb0bc3d"}, + {file = "coverage-7.5.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:a04e990a2a41740b02d6182b498ee9796cf60eefe40cf859b016650147908029"}, + {file = "coverage-7.5.4-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ddbd2f9713a79e8e7242d7c51f1929611e991d855f414ca9996c20e44a895f7c"}, + {file = "coverage-7.5.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b1ccf5e728ccf83acd313c89f07c22d70d6c375a9c6f339233dcf792094bcbf7"}, + {file = "coverage-7.5.4-cp39-cp39-win32.whl", hash = "sha256:56b4eafa21c6c175b3ede004ca12c653a88b6f922494b023aeb1e836df953ace"}, + {file = "coverage-7.5.4-cp39-cp39-win_amd64.whl", hash = "sha256:65e528e2e921ba8fd67d9055e6b9f9e34b21ebd6768ae1c1723f4ea6ace1234d"}, + {file = "coverage-7.5.4-pp38.pp39.pp310-none-any.whl", hash = "sha256:79b356f3dd5b26f3ad23b35c75dbdaf1f9e2450b6bcefc6d0825ea0aa3f86ca5"}, + {file = "coverage-7.5.4.tar.gz", hash = "sha256:a44963520b069e12789d0faea4e9fdb1e410cdc4aab89d94f7f55cbb7fef0353"}, ] [package.dependencies] @@ -359,13 +376,13 @@ files = [ [[package]] name = "exceptiongroup" -version = "1.2.0" +version = "1.2.1" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, - {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, + {file = "exceptiongroup-1.2.1-py3-none-any.whl", hash = "sha256:5258b9ed329c5bbdd31a309f53cbfb0b155341807f6ff7606a1e801a891b29ad"}, + {file = "exceptiongroup-1.2.1.tar.gz", hash = "sha256:a4785e48b045528f5bfe627b6ad554ff32def154f42372786903b7abcfe1aa16"}, ] [package.extras] @@ -388,18 +405,18 @@ testing = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "diff-cover (>=7.5)", "p [[package]] name = "filelock" -version = "3.13.4" +version = "3.15.4" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.13.4-py3-none-any.whl", hash = "sha256:404e5e9253aa60ad457cae1be07c0f0ca90a63931200a47d9b6a6af84fd7b45f"}, - {file = "filelock-3.13.4.tar.gz", hash = "sha256:d13f466618bfde72bd2c18255e269f72542c6e70e7bac83a0232d6b1cc5c8cf4"}, + {file = "filelock-3.15.4-py3-none-any.whl", hash = "sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7"}, + {file = "filelock-3.15.4.tar.gz", hash = "sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb"}, ] [package.extras] docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-asyncio (>=0.21)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)", "virtualenv (>=20.26.2)"] typing = ["typing-extensions (>=4.8)"] [[package]] @@ -446,22 +463,22 @@ testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs [[package]] name = "importlib-metadata" -version = "7.1.0" +version = "8.0.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-7.1.0-py3-none-any.whl", hash = "sha256:30962b96c0c223483ed6cc7280e7f0199feb01a0e40cfae4d4450fc6fab1f570"}, - {file = "importlib_metadata-7.1.0.tar.gz", hash = "sha256:b78938b926ee8d5f020fc4772d487045805a55ddbad2ecf21c6d60938dc7fcd2"}, + {file = "importlib_metadata-8.0.0-py3-none-any.whl", hash = "sha256:15584cf2b1bf449d98ff8a6ff1abef57bf20f3ac6454f431736cd3e660921b2f"}, + {file = "importlib_metadata-8.0.0.tar.gz", hash = "sha256:188bd24e4c346d3f0a933f275c2fec67050326a856b9a359881d7c2a697e8812"}, ] [package.dependencies] zipp = ">=0.5" [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] +test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] [[package]] name = "iniconfig" @@ -476,13 +493,13 @@ files = [ [[package]] name = "jinja2" -version = "3.1.3" +version = "3.1.4" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" files = [ - {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, - {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, + {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, + {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, ] [package.dependencies] @@ -609,38 +626,38 @@ reports = ["lxml"] [[package]] name = "mypy" -version = "1.9.0" +version = "1.10.1" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f8a67616990062232ee4c3952f41c779afac41405806042a8126fe96e098419f"}, - {file = "mypy-1.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d357423fa57a489e8c47b7c85dfb96698caba13d66e086b412298a1a0ea3b0ed"}, - {file = "mypy-1.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49c87c15aed320de9b438ae7b00c1ac91cd393c1b854c2ce538e2a72d55df150"}, - {file = "mypy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:48533cdd345c3c2e5ef48ba3b0d3880b257b423e7995dada04248725c6f77374"}, - {file = "mypy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:4d3dbd346cfec7cb98e6cbb6e0f3c23618af826316188d587d1c1bc34f0ede03"}, - {file = "mypy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:653265f9a2784db65bfca694d1edd23093ce49740b2244cde583aeb134c008f3"}, - {file = "mypy-1.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3a3c007ff3ee90f69cf0a15cbcdf0995749569b86b6d2f327af01fd1b8aee9dc"}, - {file = "mypy-1.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2418488264eb41f69cc64a69a745fad4a8f86649af4b1041a4c64ee61fc61129"}, - {file = "mypy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:68edad3dc7d70f2f17ae4c6c1b9471a56138ca22722487eebacfd1eb5321d612"}, - {file = "mypy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:85ca5fcc24f0b4aeedc1d02f93707bccc04733f21d41c88334c5482219b1ccb3"}, - {file = "mypy-1.9.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aceb1db093b04db5cd390821464504111b8ec3e351eb85afd1433490163d60cd"}, - {file = "mypy-1.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0235391f1c6f6ce487b23b9dbd1327b4ec33bb93934aa986efe8a9563d9349e6"}, - {file = "mypy-1.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4d5ddc13421ba3e2e082a6c2d74c2ddb3979c39b582dacd53dd5d9431237185"}, - {file = "mypy-1.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:190da1ee69b427d7efa8aa0d5e5ccd67a4fb04038c380237a0d96829cb157913"}, - {file = "mypy-1.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:fe28657de3bfec596bbeef01cb219833ad9d38dd5393fc649f4b366840baefe6"}, - {file = "mypy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e54396d70be04b34f31d2edf3362c1edd023246c82f1730bbf8768c28db5361b"}, - {file = "mypy-1.9.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5e6061f44f2313b94f920e91b204ec600982961e07a17e0f6cd83371cb23f5c2"}, - {file = "mypy-1.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a10926e5473c5fc3da8abb04119a1f5811a236dc3a38d92015cb1e6ba4cb9e"}, - {file = "mypy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b685154e22e4e9199fc95f298661deea28aaede5ae16ccc8cbb1045e716b3e04"}, - {file = "mypy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:5d741d3fc7c4da608764073089e5f58ef6352bedc223ff58f2f038c2c4698a89"}, - {file = "mypy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:587ce887f75dd9700252a3abbc9c97bbe165a4a630597845c61279cf32dfbf02"}, - {file = "mypy-1.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f88566144752999351725ac623471661c9d1cd8caa0134ff98cceeea181789f4"}, - {file = "mypy-1.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61758fabd58ce4b0720ae1e2fea5cfd4431591d6d590b197775329264f86311d"}, - {file = "mypy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e49499be624dead83927e70c756970a0bc8240e9f769389cdf5714b0784ca6bf"}, - {file = "mypy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:571741dc4194b4f82d344b15e8837e8c5fcc462d66d076748142327626a1b6e9"}, - {file = "mypy-1.9.0-py3-none-any.whl", hash = "sha256:a260627a570559181a9ea5de61ac6297aa5af202f06fd7ab093ce74e7181e43e"}, - {file = "mypy-1.9.0.tar.gz", hash = "sha256:3cc5da0127e6a478cddd906068496a97a7618a21ce9b54bde5bf7e539c7af974"}, + {file = "mypy-1.10.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e36f229acfe250dc660790840916eb49726c928e8ce10fbdf90715090fe4ae02"}, + {file = "mypy-1.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:51a46974340baaa4145363b9e051812a2446cf583dfaeba124af966fa44593f7"}, + {file = "mypy-1.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:901c89c2d67bba57aaaca91ccdb659aa3a312de67f23b9dfb059727cce2e2e0a"}, + {file = "mypy-1.10.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0cd62192a4a32b77ceb31272d9e74d23cd88c8060c34d1d3622db3267679a5d9"}, + {file = "mypy-1.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:a2cbc68cb9e943ac0814c13e2452d2046c2f2b23ff0278e26599224cf164e78d"}, + {file = "mypy-1.10.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bd6f629b67bb43dc0d9211ee98b96d8dabc97b1ad38b9b25f5e4c4d7569a0c6a"}, + {file = "mypy-1.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a1bbb3a6f5ff319d2b9d40b4080d46cd639abe3516d5a62c070cf0114a457d84"}, + {file = "mypy-1.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8edd4e9bbbc9d7b79502eb9592cab808585516ae1bcc1446eb9122656c6066f"}, + {file = "mypy-1.10.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6166a88b15f1759f94a46fa474c7b1b05d134b1b61fca627dd7335454cc9aa6b"}, + {file = "mypy-1.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:5bb9cd11c01c8606a9d0b83ffa91d0b236a0e91bc4126d9ba9ce62906ada868e"}, + {file = "mypy-1.10.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d8681909f7b44d0b7b86e653ca152d6dff0eb5eb41694e163c6092124f8246d7"}, + {file = "mypy-1.10.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:378c03f53f10bbdd55ca94e46ec3ba255279706a6aacaecac52ad248f98205d3"}, + {file = "mypy-1.10.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bacf8f3a3d7d849f40ca6caea5c055122efe70e81480c8328ad29c55c69e93e"}, + {file = "mypy-1.10.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:701b5f71413f1e9855566a34d6e9d12624e9e0a8818a5704d74d6b0402e66c04"}, + {file = "mypy-1.10.1-cp312-cp312-win_amd64.whl", hash = "sha256:3c4c2992f6ea46ff7fce0072642cfb62af7a2484efe69017ed8b095f7b39ef31"}, + {file = "mypy-1.10.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:604282c886497645ffb87b8f35a57ec773a4a2721161e709a4422c1636ddde5c"}, + {file = "mypy-1.10.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37fd87cab83f09842653f08de066ee68f1182b9b5282e4634cdb4b407266bade"}, + {file = "mypy-1.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8addf6313777dbb92e9564c5d32ec122bf2c6c39d683ea64de6a1fd98b90fe37"}, + {file = "mypy-1.10.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5cc3ca0a244eb9a5249c7c583ad9a7e881aa5d7b73c35652296ddcdb33b2b9c7"}, + {file = "mypy-1.10.1-cp38-cp38-win_amd64.whl", hash = "sha256:1b3a2ffce52cc4dbaeee4df762f20a2905aa171ef157b82192f2e2f368eec05d"}, + {file = "mypy-1.10.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fe85ed6836165d52ae8b88f99527d3d1b2362e0cb90b005409b8bed90e9059b3"}, + {file = "mypy-1.10.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c2ae450d60d7d020d67ab440c6e3fae375809988119817214440033f26ddf7bf"}, + {file = "mypy-1.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6be84c06e6abd72f960ba9a71561c14137a583093ffcf9bbfaf5e613d63fa531"}, + {file = "mypy-1.10.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2189ff1e39db399f08205e22a797383613ce1cb0cb3b13d8bcf0170e45b96cc3"}, + {file = "mypy-1.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:97a131ee36ac37ce9581f4220311247ab6cba896b4395b9c87af0675a13a755f"}, + {file = "mypy-1.10.1-py3-none-any.whl", hash = "sha256:71d8ac0b906354ebda8ef1673e5fde785936ac1f29ff6987c7483cfbd5a4235a"}, + {file = "mypy-1.10.1.tar.gz", hash = "sha256:1f8f492d7db9e3593ef42d4f115f04e556130f2819ad33ab84551403e97dd4c0"}, ] [package.dependencies] @@ -676,6 +693,17 @@ files = [ {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] +[[package]] +name = "packaging" +version = "24.1" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, + {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, +] + [[package]] name = "platformdirs" version = "4.0.0" @@ -696,18 +724,19 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-co [[package]] name = "platformdirs" -version = "4.2.0" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +version = "4.2.2" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, - {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, + {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, + {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, ] [package.extras] docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] +type = ["mypy (>=1.8)"] [[package]] name = "pluggy" @@ -729,13 +758,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "pluggy" -version = "1.4.0" +version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" files = [ - {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, - {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, ] [package.extras] @@ -779,24 +808,38 @@ files = [ plugins = ["importlib-metadata"] windows-terminal = ["colorama (>=0.4.6)"] +[[package]] +name = "pygments" +version = "2.18.0" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, + {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, +] + +[package.extras] +windows-terminal = ["colorama (>=0.4.6)"] + [[package]] name = "pyproject-api" -version = "1.6.1" +version = "1.7.1" description = "API to interact with the python pyproject.toml based projects" optional = false python-versions = ">=3.8" files = [ - {file = "pyproject_api-1.6.1-py3-none-any.whl", hash = "sha256:4c0116d60476b0786c88692cf4e325a9814965e2469c5998b830bba16b183675"}, - {file = "pyproject_api-1.6.1.tar.gz", hash = "sha256:1817dc018adc0d1ff9ca1ed8c60e1623d5aaca40814b953af14a9cf9a5cae538"}, + {file = "pyproject_api-1.7.1-py3-none-any.whl", hash = "sha256:2dc1654062c2b27733d8fd4cdda672b22fe8741ef1dde8e3a998a9547b071eeb"}, + {file = "pyproject_api-1.7.1.tar.gz", hash = "sha256:7ebc6cd10710f89f4cf2a2731710a98abce37ebff19427116ff2174c9236a827"}, ] [package.dependencies] -packaging = ">=23.1" +packaging = ">=24.1" tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} [package.extras] -docs = ["furo (>=2023.8.19)", "sphinx (<7.2)", "sphinx-autodoc-typehints (>=1.24)"] -testing = ["covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)", "setuptools (>=68.1.2)", "wheel (>=0.41.2)"] +docs = ["furo (>=2024.5.6)", "sphinx-autodoc-typehints (>=2.2.1)"] +testing = ["covdefaults (>=2.3)", "pytest (>=8.2.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "setuptools (>=70.1)"] [[package]] name = "pytest" @@ -823,13 +866,13 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no [[package]] name = "pytest" -version = "8.1.1" +version = "8.2.2" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-8.1.1-py3-none-any.whl", hash = "sha256:2a8386cfc11fa9d2c50ee7b2a57e7d898ef90470a7a34c4b949ff59662bb78b7"}, - {file = "pytest-8.1.1.tar.gz", hash = "sha256:ac978141a75948948817d360297b7aae0fcb9d6ff6bc9ec6d514b85d5a65c044"}, + {file = "pytest-8.2.2-py3-none-any.whl", hash = "sha256:c434598117762e2bd304e526244f67bf66bbd7b5d6cf22138be51ff661980343"}, + {file = "pytest-8.2.2.tar.gz", hash = "sha256:de4bb8104e201939ccdc688b27a89a7be2079b22e2bd2b07f806b6ba71117977"}, ] [package.dependencies] @@ -837,21 +880,21 @@ colorama = {version = "*", markers = "sys_platform == \"win32\""} exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} iniconfig = "*" packaging = "*" -pluggy = ">=1.4,<2.0" +pluggy = ">=1.5,<2.0" tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] -testing = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] [[package]] name = "pytest-asyncio" -version = "0.21.1" +version = "0.21.2" description = "Pytest support for asyncio" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-asyncio-0.21.1.tar.gz", hash = "sha256:40a7eae6dded22c7b604986855ea48400ab15b069ae38116e8c01238e9eeb64d"}, - {file = "pytest_asyncio-0.21.1-py3-none-any.whl", hash = "sha256:8666c1c8ac02631d7c51ba282e0c69a8a452b211ffedf2599099845da5c5c37b"}, + {file = "pytest_asyncio-0.21.2-py3-none-any.whl", hash = "sha256:ab664c88bb7998f711d8039cacd4884da6430886ae8bbd4eded552ed2004f16b"}, + {file = "pytest_asyncio-0.21.2.tar.gz", hash = "sha256:d67738fc232b94b326b9d060750beb16e0074210b98dd8b58a5239fa2a154f45"}, ] [package.dependencies] @@ -864,13 +907,13 @@ testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy [[package]] name = "pytest-asyncio" -version = "0.23.6" +version = "0.23.7" description = "Pytest support for asyncio" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-asyncio-0.23.6.tar.gz", hash = "sha256:ffe523a89c1c222598c76856e76852b787504ddb72dd5d9b6617ffa8aa2cde5f"}, - {file = "pytest_asyncio-0.23.6-py3-none-any.whl", hash = "sha256:68516fdd1018ac57b846c9846b954f0393b26f094764a28c955eabb0536a4e8a"}, + {file = "pytest_asyncio-0.23.7-py3-none-any.whl", hash = "sha256:009b48127fbe44518a547bddd25611551b0e43ccdbf1e67d12479f569832c20b"}, + {file = "pytest_asyncio-0.23.7.tar.gz", hash = "sha256:5f5c72948f4c49e7db4f29f2521d4031f1c27f86e57b046126654083d4770268"}, ] [package.dependencies] @@ -996,30 +1039,52 @@ urllib3 = ">=1.21.1,<3" socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] +[[package]] +name = "requests" +version = "2.32.3" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + [[package]] name = "ruff" -version = "0.3.7" +version = "0.5.1" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.3.7-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:0e8377cccb2f07abd25e84fc5b2cbe48eeb0fea9f1719cad7caedb061d70e5ce"}, - {file = "ruff-0.3.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:15a4d1cc1e64e556fa0d67bfd388fed416b7f3b26d5d1c3e7d192c897e39ba4b"}, - {file = "ruff-0.3.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d28bdf3d7dc71dd46929fafeec98ba89b7c3550c3f0978e36389b5631b793663"}, - {file = "ruff-0.3.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:379b67d4f49774ba679593b232dcd90d9e10f04d96e3c8ce4a28037ae473f7bb"}, - {file = "ruff-0.3.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c060aea8ad5ef21cdfbbe05475ab5104ce7827b639a78dd55383a6e9895b7c51"}, - {file = "ruff-0.3.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:ebf8f615dde968272d70502c083ebf963b6781aacd3079081e03b32adfe4d58a"}, - {file = "ruff-0.3.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d48098bd8f5c38897b03604f5428901b65e3c97d40b3952e38637b5404b739a2"}, - {file = "ruff-0.3.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da8a4fda219bf9024692b1bc68c9cff4b80507879ada8769dc7e985755d662ea"}, - {file = "ruff-0.3.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c44e0149f1d8b48c4d5c33d88c677a4aa22fd09b1683d6a7ff55b816b5d074f"}, - {file = "ruff-0.3.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3050ec0af72b709a62ecc2aca941b9cd479a7bf2b36cc4562f0033d688e44fa1"}, - {file = "ruff-0.3.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:a29cc38e4c1ab00da18a3f6777f8b50099d73326981bb7d182e54a9a21bb4ff7"}, - {file = "ruff-0.3.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:5b15cc59c19edca917f51b1956637db47e200b0fc5e6e1878233d3a938384b0b"}, - {file = "ruff-0.3.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:e491045781b1e38b72c91247cf4634f040f8d0cb3e6d3d64d38dcf43616650b4"}, - {file = "ruff-0.3.7-py3-none-win32.whl", hash = "sha256:bc931de87593d64fad3a22e201e55ad76271f1d5bfc44e1a1887edd0903c7d9f"}, - {file = "ruff-0.3.7-py3-none-win_amd64.whl", hash = "sha256:5ef0e501e1e39f35e03c2acb1d1238c595b8bb36cf7a170e7c1df1b73da00e74"}, - {file = "ruff-0.3.7-py3-none-win_arm64.whl", hash = "sha256:789e144f6dc7019d1f92a812891c645274ed08af6037d11fc65fcbc183b7d59f"}, - {file = "ruff-0.3.7.tar.gz", hash = "sha256:d5c1aebee5162c2226784800ae031f660c350e7a3402c4d1f8ea4e97e232e3ba"}, + {file = "ruff-0.5.1-py3-none-linux_armv6l.whl", hash = "sha256:6ecf968fcf94d942d42b700af18ede94b07521bd188aaf2cd7bc898dd8cb63b6"}, + {file = "ruff-0.5.1-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:204fb0a472f00f2e6280a7c8c7c066e11e20e23a37557d63045bf27a616ba61c"}, + {file = "ruff-0.5.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d235968460e8758d1e1297e1de59a38d94102f60cafb4d5382033c324404ee9d"}, + {file = "ruff-0.5.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38beace10b8d5f9b6bdc91619310af6d63dd2019f3fb2d17a2da26360d7962fa"}, + {file = "ruff-0.5.1-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e478d2f09cf06add143cf8c4540ef77b6599191e0c50ed976582f06e588c994"}, + {file = "ruff-0.5.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f0368d765eec8247b8550251c49ebb20554cc4e812f383ff9f5bf0d5d94190b0"}, + {file = "ruff-0.5.1-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:3a9a9a1b582e37669b0138b7c1d9d60b9edac880b80eb2baba6d0e566bdeca4d"}, + {file = "ruff-0.5.1-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bdd9f723e16003623423affabcc0a807a66552ee6a29f90eddad87a40c750b78"}, + {file = "ruff-0.5.1-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:be9fd62c1e99539da05fcdc1e90d20f74aec1b7a1613463ed77870057cd6bd96"}, + {file = "ruff-0.5.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e216fc75a80ea1fbd96af94a6233d90190d5b65cc3d5dfacf2bd48c3e067d3e1"}, + {file = "ruff-0.5.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:c4c2112e9883a40967827d5c24803525145e7dab315497fae149764979ac7929"}, + {file = "ruff-0.5.1-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:dfaf11c8a116394da3b65cd4b36de30d8552fa45b8119b9ef5ca6638ab964fa3"}, + {file = "ruff-0.5.1-py3-none-musllinux_1_2_i686.whl", hash = "sha256:d7ceb9b2fe700ee09a0c6b192c5ef03c56eb82a0514218d8ff700f6ade004108"}, + {file = "ruff-0.5.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:bac6288e82f6296f82ed5285f597713acb2a6ae26618ffc6b429c597b392535c"}, + {file = "ruff-0.5.1-py3-none-win32.whl", hash = "sha256:5c441d9c24ec09e1cb190a04535c5379b36b73c4bc20aa180c54812c27d1cca4"}, + {file = "ruff-0.5.1-py3-none-win_amd64.whl", hash = "sha256:b1789bf2cd3d1b5a7d38397cac1398ddf3ad7f73f4de01b1e913e2abc7dfc51d"}, + {file = "ruff-0.5.1-py3-none-win_arm64.whl", hash = "sha256:2875b7596a740cbbd492f32d24be73e545a4ce0a3daf51e4f4e609962bfd3cd2"}, + {file = "ruff-0.5.1.tar.gz", hash = "sha256:3164488aebd89b1745b47fd00604fb4358d774465f20d1fcd907f9c0fc1b0655"}, ] [[package]] @@ -1305,30 +1370,30 @@ testing = ["flaky (>=3.4.0)", "freezegun (>=0.3.11)", "pathlib2 (>=2.3.3)", "psu [[package]] name = "tox" -version = "4.14.2" +version = "4.16.0" description = "tox is a generic virtualenv management and test command line tool" optional = false python-versions = ">=3.8" files = [ - {file = "tox-4.14.2-py3-none-any.whl", hash = "sha256:2900c4eb7b716af4a928a7fdc2ed248ad6575294ed7cfae2ea41203937422847"}, - {file = "tox-4.14.2.tar.gz", hash = "sha256:0defb44f6dafd911b61788325741cc6b2e12ea71f987ac025ad4d649f1f1a104"}, + {file = "tox-4.16.0-py3-none-any.whl", hash = "sha256:61e101061b977b46cf00093d4319438055290ad0009f84497a07bf2d2d7a06d0"}, + {file = "tox-4.16.0.tar.gz", hash = "sha256:43499656f9949edb681c0f907f86fbfee98677af9919d8b11ae5ad77cb800748"}, ] [package.dependencies] -cachetools = ">=5.3.2" +cachetools = ">=5.3.3" chardet = ">=5.2" colorama = ">=0.4.6" -filelock = ">=3.13.1" -packaging = ">=23.2" -platformdirs = ">=4.1" -pluggy = ">=1.3" -pyproject-api = ">=1.6.1" +filelock = ">=3.15.4" +packaging = ">=24.1" +platformdirs = ">=4.2.2" +pluggy = ">=1.5" +pyproject-api = ">=1.7.1" tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} -virtualenv = ">=20.25" +virtualenv = ">=20.26.3" [package.extras] -docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-argparse-cli (>=1.11.1)", "sphinx-autodoc-typehints (>=1.25.2)", "sphinx-copybutton (>=0.5.2)", "sphinx-inline-tabs (>=2023.4.21)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.11)"] -testing = ["build[virtualenv] (>=1.0.3)", "covdefaults (>=2.3)", "detect-test-pollution (>=1.2)", "devpi-process (>=1)", "diff-cover (>=8.0.2)", "distlib (>=0.3.8)", "flaky (>=3.7)", "hatch-vcs (>=0.4)", "hatchling (>=1.21)", "psutil (>=5.9.7)", "pytest (>=7.4.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-xdist (>=3.5)", "re-assert (>=1.1)", "time-machine (>=2.13)", "wheel (>=0.42)"] +docs = ["furo (>=2024.5.6)", "sphinx (>=7.3.7)", "sphinx-argparse-cli (>=1.16)", "sphinx-autodoc-typehints (>=2.2.2)", "sphinx-copybutton (>=0.5.2)", "sphinx-inline-tabs (>=2023.4.21)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.11)"] +testing = ["build[virtualenv] (>=1.2.1)", "covdefaults (>=2.3)", "detect-test-pollution (>=1.2)", "devpi-process (>=1)", "diff-cover (>=9.1)", "distlib (>=0.3.8)", "flaky (>=3.8.1)", "hatch-vcs (>=0.4)", "hatchling (>=1.25)", "psutil (>=6)", "pytest (>=8.2.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-xdist (>=3.6.1)", "re-assert (>=1.1)", "setuptools (>=70.2)", "time-machine (>=2.14.2)", "wheel (>=0.43)"] [[package]] name = "typed-ast" @@ -1393,13 +1458,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.11.0" +version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"}, - {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"}, + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] [[package]] @@ -1421,13 +1486,13 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "urllib3" -version = "2.2.1" +version = "2.2.2" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, - {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, + {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, + {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, ] [package.extras] @@ -1438,13 +1503,13 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "virtualenv" -version = "20.25.1" +version = "20.26.3" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.25.1-py3-none-any.whl", hash = "sha256:961c026ac520bac5f69acb8ea063e8a4f071bcc9457b9c1f28f6b085c511583a"}, - {file = "virtualenv-20.25.1.tar.gz", hash = "sha256:e08e13ecdca7a0bd53798f356d5831434afa5b07b93f0abdf0797b7a06ffe197"}, + {file = "virtualenv-20.26.3-py3-none-any.whl", hash = "sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589"}, + {file = "virtualenv-20.26.3.tar.gz", hash = "sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a"}, ] [package.dependencies] @@ -1454,7 +1519,7 @@ importlib-metadata = {version = ">=6.6", markers = "python_version < \"3.8\""} platformdirs = ">=3.9.1,<5" [package.extras] -docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] [[package]] @@ -1474,20 +1539,20 @@ testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more [[package]] name = "zipp" -version = "3.18.1" +version = "3.19.2" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.18.1-py3-none-any.whl", hash = "sha256:206f5a15f2af3dbaee80769fb7dc6f249695e940acca08dfb2a4769fe61e538b"}, - {file = "zipp-3.18.1.tar.gz", hash = "sha256:2884ed22e7d8961de1c9a05142eb69a247f120291bc0206a00a7642f09b5b715"}, + {file = "zipp-3.19.2-py3-none-any.whl", hash = "sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c"}, + {file = "zipp-3.19.2.tar.gz", hash = "sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] [metadata] lock-version = "2.0" python-versions = "^3.7" -content-hash = "81ca5ed14a2f62d3dd600ee6b318b9d5a4dd228c20045d68426743be3c1c0714" +content-hash = "3b73809139a631a17a57dcc7911caa72b3b69dd61899f5ba37f2a21d5d685bf9" diff --git a/pyproject.toml b/pyproject.toml index 05e84f7c..d11d18eb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -43,7 +43,7 @@ Changelog = "https://github.com/graphql-python/graphql-core/releases" [tool.poetry.dependencies] python = "^3.7" typing-extensions = [ - { version = "^4.9", python = ">=3.8,<3.10" }, + { version = "^4.12", python = ">=3.8,<3.10" }, { version = "^4.7.1", python = "<3.8" }, ] @@ -52,7 +52,7 @@ optional = true [tool.poetry.group.test.dependencies] pytest = [ - { version = "^8.1", python = ">=3.8" }, + { version = "^8.2", python = ">=3.8" }, { version = "^7.4", python = "<3.8"} ] pytest-asyncio = [ @@ -75,9 +75,9 @@ tox = [ optional = true [tool.poetry.group.lint.dependencies] -ruff = ">=0.3.7,<0.4" +ruff = ">=0.5.1,<0.6" mypy = [ - { version = "^1.9", python = ">=3.8" }, + { version = "^1.10", python = ">=3.8" }, { version = "~1.4", python = "<3.8" } ] bump2version = ">=1.0,<2" diff --git a/tox.ini b/tox.ini index 8082ac27..1fe4caf1 100644 --- a/tox.ini +++ b/tox.ini @@ -17,7 +17,7 @@ python = [testenv:ruff] basepython = python3.12 -deps = ruff>=0.3.7,<0.4 +deps = ruff>=0.5.1,<0.6 commands = ruff check src tests ruff format --check src tests @@ -25,8 +25,8 @@ commands = [testenv:mypy] basepython = python3.12 deps = - mypy>=1.9,<2 - pytest>=8.0,<9 + mypy>=1.10,<2 + pytest>=8.2,<9 commands = mypy src tests From 331c7bcbc04031e3c8b865b3ba52f4587dde1abe Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 7 Jul 2024 20:15:24 +0200 Subject: [PATCH 174/230] Minor typing and linting fixes --- src/graphql/execution/collect_fields.py | 5 +---- src/graphql/execution/execute.py | 6 +++--- src/graphql/language/block_string.py | 2 +- src/graphql/type/definition.py | 12 ++++++------ 4 files changed, 11 insertions(+), 14 deletions(-) diff --git a/src/graphql/execution/collect_fields.py b/src/graphql/execution/collect_fields.py index 0bfbdf2a..5cb5a723 100644 --- a/src/graphql/execution/collect_fields.py +++ b/src/graphql/execution/collect_fields.py @@ -281,10 +281,7 @@ def should_include_node( return False include = get_directive_values(GraphQLIncludeDirective, node, variable_values) - if include and not include["if"]: - return False - - return True + return not (include and not include["if"]) def does_fragment_condition_match( diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index 7d3d85ed..b49bf981 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -354,7 +354,7 @@ class ExecutionContext(IncrementalPublisherMixin): middleware_manager: MiddlewareManager | None is_awaitable: Callable[[Any], TypeGuard[Awaitable]] = staticmethod( - default_is_awaitable # type: ignore + default_is_awaitable ) def __init__( @@ -1113,13 +1113,13 @@ async def get_completed_results() -> list[Any]: index = awaitable_indices[0] completed_results[index] = await completed_results[index] else: - for index, result in zip( + for index, sub_result in zip( awaitable_indices, await gather( *(completed_results[index] for index in awaitable_indices) ), ): - completed_results[index] = result + completed_results[index] = sub_result return completed_results return get_completed_results() diff --git a/src/graphql/language/block_string.py b/src/graphql/language/block_string.py index ef5e1ccf..d784c236 100644 --- a/src/graphql/language/block_string.py +++ b/src/graphql/language/block_string.py @@ -97,7 +97,7 @@ def is_printable_as_block_string(value: str) -> bool: if is_empty_line: return False # has trailing empty lines - if has_common_indent and seen_non_empty_line: + if has_common_indent and seen_non_empty_line: # noqa: SIM103 return False # has internal indent return True diff --git a/src/graphql/type/definition.py b/src/graphql/type/definition.py index 004a3e26..dbca4e66 100644 --- a/src/graphql/type/definition.py +++ b/src/graphql/type/definition.py @@ -783,7 +783,7 @@ def fields(self) -> GraphQLFieldMap: """Get provided fields, wrapping them as GraphQLFields if needed.""" try: fields = resolve_thunk(self._fields) - except Exception as error: # noqa: BLE001 + except Exception as error: cls = GraphQLError if isinstance(error, GraphQLError) else TypeError msg = f"{self.name} fields cannot be resolved. {error}" raise cls(msg) from error @@ -801,7 +801,7 @@ def interfaces(self) -> tuple[GraphQLInterfaceType, ...]: interfaces: Collection[GraphQLInterfaceType] = resolve_thunk( self._interfaces # type: ignore ) - except Exception as error: # noqa: BLE001 + except Exception as error: cls = GraphQLError if isinstance(error, GraphQLError) else TypeError msg = f"{self.name} interfaces cannot be resolved. {error}" raise cls(msg) from error @@ -888,7 +888,7 @@ def fields(self) -> GraphQLFieldMap: """Get provided fields, wrapping them as GraphQLFields if needed.""" try: fields = resolve_thunk(self._fields) - except Exception as error: # noqa: BLE001 + except Exception as error: cls = GraphQLError if isinstance(error, GraphQLError) else TypeError msg = f"{self.name} fields cannot be resolved. {error}" raise cls(msg) from error @@ -906,7 +906,7 @@ def interfaces(self) -> tuple[GraphQLInterfaceType, ...]: interfaces: Collection[GraphQLInterfaceType] = resolve_thunk( self._interfaces # type: ignore ) - except Exception as error: # noqa: BLE001 + except Exception as error: cls = GraphQLError if isinstance(error, GraphQLError) else TypeError msg = f"{self.name} interfaces cannot be resolved. {error}" raise cls(msg) from error @@ -992,7 +992,7 @@ def types(self) -> tuple[GraphQLObjectType, ...]: """Get provided types.""" try: types: Collection[GraphQLObjectType] = resolve_thunk(self._types) - except Exception as error: # noqa: BLE001 + except Exception as error: cls = GraphQLError if isinstance(error, GraphQLError) else TypeError msg = f"{self.name} types cannot be resolved. {error}" raise cls(msg) from error @@ -1350,7 +1350,7 @@ def fields(self) -> GraphQLInputFieldMap: """Get provided fields, wrap them as GraphQLInputField if needed.""" try: fields = resolve_thunk(self._fields) - except Exception as error: # noqa: BLE001 + except Exception as error: cls = GraphQLError if isinstance(error, GraphQLError) else TypeError msg = f"{self.name} fields cannot be resolved. {error}" raise cls(msg) from error From a5a2a655c7adb4c3e7706c2f14c6f1da1fb7cca5 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 7 Jul 2024 20:28:27 +0200 Subject: [PATCH 175/230] Fix doc building warning on GitHub --- .github/workflows/lint.yml | 2 +- docs/conf.py | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index f5ad7802..74f14604 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -3,7 +3,7 @@ name: Code quality on: [push, pull_request] jobs: - build: + lint: runs-on: ubuntu-latest steps: diff --git a/docs/conf.py b/docs/conf.py index 90ff122b..ad04aff5 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -156,8 +156,9 @@ GraphQLErrorExtensions GraphQLFieldResolver GraphQLInputType -GraphQLTypeResolver +GraphQLNullableType GraphQLOutputType +GraphQLTypeResolver GroupedFieldSet IncrementalDataRecord Middleware From 876aef67b6f1e1f21b3b5db94c7ff03726cb6bdf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=D7=A0=D7=99=D7=A8?= <88795475+nrbnlulu@users.noreply.github.com> Date: Sun, 7 Jul 2024 23:06:54 +0300 Subject: [PATCH 176/230] Support middlewares for subscriptions (#221) --- src/graphql/execution/execute.py | 2 ++ tests/execution/test_middleware.py | 42 +++++++++++++++++++++++++++++- 2 files changed, 43 insertions(+), 1 deletion(-) diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index b49bf981..74356fa0 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -2043,6 +2043,7 @@ def subscribe( type_resolver: GraphQLTypeResolver | None = None, subscribe_field_resolver: GraphQLFieldResolver | None = None, execution_context_class: type[ExecutionContext] | None = None, + middleware: MiddlewareManager | None = None, ) -> AwaitableOrValue[AsyncIterator[ExecutionResult] | ExecutionResult]: """Create a GraphQL subscription. @@ -2082,6 +2083,7 @@ def subscribe( field_resolver, type_resolver, subscribe_field_resolver, + middleware=middleware, ) # Return early errors if execution context failed. diff --git a/tests/execution/test_middleware.py b/tests/execution/test_middleware.py index 4927b52f..d4abba95 100644 --- a/tests/execution/test_middleware.py +++ b/tests/execution/test_middleware.py @@ -1,7 +1,8 @@ +import inspect from typing import Awaitable, cast import pytest -from graphql.execution import Middleware, MiddlewareManager, execute +from graphql.execution import Middleware, MiddlewareManager, execute, subscribe from graphql.language.parser import parse from graphql.type import GraphQLField, GraphQLObjectType, GraphQLSchema, GraphQLString @@ -236,6 +237,45 @@ async def resolve(self, next_, *args, **kwargs): result = await awaitable_result assert result.data == {"field": "devloseR"} + @pytest.mark.asyncio() + async def subscription_simple(): + async def bar_resolve(_obj, _info): + yield "bar" + yield "oof" + + test_type = GraphQLObjectType( + "Subscription", + { + "bar": GraphQLField( + GraphQLString, + resolve=lambda message, _info: message, + subscribe=bar_resolve, + ), + }, + ) + doc = parse("subscription { bar }") + + async def reverse_middleware(next_, value, info, **kwargs): + awaitable_maybe = next_(value, info, **kwargs) + return awaitable_maybe[::-1] + + noop_type = GraphQLObjectType( + "Noop", + {"noop": GraphQLField(GraphQLString)}, + ) + schema = GraphQLSchema(query=noop_type, subscription=test_type) + + agen = subscribe( + schema, + doc, + middleware=MiddlewareManager(reverse_middleware), + ) + assert inspect.isasyncgen(agen) + data = (await agen.__anext__()).data + assert data == {"bar": "rab"} + data = (await agen.__anext__()).data + assert data == {"bar": "foo"} + def describe_without_manager(): def no_middleware(): doc = parse("{ field }") From 730ac15ca11a1df84f18cf26b62b5e4fe8609b4d Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 21 Jul 2024 12:15:56 +0200 Subject: [PATCH 177/230] Update dependencies and fix typing --- poetry.lock | 230 +++++++++++------------ pyproject.toml | 8 +- src/graphql/execution/async_iterables.py | 5 +- src/graphql/utilities/extend_schema.py | 16 +- tests/type/test_definition.py | 20 +- tests/validation/harness.py | 12 +- tox.ini | 4 +- 7 files changed, 150 insertions(+), 145 deletions(-) diff --git a/poetry.lock b/poetry.lock index b1aa2914..e548c1e9 100644 --- a/poetry.lock +++ b/poetry.lock @@ -58,13 +58,13 @@ files = [ [[package]] name = "cachetools" -version = "5.3.3" +version = "5.4.0" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, - {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, + {file = "cachetools-5.4.0-py3-none-any.whl", hash = "sha256:3ae3b49a3d5e28a77a0be2b37dbcb89005058959cb2323858c2657c4a8cab474"}, + {file = "cachetools-5.4.0.tar.gz", hash = "sha256:b8adc2e7c07f105ced7bc56dbb6dfbe7c4a00acce20e2227b3f355be89bc6827"}, ] [[package]] @@ -276,63 +276,63 @@ toml = ["tomli"] [[package]] name = "coverage" -version = "7.5.4" +version = "7.6.0" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.5.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6cfb5a4f556bb51aba274588200a46e4dd6b505fb1a5f8c5ae408222eb416f99"}, - {file = "coverage-7.5.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2174e7c23e0a454ffe12267a10732c273243b4f2d50d07544a91198f05c48f47"}, - {file = "coverage-7.5.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2214ee920787d85db1b6a0bd9da5f8503ccc8fcd5814d90796c2f2493a2f4d2e"}, - {file = "coverage-7.5.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1137f46adb28e3813dec8c01fefadcb8c614f33576f672962e323b5128d9a68d"}, - {file = "coverage-7.5.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b385d49609f8e9efc885790a5a0e89f2e3ae042cdf12958b6034cc442de428d3"}, - {file = "coverage-7.5.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b4a474f799456e0eb46d78ab07303286a84a3140e9700b9e154cfebc8f527016"}, - {file = "coverage-7.5.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:5cd64adedf3be66f8ccee418473c2916492d53cbafbfcff851cbec5a8454b136"}, - {file = "coverage-7.5.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e564c2cf45d2f44a9da56f4e3a26b2236504a496eb4cb0ca7221cd4cc7a9aca9"}, - {file = "coverage-7.5.4-cp310-cp310-win32.whl", hash = "sha256:7076b4b3a5f6d2b5d7f1185fde25b1e54eb66e647a1dfef0e2c2bfaf9b4c88c8"}, - {file = "coverage-7.5.4-cp310-cp310-win_amd64.whl", hash = "sha256:018a12985185038a5b2bcafab04ab833a9a0f2c59995b3cec07e10074c78635f"}, - {file = "coverage-7.5.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:db14f552ac38f10758ad14dd7b983dbab424e731588d300c7db25b6f89e335b5"}, - {file = "coverage-7.5.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3257fdd8e574805f27bb5342b77bc65578e98cbc004a92232106344053f319ba"}, - {file = "coverage-7.5.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a6612c99081d8d6134005b1354191e103ec9705d7ba2754e848211ac8cacc6b"}, - {file = "coverage-7.5.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d45d3cbd94159c468b9b8c5a556e3f6b81a8d1af2a92b77320e887c3e7a5d080"}, - {file = "coverage-7.5.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed550e7442f278af76d9d65af48069f1fb84c9f745ae249c1a183c1e9d1b025c"}, - {file = "coverage-7.5.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7a892be37ca35eb5019ec85402c3371b0f7cda5ab5056023a7f13da0961e60da"}, - {file = "coverage-7.5.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8192794d120167e2a64721d88dbd688584675e86e15d0569599257566dec9bf0"}, - {file = "coverage-7.5.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:820bc841faa502e727a48311948e0461132a9c8baa42f6b2b84a29ced24cc078"}, - {file = "coverage-7.5.4-cp311-cp311-win32.whl", hash = "sha256:6aae5cce399a0f065da65c7bb1e8abd5c7a3043da9dceb429ebe1b289bc07806"}, - {file = "coverage-7.5.4-cp311-cp311-win_amd64.whl", hash = "sha256:d2e344d6adc8ef81c5a233d3a57b3c7d5181f40e79e05e1c143da143ccb6377d"}, - {file = "coverage-7.5.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:54317c2b806354cbb2dc7ac27e2b93f97096912cc16b18289c5d4e44fc663233"}, - {file = "coverage-7.5.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:042183de01f8b6d531e10c197f7f0315a61e8d805ab29c5f7b51a01d62782747"}, - {file = "coverage-7.5.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6bb74ed465d5fb204b2ec41d79bcd28afccf817de721e8a807d5141c3426638"}, - {file = "coverage-7.5.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3d45ff86efb129c599a3b287ae2e44c1e281ae0f9a9bad0edc202179bcc3a2e"}, - {file = "coverage-7.5.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5013ed890dc917cef2c9f765c4c6a8ae9df983cd60dbb635df8ed9f4ebc9f555"}, - {file = "coverage-7.5.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1014fbf665fef86cdfd6cb5b7371496ce35e4d2a00cda501cf9f5b9e6fced69f"}, - {file = "coverage-7.5.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3684bc2ff328f935981847082ba4fdc950d58906a40eafa93510d1b54c08a66c"}, - {file = "coverage-7.5.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:581ea96f92bf71a5ec0974001f900db495488434a6928a2ca7f01eee20c23805"}, - {file = "coverage-7.5.4-cp312-cp312-win32.whl", hash = "sha256:73ca8fbc5bc622e54627314c1a6f1dfdd8db69788f3443e752c215f29fa87a0b"}, - {file = "coverage-7.5.4-cp312-cp312-win_amd64.whl", hash = "sha256:cef4649ec906ea7ea5e9e796e68b987f83fa9a718514fe147f538cfeda76d7a7"}, - {file = "coverage-7.5.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cdd31315fc20868c194130de9ee6bfd99755cc9565edff98ecc12585b90be882"}, - {file = "coverage-7.5.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:02ff6e898197cc1e9fa375581382b72498eb2e6d5fc0b53f03e496cfee3fac6d"}, - {file = "coverage-7.5.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d05c16cf4b4c2fc880cb12ba4c9b526e9e5d5bb1d81313d4d732a5b9fe2b9d53"}, - {file = "coverage-7.5.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c5986ee7ea0795a4095ac4d113cbb3448601efca7f158ec7f7087a6c705304e4"}, - {file = "coverage-7.5.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5df54843b88901fdc2f598ac06737f03d71168fd1175728054c8f5a2739ac3e4"}, - {file = "coverage-7.5.4-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ab73b35e8d109bffbda9a3e91c64e29fe26e03e49addf5b43d85fc426dde11f9"}, - {file = "coverage-7.5.4-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:aea072a941b033813f5e4814541fc265a5c12ed9720daef11ca516aeacd3bd7f"}, - {file = "coverage-7.5.4-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:16852febd96acd953b0d55fc842ce2dac1710f26729b31c80b940b9afcd9896f"}, - {file = "coverage-7.5.4-cp38-cp38-win32.whl", hash = "sha256:8f894208794b164e6bd4bba61fc98bf6b06be4d390cf2daacfa6eca0a6d2bb4f"}, - {file = "coverage-7.5.4-cp38-cp38-win_amd64.whl", hash = "sha256:e2afe743289273209c992075a5a4913e8d007d569a406ffed0bd080ea02b0633"}, - {file = "coverage-7.5.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b95c3a8cb0463ba9f77383d0fa8c9194cf91f64445a63fc26fb2327e1e1eb088"}, - {file = "coverage-7.5.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3d7564cc09dd91b5a6001754a5b3c6ecc4aba6323baf33a12bd751036c998be4"}, - {file = "coverage-7.5.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44da56a2589b684813f86d07597fdf8a9c6ce77f58976727329272f5a01f99f7"}, - {file = "coverage-7.5.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e16f3d6b491c48c5ae726308e6ab1e18ee830b4cdd6913f2d7f77354b33f91c8"}, - {file = "coverage-7.5.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dbc5958cb471e5a5af41b0ddaea96a37e74ed289535e8deca404811f6cb0bc3d"}, - {file = "coverage-7.5.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:a04e990a2a41740b02d6182b498ee9796cf60eefe40cf859b016650147908029"}, - {file = "coverage-7.5.4-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ddbd2f9713a79e8e7242d7c51f1929611e991d855f414ca9996c20e44a895f7c"}, - {file = "coverage-7.5.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b1ccf5e728ccf83acd313c89f07c22d70d6c375a9c6f339233dcf792094bcbf7"}, - {file = "coverage-7.5.4-cp39-cp39-win32.whl", hash = "sha256:56b4eafa21c6c175b3ede004ca12c653a88b6f922494b023aeb1e836df953ace"}, - {file = "coverage-7.5.4-cp39-cp39-win_amd64.whl", hash = "sha256:65e528e2e921ba8fd67d9055e6b9f9e34b21ebd6768ae1c1723f4ea6ace1234d"}, - {file = "coverage-7.5.4-pp38.pp39.pp310-none-any.whl", hash = "sha256:79b356f3dd5b26f3ad23b35c75dbdaf1f9e2450b6bcefc6d0825ea0aa3f86ca5"}, - {file = "coverage-7.5.4.tar.gz", hash = "sha256:a44963520b069e12789d0faea4e9fdb1e410cdc4aab89d94f7f55cbb7fef0353"}, + {file = "coverage-7.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dff044f661f59dace805eedb4a7404c573b6ff0cdba4a524141bc63d7be5c7fd"}, + {file = "coverage-7.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a8659fd33ee9e6ca03950cfdcdf271d645cf681609153f218826dd9805ab585c"}, + {file = "coverage-7.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7792f0ab20df8071d669d929c75c97fecfa6bcab82c10ee4adb91c7a54055463"}, + {file = "coverage-7.6.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d4b3cd1ca7cd73d229487fa5caca9e4bc1f0bca96526b922d61053ea751fe791"}, + {file = "coverage-7.6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7e128f85c0b419907d1f38e616c4f1e9f1d1b37a7949f44df9a73d5da5cd53c"}, + {file = "coverage-7.6.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a94925102c89247530ae1dab7dc02c690942566f22e189cbd53579b0693c0783"}, + {file = "coverage-7.6.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:dcd070b5b585b50e6617e8972f3fbbee786afca71b1936ac06257f7e178f00f6"}, + {file = "coverage-7.6.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d50a252b23b9b4dfeefc1f663c568a221092cbaded20a05a11665d0dbec9b8fb"}, + {file = "coverage-7.6.0-cp310-cp310-win32.whl", hash = "sha256:0e7b27d04131c46e6894f23a4ae186a6a2207209a05df5b6ad4caee6d54a222c"}, + {file = "coverage-7.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:54dece71673b3187c86226c3ca793c5f891f9fc3d8aa183f2e3653da18566169"}, + {file = "coverage-7.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c7b525ab52ce18c57ae232ba6f7010297a87ced82a2383b1afd238849c1ff933"}, + {file = "coverage-7.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bea27c4269234e06f621f3fac3925f56ff34bc14521484b8f66a580aacc2e7d"}, + {file = "coverage-7.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed8d1d1821ba5fc88d4a4f45387b65de52382fa3ef1f0115a4f7a20cdfab0e94"}, + {file = "coverage-7.6.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01c322ef2bbe15057bc4bf132b525b7e3f7206f071799eb8aa6ad1940bcf5fb1"}, + {file = "coverage-7.6.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03cafe82c1b32b770a29fd6de923625ccac3185a54a5e66606da26d105f37dac"}, + {file = "coverage-7.6.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0d1b923fc4a40c5832be4f35a5dab0e5ff89cddf83bb4174499e02ea089daf57"}, + {file = "coverage-7.6.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4b03741e70fb811d1a9a1d75355cf391f274ed85847f4b78e35459899f57af4d"}, + {file = "coverage-7.6.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a73d18625f6a8a1cbb11eadc1d03929f9510f4131879288e3f7922097a429f63"}, + {file = "coverage-7.6.0-cp311-cp311-win32.whl", hash = "sha256:65fa405b837060db569a61ec368b74688f429b32fa47a8929a7a2f9b47183713"}, + {file = "coverage-7.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:6379688fb4cfa921ae349c76eb1a9ab26b65f32b03d46bb0eed841fd4cb6afb1"}, + {file = "coverage-7.6.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f7db0b6ae1f96ae41afe626095149ecd1b212b424626175a6633c2999eaad45b"}, + {file = "coverage-7.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bbdf9a72403110a3bdae77948b8011f644571311c2fb35ee15f0f10a8fc082e8"}, + {file = "coverage-7.6.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cc44bf0315268e253bf563f3560e6c004efe38f76db03a1558274a6e04bf5d5"}, + {file = "coverage-7.6.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da8549d17489cd52f85a9829d0e1d91059359b3c54a26f28bec2c5d369524807"}, + {file = "coverage-7.6.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0086cd4fc71b7d485ac93ca4239c8f75732c2ae3ba83f6be1c9be59d9e2c6382"}, + {file = "coverage-7.6.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1fad32ee9b27350687035cb5fdf9145bc9cf0a094a9577d43e909948ebcfa27b"}, + {file = "coverage-7.6.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:044a0985a4f25b335882b0966625270a8d9db3d3409ddc49a4eb00b0ef5e8cee"}, + {file = "coverage-7.6.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:76d5f82213aa78098b9b964ea89de4617e70e0d43e97900c2778a50856dac605"}, + {file = "coverage-7.6.0-cp312-cp312-win32.whl", hash = "sha256:3c59105f8d58ce500f348c5b56163a4113a440dad6daa2294b5052a10db866da"}, + {file = "coverage-7.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:ca5d79cfdae420a1d52bf177de4bc2289c321d6c961ae321503b2ca59c17ae67"}, + {file = "coverage-7.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d39bd10f0ae453554798b125d2f39884290c480f56e8a02ba7a6ed552005243b"}, + {file = "coverage-7.6.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:beb08e8508e53a568811016e59f3234d29c2583f6b6e28572f0954a6b4f7e03d"}, + {file = "coverage-7.6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2e16f4cd2bc4d88ba30ca2d3bbf2f21f00f382cf4e1ce3b1ddc96c634bc48ca"}, + {file = "coverage-7.6.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6616d1c9bf1e3faea78711ee42a8b972367d82ceae233ec0ac61cc7fec09fa6b"}, + {file = "coverage-7.6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad4567d6c334c46046d1c4c20024de2a1c3abc626817ae21ae3da600f5779b44"}, + {file = "coverage-7.6.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d17c6a415d68cfe1091d3296ba5749d3d8696e42c37fca5d4860c5bf7b729f03"}, + {file = "coverage-7.6.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9146579352d7b5f6412735d0f203bbd8d00113a680b66565e205bc605ef81bc6"}, + {file = "coverage-7.6.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:cdab02a0a941af190df8782aafc591ef3ad08824f97850b015c8c6a8b3877b0b"}, + {file = "coverage-7.6.0-cp38-cp38-win32.whl", hash = "sha256:df423f351b162a702c053d5dddc0fc0ef9a9e27ea3f449781ace5f906b664428"}, + {file = "coverage-7.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:f2501d60d7497fd55e391f423f965bbe9e650e9ffc3c627d5f0ac516026000b8"}, + {file = "coverage-7.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7221f9ac9dad9492cecab6f676b3eaf9185141539d5c9689d13fd6b0d7de840c"}, + {file = "coverage-7.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ddaaa91bfc4477d2871442bbf30a125e8fe6b05da8a0015507bfbf4718228ab2"}, + {file = "coverage-7.6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4cbe651f3904e28f3a55d6f371203049034b4ddbce65a54527a3f189ca3b390"}, + {file = "coverage-7.6.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:831b476d79408ab6ccfadaaf199906c833f02fdb32c9ab907b1d4aa0713cfa3b"}, + {file = "coverage-7.6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46c3d091059ad0b9c59d1034de74a7f36dcfa7f6d3bde782c49deb42438f2450"}, + {file = "coverage-7.6.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4d5fae0a22dc86259dee66f2cc6c1d3e490c4a1214d7daa2a93d07491c5c04b6"}, + {file = "coverage-7.6.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:07ed352205574aad067482e53dd606926afebcb5590653121063fbf4e2175166"}, + {file = "coverage-7.6.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:49c76cdfa13015c4560702574bad67f0e15ca5a2872c6a125f6327ead2b731dd"}, + {file = "coverage-7.6.0-cp39-cp39-win32.whl", hash = "sha256:482855914928c8175735a2a59c8dc5806cf7d8f032e4820d52e845d1f731dca2"}, + {file = "coverage-7.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:543ef9179bc55edfd895154a51792b01c017c87af0ebaae092720152e19e42ca"}, + {file = "coverage-7.6.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:6fe885135c8a479d3e37a7aae61cbd3a0fb2deccb4dda3c25f92a49189f766d6"}, + {file = "coverage-7.6.0.tar.gz", hash = "sha256:289cc803fa1dc901f84701ac10c9ee873619320f2f9aff38794db4a4a0268d51"}, ] [package.dependencies] @@ -376,13 +376,13 @@ files = [ [[package]] name = "exceptiongroup" -version = "1.2.1" +version = "1.2.2" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.2.1-py3-none-any.whl", hash = "sha256:5258b9ed329c5bbdd31a309f53cbfb0b155341807f6ff7606a1e801a891b29ad"}, - {file = "exceptiongroup-1.2.1.tar.gz", hash = "sha256:a4785e48b045528f5bfe627b6ad554ff32def154f42372786903b7abcfe1aa16"}, + {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, + {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, ] [package.extras] @@ -626,44 +626,44 @@ reports = ["lxml"] [[package]] name = "mypy" -version = "1.10.1" +version = "1.11.0" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.10.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e36f229acfe250dc660790840916eb49726c928e8ce10fbdf90715090fe4ae02"}, - {file = "mypy-1.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:51a46974340baaa4145363b9e051812a2446cf583dfaeba124af966fa44593f7"}, - {file = "mypy-1.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:901c89c2d67bba57aaaca91ccdb659aa3a312de67f23b9dfb059727cce2e2e0a"}, - {file = "mypy-1.10.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0cd62192a4a32b77ceb31272d9e74d23cd88c8060c34d1d3622db3267679a5d9"}, - {file = "mypy-1.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:a2cbc68cb9e943ac0814c13e2452d2046c2f2b23ff0278e26599224cf164e78d"}, - {file = "mypy-1.10.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bd6f629b67bb43dc0d9211ee98b96d8dabc97b1ad38b9b25f5e4c4d7569a0c6a"}, - {file = "mypy-1.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a1bbb3a6f5ff319d2b9d40b4080d46cd639abe3516d5a62c070cf0114a457d84"}, - {file = "mypy-1.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8edd4e9bbbc9d7b79502eb9592cab808585516ae1bcc1446eb9122656c6066f"}, - {file = "mypy-1.10.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6166a88b15f1759f94a46fa474c7b1b05d134b1b61fca627dd7335454cc9aa6b"}, - {file = "mypy-1.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:5bb9cd11c01c8606a9d0b83ffa91d0b236a0e91bc4126d9ba9ce62906ada868e"}, - {file = "mypy-1.10.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d8681909f7b44d0b7b86e653ca152d6dff0eb5eb41694e163c6092124f8246d7"}, - {file = "mypy-1.10.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:378c03f53f10bbdd55ca94e46ec3ba255279706a6aacaecac52ad248f98205d3"}, - {file = "mypy-1.10.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bacf8f3a3d7d849f40ca6caea5c055122efe70e81480c8328ad29c55c69e93e"}, - {file = "mypy-1.10.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:701b5f71413f1e9855566a34d6e9d12624e9e0a8818a5704d74d6b0402e66c04"}, - {file = "mypy-1.10.1-cp312-cp312-win_amd64.whl", hash = "sha256:3c4c2992f6ea46ff7fce0072642cfb62af7a2484efe69017ed8b095f7b39ef31"}, - {file = "mypy-1.10.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:604282c886497645ffb87b8f35a57ec773a4a2721161e709a4422c1636ddde5c"}, - {file = "mypy-1.10.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37fd87cab83f09842653f08de066ee68f1182b9b5282e4634cdb4b407266bade"}, - {file = "mypy-1.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8addf6313777dbb92e9564c5d32ec122bf2c6c39d683ea64de6a1fd98b90fe37"}, - {file = "mypy-1.10.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5cc3ca0a244eb9a5249c7c583ad9a7e881aa5d7b73c35652296ddcdb33b2b9c7"}, - {file = "mypy-1.10.1-cp38-cp38-win_amd64.whl", hash = "sha256:1b3a2ffce52cc4dbaeee4df762f20a2905aa171ef157b82192f2e2f368eec05d"}, - {file = "mypy-1.10.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fe85ed6836165d52ae8b88f99527d3d1b2362e0cb90b005409b8bed90e9059b3"}, - {file = "mypy-1.10.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c2ae450d60d7d020d67ab440c6e3fae375809988119817214440033f26ddf7bf"}, - {file = "mypy-1.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6be84c06e6abd72f960ba9a71561c14137a583093ffcf9bbfaf5e613d63fa531"}, - {file = "mypy-1.10.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2189ff1e39db399f08205e22a797383613ce1cb0cb3b13d8bcf0170e45b96cc3"}, - {file = "mypy-1.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:97a131ee36ac37ce9581f4220311247ab6cba896b4395b9c87af0675a13a755f"}, - {file = "mypy-1.10.1-py3-none-any.whl", hash = "sha256:71d8ac0b906354ebda8ef1673e5fde785936ac1f29ff6987c7483cfbd5a4235a"}, - {file = "mypy-1.10.1.tar.gz", hash = "sha256:1f8f492d7db9e3593ef42d4f115f04e556130f2819ad33ab84551403e97dd4c0"}, + {file = "mypy-1.11.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3824187c99b893f90c845bab405a585d1ced4ff55421fdf5c84cb7710995229"}, + {file = "mypy-1.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:96f8dbc2c85046c81bcddc246232d500ad729cb720da4e20fce3b542cab91287"}, + {file = "mypy-1.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1a5d8d8dd8613a3e2be3eae829ee891b6b2de6302f24766ff06cb2875f5be9c6"}, + {file = "mypy-1.11.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:72596a79bbfb195fd41405cffa18210af3811beb91ff946dbcb7368240eed6be"}, + {file = "mypy-1.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:35ce88b8ed3a759634cb4eb646d002c4cef0a38f20565ee82b5023558eb90c00"}, + {file = "mypy-1.11.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:98790025861cb2c3db8c2f5ad10fc8c336ed2a55f4daf1b8b3f877826b6ff2eb"}, + {file = "mypy-1.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:25bcfa75b9b5a5f8d67147a54ea97ed63a653995a82798221cca2a315c0238c1"}, + {file = "mypy-1.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0bea2a0e71c2a375c9fa0ede3d98324214d67b3cbbfcbd55ac8f750f85a414e3"}, + {file = "mypy-1.11.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d2b3d36baac48e40e3064d2901f2fbd2a2d6880ec6ce6358825c85031d7c0d4d"}, + {file = "mypy-1.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:d8e2e43977f0e09f149ea69fd0556623919f816764e26d74da0c8a7b48f3e18a"}, + {file = "mypy-1.11.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:1d44c1e44a8be986b54b09f15f2c1a66368eb43861b4e82573026e04c48a9e20"}, + {file = "mypy-1.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cea3d0fb69637944dd321f41bc896e11d0fb0b0aa531d887a6da70f6e7473aba"}, + {file = "mypy-1.11.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a83ec98ae12d51c252be61521aa5731f5512231d0b738b4cb2498344f0b840cd"}, + {file = "mypy-1.11.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c7b73a856522417beb78e0fb6d33ef89474e7a622db2653bc1285af36e2e3e3d"}, + {file = "mypy-1.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:f2268d9fcd9686b61ab64f077be7ffbc6fbcdfb4103e5dd0cc5eaab53a8886c2"}, + {file = "mypy-1.11.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:940bfff7283c267ae6522ef926a7887305945f716a7704d3344d6d07f02df850"}, + {file = "mypy-1.11.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:14f9294528b5f5cf96c721f231c9f5b2733164e02c1c018ed1a0eff8a18005ac"}, + {file = "mypy-1.11.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d7b54c27783991399046837df5c7c9d325d921394757d09dbcbf96aee4649fe9"}, + {file = "mypy-1.11.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:65f190a6349dec29c8d1a1cd4aa71284177aee5949e0502e6379b42873eddbe7"}, + {file = "mypy-1.11.0-cp38-cp38-win_amd64.whl", hash = "sha256:dbe286303241fea8c2ea5466f6e0e6a046a135a7e7609167b07fd4e7baf151bf"}, + {file = "mypy-1.11.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:104e9c1620c2675420abd1f6c44bab7dd33cc85aea751c985006e83dcd001095"}, + {file = "mypy-1.11.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f006e955718ecd8d159cee9932b64fba8f86ee6f7728ca3ac66c3a54b0062abe"}, + {file = "mypy-1.11.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:becc9111ca572b04e7e77131bc708480cc88a911adf3d0239f974c034b78085c"}, + {file = "mypy-1.11.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6801319fe76c3f3a3833f2b5af7bd2c17bb93c00026a2a1b924e6762f5b19e13"}, + {file = "mypy-1.11.0-cp39-cp39-win_amd64.whl", hash = "sha256:c1a184c64521dc549324ec6ef7cbaa6b351912be9cb5edb803c2808a0d7e85ac"}, + {file = "mypy-1.11.0-py3-none-any.whl", hash = "sha256:56913ec8c7638b0091ef4da6fcc9136896914a9d60d54670a75880c3e5b99ace"}, + {file = "mypy-1.11.0.tar.gz", hash = "sha256:93743608c7348772fdc717af4aeee1997293a1ad04bc0ea6efa15bf65385c538"}, ] [package.dependencies] mypy-extensions = ">=1.0.0" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = ">=4.1.0" +typing-extensions = ">=4.6.0" [package.extras] dmypy = ["psutil (>=4.0)"] @@ -866,13 +866,13 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no [[package]] name = "pytest" -version = "8.2.2" +version = "8.3.1" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-8.2.2-py3-none-any.whl", hash = "sha256:c434598117762e2bd304e526244f67bf66bbd7b5d6cf22138be51ff661980343"}, - {file = "pytest-8.2.2.tar.gz", hash = "sha256:de4bb8104e201939ccdc688b27a89a7be2079b22e2bd2b07f806b6ba71117977"}, + {file = "pytest-8.3.1-py3-none-any.whl", hash = "sha256:e9600ccf4f563976e2c99fa02c7624ab938296551f280835ee6516df8bc4ae8c"}, + {file = "pytest-8.3.1.tar.gz", hash = "sha256:7e8e5c5abd6e93cb1cc151f23e57adc31fcf8cfd2a3ff2da63e23f732de35db6"}, ] [package.dependencies] @@ -880,7 +880,7 @@ colorama = {version = "*", markers = "sys_platform == \"win32\""} exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} iniconfig = "*" packaging = "*" -pluggy = ">=1.5,<2.0" +pluggy = ">=1.5,<2" tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] @@ -907,13 +907,13 @@ testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy [[package]] name = "pytest-asyncio" -version = "0.23.7" +version = "0.23.8" description = "Pytest support for asyncio" optional = false python-versions = ">=3.8" files = [ - {file = "pytest_asyncio-0.23.7-py3-none-any.whl", hash = "sha256:009b48127fbe44518a547bddd25611551b0e43ccdbf1e67d12479f569832c20b"}, - {file = "pytest_asyncio-0.23.7.tar.gz", hash = "sha256:5f5c72948f4c49e7db4f29f2521d4031f1c27f86e57b046126654083d4770268"}, + {file = "pytest_asyncio-0.23.8-py3-none-any.whl", hash = "sha256:50265d892689a5faefb84df80819d1ecef566eb3549cf915dfb33569359d1ce2"}, + {file = "pytest_asyncio-0.23.8.tar.gz", hash = "sha256:759b10b33a6dc61cce40a8bd5205e302978bbbcc00e279a8b61d9a6a3c82e4d3"}, ] [package.dependencies] @@ -1062,29 +1062,29 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "ruff" -version = "0.5.1" +version = "0.5.3" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.5.1-py3-none-linux_armv6l.whl", hash = "sha256:6ecf968fcf94d942d42b700af18ede94b07521bd188aaf2cd7bc898dd8cb63b6"}, - {file = "ruff-0.5.1-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:204fb0a472f00f2e6280a7c8c7c066e11e20e23a37557d63045bf27a616ba61c"}, - {file = "ruff-0.5.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d235968460e8758d1e1297e1de59a38d94102f60cafb4d5382033c324404ee9d"}, - {file = "ruff-0.5.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38beace10b8d5f9b6bdc91619310af6d63dd2019f3fb2d17a2da26360d7962fa"}, - {file = "ruff-0.5.1-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e478d2f09cf06add143cf8c4540ef77b6599191e0c50ed976582f06e588c994"}, - {file = "ruff-0.5.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f0368d765eec8247b8550251c49ebb20554cc4e812f383ff9f5bf0d5d94190b0"}, - {file = "ruff-0.5.1-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:3a9a9a1b582e37669b0138b7c1d9d60b9edac880b80eb2baba6d0e566bdeca4d"}, - {file = "ruff-0.5.1-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bdd9f723e16003623423affabcc0a807a66552ee6a29f90eddad87a40c750b78"}, - {file = "ruff-0.5.1-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:be9fd62c1e99539da05fcdc1e90d20f74aec1b7a1613463ed77870057cd6bd96"}, - {file = "ruff-0.5.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e216fc75a80ea1fbd96af94a6233d90190d5b65cc3d5dfacf2bd48c3e067d3e1"}, - {file = "ruff-0.5.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:c4c2112e9883a40967827d5c24803525145e7dab315497fae149764979ac7929"}, - {file = "ruff-0.5.1-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:dfaf11c8a116394da3b65cd4b36de30d8552fa45b8119b9ef5ca6638ab964fa3"}, - {file = "ruff-0.5.1-py3-none-musllinux_1_2_i686.whl", hash = "sha256:d7ceb9b2fe700ee09a0c6b192c5ef03c56eb82a0514218d8ff700f6ade004108"}, - {file = "ruff-0.5.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:bac6288e82f6296f82ed5285f597713acb2a6ae26618ffc6b429c597b392535c"}, - {file = "ruff-0.5.1-py3-none-win32.whl", hash = "sha256:5c441d9c24ec09e1cb190a04535c5379b36b73c4bc20aa180c54812c27d1cca4"}, - {file = "ruff-0.5.1-py3-none-win_amd64.whl", hash = "sha256:b1789bf2cd3d1b5a7d38397cac1398ddf3ad7f73f4de01b1e913e2abc7dfc51d"}, - {file = "ruff-0.5.1-py3-none-win_arm64.whl", hash = "sha256:2875b7596a740cbbd492f32d24be73e545a4ce0a3daf51e4f4e609962bfd3cd2"}, - {file = "ruff-0.5.1.tar.gz", hash = "sha256:3164488aebd89b1745b47fd00604fb4358d774465f20d1fcd907f9c0fc1b0655"}, + {file = "ruff-0.5.3-py3-none-linux_armv6l.whl", hash = "sha256:b12424d9db7347fa63c5ed9af010003338c63c629fb9c9c6adb2aa4f5699729b"}, + {file = "ruff-0.5.3-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b8d72c5684bbd4ed304a9a955ee2e67f57b35f6193222ade910cca8a805490e3"}, + {file = "ruff-0.5.3-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d2fc2cdb85ccac1e816cc9d5d8cedefd93661bd957756d902543af32a6b04a71"}, + {file = "ruff-0.5.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf4bc751240b2fab5d19254571bcacb315c7b0b00bf3c912d52226a82bbec073"}, + {file = "ruff-0.5.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bc697ec874fdd7c7ba0a85ec76ab38f8595224868d67f097c5ffc21136e72fcd"}, + {file = "ruff-0.5.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e791d34d3557a3819b3704bc1f087293c821083fa206812842fa363f6018a192"}, + {file = "ruff-0.5.3-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:76bb5a87fd397520b91a83eae8a2f7985236d42dd9459f09eef58e7f5c1d8316"}, + {file = "ruff-0.5.3-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a8cfc7a26422c78e94f1ec78ec02501bbad2df5834907e75afe474cc6b83a8c1"}, + {file = "ruff-0.5.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96066c4328a49fce2dd40e80f7117987369feec30ab771516cf95f1cc2db923c"}, + {file = "ruff-0.5.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03bfe9ab5bdc0b08470c3b261643ad54ea86edc32b64d1e080892d7953add3ad"}, + {file = "ruff-0.5.3-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:7704582a026fa02cca83efd76671a98ee6eb412c4230209efe5e2a006c06db62"}, + {file = "ruff-0.5.3-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:08058d077e21b856d32ebf483443390e29dc44d927608dc8f092ff6776519da9"}, + {file = "ruff-0.5.3-py3-none-musllinux_1_2_i686.whl", hash = "sha256:77d49484429ed7c7e6e2e75a753f153b7b58f875bdb4158ad85af166a1ec1822"}, + {file = "ruff-0.5.3-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:642cbff6cbfa38d2566d8db086508d6f472edb136cbfcc4ea65997745368c29e"}, + {file = "ruff-0.5.3-py3-none-win32.whl", hash = "sha256:eafc45dd8bdc37a00b28e68cc038daf3ca8c233d73fea276dcd09defb1352841"}, + {file = "ruff-0.5.3-py3-none-win_amd64.whl", hash = "sha256:cbaec2ddf4f78e5e9ecf5456ea0f496991358a1d883862ed0b9e947e2b6aea93"}, + {file = "ruff-0.5.3-py3-none-win_arm64.whl", hash = "sha256:05fbd2cb404775d6cd7f2ff49504e2d20e13ef95fa203bd1ab22413af70d420b"}, + {file = "ruff-0.5.3.tar.gz", hash = "sha256:2a3eb4f1841771fa5b67a56be9c2d16fd3cc88e378bd86aaeaec2f7e6bcdd0a2"}, ] [[package]] @@ -1555,4 +1555,4 @@ test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", [metadata] lock-version = "2.0" python-versions = "^3.7" -content-hash = "3b73809139a631a17a57dcc7911caa72b3b69dd61899f5ba37f2a21d5d685bf9" +content-hash = "ddc1250408232db6c9d443180037324541ece1547571f23e6ef8db8e2e0e09ea" diff --git a/pyproject.toml b/pyproject.toml index d11d18eb..2c9388fe 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -52,11 +52,11 @@ optional = true [tool.poetry.group.test.dependencies] pytest = [ - { version = "^8.2", python = ">=3.8" }, + { version = "^8.3", python = ">=3.8" }, { version = "^7.4", python = "<3.8"} ] pytest-asyncio = [ - { version = "^0.23.6", python = ">=3.8" }, + { version = "^0.23.8", python = ">=3.8" }, { version = "~0.21.1", python = "<3.8"} ] pytest-benchmark = "^4.0" @@ -67,7 +67,7 @@ pytest-cov = [ pytest-describe = "^2.2" pytest-timeout = "^2.3" tox = [ - { version = "^4.14", python = ">=3.8" }, + { version = "^4.16", python = ">=3.8" }, { version = "^3.28", python = "<3.8" } ] @@ -75,7 +75,7 @@ tox = [ optional = true [tool.poetry.group.lint.dependencies] -ruff = ">=0.5.1,<0.6" +ruff = ">=0.5.3,<0.6" mypy = [ { version = "^1.10", python = ">=3.8" }, { version = "~1.4", python = "<3.8" } diff --git a/src/graphql/execution/async_iterables.py b/src/graphql/execution/async_iterables.py index 83d902c0..747a515d 100644 --- a/src/graphql/execution/async_iterables.py +++ b/src/graphql/execution/async_iterables.py @@ -8,6 +8,7 @@ AsyncIterable, Awaitable, Callable, + Generic, TypeVar, Union, ) @@ -20,7 +21,7 @@ AsyncIterableOrGenerator = Union[AsyncGenerator[T, None], AsyncIterable[T]] -class aclosing(AbstractAsyncContextManager): # noqa: N801 +class aclosing(AbstractAsyncContextManager, Generic[T]): # noqa: N801 """Async context manager for safely finalizing an async iterator or generator. Contrary to the function available via the standard library, this one silently @@ -52,6 +53,6 @@ async def map_async_iterable( If the inner iterator supports an `aclose()` method, it will be called when the generator finishes or closes. """ - async with aclosing(iterable) as items: # type: ignore + async with aclosing(iterable) as items: async for item in items: yield await callback(item) diff --git a/src/graphql/utilities/extend_schema.py b/src/graphql/utilities/extend_schema.py index 1b55b752..c5af8669 100644 --- a/src/graphql/utilities/extend_schema.py +++ b/src/graphql/utilities/extend_schema.py @@ -55,13 +55,16 @@ GraphQLInputField, GraphQLInputFieldMap, GraphQLInputObjectType, + GraphQLInputObjectTypeKwargs, GraphQLInputType, GraphQLInterfaceType, + GraphQLInterfaceTypeKwargs, GraphQLList, GraphQLNamedType, GraphQLNonNull, GraphQLNullableType, GraphQLObjectType, + GraphQLObjectTypeKwargs, GraphQLOutputType, GraphQLScalarType, GraphQLSchema, @@ -69,6 +72,7 @@ GraphQLSpecifiedByDirective, GraphQLType, GraphQLUnionType, + GraphQLUnionTypeKwargs, assert_schema, introspection_types, is_enum_type, @@ -326,7 +330,7 @@ def extend_named_type(self, type_: GraphQLNamedType) -> GraphQLNamedType: raise TypeError(msg) # pragma: no cover def extend_input_object_type_fields( - self, kwargs: dict[str, Any], extensions: tuple[Any, ...] + self, kwargs: GraphQLInputObjectTypeKwargs, extensions: tuple[Any, ...] ) -> GraphQLInputFieldMap: """Extend GraphQL input object type fields.""" return { @@ -392,7 +396,7 @@ def extend_scalar_type(self, type_: GraphQLScalarType) -> GraphQLScalarType: ) def extend_object_type_interfaces( - self, kwargs: dict[str, Any], extensions: tuple[Any, ...] + self, kwargs: GraphQLObjectTypeKwargs, extensions: tuple[Any, ...] ) -> list[GraphQLInterfaceType]: """Extend a GraphQL object type interface.""" return [ @@ -401,7 +405,7 @@ def extend_object_type_interfaces( ] + self.build_interfaces(extensions) def extend_object_type_fields( - self, kwargs: dict[str, Any], extensions: tuple[Any, ...] + self, kwargs: GraphQLObjectTypeKwargs, extensions: tuple[Any, ...] ) -> GraphQLFieldMap: """Extend GraphQL object type fields.""" return { @@ -430,7 +434,7 @@ def extend_object_type(self, type_: GraphQLObjectType) -> GraphQLObjectType: ) def extend_interface_type_interfaces( - self, kwargs: dict[str, Any], extensions: tuple[Any, ...] + self, kwargs: GraphQLInterfaceTypeKwargs, extensions: tuple[Any, ...] ) -> list[GraphQLInterfaceType]: """Extend GraphQL interface type interfaces.""" return [ @@ -439,7 +443,7 @@ def extend_interface_type_interfaces( ] + self.build_interfaces(extensions) def extend_interface_type_fields( - self, kwargs: dict[str, Any], extensions: tuple[Any, ...] + self, kwargs: GraphQLInterfaceTypeKwargs, extensions: tuple[Any, ...] ) -> GraphQLFieldMap: """Extend GraphQL interface type fields.""" return { @@ -470,7 +474,7 @@ def extend_interface_type( ) def extend_union_type_types( - self, kwargs: dict[str, Any], extensions: tuple[Any, ...] + self, kwargs: GraphQLUnionTypeKwargs, extensions: tuple[Any, ...] ) -> list[GraphQLObjectType]: """Extend types of a GraphQL union type.""" return [ diff --git a/tests/type/test_definition.py b/tests/type/test_definition.py index cb666b1c..88ce94f7 100644 --- a/tests/type/test_definition.py +++ b/tests/type/test_definition.py @@ -714,35 +714,35 @@ def defines_an_enum_using_an_enum_value_map(): assert enum_type.values == {"RED": red, "BLUE": blue} def defines_an_enum_using_a_python_enum(): - colors = Enum("Colors", "RED BLUE") - enum_type = GraphQLEnumType("SomeEnum", colors) + Colors = Enum("Colors", "RED BLUE") + enum_type = GraphQLEnumType("SomeEnum", Colors) assert enum_type.values == { "RED": GraphQLEnumValue(1), "BLUE": GraphQLEnumValue(2), } def defines_an_enum_using_values_of_a_python_enum(): - colors = Enum("Colors", "RED BLUE") - enum_type = GraphQLEnumType("SomeEnum", colors, names_as_values=False) + Colors = Enum("Colors", "RED BLUE") + enum_type = GraphQLEnumType("SomeEnum", Colors, names_as_values=False) assert enum_type.values == { "RED": GraphQLEnumValue(1), "BLUE": GraphQLEnumValue(2), } def defines_an_enum_using_names_of_a_python_enum(): - colors = Enum("Colors", "RED BLUE") - enum_type = GraphQLEnumType("SomeEnum", colors, names_as_values=True) + Colors = Enum("Colors", "RED BLUE") + enum_type = GraphQLEnumType("SomeEnum", Colors, names_as_values=True) assert enum_type.values == { "RED": GraphQLEnumValue("RED"), "BLUE": GraphQLEnumValue("BLUE"), } def defines_an_enum_using_members_of_a_python_enum(): - colors = Enum("Colors", "RED BLUE") - enum_type = GraphQLEnumType("SomeEnum", colors, names_as_values=None) + Colors = Enum("Colors", "RED BLUE") + enum_type = GraphQLEnumType("SomeEnum", Colors, names_as_values=None) assert enum_type.values == { - "RED": GraphQLEnumValue(colors.RED), - "BLUE": GraphQLEnumValue(colors.BLUE), + "RED": GraphQLEnumValue(Colors.RED), + "BLUE": GraphQLEnumValue(Colors.BLUE), } def defines_an_enum_type_with_a_description(): diff --git a/tests/validation/harness.py b/tests/validation/harness.py index 3689c8fe..1189e922 100644 --- a/tests/validation/harness.py +++ b/tests/validation/harness.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Any from graphql.language import parse from graphql.utilities import build_schema @@ -9,7 +9,7 @@ if TYPE_CHECKING: from graphql.error import GraphQLError from graphql.type import GraphQLSchema - from graphql.validation import SDLValidationRule, ValidationRule + from graphql.validation import ASTValidationRule __all__ = [ "test_schema", @@ -125,9 +125,9 @@ def assert_validation_errors( - rule: type[ValidationRule], + rule: type[ASTValidationRule], query_str: str, - errors: list[GraphQLError], + errors: list[GraphQLError | dict[str, Any]], schema: GraphQLSchema = test_schema, ) -> list[GraphQLError]: doc = parse(query_str) @@ -137,9 +137,9 @@ def assert_validation_errors( def assert_sdl_validation_errors( - rule: type[SDLValidationRule], + rule: type[ASTValidationRule], sdl_str: str, - errors: list[GraphQLError], + errors: list[GraphQLError | dict[str, Any]], schema: GraphQLSchema | None = None, ) -> list[GraphQLError]: doc = parse(sdl_str) diff --git a/tox.ini b/tox.ini index 1fe4caf1..910443c5 100644 --- a/tox.ini +++ b/tox.ini @@ -17,7 +17,7 @@ python = [testenv:ruff] basepython = python3.12 -deps = ruff>=0.5.1,<0.6 +deps = ruff>=0.5.3,<0.6 commands = ruff check src tests ruff format --check src tests @@ -26,7 +26,7 @@ commands = basepython = python3.12 deps = mypy>=1.10,<2 - pytest>=8.2,<9 + pytest>=8.3,<9 commands = mypy src tests From 238704db987d6bd530add0fa47e24ed48c6c449b Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 11 Aug 2024 21:06:47 +0200 Subject: [PATCH 178/230] introduce new IncrementalPublisher class Replicates graphql/graphql-js@d766c8eb34fb03c5ee63c51380f2b012924e970c --- docs/conf.py | 2 +- src/graphql/error/located_error.py | 4 +- src/graphql/execution/execute.py | 264 ++++++----- .../execution/incremental_publisher.py | 416 ++++++++++++------ tests/execution/test_defer.py | 9 +- tests/execution/test_executor.py | 1 + tests/execution/test_stream.py | 215 ++++++++- 7 files changed, 642 insertions(+), 269 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index ad04aff5..50c2639e 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -168,7 +168,7 @@ graphql.execution.Middleware graphql.execution.execute.ExperimentalIncrementalExecutionResults graphql.execution.execute.StreamArguments -graphql.execution.incremental_publisher.IncrementalPublisherMixin +graphql.execution.incremental_publisher.IncrementalPublisher graphql.execution.incremental_publisher.StreamItemsRecord graphql.execution.incremental_publisher.DeferredFragmentRecord graphql.language.lexer.EscapeSequence diff --git a/src/graphql/error/located_error.py b/src/graphql/error/located_error.py index ab665787..31e423bc 100644 --- a/src/graphql/error/located_error.py +++ b/src/graphql/error/located_error.py @@ -13,6 +13,8 @@ __all__ = ["located_error"] +suppress_attribute_error = suppress(AttributeError) + def located_error( original_error: Exception, @@ -45,6 +47,6 @@ def located_error( except AttributeError: positions = None - with suppress(AttributeError): + with suppress_attribute_error: nodes = original_error.nodes or nodes # type: ignore return GraphQLError(message, nodes, source, positions, path, original_error) diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index 74356fa0..e370bcc1 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -82,10 +82,9 @@ ) from .incremental_publisher import ( ASYNC_DELAY, - DeferredFragmentRecord, FormattedIncrementalResult, IncrementalDataRecord, - IncrementalPublisherMixin, + IncrementalPublisher, IncrementalResult, StreamItemsRecord, SubsequentIncrementalExecutionResult, @@ -120,6 +119,9 @@ async def anext(iterator: AsyncIterator) -> Any: # noqa: A001 "Middleware", ] +suppress_exceptions = suppress(Exception) +suppress_timeout_error = suppress(TimeoutError) + # Terminology # @@ -334,7 +336,7 @@ class ExperimentalIncrementalExecutionResults(NamedTuple): Middleware: TypeAlias = Optional[Union[Tuple, List, MiddlewareManager]] -class ExecutionContext(IncrementalPublisherMixin): +class ExecutionContext: """Data that must be available at all points during query execution. Namely, schema of the type system that is currently executing, and the fragments @@ -351,6 +353,7 @@ class ExecutionContext(IncrementalPublisherMixin): type_resolver: GraphQLTypeResolver subscribe_field_resolver: GraphQLFieldResolver errors: list[GraphQLError] + incremental_publisher: IncrementalPublisher middleware_manager: MiddlewareManager | None is_awaitable: Callable[[Any], TypeGuard[Awaitable]] = staticmethod( @@ -368,8 +371,8 @@ def __init__( field_resolver: GraphQLFieldResolver, type_resolver: GraphQLTypeResolver, subscribe_field_resolver: GraphQLFieldResolver, - subsequent_payloads: dict[IncrementalDataRecord, None], errors: list[GraphQLError], + incremental_publisher: IncrementalPublisher, middleware_manager: MiddlewareManager | None, is_awaitable: Callable[[Any], bool] | None, ) -> None: @@ -382,13 +385,14 @@ def __init__( self.field_resolver = field_resolver self.type_resolver = type_resolver self.subscribe_field_resolver = subscribe_field_resolver - self.subsequent_payloads = subsequent_payloads self.errors = errors + self.incremental_publisher = incremental_publisher self.middleware_manager = middleware_manager if is_awaitable: self.is_awaitable = is_awaitable self._canceled_iterators: set[AsyncIterator] = set() self._subfields_cache: dict[tuple, FieldsAndPatches] = {} + self._tasks: set[Awaitable] = set() @classmethod def build( @@ -474,8 +478,8 @@ def build( field_resolver or default_field_resolver, type_resolver or default_type_resolver, subscribe_field_resolver or default_field_resolver, - {}, [], + IncrementalPublisher(), middleware_manager, is_awaitable, ) @@ -510,8 +514,10 @@ def build_per_event_execution_context(self, payload: Any) -> ExecutionContext: self.field_resolver, self.type_resolver, self.subscribe_field_resolver, - {}, [], + # no need to update incrementalPublisher, + # incremental delivery is not supported for subscriptions + self.incremental_publisher, self.middleware_manager, self.is_awaitable, ) @@ -716,7 +722,7 @@ async def await_completed() -> Any: path, incremental_data_record, ) - self.filter_subsequent_payloads(path, incremental_data_record) + self.incremental_publisher.filter(path, incremental_data_record) return None return await_completed() @@ -729,7 +735,7 @@ async def await_completed() -> Any: path, incremental_data_record, ) - self.filter_subsequent_payloads(path, incremental_data_record) + self.incremental_publisher.filter(path, incremental_data_record) return None return completed @@ -901,7 +907,7 @@ async def complete_awaitable_value( self.handle_field_error( raw_error, return_type, field_group, path, incremental_data_record ) - self.filter_subsequent_payloads(path, incremental_data_record) + self.incremental_publisher.filter(path, incremental_data_record) completed = None return completed @@ -968,7 +974,7 @@ async def complete_async_iterator_value( and isinstance(stream.initial_count, int) and index >= stream.initial_count ): - with suppress(TimeoutError): + with suppress_timeout_error: await wait_for( shield( self.execute_stream_async_iterator( @@ -1176,7 +1182,7 @@ async def await_completed() -> Any: item_path, incremental_data_record, ) - self.filter_subsequent_payloads( + self.incremental_publisher.filter( item_path, incremental_data_record ) return None @@ -1194,7 +1200,7 @@ async def await_completed() -> Any: item_path, incremental_data_record, ) - self.filter_subsequent_payloads(item_path, incremental_data_record) + self.incremental_publisher.filter(item_path, incremental_data_record) complete_results.append(None) return False @@ -1385,11 +1391,11 @@ def collect_and_execute_subfields( ) for sub_patch in sub_patches: - label, sub_patch_field_nodes = sub_patch + label, sub_patch_grouped_field_set = sub_patch self.execute_deferred_fragment( return_type, result, - sub_patch_field_nodes, + sub_patch_grouped_field_set, label, path, incremental_data_record, @@ -1473,8 +1479,11 @@ def execute_deferred_fragment( parent_context: IncrementalDataRecord | None = None, ) -> None: """Execute deferred fragment.""" - incremental_data_record = DeferredFragmentRecord( - label, path, parent_context, self + incremental_publisher = self.incremental_publisher + incremental_data_record = ( + incremental_publisher.prepare_new_deferred_fragment_record( + label, path, parent_context + ) ) try: awaitable_or_data = self.execute_fields( @@ -1483,23 +1492,35 @@ def execute_deferred_fragment( if self.is_awaitable(awaitable_or_data): - async def await_data( - awaitable: Awaitable[dict[str, Any]], - ) -> dict[str, Any] | None: - # noinspection PyShadowingNames + async def await_data() -> None: try: - return await awaitable + data = await awaitable_or_data # type: ignore except GraphQLError as error: - incremental_data_record.errors.append(error) - return None + incremental_publisher.add_field_error( + incremental_data_record, error + ) + incremental_publisher.complete_deferred_fragment_record( + incremental_data_record, None + ) + else: + incremental_publisher.complete_deferred_fragment_record( + incremental_data_record, data + ) - awaitable_or_data = await_data(awaitable_or_data) # type: ignore + self.add_task(await_data()) + + else: + incremental_publisher.complete_deferred_fragment_record( + incremental_data_record, + awaitable_or_data, # type: ignore + ) except GraphQLError as error: - incremental_data_record.errors.append(error) + incremental_publisher.add_field_error(incremental_data_record, error) + incremental_publisher.complete_deferred_fragment_record( + incremental_data_record, None + ) awaitable_or_data = None - incremental_data_record.add_data(awaitable_or_data) - def execute_stream_field( self, path: Path, @@ -1513,31 +1534,38 @@ def execute_stream_field( ) -> IncrementalDataRecord: """Execute stream field.""" is_awaitable = self.is_awaitable - incremental_data_record = StreamItemsRecord( - label, item_path, None, parent_context, self + incremental_publisher = self.incremental_publisher + incremental_data_record = incremental_publisher.prepare_new_stream_items_record( + label, item_path, parent_context ) completed_item: Any if is_awaitable(item): - # noinspection PyShadowingNames - async def await_completed_items() -> list[Any] | None: + + async def await_completed_awaitable_item() -> None: try: - return [ - await self.complete_awaitable_value( - item_type, - field_group, - info, - item_path, - item, - incremental_data_record, - ) - ] + value = await self.complete_awaitable_value( + item_type, + field_group, + info, + item_path, + item, + incremental_data_record, + ) except GraphQLError as error: - incremental_data_record.errors.append(error) - self.filter_subsequent_payloads(path, incremental_data_record) - return None + incremental_publisher.add_field_error( + incremental_data_record, error + ) + incremental_publisher.filter(path, incremental_data_record) + incremental_publisher.complete_stream_items_record( + incremental_data_record, None + ) + else: + incremental_publisher.complete_stream_items_record( + incremental_data_record, [value] + ) - incremental_data_record.add_items(await_completed_items()) + self.add_task(await_completed_awaitable_item()) return incremental_data_record try: @@ -1550,39 +1578,6 @@ async def await_completed_items() -> list[Any] | None: item, incremental_data_record, ) - - completed_items: Any - - if is_awaitable(completed_item): - # noinspection PyShadowingNames - async def await_completed_items() -> list[Any] | None: - # noinspection PyShadowingNames - try: - try: - return [await completed_item] - except Exception as raw_error: # pragma: no cover - self.handle_field_error( - raw_error, - item_type, - field_group, - item_path, - incremental_data_record, - ) - self.filter_subsequent_payloads( - item_path, incremental_data_record - ) - return [None] - except GraphQLError as error: # pragma: no cover - incremental_data_record.errors.append(error) - self.filter_subsequent_payloads( - path, incremental_data_record - ) - return None - - completed_items = await_completed_items() - else: - completed_items = [completed_item] - except Exception as raw_error: self.handle_field_error( raw_error, @@ -1591,15 +1586,51 @@ async def await_completed_items() -> list[Any] | None: item_path, incremental_data_record, ) - self.filter_subsequent_payloads(item_path, incremental_data_record) - completed_items = [None] - + completed_item = None + incremental_publisher.filter(item_path, incremental_data_record) except GraphQLError as error: - incremental_data_record.errors.append(error) - self.filter_subsequent_payloads(item_path, incremental_data_record) - completed_items = None + incremental_publisher.add_field_error(incremental_data_record, error) + incremental_publisher.filter(path, incremental_data_record) + incremental_publisher.complete_stream_items_record( + incremental_data_record, None + ) + return incremental_data_record - incremental_data_record.add_items(completed_items) + if is_awaitable(completed_item): + + async def await_completed_item() -> None: + try: + try: + value = await completed_item + except Exception as raw_error: # pragma: no cover + self.handle_field_error( + raw_error, + item_type, + field_group, + item_path, + incremental_data_record, + ) + incremental_publisher.filter(item_path, incremental_data_record) + value = None + except GraphQLError as error: # pragma: no cover + incremental_publisher.add_field_error( + incremental_data_record, error + ) + incremental_publisher.filter(path, incremental_data_record) + incremental_publisher.complete_stream_items_record( + incremental_data_record, None + ) + else: + incremental_publisher.complete_stream_items_record( + incremental_data_record, [value] + ) + + self.add_task(await_completed_item()) + return incremental_data_record + + incremental_publisher.complete_stream_items_record( + incremental_data_record, [completed_item] + ) return incremental_data_record async def execute_stream_async_iterator_item( @@ -1614,11 +1645,13 @@ async def execute_stream_async_iterator_item( ) -> Any: """Execute stream iterator item.""" if async_iterator in self._canceled_iterators: - raise StopAsyncIteration + raise StopAsyncIteration # pragma: no cover try: item = await anext(async_iterator) except StopAsyncIteration as raw_error: - incremental_data_record.set_is_completed_async_iterator() + self.incremental_publisher.set_is_completed_async_iterator( + incremental_data_record + ) raise StopAsyncIteration from raw_error except Exception as raw_error: raise located_error(raw_error, field_group, path.as_list()) from raw_error @@ -1635,7 +1668,7 @@ async def execute_stream_async_iterator_item( self.handle_field_error( raw_error, item_type, field_group, item_path, incremental_data_record ) - self.filter_subsequent_payloads(item_path, incremental_data_record) + self.incremental_publisher.filter(item_path, incremental_data_record) async def execute_stream_async_iterator( self, @@ -1649,17 +1682,21 @@ async def execute_stream_async_iterator( parent_context: IncrementalDataRecord | None = None, ) -> None: """Execute stream iterator.""" + incremental_publisher = self.incremental_publisher index = initial_index previous_incremental_data_record = parent_context + done = False while True: item_path = Path(path, index, None) - incremental_data_record = StreamItemsRecord( - label, item_path, async_iterator, previous_incremental_data_record, self + incremental_data_record = ( + incremental_publisher.prepare_new_stream_items_record( + label, item_path, previous_incremental_data_record, async_iterator + ) ) try: - data = await self.execute_stream_async_iterator_item( + completed_item = await self.execute_stream_async_iterator_item( async_iterator, field_group, info, @@ -1668,29 +1705,39 @@ async def execute_stream_async_iterator( path, item_path, ) - except StopAsyncIteration: - if incremental_data_record.errors: - incremental_data_record.add_items(None) # pragma: no cover - else: - del self.subsequent_payloads[incremental_data_record] - break except GraphQLError as error: - incremental_data_record.errors.append(error) - self.filter_subsequent_payloads(path, incremental_data_record) - incremental_data_record.add_items(None) + incremental_publisher.add_field_error(incremental_data_record, error) + incremental_publisher.filter(path, incremental_data_record) + incremental_publisher.complete_stream_items_record( + incremental_data_record, None + ) if async_iterator: # pragma: no cover else - with suppress(Exception): + with suppress_exceptions: await async_iterator.aclose() # type: ignore # running generators cannot be closed since Python 3.8, # so we need to remember that this iterator is already canceled self._canceled_iterators.add(async_iterator) break + except StopAsyncIteration: + done = True - incremental_data_record.add_items([data]) + incremental_publisher.complete_stream_items_record( + incremental_data_record, + [completed_item], + ) + if done: + break previous_incremental_data_record = incremental_data_record index += 1 + def add_task(self, awaitable: Awaitable[Any]) -> None: + """Add the given task to the tasks set for later execution.""" + tasks = self._tasks + task = ensure_future(awaitable) + tasks.add(task) + task.add_done_callback(tasks.discard) + UNEXPECTED_EXPERIMENTAL_DIRECTIVES = ( "The provided schema unexpectedly contains experimental directives" @@ -1831,6 +1878,7 @@ def execute_impl( # at which point we still log the error and null the parent field, which # in this case is the entire response. errors = context.errors + incremental_publisher = context.incremental_publisher build_response = context.build_response try: result = context.execute_operation() @@ -1843,14 +1891,15 @@ async def await_result() -> Any: await result, # type: ignore errors, ) - if context.subsequent_payloads: + incremental_publisher.publish_initial() + if incremental_publisher.has_next(): return ExperimentalIncrementalExecutionResults( initial_result=InitialIncrementalExecutionResult( initial_result.data, initial_result.errors, has_next=True, ), - subsequent_results=context.yield_subsequent_payloads(), + subsequent_results=incremental_publisher.subscribe(), ) except GraphQLError as error: errors.append(error) @@ -1860,14 +1909,15 @@ async def await_result() -> Any: return await_result() initial_result = build_response(result, errors) # type: ignore - if context.subsequent_payloads: + incremental_publisher.publish_initial() + if incremental_publisher.has_next(): return ExperimentalIncrementalExecutionResults( initial_result=InitialIncrementalExecutionResult( initial_result.data, initial_result.errors, has_next=True, ), - subsequent_results=context.yield_subsequent_payloads(), + subsequent_results=incremental_publisher.subscribe(), ) except GraphQLError as error: errors.append(error) diff --git a/src/graphql/execution/incremental_publisher.py b/src/graphql/execution/incremental_publisher.py index b6d9bcf4..fb660e85 100644 --- a/src/graphql/execution/incremental_publisher.py +++ b/src/graphql/execution/incremental_publisher.py @@ -2,15 +2,16 @@ from __future__ import annotations -from asyncio import Event, as_completed, sleep +from asyncio import Event, ensure_future, gather +from contextlib import suppress from typing import ( TYPE_CHECKING, Any, AsyncGenerator, AsyncIterator, Awaitable, - Callable, - Generator, + Collection, + NamedTuple, Sequence, Union, ) @@ -19,15 +20,11 @@ from typing import TypedDict except ImportError: # Python < 3.8 from typing_extensions import TypedDict -try: - from typing import TypeGuard -except ImportError: # Python < 3.10 - from typing_extensions import TypeGuard if TYPE_CHECKING: from ..error import GraphQLError, GraphQLFormattedError - from ..pyutils import AwaitableOrValue, Path + from ..pyutils import Path __all__ = [ "ASYNC_DELAY", @@ -38,7 +35,7 @@ "FormattedSubsequentIncrementalExecutionResult", "IncrementalDataRecord", "IncrementalDeferResult", - "IncrementalPublisherMixin", + "IncrementalPublisher", "IncrementalResult", "IncrementalStreamResult", "StreamItemsRecord", @@ -48,6 +45,8 @@ ASYNC_DELAY = 1 / 512 # wait time in seconds for deferring execution +suppress_key_error = suppress(KeyError) + class FormattedIncrementalDeferResult(TypedDict, total=False): """Formatted incremental deferred execution result""" @@ -326,50 +325,243 @@ def __ne__(self, other: object) -> bool: return not self == other -class IncrementalPublisherMixin: - """Mixin to add incremental publishing to the ExecutionContext.""" +class InitialResult(NamedTuple): + """The state of the initial result""" + + children: dict[IncrementalDataRecord, None] + is_completed: bool + + +class IncrementalPublisher: + """Publish incremental results. + + This class is used to publish incremental results to the client, enabling + semi-concurrent execution while preserving result order. + + The internal publishing state is managed as follows: + + ``_released``: the set of Incremental Data records that are ready to be sent to the + client, i.e. their parents have completed and they have also completed. + + ``_pending``: the set of Incremental Data records that are definitely pending, i.e. + their parents have completed so that they can no longer be filtered. This includes + all Incremental Data records in `released`, as well as Incremental Data records that + have not yet completed. - _canceled_iterators: set[AsyncIterator] - subsequent_payloads: dict[IncrementalDataRecord, None] # used as ordered set + ``_initial_result``: a record containing the state of the initial result, + as follows: + ``is_completed``: indicates whether the initial result has completed. + ``children``: the set of Incremental Data records that can be be published when the + initial result is completed. - is_awaitable: Callable[[Any], TypeGuard[Awaitable]] + Each Incremental Data record also contains similar metadata, i.e. these records also + contain similar ``is_completed`` and ``children`` properties. - def filter_subsequent_payloads( + Note: Instead of sets we use dicts (with values set to None) which preserve order + and thereby achieve more deterministic results. + """ + + _initial_result: InitialResult + _released: dict[IncrementalDataRecord, None] + _pending: dict[IncrementalDataRecord, None] + _resolve: Event | None + + def __init__(self) -> None: + self._initial_result = InitialResult({}, False) + self._released = {} + self._pending = {} + self._resolve = None # lazy initialization + self._tasks: set[Awaitable] = set() + + def has_next(self) -> bool: + """Check whether there is a next incremental result.""" + return bool(self._pending) + + async def subscribe( + self, + ) -> AsyncGenerator[SubsequentIncrementalExecutionResult, None]: + """Subscribe to the incremental results.""" + is_done = False + pending = self._pending + + try: + while not is_done: + released = self._released + for item in released: + with suppress_key_error: + del pending[item] + self._released = {} + + result = self._get_incremental_result(released) + + if not self.has_next(): + is_done = True + + if result is not None: + yield result + else: + resolve = self._resolve + if resolve is None: + self._resolve = resolve = Event() + await resolve.wait() + finally: + close_async_iterators = [] + for incremental_data_record in pending: + if isinstance( + incremental_data_record, StreamItemsRecord + ): # pragma: no cover + async_iterator = incremental_data_record.async_iterator + if async_iterator: + try: + close_async_iterator = async_iterator.aclose() # type: ignore + except AttributeError: + pass + else: + close_async_iterators.append(close_async_iterator) + await gather(*close_async_iterators) + + def prepare_new_deferred_fragment_record( + self, + label: str | None, + path: Path | None, + parent_context: IncrementalDataRecord | None, + ) -> DeferredFragmentRecord: + """Prepare a new deferred fragment record.""" + deferred_fragment_record = DeferredFragmentRecord(label, path, parent_context) + + context = parent_context or self._initial_result + context.children[deferred_fragment_record] = None + return deferred_fragment_record + + def prepare_new_stream_items_record( + self, + label: str | None, + path: Path | None, + parent_context: IncrementalDataRecord | None, + async_iterator: AsyncIterator[Any] | None = None, + ) -> StreamItemsRecord: + """Prepare a new stream items record.""" + stream_items_record = StreamItemsRecord( + label, path, parent_context, async_iterator + ) + + context = parent_context or self._initial_result + context.children[stream_items_record] = None + return stream_items_record + + def complete_deferred_fragment_record( + self, + deferred_fragment_record: DeferredFragmentRecord, + data: dict[str, Any] | None, + ) -> None: + """Complete the given deferred fragment record.""" + deferred_fragment_record.data = data + deferred_fragment_record.is_completed = True + self._release(deferred_fragment_record) + + def complete_stream_items_record( + self, + stream_items_record: StreamItemsRecord, + items: list[str] | None, + ) -> None: + """Complete the given stream items record.""" + stream_items_record.items = items + stream_items_record.is_completed = True + self._release(stream_items_record) + + def set_is_completed_async_iterator( + self, stream_items_record: StreamItemsRecord + ) -> None: + """Mark async iterator for stream items as completed.""" + stream_items_record.is_completed_async_iterator = True + + def add_field_error( + self, incremental_data_record: IncrementalDataRecord, error: GraphQLError + ) -> None: + """Add a field error to the given incremental data record.""" + incremental_data_record.errors.append(error) + + def publish_initial(self) -> None: + """Publish the initial result.""" + for child in self._initial_result.children: + self._publish(child) + + def filter( self, null_path: Path, - current_incremental_data_record: IncrementalDataRecord | None = None, + erroring_incremental_data_record: IncrementalDataRecord | None, ) -> None: - """Filter subsequent payloads.""" + """Filter out the given erroring incremental data record.""" null_path_list = null_path.as_list() - for incremental_data_record in list(self.subsequent_payloads): - if incremental_data_record is current_incremental_data_record: - # don't remove payload from where error originates - continue - if incremental_data_record.path[: len(null_path_list)] != null_path_list: - # incremental_data_record points to a path unaffected by this payload + + children = (erroring_incremental_data_record or self._initial_result).children + + for child in self._get_descendants(children): + if not self._matches_path(child.path, null_path_list): continue - # incremental_data_record path points to nulled error field - if ( - isinstance(incremental_data_record, StreamItemsRecord) - and incremental_data_record.async_iterator - ): - self._canceled_iterators.add(incremental_data_record.async_iterator) - del self.subsequent_payloads[incremental_data_record] - - def get_completed_incremental_results(self) -> list[IncrementalResult]: - """Get completed incremental results.""" + + self._delete(child) + parent = child.parent_context or self._initial_result + with suppress_key_error: + del parent.children[child] + + if isinstance(child, StreamItemsRecord): + async_iterator = child.async_iterator + if async_iterator: + try: + close_async_iterator = async_iterator.aclose() # type:ignore + except AttributeError: # pragma: no cover + pass + else: + self._add_task(close_async_iterator) + + def _trigger(self) -> None: + """Trigger the resolve event.""" + resolve = self._resolve + if resolve is not None: + resolve.set() + self._resolve = Event() + + def _introduce(self, item: IncrementalDataRecord) -> None: + """Introduce a new IncrementalDataRecord.""" + self._pending[item] = None + + def _release(self, item: IncrementalDataRecord) -> None: + """Release the given IncrementalDataRecord.""" + if item in self._pending: + self._released[item] = None + self._trigger() + + def _push(self, item: IncrementalDataRecord) -> None: + """Push the given IncrementalDataRecord.""" + self._released[item] = None + self._pending[item] = None + self._trigger() + + def _delete(self, item: IncrementalDataRecord) -> None: + """Delete the given IncrementalDataRecord.""" + with suppress_key_error: + del self._released[item] + with suppress_key_error: + del self._pending[item] + self._trigger() + + def _get_incremental_result( + self, completed_records: Collection[IncrementalDataRecord] + ) -> SubsequentIncrementalExecutionResult | None: + """Get the incremental result with the completed records.""" incremental_results: list[IncrementalResult] = [] + encountered_completed_async_iterator = False append_result = incremental_results.append - subsequent_payloads = list(self.subsequent_payloads) - for incremental_data_record in subsequent_payloads: + for incremental_data_record in completed_records: incremental_result: IncrementalResult - if not incremental_data_record.completed.is_set(): - continue - del self.subsequent_payloads[incremental_data_record] + for child in incremental_data_record.children: + self._publish(child) if isinstance(incremental_data_record, StreamItemsRecord): items = incremental_data_record.items if incremental_data_record.is_completed_async_iterator: # async iterable resolver finished but there may be pending payload + encountered_completed_async_iterator = True continue # pragma: no cover incremental_result = IncrementalStreamResult( items, @@ -389,33 +581,48 @@ def get_completed_incremental_results(self) -> list[IncrementalResult]: incremental_data_record.path, incremental_data_record.label, ) - append_result(incremental_result) - return incremental_results - - async def yield_subsequent_payloads( + if incremental_results: + return SubsequentIncrementalExecutionResult( + incremental=incremental_results, has_next=self.has_next() + ) + if encountered_completed_async_iterator and not self.has_next(): + return SubsequentIncrementalExecutionResult(has_next=False) + return None + + def _publish(self, incremental_data_record: IncrementalDataRecord) -> None: + """Publish the given incremental data record.""" + if incremental_data_record.is_completed: + self._push(incremental_data_record) + else: + self._introduce(incremental_data_record) + + def _get_descendants( self, - ) -> AsyncGenerator[SubsequentIncrementalExecutionResult, None]: - """Yield subsequent payloads.""" - payloads = self.subsequent_payloads - has_next = bool(payloads) - - while has_next: - for awaitable in as_completed(payloads): - await awaitable - - incremental = self.get_completed_incremental_results() - - has_next = bool(payloads) - - if incremental or not has_next: - yield SubsequentIncrementalExecutionResult( - incremental=incremental or None, has_next=has_next - ) - - if not has_next: - break + children: dict[IncrementalDataRecord, None], + descendants: dict[IncrementalDataRecord, None] | None = None, + ) -> dict[IncrementalDataRecord, None]: + """Get the descendants of the given children.""" + if descendants is None: + descendants = {} + for child in children: + descendants[child] = None + self._get_descendants(child.children, descendants) + return descendants + + def _matches_path( + self, test_path: list[str | int], base_path: list[str | int] + ) -> bool: + """Get whether the given test path matches the base path.""" + return all(item == test_path[i] for i, item in enumerate(base_path)) + + def _add_task(self, awaitable: Awaitable[Any]) -> None: + """Add the given task to the tasks set for later execution.""" + tasks = self._tasks + task = ensure_future(awaitable) + tasks.add(task) + task.add_done_callback(tasks.discard) class DeferredFragmentRecord: @@ -426,27 +633,22 @@ class DeferredFragmentRecord: path: list[str | int] data: dict[str, Any] | None parent_context: IncrementalDataRecord | None - completed: Event - _publisher: IncrementalPublisherMixin - _data: AwaitableOrValue[dict[str, Any] | None] - _data_added: Event + children: dict[IncrementalDataRecord, None] + is_completed: bool def __init__( self, label: str | None, path: Path | None, parent_context: IncrementalDataRecord | None, - context: IncrementalPublisherMixin, ) -> None: self.label = label self.path = path.as_list() if path else [] self.parent_context = parent_context self.errors = [] - self._publisher = context - context.subsequent_payloads[self] = None - self.data = self._data = None - self.completed = Event() - self._data_added = Event() + self.children = {} + self.is_completed = False + self.data = None def __repr__(self) -> str: name = self.__class__.__name__ @@ -459,29 +661,6 @@ def __repr__(self) -> str: args.append("data") return f"{name}({', '.join(args)})" - def __await__(self) -> Generator[Any, None, dict[str, Any] | None]: - return self.wait().__await__() - - async def wait(self) -> dict[str, Any] | None: - """Wait until data is ready.""" - if self.parent_context: - await self.parent_context.completed.wait() - _data = self._data - data = ( - await _data # type: ignore - if self._publisher.is_awaitable(_data) - else _data - ) - await sleep(ASYNC_DELAY) # always defer completion a little bit - self.completed.set() - self.data = data - return data - - def add_data(self, data: AwaitableOrValue[dict[str, Any] | None]) -> None: - """Add data to the record.""" - self._data = data - self._data_added.set() - class StreamItemsRecord: """A record collecting items marked with the stream directive""" @@ -491,32 +670,26 @@ class StreamItemsRecord: path: list[str | int] items: list[str] | None parent_context: IncrementalDataRecord | None + children: dict[IncrementalDataRecord, None] async_iterator: AsyncIterator[Any] | None is_completed_async_iterator: bool - completed: Event - _publisher: IncrementalPublisherMixin - _items: AwaitableOrValue[list[Any] | None] - _items_added: Event + is_completed: bool def __init__( self, label: str | None, path: Path | None, - async_iterator: AsyncIterator[Any] | None, parent_context: IncrementalDataRecord | None, - context: IncrementalPublisherMixin, + async_iterator: AsyncIterator[Any] | None = None, ) -> None: self.label = label self.path = path.as_list() if path else [] self.parent_context = parent_context self.async_iterator = async_iterator self.errors = [] - self._publisher = context - context.subsequent_payloads[self] = None - self.items = self._items = None - self.completed = Event() - self._items_added = Event() - self.is_completed_async_iterator = False + self.children = {} + self.is_completed_async_iterator = self.is_completed = False + self.items = None def __repr__(self) -> str: name = self.__class__.__name__ @@ -529,34 +702,5 @@ def __repr__(self) -> str: args.append("items") return f"{name}({', '.join(args)})" - def __await__(self) -> Generator[Any, None, list[str] | None]: - return self.wait().__await__() - - async def wait(self) -> list[str] | None: - """Wait until data is ready.""" - await self._items_added.wait() - if self.parent_context: - await self.parent_context.completed.wait() - _items = self._items - items = ( - await _items # type: ignore - if self._publisher.is_awaitable(_items) - else _items - ) - await sleep(ASYNC_DELAY) # always defer completion a little bit - self.items = items - self.completed.set() - return items - - def add_items(self, items: AwaitableOrValue[list[Any] | None]) -> None: - """Add items to the record.""" - self._items = items - self._items_added.set() - - def set_is_completed_async_iterator(self) -> None: - """Mark as completed.""" - self.is_completed_async_iterator = True - self._items_added.set() - IncrementalDataRecord = Union[DeferredFragmentRecord, StreamItemsRecord] diff --git a/tests/execution/test_defer.py b/tests/execution/test_defer.py index b43ba00a..6b39f74e 100644 --- a/tests/execution/test_defer.py +++ b/tests/execution/test_defer.py @@ -6,7 +6,6 @@ import pytest from graphql.error import GraphQLError from graphql.execution import ( - ExecutionContext, ExecutionResult, ExperimentalIncrementalExecutionResults, IncrementalDeferResult, @@ -321,13 +320,9 @@ def can_compare_subsequent_incremental_execution_result(): } def can_print_deferred_fragment_record(): - context = ExecutionContext.build(schema, parse("{ hero { id } }")) - assert isinstance(context, ExecutionContext) - record = DeferredFragmentRecord(None, None, None, context) + record = DeferredFragmentRecord(None, None, None) assert str(record) == "DeferredFragmentRecord(path=[])" - record = DeferredFragmentRecord( - "foo", Path(None, "bar", "Bar"), record, context - ) + record = DeferredFragmentRecord("foo", Path(None, "bar", "Bar"), record) assert ( str(record) == "DeferredFragmentRecord(" "path=['bar'], label='foo', parent_context)" diff --git a/tests/execution/test_executor.py b/tests/execution/test_executor.py index 391a1de6..5ea1f25b 100644 --- a/tests/execution/test_executor.py +++ b/tests/execution/test_executor.py @@ -617,6 +617,7 @@ def resolve_error(*_args): ], ) + @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") def uses_the_inline_operation_if_no_operation_name_is_provided(): schema = GraphQLSchema( GraphQLObjectType("Type", {"a": GraphQLField(GraphQLString)}) diff --git a/tests/execution/test_stream.py b/tests/execution/test_stream.py index bffc26c5..46a53b56 100644 --- a/tests/execution/test_stream.py +++ b/tests/execution/test_stream.py @@ -6,7 +6,6 @@ import pytest from graphql.error import GraphQLError from graphql.execution import ( - ExecutionContext, ExecutionResult, ExperimentalIncrementalExecutionResults, IncrementalStreamResult, @@ -173,13 +172,9 @@ def can_format_and_print_incremental_stream_result(): ) def can_print_stream_record(): - context = ExecutionContext.build(schema, parse("{ hero { id } }")) - assert isinstance(context, ExecutionContext) - record = StreamItemsRecord(None, None, None, None, context) + record = StreamItemsRecord(None, None, None, None) assert str(record) == "StreamItemsRecord(path=[])" - record = StreamItemsRecord( - "foo", Path(None, "bar", "Bar"), None, record, context - ) + record = StreamItemsRecord("foo", Path(None, "bar", "Bar"), record, None) assert ( str(record) == "StreamItemsRecord(" "path=['bar'], label='foo', parent_context)" @@ -748,6 +743,9 @@ async def friend_list(_info): "path": ["friendList", 2], } ], + "hasNext": True, + }, + { "hasNext": False, }, ] @@ -788,6 +786,9 @@ async def friend_list(_info): "path": ["friendList", 2], } ], + "hasNext": True, + }, + { "hasNext": False, }, ] @@ -861,10 +862,10 @@ async def friend_list(_info): "path": ["friendList", 2], } ], - "hasNext": False, + "hasNext": True, }, }, - {"done": True, "value": None}, + {"done": False, "value": {"hasNext": False}}, {"done": True, "value": None}, ] @@ -1092,7 +1093,7 @@ async def get_friend(i): return {"nonNullName": throw() if i < 0 else friends[i].name} def get_friends(_info): - return [get_friend(0), get_friend(-1), get_friend(1)] + return [get_friend(i) for i in (0, -1, 1)] result = await complete( document, @@ -1135,7 +1136,68 @@ def get_friends(_info): ] @pytest.mark.asyncio() - async def handles_async_error_in_complete_value_for_non_nullable_list(): + async def handles_nested_async_error_in_complete_value_after_initial_count(): + document = parse( + """ + query { + friendList @stream(initialCount: 1) { + nonNullName + } + } + """ + ) + + async def get_friend_name(i): + await sleep(0) + if i < 0: + raise RuntimeError("Oops") + return friends[i].name + + def get_friends(_info): + return [{"nonNullName": get_friend_name(i)} for i in (0, -1, 1)] + + result = await complete( + document, + { + "friendList": get_friends, + }, + ) + assert result == [ + { + "data": { + "friendList": [{"nonNullName": "Luke"}], + }, + "hasNext": True, + }, + { + "incremental": [ + { + "items": [None], + "path": ["friendList", 1], + "errors": [ + { + "message": "Oops", + "locations": [{"line": 4, "column": 17}], + "path": ["friendList", 1, "nonNullName"], + }, + ], + }, + ], + "hasNext": True, + }, + { + "incremental": [ + { + "items": [{"nonNullName": "Han"}], + "path": ["friendList", 2], + } + ], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio() + async def handles_async_error_in_complete_value_after_initial_count_non_null(): document = parse( """ query { @@ -1154,7 +1216,59 @@ async def get_friend(i): return {"nonNullName": throw() if i < 0 else friends[i].name} def get_friends(_info): - return [get_friend(0), get_friend(-1), get_friend(1)] + return [get_friend(i) for i in (0, -1, 1)] + + result = await complete( + document, + { + "nonNullFriendList": get_friends, + }, + ) + assert result == [ + { + "data": { + "nonNullFriendList": [{"nonNullName": "Luke"}], + }, + "hasNext": True, + }, + { + "incremental": [ + { + "items": None, + "path": ["nonNullFriendList", 1], + "errors": [ + { + "message": "Oops", + "locations": [{"line": 4, "column": 17}], + "path": ["nonNullFriendList", 1, "nonNullName"], + }, + ], + }, + ], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio() + async def handles_nested_async_error_in_complete_value_after_initial_non_null(): + document = parse( + """ + query { + nonNullFriendList @stream(initialCount: 1) { + nonNullName + } + } + """ + ) + + async def get_friend_name(i): + await sleep(0) + if i < 0: + raise RuntimeError("Oops") + return friends[i].name + + def get_friends(_info): + return [{"nonNullName": get_friend_name(i)} for i in (0, -1, 1)] result = await complete( document, @@ -1188,7 +1302,7 @@ def get_friends(_info): ] @pytest.mark.asyncio() - async def handles_async_error_after_initial_count_reached_from_async_iterable(): + async def handles_async_error_in_complete_value_after_initial_from_async_iterable(): document = parse( """ query { @@ -1207,9 +1321,8 @@ async def get_friend(i): return {"nonNullName": throw() if i < 0 else friends[i].name} async def get_friends(_info): - yield await get_friend(0) - yield await get_friend(-1) - yield await get_friend(1) + for i in 0, -1, 1: + yield await get_friend(i) result = await complete( document, @@ -1247,6 +1360,63 @@ async def get_friends(_info): "path": ["friendList", 2], }, ], + "hasNext": True, + }, + { + "hasNext": False, + }, + ] + + @pytest.mark.asyncio() + async def handles_async_error_in_complete_value_from_async_iterable_non_null(): + document = parse( + """ + query { + nonNullFriendList @stream(initialCount: 1) { + nonNullName + } + } + """ + ) + + async def throw(): + raise RuntimeError("Oops") + + async def get_friend(i): + await sleep(0) + return {"nonNullName": throw() if i < 0 else friends[i].name} + + async def get_friends(_info): + for i in 0, -1, 1: # pragma: no cover exit + yield await get_friend(i) + + result = await complete( + document, + { + "nonNullFriendList": get_friends, + }, + ) + assert result == [ + { + "data": { + "nonNullFriendList": [{"nonNullName": "Luke"}], + }, + "hasNext": True, + }, + { + "incremental": [ + { + "items": None, + "path": ["nonNullFriendList", 1], + "errors": [ + { + "message": "Oops", + "locations": [{"line": 4, "column": 17}], + "path": ["nonNullFriendList", 1, "nonNullName"], + }, + ], + }, + ], "hasNext": False, }, ] @@ -1409,8 +1579,9 @@ async def friend_list(_info): "path": ["nestedObject", "nestedFriendList", 0], }, ], - "hasNext": False, + "hasNext": True, }, + {"hasNext": False}, ] @pytest.mark.asyncio() @@ -1537,6 +1708,9 @@ async def friend_list(_info): ], }, ], + "hasNext": True, + }, + { "hasNext": False, }, ] @@ -1677,6 +1851,9 @@ async def get_friends(_info): "path": ["friendList", 2], } ], + "hasNext": True, + }, + { "hasNext": False, }, ] @@ -1756,6 +1933,10 @@ async def get_friends(_info): "path": ["nestedObject", "nestedFriendList", 1], }, ], + "hasNext": True, + } + result5 = await anext(iterator) + assert result5.formatted == { "hasNext": False, } From e92c5ee848457c2c9dd2986ec18ecc4d633808e3 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 11 Aug 2024 21:11:34 +0200 Subject: [PATCH 179/230] Update dependencies --- poetry.lock | 258 ++++++++++++++++++++++++++----------------------- pyproject.toml | 4 +- tox.ini | 4 +- 3 files changed, 143 insertions(+), 123 deletions(-) diff --git a/poetry.lock b/poetry.lock index e548c1e9..1d4f8e60 100644 --- a/poetry.lock +++ b/poetry.lock @@ -30,13 +30,13 @@ dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] [[package]] name = "babel" -version = "2.15.0" +version = "2.16.0" description = "Internationalization utilities" optional = false python-versions = ">=3.8" files = [ - {file = "Babel-2.15.0-py3-none-any.whl", hash = "sha256:08706bdad8d0a3413266ab61bd6c34d0c28d6e1e7badf40a2cebe67644e2e1fb"}, - {file = "babel-2.15.0.tar.gz", hash = "sha256:8daf0e265d05768bc6c7a314cf1321e9a123afc328cc635c18622a2f30a04413"}, + {file = "babel-2.16.0-py3-none-any.whl", hash = "sha256:368b5b98b37c06b7daf6696391c3240c938b37767d4584413e8438c5c435fa8b"}, + {file = "babel-2.16.0.tar.gz", hash = "sha256:d1f3554ca26605fe173f3de0c65f750f5a42f924499bf134de6423582298e316"}, ] [package.dependencies] @@ -276,63 +276,83 @@ toml = ["tomli"] [[package]] name = "coverage" -version = "7.6.0" +version = "7.6.1" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dff044f661f59dace805eedb4a7404c573b6ff0cdba4a524141bc63d7be5c7fd"}, - {file = "coverage-7.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a8659fd33ee9e6ca03950cfdcdf271d645cf681609153f218826dd9805ab585c"}, - {file = "coverage-7.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7792f0ab20df8071d669d929c75c97fecfa6bcab82c10ee4adb91c7a54055463"}, - {file = "coverage-7.6.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d4b3cd1ca7cd73d229487fa5caca9e4bc1f0bca96526b922d61053ea751fe791"}, - {file = "coverage-7.6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7e128f85c0b419907d1f38e616c4f1e9f1d1b37a7949f44df9a73d5da5cd53c"}, - {file = "coverage-7.6.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a94925102c89247530ae1dab7dc02c690942566f22e189cbd53579b0693c0783"}, - {file = "coverage-7.6.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:dcd070b5b585b50e6617e8972f3fbbee786afca71b1936ac06257f7e178f00f6"}, - {file = "coverage-7.6.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d50a252b23b9b4dfeefc1f663c568a221092cbaded20a05a11665d0dbec9b8fb"}, - {file = "coverage-7.6.0-cp310-cp310-win32.whl", hash = "sha256:0e7b27d04131c46e6894f23a4ae186a6a2207209a05df5b6ad4caee6d54a222c"}, - {file = "coverage-7.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:54dece71673b3187c86226c3ca793c5f891f9fc3d8aa183f2e3653da18566169"}, - {file = "coverage-7.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c7b525ab52ce18c57ae232ba6f7010297a87ced82a2383b1afd238849c1ff933"}, - {file = "coverage-7.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bea27c4269234e06f621f3fac3925f56ff34bc14521484b8f66a580aacc2e7d"}, - {file = "coverage-7.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed8d1d1821ba5fc88d4a4f45387b65de52382fa3ef1f0115a4f7a20cdfab0e94"}, - {file = "coverage-7.6.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01c322ef2bbe15057bc4bf132b525b7e3f7206f071799eb8aa6ad1940bcf5fb1"}, - {file = "coverage-7.6.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03cafe82c1b32b770a29fd6de923625ccac3185a54a5e66606da26d105f37dac"}, - {file = "coverage-7.6.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0d1b923fc4a40c5832be4f35a5dab0e5ff89cddf83bb4174499e02ea089daf57"}, - {file = "coverage-7.6.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4b03741e70fb811d1a9a1d75355cf391f274ed85847f4b78e35459899f57af4d"}, - {file = "coverage-7.6.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a73d18625f6a8a1cbb11eadc1d03929f9510f4131879288e3f7922097a429f63"}, - {file = "coverage-7.6.0-cp311-cp311-win32.whl", hash = "sha256:65fa405b837060db569a61ec368b74688f429b32fa47a8929a7a2f9b47183713"}, - {file = "coverage-7.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:6379688fb4cfa921ae349c76eb1a9ab26b65f32b03d46bb0eed841fd4cb6afb1"}, - {file = "coverage-7.6.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f7db0b6ae1f96ae41afe626095149ecd1b212b424626175a6633c2999eaad45b"}, - {file = "coverage-7.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bbdf9a72403110a3bdae77948b8011f644571311c2fb35ee15f0f10a8fc082e8"}, - {file = "coverage-7.6.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cc44bf0315268e253bf563f3560e6c004efe38f76db03a1558274a6e04bf5d5"}, - {file = "coverage-7.6.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da8549d17489cd52f85a9829d0e1d91059359b3c54a26f28bec2c5d369524807"}, - {file = "coverage-7.6.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0086cd4fc71b7d485ac93ca4239c8f75732c2ae3ba83f6be1c9be59d9e2c6382"}, - {file = "coverage-7.6.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1fad32ee9b27350687035cb5fdf9145bc9cf0a094a9577d43e909948ebcfa27b"}, - {file = "coverage-7.6.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:044a0985a4f25b335882b0966625270a8d9db3d3409ddc49a4eb00b0ef5e8cee"}, - {file = "coverage-7.6.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:76d5f82213aa78098b9b964ea89de4617e70e0d43e97900c2778a50856dac605"}, - {file = "coverage-7.6.0-cp312-cp312-win32.whl", hash = "sha256:3c59105f8d58ce500f348c5b56163a4113a440dad6daa2294b5052a10db866da"}, - {file = "coverage-7.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:ca5d79cfdae420a1d52bf177de4bc2289c321d6c961ae321503b2ca59c17ae67"}, - {file = "coverage-7.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d39bd10f0ae453554798b125d2f39884290c480f56e8a02ba7a6ed552005243b"}, - {file = "coverage-7.6.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:beb08e8508e53a568811016e59f3234d29c2583f6b6e28572f0954a6b4f7e03d"}, - {file = "coverage-7.6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2e16f4cd2bc4d88ba30ca2d3bbf2f21f00f382cf4e1ce3b1ddc96c634bc48ca"}, - {file = "coverage-7.6.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6616d1c9bf1e3faea78711ee42a8b972367d82ceae233ec0ac61cc7fec09fa6b"}, - {file = "coverage-7.6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad4567d6c334c46046d1c4c20024de2a1c3abc626817ae21ae3da600f5779b44"}, - {file = "coverage-7.6.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d17c6a415d68cfe1091d3296ba5749d3d8696e42c37fca5d4860c5bf7b729f03"}, - {file = "coverage-7.6.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9146579352d7b5f6412735d0f203bbd8d00113a680b66565e205bc605ef81bc6"}, - {file = "coverage-7.6.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:cdab02a0a941af190df8782aafc591ef3ad08824f97850b015c8c6a8b3877b0b"}, - {file = "coverage-7.6.0-cp38-cp38-win32.whl", hash = "sha256:df423f351b162a702c053d5dddc0fc0ef9a9e27ea3f449781ace5f906b664428"}, - {file = "coverage-7.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:f2501d60d7497fd55e391f423f965bbe9e650e9ffc3c627d5f0ac516026000b8"}, - {file = "coverage-7.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7221f9ac9dad9492cecab6f676b3eaf9185141539d5c9689d13fd6b0d7de840c"}, - {file = "coverage-7.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ddaaa91bfc4477d2871442bbf30a125e8fe6b05da8a0015507bfbf4718228ab2"}, - {file = "coverage-7.6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4cbe651f3904e28f3a55d6f371203049034b4ddbce65a54527a3f189ca3b390"}, - {file = "coverage-7.6.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:831b476d79408ab6ccfadaaf199906c833f02fdb32c9ab907b1d4aa0713cfa3b"}, - {file = "coverage-7.6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46c3d091059ad0b9c59d1034de74a7f36dcfa7f6d3bde782c49deb42438f2450"}, - {file = "coverage-7.6.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4d5fae0a22dc86259dee66f2cc6c1d3e490c4a1214d7daa2a93d07491c5c04b6"}, - {file = "coverage-7.6.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:07ed352205574aad067482e53dd606926afebcb5590653121063fbf4e2175166"}, - {file = "coverage-7.6.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:49c76cdfa13015c4560702574bad67f0e15ca5a2872c6a125f6327ead2b731dd"}, - {file = "coverage-7.6.0-cp39-cp39-win32.whl", hash = "sha256:482855914928c8175735a2a59c8dc5806cf7d8f032e4820d52e845d1f731dca2"}, - {file = "coverage-7.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:543ef9179bc55edfd895154a51792b01c017c87af0ebaae092720152e19e42ca"}, - {file = "coverage-7.6.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:6fe885135c8a479d3e37a7aae61cbd3a0fb2deccb4dda3c25f92a49189f766d6"}, - {file = "coverage-7.6.0.tar.gz", hash = "sha256:289cc803fa1dc901f84701ac10c9ee873619320f2f9aff38794db4a4a0268d51"}, + {file = "coverage-7.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16"}, + {file = "coverage-7.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959"}, + {file = "coverage-7.6.1-cp310-cp310-win32.whl", hash = "sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232"}, + {file = "coverage-7.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0"}, + {file = "coverage-7.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93"}, + {file = "coverage-7.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133"}, + {file = "coverage-7.6.1-cp311-cp311-win32.whl", hash = "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c"}, + {file = "coverage-7.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6"}, + {file = "coverage-7.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778"}, + {file = "coverage-7.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d"}, + {file = "coverage-7.6.1-cp312-cp312-win32.whl", hash = "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5"}, + {file = "coverage-7.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb"}, + {file = "coverage-7.6.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a4acd025ecc06185ba2b801f2de85546e0b8ac787cf9d3b06e7e2a69f925b106"}, + {file = "coverage-7.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a6d3adcf24b624a7b778533480e32434a39ad8fa30c315208f6d3e5542aeb6e9"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0c212c49b6c10e6951362f7c6df3329f04c2b1c28499563d4035d964ab8e08c"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e81d7a3e58882450ec4186ca59a3f20a5d4440f25b1cff6f0902ad890e6748a"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78b260de9790fd81e69401c2dc8b17da47c8038176a79092a89cb2b7d945d060"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a78d169acd38300060b28d600344a803628c3fd585c912cacc9ea8790fe96862"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2c09f4ce52cb99dd7505cd0fc8e0e37c77b87f46bc9c1eb03fe3bc9991085388"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6878ef48d4227aace338d88c48738a4258213cd7b74fd9a3d4d7582bb1d8a155"}, + {file = "coverage-7.6.1-cp313-cp313-win32.whl", hash = "sha256:44df346d5215a8c0e360307d46ffaabe0f5d3502c8a1cefd700b34baf31d411a"}, + {file = "coverage-7.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:8284cf8c0dd272a247bc154eb6c95548722dce90d098c17a883ed36e67cdb129"}, + {file = "coverage-7.6.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d3296782ca4eab572a1a4eca686d8bfb00226300dcefdf43faa25b5242ab8a3e"}, + {file = "coverage-7.6.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:502753043567491d3ff6d08629270127e0c31d4184c4c8d98f92c26f65019962"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a89ecca80709d4076b95f89f308544ec8f7b4727e8a547913a35f16717856cb"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a318d68e92e80af8b00fa99609796fdbcdfef3629c77c6283566c6f02c6d6704"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13b0a73a0896988f053e4fbb7de6d93388e6dd292b0d87ee51d106f2c11b465b"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4421712dbfc5562150f7554f13dde997a2e932a6b5f352edcce948a815efee6f"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:166811d20dfea725e2e4baa71fffd6c968a958577848d2131f39b60043400223"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3"}, + {file = "coverage-7.6.1-cp313-cp313t-win32.whl", hash = "sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f"}, + {file = "coverage-7.6.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657"}, + {file = "coverage-7.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6db04803b6c7291985a761004e9060b2bca08da6d04f26a7f2294b8623a0c1a0"}, + {file = "coverage-7.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f1adfc8ac319e1a348af294106bc6a8458a0f1633cc62a1446aebc30c5fa186a"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a95324a9de9650a729239daea117df21f4b9868ce32e63f8b650ebe6cef5595b"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b43c03669dc4618ec25270b06ecd3ee4fa94c7f9b3c14bae6571ca00ef98b0d3"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8929543a7192c13d177b770008bc4e8119f2e1f881d563fc6b6305d2d0ebe9de"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:a09ece4a69cf399510c8ab25e0950d9cf2b42f7b3cb0374f95d2e2ff594478a6"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9054a0754de38d9dbd01a46621636689124d666bad1936d76c0341f7d71bf569"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0dbde0f4aa9a16fa4d754356a8f2e36296ff4d83994b2c9d8398aa32f222f989"}, + {file = "coverage-7.6.1-cp38-cp38-win32.whl", hash = "sha256:da511e6ad4f7323ee5702e6633085fb76c2f893aaf8ce4c51a0ba4fc07580ea7"}, + {file = "coverage-7.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:3f1156e3e8f2872197af3840d8ad307a9dd18e615dc64d9ee41696f287c57ad8"}, + {file = "coverage-7.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abd5fd0db5f4dc9289408aaf34908072f805ff7792632250dcb36dc591d24255"}, + {file = "coverage-7.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:547f45fa1a93154bd82050a7f3cddbc1a7a4dd2a9bf5cb7d06f4ae29fe94eaf8"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645786266c8f18a931b65bfcefdbf6952dd0dea98feee39bd188607a9d307ed2"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e0b2df163b8ed01d515807af24f63de04bebcecbd6c3bfeff88385789fdf75a"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:609b06f178fe8e9f89ef676532760ec0b4deea15e9969bf754b37f7c40326dbc"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:702855feff378050ae4f741045e19a32d57d19f3e0676d589df0575008ea5004"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2bdb062ea438f22d99cba0d7829c2ef0af1d768d1e4a4f528087224c90b132cb"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9c56863d44bd1c4fe2abb8a4d6f5371d197f1ac0ebdee542f07f35895fc07f36"}, + {file = "coverage-7.6.1-cp39-cp39-win32.whl", hash = "sha256:6e2cd258d7d927d09493c8df1ce9174ad01b381d4729a9d8d4e38670ca24774c"}, + {file = "coverage-7.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:06a737c882bd26d0d6ee7269b20b12f14a8704807a01056c80bb881a4b2ce6ca"}, + {file = "coverage-7.6.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df"}, + {file = "coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d"}, ] [package.dependencies] @@ -463,13 +483,13 @@ testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs [[package]] name = "importlib-metadata" -version = "8.0.0" +version = "8.2.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-8.0.0-py3-none-any.whl", hash = "sha256:15584cf2b1bf449d98ff8a6ff1abef57bf20f3ac6454f431736cd3e660921b2f"}, - {file = "importlib_metadata-8.0.0.tar.gz", hash = "sha256:188bd24e4c346d3f0a933f275c2fec67050326a856b9a359881d7c2a697e8812"}, + {file = "importlib_metadata-8.2.0-py3-none-any.whl", hash = "sha256:11901fa0c2f97919b288679932bb64febaeacf289d18ac84dd68cb2e74213369"}, + {file = "importlib_metadata-8.2.0.tar.gz", hash = "sha256:72e8d4399996132204f9a16dcc751af254a48f8d1b20b9ff0f98d4a8f901e73d"}, ] [package.dependencies] @@ -626,38 +646,38 @@ reports = ["lxml"] [[package]] name = "mypy" -version = "1.11.0" +version = "1.11.1" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.11.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3824187c99b893f90c845bab405a585d1ced4ff55421fdf5c84cb7710995229"}, - {file = "mypy-1.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:96f8dbc2c85046c81bcddc246232d500ad729cb720da4e20fce3b542cab91287"}, - {file = "mypy-1.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1a5d8d8dd8613a3e2be3eae829ee891b6b2de6302f24766ff06cb2875f5be9c6"}, - {file = "mypy-1.11.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:72596a79bbfb195fd41405cffa18210af3811beb91ff946dbcb7368240eed6be"}, - {file = "mypy-1.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:35ce88b8ed3a759634cb4eb646d002c4cef0a38f20565ee82b5023558eb90c00"}, - {file = "mypy-1.11.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:98790025861cb2c3db8c2f5ad10fc8c336ed2a55f4daf1b8b3f877826b6ff2eb"}, - {file = "mypy-1.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:25bcfa75b9b5a5f8d67147a54ea97ed63a653995a82798221cca2a315c0238c1"}, - {file = "mypy-1.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0bea2a0e71c2a375c9fa0ede3d98324214d67b3cbbfcbd55ac8f750f85a414e3"}, - {file = "mypy-1.11.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d2b3d36baac48e40e3064d2901f2fbd2a2d6880ec6ce6358825c85031d7c0d4d"}, - {file = "mypy-1.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:d8e2e43977f0e09f149ea69fd0556623919f816764e26d74da0c8a7b48f3e18a"}, - {file = "mypy-1.11.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:1d44c1e44a8be986b54b09f15f2c1a66368eb43861b4e82573026e04c48a9e20"}, - {file = "mypy-1.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cea3d0fb69637944dd321f41bc896e11d0fb0b0aa531d887a6da70f6e7473aba"}, - {file = "mypy-1.11.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a83ec98ae12d51c252be61521aa5731f5512231d0b738b4cb2498344f0b840cd"}, - {file = "mypy-1.11.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c7b73a856522417beb78e0fb6d33ef89474e7a622db2653bc1285af36e2e3e3d"}, - {file = "mypy-1.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:f2268d9fcd9686b61ab64f077be7ffbc6fbcdfb4103e5dd0cc5eaab53a8886c2"}, - {file = "mypy-1.11.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:940bfff7283c267ae6522ef926a7887305945f716a7704d3344d6d07f02df850"}, - {file = "mypy-1.11.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:14f9294528b5f5cf96c721f231c9f5b2733164e02c1c018ed1a0eff8a18005ac"}, - {file = "mypy-1.11.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d7b54c27783991399046837df5c7c9d325d921394757d09dbcbf96aee4649fe9"}, - {file = "mypy-1.11.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:65f190a6349dec29c8d1a1cd4aa71284177aee5949e0502e6379b42873eddbe7"}, - {file = "mypy-1.11.0-cp38-cp38-win_amd64.whl", hash = "sha256:dbe286303241fea8c2ea5466f6e0e6a046a135a7e7609167b07fd4e7baf151bf"}, - {file = "mypy-1.11.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:104e9c1620c2675420abd1f6c44bab7dd33cc85aea751c985006e83dcd001095"}, - {file = "mypy-1.11.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f006e955718ecd8d159cee9932b64fba8f86ee6f7728ca3ac66c3a54b0062abe"}, - {file = "mypy-1.11.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:becc9111ca572b04e7e77131bc708480cc88a911adf3d0239f974c034b78085c"}, - {file = "mypy-1.11.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6801319fe76c3f3a3833f2b5af7bd2c17bb93c00026a2a1b924e6762f5b19e13"}, - {file = "mypy-1.11.0-cp39-cp39-win_amd64.whl", hash = "sha256:c1a184c64521dc549324ec6ef7cbaa6b351912be9cb5edb803c2808a0d7e85ac"}, - {file = "mypy-1.11.0-py3-none-any.whl", hash = "sha256:56913ec8c7638b0091ef4da6fcc9136896914a9d60d54670a75880c3e5b99ace"}, - {file = "mypy-1.11.0.tar.gz", hash = "sha256:93743608c7348772fdc717af4aeee1997293a1ad04bc0ea6efa15bf65385c538"}, + {file = "mypy-1.11.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a32fc80b63de4b5b3e65f4be82b4cfa362a46702672aa6a0f443b4689af7008c"}, + {file = "mypy-1.11.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c1952f5ea8a5a959b05ed5f16452fddadbaae48b5d39235ab4c3fc444d5fd411"}, + {file = "mypy-1.11.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1e30dc3bfa4e157e53c1d17a0dad20f89dc433393e7702b813c10e200843b03"}, + {file = "mypy-1.11.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2c63350af88f43a66d3dfeeeb8d77af34a4f07d760b9eb3a8697f0386c7590b4"}, + {file = "mypy-1.11.1-cp310-cp310-win_amd64.whl", hash = "sha256:a831671bad47186603872a3abc19634f3011d7f83b083762c942442d51c58d58"}, + {file = "mypy-1.11.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7b6343d338390bb946d449677726edf60102a1c96079b4f002dedff375953fc5"}, + {file = "mypy-1.11.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e4fe9f4e5e521b458d8feb52547f4bade7ef8c93238dfb5bbc790d9ff2d770ca"}, + {file = "mypy-1.11.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:886c9dbecc87b9516eff294541bf7f3655722bf22bb898ee06985cd7269898de"}, + {file = "mypy-1.11.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fca4a60e1dd9fd0193ae0067eaeeb962f2d79e0d9f0f66223a0682f26ffcc809"}, + {file = "mypy-1.11.1-cp311-cp311-win_amd64.whl", hash = "sha256:0bd53faf56de9643336aeea1c925012837432b5faf1701ccca7fde70166ccf72"}, + {file = "mypy-1.11.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f39918a50f74dc5969807dcfaecafa804fa7f90c9d60506835036cc1bc891dc8"}, + {file = "mypy-1.11.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0bc71d1fb27a428139dd78621953effe0d208aed9857cb08d002280b0422003a"}, + {file = "mypy-1.11.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b868d3bcff720dd7217c383474008ddabaf048fad8d78ed948bb4b624870a417"}, + {file = "mypy-1.11.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a707ec1527ffcdd1c784d0924bf5cb15cd7f22683b919668a04d2b9c34549d2e"}, + {file = "mypy-1.11.1-cp312-cp312-win_amd64.whl", hash = "sha256:64f4a90e3ea07f590c5bcf9029035cf0efeae5ba8be511a8caada1a4893f5525"}, + {file = "mypy-1.11.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:749fd3213916f1751fff995fccf20c6195cae941dc968f3aaadf9bb4e430e5a2"}, + {file = "mypy-1.11.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b639dce63a0b19085213ec5fdd8cffd1d81988f47a2dec7100e93564f3e8fb3b"}, + {file = "mypy-1.11.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4c956b49c5d865394d62941b109728c5c596a415e9c5b2be663dd26a1ff07bc0"}, + {file = "mypy-1.11.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45df906e8b6804ef4b666af29a87ad9f5921aad091c79cc38e12198e220beabd"}, + {file = "mypy-1.11.1-cp38-cp38-win_amd64.whl", hash = "sha256:d44be7551689d9d47b7abc27c71257adfdb53f03880841a5db15ddb22dc63edb"}, + {file = "mypy-1.11.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2684d3f693073ab89d76da8e3921883019ea8a3ec20fa5d8ecca6a2db4c54bbe"}, + {file = "mypy-1.11.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:79c07eb282cb457473add5052b63925e5cc97dfab9812ee65a7c7ab5e3cb551c"}, + {file = "mypy-1.11.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11965c2f571ded6239977b14deebd3f4c3abd9a92398712d6da3a772974fad69"}, + {file = "mypy-1.11.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a2b43895a0f8154df6519706d9bca8280cda52d3d9d1514b2d9c3e26792a0b74"}, + {file = "mypy-1.11.1-cp39-cp39-win_amd64.whl", hash = "sha256:1a81cf05975fd61aec5ae16501a091cfb9f605dc3e3c878c0da32f250b74760b"}, + {file = "mypy-1.11.1-py3-none-any.whl", hash = "sha256:0624bdb940255d2dd24e829d99a13cfeb72e4e9031f9492148f410ed30bcab54"}, + {file = "mypy-1.11.1.tar.gz", hash = "sha256:f404a0b069709f18bbdb702eb3dcfe51910602995de00bd39cea3050b5772d08"}, ] [package.dependencies] @@ -866,13 +886,13 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no [[package]] name = "pytest" -version = "8.3.1" +version = "8.3.2" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-8.3.1-py3-none-any.whl", hash = "sha256:e9600ccf4f563976e2c99fa02c7624ab938296551f280835ee6516df8bc4ae8c"}, - {file = "pytest-8.3.1.tar.gz", hash = "sha256:7e8e5c5abd6e93cb1cc151f23e57adc31fcf8cfd2a3ff2da63e23f732de35db6"}, + {file = "pytest-8.3.2-py3-none-any.whl", hash = "sha256:4ba08f9ae7dcf84ded419494d229b48d0903ea6407b030eaec46df5e6a73bba5"}, + {file = "pytest-8.3.2.tar.gz", hash = "sha256:c132345d12ce551242c87269de812483f5bcc87cdbb4722e48487ba194f9fdce"}, ] [package.dependencies] @@ -1062,29 +1082,29 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "ruff" -version = "0.5.3" +version = "0.5.7" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.5.3-py3-none-linux_armv6l.whl", hash = "sha256:b12424d9db7347fa63c5ed9af010003338c63c629fb9c9c6adb2aa4f5699729b"}, - {file = "ruff-0.5.3-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b8d72c5684bbd4ed304a9a955ee2e67f57b35f6193222ade910cca8a805490e3"}, - {file = "ruff-0.5.3-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d2fc2cdb85ccac1e816cc9d5d8cedefd93661bd957756d902543af32a6b04a71"}, - {file = "ruff-0.5.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf4bc751240b2fab5d19254571bcacb315c7b0b00bf3c912d52226a82bbec073"}, - {file = "ruff-0.5.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bc697ec874fdd7c7ba0a85ec76ab38f8595224868d67f097c5ffc21136e72fcd"}, - {file = "ruff-0.5.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e791d34d3557a3819b3704bc1f087293c821083fa206812842fa363f6018a192"}, - {file = "ruff-0.5.3-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:76bb5a87fd397520b91a83eae8a2f7985236d42dd9459f09eef58e7f5c1d8316"}, - {file = "ruff-0.5.3-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a8cfc7a26422c78e94f1ec78ec02501bbad2df5834907e75afe474cc6b83a8c1"}, - {file = "ruff-0.5.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96066c4328a49fce2dd40e80f7117987369feec30ab771516cf95f1cc2db923c"}, - {file = "ruff-0.5.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03bfe9ab5bdc0b08470c3b261643ad54ea86edc32b64d1e080892d7953add3ad"}, - {file = "ruff-0.5.3-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:7704582a026fa02cca83efd76671a98ee6eb412c4230209efe5e2a006c06db62"}, - {file = "ruff-0.5.3-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:08058d077e21b856d32ebf483443390e29dc44d927608dc8f092ff6776519da9"}, - {file = "ruff-0.5.3-py3-none-musllinux_1_2_i686.whl", hash = "sha256:77d49484429ed7c7e6e2e75a753f153b7b58f875bdb4158ad85af166a1ec1822"}, - {file = "ruff-0.5.3-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:642cbff6cbfa38d2566d8db086508d6f472edb136cbfcc4ea65997745368c29e"}, - {file = "ruff-0.5.3-py3-none-win32.whl", hash = "sha256:eafc45dd8bdc37a00b28e68cc038daf3ca8c233d73fea276dcd09defb1352841"}, - {file = "ruff-0.5.3-py3-none-win_amd64.whl", hash = "sha256:cbaec2ddf4f78e5e9ecf5456ea0f496991358a1d883862ed0b9e947e2b6aea93"}, - {file = "ruff-0.5.3-py3-none-win_arm64.whl", hash = "sha256:05fbd2cb404775d6cd7f2ff49504e2d20e13ef95fa203bd1ab22413af70d420b"}, - {file = "ruff-0.5.3.tar.gz", hash = "sha256:2a3eb4f1841771fa5b67a56be9c2d16fd3cc88e378bd86aaeaec2f7e6bcdd0a2"}, + {file = "ruff-0.5.7-py3-none-linux_armv6l.whl", hash = "sha256:548992d342fc404ee2e15a242cdbea4f8e39a52f2e7752d0e4cbe88d2d2f416a"}, + {file = "ruff-0.5.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:00cc8872331055ee017c4f1071a8a31ca0809ccc0657da1d154a1d2abac5c0be"}, + {file = "ruff-0.5.7-py3-none-macosx_11_0_arm64.whl", hash = "sha256:eaf3d86a1fdac1aec8a3417a63587d93f906c678bb9ed0b796da7b59c1114a1e"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a01c34400097b06cf8a6e61b35d6d456d5bd1ae6961542de18ec81eaf33b4cb8"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fcc8054f1a717e2213500edaddcf1dbb0abad40d98e1bd9d0ad364f75c763eea"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f70284e73f36558ef51602254451e50dd6cc479f8b6f8413a95fcb5db4a55fc"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:a78ad870ae3c460394fc95437d43deb5c04b5c29297815a2a1de028903f19692"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ccd078c66a8e419475174bfe60a69adb36ce04f8d4e91b006f1329d5cd44bcf"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e31c9bad4ebf8fdb77b59cae75814440731060a09a0e0077d559a556453acbb"}, + {file = "ruff-0.5.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d796327eed8e168164346b769dd9a27a70e0298d667b4ecee6877ce8095ec8e"}, + {file = "ruff-0.5.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:4a09ea2c3f7778cc635e7f6edf57d566a8ee8f485f3c4454db7771efb692c499"}, + {file = "ruff-0.5.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:a36d8dcf55b3a3bc353270d544fb170d75d2dff41eba5df57b4e0b67a95bb64e"}, + {file = "ruff-0.5.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9369c218f789eefbd1b8d82a8cf25017b523ac47d96b2f531eba73770971c9e5"}, + {file = "ruff-0.5.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:b88ca3db7eb377eb24fb7c82840546fb7acef75af4a74bd36e9ceb37a890257e"}, + {file = "ruff-0.5.7-py3-none-win32.whl", hash = "sha256:33d61fc0e902198a3e55719f4be6b375b28f860b09c281e4bdbf783c0566576a"}, + {file = "ruff-0.5.7-py3-none-win_amd64.whl", hash = "sha256:083bbcbe6fadb93cd86709037acc510f86eed5a314203079df174c40bbbca6b3"}, + {file = "ruff-0.5.7-py3-none-win_arm64.whl", hash = "sha256:2dca26154ff9571995107221d0aeaad0e75a77b5a682d6236cf89a58c70b76f4"}, + {file = "ruff-0.5.7.tar.gz", hash = "sha256:8dfc0a458797f5d9fb622dd0efc52d796f23f0a1493a9527f4e49a550ae9a7e5"}, ] [[package]] @@ -1370,17 +1390,17 @@ testing = ["flaky (>=3.4.0)", "freezegun (>=0.3.11)", "pathlib2 (>=2.3.3)", "psu [[package]] name = "tox" -version = "4.16.0" +version = "4.17.1" description = "tox is a generic virtualenv management and test command line tool" optional = false python-versions = ">=3.8" files = [ - {file = "tox-4.16.0-py3-none-any.whl", hash = "sha256:61e101061b977b46cf00093d4319438055290ad0009f84497a07bf2d2d7a06d0"}, - {file = "tox-4.16.0.tar.gz", hash = "sha256:43499656f9949edb681c0f907f86fbfee98677af9919d8b11ae5ad77cb800748"}, + {file = "tox-4.17.1-py3-none-any.whl", hash = "sha256:2974597c0353577126ab014f52d1a399fb761049e165ff34427f84e8cfe6c990"}, + {file = "tox-4.17.1.tar.gz", hash = "sha256:2c41565a571e34480bd401d668a4899806169a4633e972ac296c54406d2ded8a"}, ] [package.dependencies] -cachetools = ">=5.3.3" +cachetools = ">=5.4" chardet = ">=5.2" colorama = ">=0.4.6" filelock = ">=3.15.4" @@ -1392,8 +1412,8 @@ tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} virtualenv = ">=20.26.3" [package.extras] -docs = ["furo (>=2024.5.6)", "sphinx (>=7.3.7)", "sphinx-argparse-cli (>=1.16)", "sphinx-autodoc-typehints (>=2.2.2)", "sphinx-copybutton (>=0.5.2)", "sphinx-inline-tabs (>=2023.4.21)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.11)"] -testing = ["build[virtualenv] (>=1.2.1)", "covdefaults (>=2.3)", "detect-test-pollution (>=1.2)", "devpi-process (>=1)", "diff-cover (>=9.1)", "distlib (>=0.3.8)", "flaky (>=3.8.1)", "hatch-vcs (>=0.4)", "hatchling (>=1.25)", "psutil (>=6)", "pytest (>=8.2.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-xdist (>=3.6.1)", "re-assert (>=1.1)", "setuptools (>=70.2)", "time-machine (>=2.14.2)", "wheel (>=0.43)"] +docs = ["furo (>=2024.7.18)", "sphinx (>=7.4.7)", "sphinx-argparse-cli (>=1.16)", "sphinx-autodoc-typehints (>=2.2.3)", "sphinx-copybutton (>=0.5.2)", "sphinx-inline-tabs (>=2023.4.21)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.11)"] +testing = ["build[virtualenv] (>=1.2.1)", "covdefaults (>=2.3)", "detect-test-pollution (>=1.2)", "devpi-process (>=1)", "diff-cover (>=9.1.1)", "distlib (>=0.3.8)", "flaky (>=3.8.1)", "hatch-vcs (>=0.4)", "hatchling (>=1.25)", "psutil (>=6)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-xdist (>=3.6.1)", "re-assert (>=1.1)", "setuptools (>=70.3)", "time-machine (>=2.14.2)", "wheel (>=0.43)"] [[package]] name = "typed-ast" @@ -1539,13 +1559,13 @@ testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more [[package]] name = "zipp" -version = "3.19.2" +version = "3.20.0" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.19.2-py3-none-any.whl", hash = "sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c"}, - {file = "zipp-3.19.2.tar.gz", hash = "sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19"}, + {file = "zipp-3.20.0-py3-none-any.whl", hash = "sha256:58da6168be89f0be59beb194da1250516fdaa062ccebd30127ac65d30045e10d"}, + {file = "zipp-3.20.0.tar.gz", hash = "sha256:0145e43d89664cfe1a2e533adc75adafed82fe2da404b4bbb6b026c0157bdb31"}, ] [package.extras] @@ -1555,4 +1575,4 @@ test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", [metadata] lock-version = "2.0" python-versions = "^3.7" -content-hash = "ddc1250408232db6c9d443180037324541ece1547571f23e6ef8db8e2e0e09ea" +content-hash = "de9ad44d919a23237212508ca6da20b929c8c6cc8aa0da01406ef2f731debe10" diff --git a/pyproject.toml b/pyproject.toml index 2c9388fe..e4cb603b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -75,9 +75,9 @@ tox = [ optional = true [tool.poetry.group.lint.dependencies] -ruff = ">=0.5.3,<0.6" +ruff = ">=0.5.7,<0.6" mypy = [ - { version = "^1.10", python = ">=3.8" }, + { version = "^1.11", python = ">=3.8" }, { version = "~1.4", python = "<3.8" } ] bump2version = ">=1.0,<2" diff --git a/tox.ini b/tox.ini index 910443c5..f32bcfff 100644 --- a/tox.ini +++ b/tox.ini @@ -17,7 +17,7 @@ python = [testenv:ruff] basepython = python3.12 -deps = ruff>=0.5.3,<0.6 +deps = ruff>=0.5.7,<0.6 commands = ruff check src tests ruff format --check src tests @@ -25,7 +25,7 @@ commands = [testenv:mypy] basepython = python3.12 deps = - mypy>=1.10,<2 + mypy>=1.11,<2 pytest>=8.3,<9 commands = mypy src tests From 9dcf25e66f6ed36b77de788621cf50bab600d1d3 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 11 Aug 2024 21:27:45 +0200 Subject: [PATCH 180/230] Bump patch version --- .bumpversion.cfg | 2 +- README.md | 2 +- docs/conf.py | 2 +- pyproject.toml | 2 +- src/graphql/version.py | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index f9e8ce93..e2aa0e98 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 3.3.0a5 +current_version = 3.3.0a6 commit = False tag = False diff --git a/README.md b/README.md index 127c226b..7a0a1e7a 100644 --- a/README.md +++ b/README.md @@ -17,7 +17,7 @@ reliable and compatible with GraphQL.js. The current stable version 3.2.3 of GraphQL-core is up-to-date with GraphQL.js version 16.6.0 and supports Python version 3.7 and newer. -You can also try out the latest alpha version 3.3.0a5 of GraphQL-core +You can also try out the latest alpha version 3.3.0a6 of GraphQL-core which is up-to-date with GraphQL.js version 17.0.0a2. Please note that this new minor version of GraphQL-core does not support Python 3.6 anymore. diff --git a/docs/conf.py b/docs/conf.py index 50c2639e..bd53efa0 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -60,7 +60,7 @@ # The short X.Y version. # version = '3.3' # The full version, including alpha/beta/rc tags. -version = release = "3.3.0a5" +version = release = "3.3.0a6" # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/pyproject.toml b/pyproject.toml index e4cb603b..e149de23 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "graphql-core" -version = "3.3.0a5" +version = "3.3.0a6" description = """\ GraphQL-core is a Python port of GraphQL.js,\ the JavaScript reference implementation for GraphQL.""" diff --git a/src/graphql/version.py b/src/graphql/version.py index 7d09b483..29166e49 100644 --- a/src/graphql/version.py +++ b/src/graphql/version.py @@ -8,7 +8,7 @@ __all__ = ["version", "version_info", "version_js", "version_info_js"] -version = "3.3.0a5" +version = "3.3.0a6" version_js = "17.0.0a2" From 5c5d5aa7afe98886d5ad876568deca6d53570e65 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Mon, 2 Sep 2024 21:50:37 +0200 Subject: [PATCH 181/230] Updae GitHub actions --- .github/workflows/test.yml | 4 ++-- pyproject.toml | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index e99059b8..01668f57 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -11,10 +11,10 @@ jobs: python-version: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12', 'pypy3.9', 'pypy3.10'] steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} diff --git a/pyproject.toml b/pyproject.toml index e149de23..3b9342b1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -53,11 +53,11 @@ optional = true [tool.poetry.group.test.dependencies] pytest = [ { version = "^8.3", python = ">=3.8" }, - { version = "^7.4", python = "<3.8"} + { version = "^7.4", python = "<3.8" } ] pytest-asyncio = [ { version = "^0.23.8", python = ">=3.8" }, - { version = "~0.21.1", python = "<3.8"} + { version = "~0.21.1", python = "<3.8" } ] pytest-benchmark = "^4.0" pytest-cov = [ From 7d722667c7aa6e1df8137d92dba6a911e155e0d7 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Mon, 2 Sep 2024 22:33:23 +0200 Subject: [PATCH 182/230] Fix docstring --- src/graphql/utilities/ast_to_dict.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/graphql/utilities/ast_to_dict.py b/src/graphql/utilities/ast_to_dict.py index fea70b32..3a2b3504 100644 --- a/src/graphql/utilities/ast_to_dict.py +++ b/src/graphql/utilities/ast_to_dict.py @@ -37,9 +37,8 @@ def ast_to_dict( ) -> Any: """Convert a language AST to a nested Python dictionary. - Set `location` to True in order to get the locations as well. + Set `locations` to True in order to get the locations as well. """ - """Convert a node to a nested Python dictionary.""" if isinstance(node, Node): if cache is None: cache = {} From bc9fa5e1029e2be870879699c29351d2f5d948ac Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Tue, 3 Sep 2024 21:24:43 +0200 Subject: [PATCH 183/230] Fix and simplify tox.ini --- tox.ini | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tox.ini b/tox.ini index f32bcfff..c261c70e 100644 --- a/tox.ini +++ b/tox.ini @@ -11,7 +11,7 @@ python = 3.10: py310 3.11: py311 3.12: py312 - pypy3: pypy9 + pypy3: pypy39 pypy3.9: pypy39 pypy3.10: pypy310 @@ -46,9 +46,9 @@ deps = pytest-cov>=4.1,<6 pytest-describe>=2.2,<3 pytest-timeout>=2.3,<3 - py37,py38,py39,pypy39: typing-extensions>=4.7.1,<5 + py3{7,8,9}, pypy39: typing-extensions>=4.7.1,<5 commands = # to also run the time-consuming tests: tox -e py311 -- --run-slow # to run the benchmarks: tox -e py311 -- -k benchmarks --benchmark-enable - py37,py38,py39,py310,py311,pypy39,pypy310: pytest tests {posargs} + py3{7,8,9,10,11}, pypy3{9,10}: pytest tests {posargs} py312: pytest tests {posargs: --cov-report=term-missing --cov=graphql --cov=tests --cov-fail-under=100} From eb7c86bbcf78f8030e4951bf76d4d925f1a13881 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Tue, 3 Sep 2024 21:52:39 +0200 Subject: [PATCH 184/230] Fix test_description --- tests/pyutils/test_description.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/pyutils/test_description.py b/tests/pyutils/test_description.py index 57edff39..8a19396d 100644 --- a/tests/pyutils/test_description.py +++ b/tests/pyutils/test_description.py @@ -42,7 +42,7 @@ def registered(base: type): try: yield None finally: - unregister_description(LazyString) + unregister_description(base) def describe_description(): From ea8402198cf891716a18da6a5d9090f0246272d6 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Tue, 3 Sep 2024 22:38:59 +0200 Subject: [PATCH 185/230] Fix coverage --- src/graphql/pyutils/description.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/graphql/pyutils/description.py b/src/graphql/pyutils/description.py index 812d61fe..9d43a86d 100644 --- a/src/graphql/pyutils/description.py +++ b/src/graphql/pyutils/description.py @@ -51,7 +51,7 @@ def unregister(cls, base: type) -> None: msg = "Only types can be unregistered." raise TypeError(msg) if isinstance(cls.bases, tuple): - if base in cls.bases: + if base in cls.bases: # pragma: no branch cls.bases = tuple(b for b in cls.bases if b is not base) if not cls.bases: cls.bases = object From 02bf4a3ead05f16acbe8d14ebcb67244522bd8b0 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Wed, 4 Sep 2024 20:28:59 +0200 Subject: [PATCH 186/230] Update Sphinx requirements --- docs/requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/requirements.txt b/docs/requirements.txt index f52741c8..9652132e 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,2 +1,2 @@ -sphinx>=7.3.7,<8 -sphinx_rtd_theme>=2.0.0,<3 +sphinx>=7,<8 +sphinx_rtd_theme>=2,<3 From 6e6d5be7516324c4e2a2ac0e352fc339f52de82e Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Wed, 4 Sep 2024 21:06:01 +0200 Subject: [PATCH 187/230] Update the README file --- README.md | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/README.md b/README.md index 7a0a1e7a..913f81e5 100644 --- a/README.md +++ b/README.md @@ -14,8 +14,8 @@ An extensive test suite with over 2300 unit tests and 100% coverage comprises a replication of the complete test suite of GraphQL.js, making sure this port is reliable and compatible with GraphQL.js. -The current stable version 3.2.3 of GraphQL-core is up-to-date with GraphQL.js -version 16.6.0 and supports Python version 3.7 and newer. +The current stable version 3.2.4 of GraphQL-core is up-to-date with GraphQL.js +version 16.8.2 and supports Python version 3.6 to 3.12. You can also try out the latest alpha version 3.3.0a6 of GraphQL-core which is up-to-date with GraphQL.js version 17.0.0a2. @@ -208,6 +208,10 @@ Some restrictions (mostly in line with the design goals): * supports asynchronous operations only via async.io (does not support the additional executors in GraphQL-core) +Note that meanwhile we are using the amazing [ruff](https://docs.astral.sh/ruff/) tool +to both format and check the code of GraphQL-core 3, +in addition to using [mypy](https://mypy-lang.org/) as type checker. + ## Integration with other libraries and roadmap @@ -217,14 +221,12 @@ Some restrictions (mostly in line with the design goals): also been created by Syrus Akbary, who meanwhile has handed over the maintenance and future development to members of the GraphQL-Python community. - The current version 2 of Graphene is using Graphql-core 2 as core library for much of - the heavy lifting. Note that Graphene 2 is not compatible with GraphQL-core 3. - The new version 3 of Graphene will use GraphQL-core 3 instead of GraphQL-core 2. + Graphene 3 is now using Graphql-core 3 as core library for much of the heavy lifting. * [Ariadne](https://github.com/mirumee/ariadne) is a Python library for implementing GraphQL servers using schema-first approach created by Mirumee Software. - Ariadne is already using GraphQL-core 3 as its GraphQL implementation. + Ariadne is also using GraphQL-core 3 as its GraphQL implementation. * [Strawberry](https://github.com/strawberry-graphql/strawberry), created by Patrick Arminio, is a new GraphQL library for Python 3, inspired by dataclasses, @@ -240,6 +242,7 @@ Changes are tracked as ## Credits and history The GraphQL-core 3 library + * has been created and is maintained by Christoph Zwerschke * uses ideas and code from GraphQL-core 2, a prior work by Syrus Akbary * is a Python port of GraphQL.js which has been developed by Lee Byron and others From b7a18ed48b7d97a79c2d0db5a8b53d820c67b8d2 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 7 Sep 2024 19:20:10 +0200 Subject: [PATCH 188/230] Implement OneOf Input Objects via @oneOf directive Replicates graphql/graphql-js@8cfa3de8a12822efdaa52e43ecd07dea57b4f926 --- src/graphql/__init__.py | 2 + src/graphql/execution/values.py | 15 +- src/graphql/type/__init__.py | 2 + src/graphql/type/definition.py | 5 + src/graphql/type/directives.py | 9 ++ src/graphql/type/introspection.py | 5 + src/graphql/type/validate.py | 24 ++- src/graphql/utilities/coerce_input_value.py | 24 +++ src/graphql/utilities/extend_schema.py | 9 ++ src/graphql/utilities/value_from_ast.py | 8 + .../rules/values_of_correct_type.py | 72 ++++++++- tests/execution/test_oneof.py | 151 ++++++++++++++++++ tests/fixtures/schema_kitchen_sink.graphql | 6 + tests/language/test_schema_printer.py | 6 + tests/type/test_introspection.py | 120 ++++++++++++++ tests/type/test_validation.py | 43 +++++ tests/utilities/test_build_ast_schema.py | 16 +- tests/utilities/test_coerce_input_value.py | 93 +++++++++++ tests/utilities/test_find_breaking_changes.py | 2 + tests/utilities/test_print_schema.py | 4 + tests/utilities/test_value_from_ast.py | 17 ++ tests/validation/harness.py | 6 + .../validation/test_values_of_correct_type.py | 94 +++++++++++ 23 files changed, 720 insertions(+), 13 deletions(-) create mode 100644 tests/execution/test_oneof.py diff --git a/src/graphql/__init__.py b/src/graphql/__init__.py index e85c51ee..f70e77b0 100644 --- a/src/graphql/__init__.py +++ b/src/graphql/__init__.py @@ -259,6 +259,7 @@ GraphQLStreamDirective, GraphQLDeprecatedDirective, GraphQLSpecifiedByDirective, + GraphQLOneOfDirective, # "Enum" of Type Kinds TypeKind, # Constant Deprecation Reason @@ -504,6 +505,7 @@ "GraphQLStreamDirective", "GraphQLDeprecatedDirective", "GraphQLSpecifiedByDirective", + "GraphQLOneOfDirective", "TypeKind", "DEFAULT_DEPRECATION_REASON", "introspection_types", diff --git a/src/graphql/execution/values.py b/src/graphql/execution/values.py index 4810a8bd..1c223b60 100644 --- a/src/graphql/execution/values.py +++ b/src/graphql/execution/values.py @@ -128,16 +128,20 @@ def coerce_variable_values( continue def on_input_value_error( - path: list[str | int], invalid_value: Any, error: GraphQLError + path: list[str | int], + invalid_value: Any, + error: GraphQLError, + var_name: str = var_name, + var_def_node: VariableDefinitionNode = var_def_node, ) -> None: invalid_str = inspect(invalid_value) - prefix = f"Variable '${var_name}' got invalid value {invalid_str}" # noqa: B023 + prefix = f"Variable '${var_name}' got invalid value {invalid_str}" if path: - prefix += f" at '{var_name}{print_path_list(path)}'" # noqa: B023 + prefix += f" at '{var_name}{print_path_list(path)}'" on_error( GraphQLError( prefix + "; " + error.message, - var_def_node, # noqa: B023 + var_def_node, original_error=error, ) ) @@ -193,7 +197,8 @@ def get_argument_values( ) raise GraphQLError(msg, value_node) continue # pragma: no cover - is_null = variable_values[variable_name] is None + variable_value = variable_values[variable_name] + is_null = variable_value is None or variable_value is Undefined if is_null and is_non_null_type(arg_type): msg = f"Argument '{name}' of non-null type '{arg_type}' must not be null." diff --git a/src/graphql/type/__init__.py b/src/graphql/type/__init__.py index 4db6516d..b95e0e55 100644 --- a/src/graphql/type/__init__.py +++ b/src/graphql/type/__init__.py @@ -137,6 +137,7 @@ GraphQLStreamDirective, GraphQLDeprecatedDirective, GraphQLSpecifiedByDirective, + GraphQLOneOfDirective, # Keyword Args GraphQLDirectiveKwargs, # Constant Deprecation Reason @@ -286,6 +287,7 @@ "GraphQLStreamDirective", "GraphQLDeprecatedDirective", "GraphQLSpecifiedByDirective", + "GraphQLOneOfDirective", "GraphQLDirectiveKwargs", "DEFAULT_DEPRECATION_REASON", "is_specified_scalar_type", diff --git a/src/graphql/type/definition.py b/src/graphql/type/definition.py index dbca4e66..312a41b2 100644 --- a/src/graphql/type/definition.py +++ b/src/graphql/type/definition.py @@ -1272,6 +1272,7 @@ class GraphQLInputObjectTypeKwargs(GraphQLNamedTypeKwargs, total=False): fields: GraphQLInputFieldMap out_type: GraphQLInputFieldOutType | None + is_one_of: bool class GraphQLInputObjectType(GraphQLNamedType): @@ -1301,6 +1302,7 @@ class GeoPoint(GraphQLInputObjectType): ast_node: InputObjectTypeDefinitionNode | None extension_ast_nodes: tuple[InputObjectTypeExtensionNode, ...] + is_one_of: bool def __init__( self, @@ -1311,6 +1313,7 @@ def __init__( extensions: dict[str, Any] | None = None, ast_node: InputObjectTypeDefinitionNode | None = None, extension_ast_nodes: Collection[InputObjectTypeExtensionNode] | None = None, + is_one_of: bool = False, ) -> None: super().__init__( name=name, @@ -1322,6 +1325,7 @@ def __init__( self._fields = fields if out_type is not None: self.out_type = out_type # type: ignore + self.is_one_of = is_one_of @staticmethod def out_type(value: dict[str, Any]) -> Any: @@ -1340,6 +1344,7 @@ def to_kwargs(self) -> GraphQLInputObjectTypeKwargs: out_type=None if self.out_type is GraphQLInputObjectType.out_type else self.out_type, + is_one_of=self.is_one_of, ) def __copy__(self) -> GraphQLInputObjectType: # pragma: no cover diff --git a/src/graphql/type/directives.py b/src/graphql/type/directives.py index 17e8083c..46201d38 100644 --- a/src/graphql/type/directives.py +++ b/src/graphql/type/directives.py @@ -261,11 +261,20 @@ def assert_directive(directive: Any) -> GraphQLDirective: description="Exposes a URL that specifies the behaviour of this scalar.", ) +# Used to declare an Input Object as a OneOf Input Objects. +GraphQLOneOfDirective = GraphQLDirective( + name="oneOf", + locations=[DirectiveLocation.INPUT_OBJECT], + args={}, + description="Indicates an Input Object is a OneOf Input Object.", +) + specified_directives: tuple[GraphQLDirective, ...] = ( GraphQLIncludeDirective, GraphQLSkipDirective, GraphQLDeprecatedDirective, GraphQLSpecifiedByDirective, + GraphQLOneOfDirective, ) """A tuple with all directives from the GraphQL specification""" diff --git a/src/graphql/type/introspection.py b/src/graphql/type/introspection.py index 866a0499..e59386a4 100644 --- a/src/graphql/type/introspection.py +++ b/src/graphql/type/introspection.py @@ -305,6 +305,7 @@ def __new__(cls): resolve=cls.input_fields, ), "ofType": GraphQLField(_Type, resolve=cls.of_type), + "isOneOf": GraphQLField(GraphQLBoolean, resolve=cls.is_one_of), } @staticmethod @@ -396,6 +397,10 @@ def input_fields(type_, _info, includeDeprecated=False): def of_type(type_, _info): return getattr(type_, "of_type", None) + @staticmethod + def is_one_of(type_, _info): + return type_.is_one_of if is_input_object_type(type_) else None + _Type: GraphQLObjectType = GraphQLObjectType( name="__Type", diff --git a/src/graphql/type/validate.py b/src/graphql/type/validate.py index 8a6b7257..c1e806c1 100644 --- a/src/graphql/type/validate.py +++ b/src/graphql/type/validate.py @@ -16,7 +16,7 @@ SchemaDefinitionNode, SchemaExtensionNode, ) -from ..pyutils import and_list, inspect +from ..pyutils import Undefined, and_list, inspect from ..utilities.type_comparators import is_equal_type, is_type_sub_type_of from .definition import ( GraphQLEnumType, @@ -482,6 +482,28 @@ def validate_input_fields(self, input_obj: GraphQLInputObjectType) -> None: ], ) + if input_obj.is_one_of: + self.validate_one_of_input_object_field(input_obj, field_name, field) + + def validate_one_of_input_object_field( + self, + type_: GraphQLInputObjectType, + field_name: str, + field: GraphQLInputField, + ) -> None: + if is_non_null_type(field.type): + self.report_error( + f"OneOf input field {type_.name}.{field_name} must be nullable.", + field.ast_node and field.ast_node.type, + ) + + if field.default_value is not Undefined: + self.report_error( + f"OneOf input field {type_.name}.{field_name}" + " cannot have a default value.", + field.ast_node, + ) + def get_operation_type_node( schema: GraphQLSchema, operation: OperationType diff --git a/src/graphql/utilities/coerce_input_value.py b/src/graphql/utilities/coerce_input_value.py index db74d272..ab06caf1 100644 --- a/src/graphql/utilities/coerce_input_value.py +++ b/src/graphql/utilities/coerce_input_value.py @@ -130,6 +130,30 @@ def coerce_input_value( + did_you_mean(suggestions) ), ) + + if type_.is_one_of: + keys = list(coerced_dict) + if len(keys) != 1: + on_error( + path.as_list() if path else [], + input_value, + GraphQLError( + "Exactly one key must be specified" + f" for OneOf type '{type_.name}'.", + ), + ) + else: + key = keys[0] + value = coerced_dict[key] + if value is None: + on_error( + (path.as_list() if path else []) + [key], + value, + GraphQLError( + f"Field '{key}' must be non-null.", + ), + ) + return type_.out_type(coerced_dict) if is_leaf_type(type_): diff --git a/src/graphql/utilities/extend_schema.py b/src/graphql/utilities/extend_schema.py index c5af8669..fc6cee77 100644 --- a/src/graphql/utilities/extend_schema.py +++ b/src/graphql/utilities/extend_schema.py @@ -65,6 +65,7 @@ GraphQLNullableType, GraphQLObjectType, GraphQLObjectTypeKwargs, + GraphQLOneOfDirective, GraphQLOutputType, GraphQLScalarType, GraphQLSchema, @@ -777,6 +778,7 @@ def build_input_object_type( fields=partial(self.build_input_field_map, all_nodes), ast_node=ast_node, extension_ast_nodes=extension_nodes, + is_one_of=is_one_of(ast_node), ) def build_type(self, ast_node: TypeDefinitionNode) -> GraphQLNamedType: @@ -822,3 +824,10 @@ def get_specified_by_url( specified_by_url = get_directive_values(GraphQLSpecifiedByDirective, node) return specified_by_url["url"] if specified_by_url else None + + +def is_one_of(node: InputObjectTypeDefinitionNode) -> bool: + """Given an input object node, returns if the node should be OneOf.""" + from ..execution import get_directive_values + + return get_directive_values(GraphQLOneOfDirective, node) is not None diff --git a/src/graphql/utilities/value_from_ast.py b/src/graphql/utilities/value_from_ast.py index 67ed11dc..dfefb723 100644 --- a/src/graphql/utilities/value_from_ast.py +++ b/src/graphql/utilities/value_from_ast.py @@ -118,6 +118,14 @@ def value_from_ast( return Undefined coerced_obj[field.out_name or field_name] = field_value + if type_.is_one_of: + keys = list(coerced_obj) + if len(keys) != 1: + return Undefined + + if coerced_obj[keys[0]] is None: + return Undefined + return type_.out_type(coerced_obj) if is_leaf_type(type_): diff --git a/src/graphql/validation/rules/values_of_correct_type.py b/src/graphql/validation/rules/values_of_correct_type.py index 8951a2d9..7df72c6e 100644 --- a/src/graphql/validation/rules/values_of_correct_type.py +++ b/src/graphql/validation/rules/values_of_correct_type.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import Any, cast +from typing import Any, Mapping, cast from ...error import GraphQLError from ...language import ( @@ -12,16 +12,20 @@ FloatValueNode, IntValueNode, ListValueNode, + NonNullTypeNode, NullValueNode, ObjectFieldNode, ObjectValueNode, StringValueNode, ValueNode, + VariableDefinitionNode, + VariableNode, VisitorAction, print_ast, ) from ...pyutils import Undefined, did_you_mean, suggestion_list from ...type import ( + GraphQLInputObjectType, GraphQLScalarType, get_named_type, get_nullable_type, @@ -31,7 +35,7 @@ is_non_null_type, is_required_input_field, ) -from . import ValidationRule +from . import ValidationContext, ValidationRule __all__ = ["ValuesOfCorrectTypeRule"] @@ -45,6 +49,18 @@ class ValuesOfCorrectTypeRule(ValidationRule): See https://spec.graphql.org/draft/#sec-Values-of-Correct-Type """ + def __init__(self, context: ValidationContext) -> None: + super().__init__(context) + self.variable_definitions: dict[str, VariableDefinitionNode] = {} + + def enter_operation_definition(self, *_args: Any) -> None: + self.variable_definitions.clear() + + def enter_variable_definition( + self, definition: VariableDefinitionNode, *_args: Any + ) -> None: + self.variable_definitions[definition.variable.name.value] = definition + def enter_list_value(self, node: ListValueNode, *_args: Any) -> VisitorAction: # Note: TypeInfo will traverse into a list's item type, so look to the parent # input type to check if it is a list. @@ -72,6 +88,10 @@ def enter_object_value(self, node: ObjectValueNode, *_args: Any) -> VisitorActio node, ) ) + if type_.is_one_of: + validate_one_of_input_object( + self.context, node, type_, field_node_map, self.variable_definitions + ) return None def enter_object_field(self, node: ObjectFieldNode, *_args: Any) -> None: @@ -162,3 +182,51 @@ def is_valid_value_node(self, node: ValueNode) -> None: ) return + + +def validate_one_of_input_object( + context: ValidationContext, + node: ObjectValueNode, + type_: GraphQLInputObjectType, + field_node_map: Mapping[str, ObjectFieldNode], + variable_definitions: dict[str, VariableDefinitionNode], +) -> None: + keys = list(field_node_map) + is_not_exactly_one_filed = len(keys) != 1 + + if is_not_exactly_one_filed: + context.report_error( + GraphQLError( + f"OneOf Input Object '{type_.name}' must specify exactly one key.", + node, + ) + ) + return + + object_field_node = field_node_map.get(keys[0]) + value = object_field_node.value if object_field_node else None + is_null_literal = not value or isinstance(value, NullValueNode) + + if is_null_literal: + context.report_error( + GraphQLError( + f"Field '{type_.name}.{keys[0]}' must be non-null.", + node, + ) + ) + return + + is_variable = value and isinstance(value, VariableNode) + if is_variable: + variable_name = cast(VariableNode, value).name.value + definition = variable_definitions[variable_name] + is_nullable_variable = not isinstance(definition.type, NonNullTypeNode) + + if is_nullable_variable: + context.report_error( + GraphQLError( + f"Variable '{variable_name}' must be non-nullable" + f" to be used for OneOf Input Object '{type_.name}'.", + node, + ) + ) diff --git a/tests/execution/test_oneof.py b/tests/execution/test_oneof.py new file mode 100644 index 00000000..2df1000d --- /dev/null +++ b/tests/execution/test_oneof.py @@ -0,0 +1,151 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from graphql.execution import ExecutionResult, execute +from graphql.language import parse +from graphql.utilities import build_schema + +if TYPE_CHECKING: + from graphql.pyutils import AwaitableOrValue + +schema = build_schema(""" + type Query { + test(input: TestInputObject!): TestObject + } + + input TestInputObject @oneOf { + a: String + b: Int + } + + type TestObject { + a: String + b: Int + } + """) + + +def execute_query( + query: str, root_value: Any, variable_values: dict[str, Any] | None = None +) -> AwaitableOrValue[ExecutionResult]: + return execute(schema, parse(query), root_value, variable_values=variable_values) + + +def describe_execute_handles_one_of_input_objects(): + def describe_one_of_input_objects(): + root_value = { + "test": lambda _info, input: input, # noqa: A002 + } + + def accepts_a_good_default_value(): + query = """ + query ($input: TestInputObject! = {a: "abc"}) { + test(input: $input) { + a + b + } + } + """ + result = execute_query(query, root_value) + + assert result == ({"test": {"a": "abc", "b": None}}, None) + + def rejects_a_bad_default_value(): + query = """ + query ($input: TestInputObject! = {a: "abc", b: 123}) { + test(input: $input) { + a + b + } + } + """ + result = execute_query(query, root_value) + + assert result == ( + {"test": None}, + [ + { + # This type of error would be caught at validation-time + # hence the vague error message here. + "message": "Argument 'input' of non-null type" + " 'TestInputObject!' must not be null.", + "locations": [(3, 31)], + "path": ["test"], + } + ], + ) + + def accepts_a_good_variable(): + query = """ + query ($input: TestInputObject!) { + test(input: $input) { + a + b + } + } + """ + result = execute_query(query, root_value, {"input": {"a": "abc"}}) + + assert result == ({"test": {"a": "abc", "b": None}}, None) + + def accepts_a_good_variable_with_an_undefined_key(): + query = """ + query ($input: TestInputObject!) { + test(input: $input) { + a + b + } + } + """ + result = execute_query(query, root_value, {"input": {"a": "abc"}}) + + assert result == ({"test": {"a": "abc", "b": None}}, None) + + def rejects_a_variable_with_multiple_non_null_keys(): + query = """ + query ($input: TestInputObject!) { + test(input: $input) { + a + b + } + } + """ + result = execute_query(query, root_value, {"input": {"a": "abc", "b": 123}}) + + assert result == ( + None, + [ + { + "message": "Variable '$input' got invalid value" + " {'a': 'abc', 'b': 123}; Exactly one key must be specified" + " for OneOf type 'TestInputObject'.", + "locations": [(2, 24)], + } + ], + ) + + def rejects_a_variable_with_multiple_nullable_keys(): + query = """ + query ($input: TestInputObject!) { + test(input: $input) { + a + b + } + } + """ + result = execute_query( + query, root_value, {"input": {"a": "abc", "b": None}} + ) + + assert result == ( + None, + [ + { + "message": "Variable '$input' got invalid value" + " {'a': 'abc', 'b': None}; Exactly one key must be specified" + " for OneOf type 'TestInputObject'.", + "locations": [(2, 24)], + } + ], + ) diff --git a/tests/fixtures/schema_kitchen_sink.graphql b/tests/fixtures/schema_kitchen_sink.graphql index 8ec1f2d8..c1d9d06e 100644 --- a/tests/fixtures/schema_kitchen_sink.graphql +++ b/tests/fixtures/schema_kitchen_sink.graphql @@ -26,6 +26,7 @@ type Foo implements Bar & Baz & Two { five(argument: [String] = ["string", "string"]): String six(argument: InputType = {key: "value"}): Type seven(argument: Int = null): Type + eight(argument: OneOfInputType): Type } type AnnotatedObject @onObject(arg: "value") { @@ -115,6 +116,11 @@ input InputType { answer: Int = 42 } +input OneOfInputType @oneOf { + string: String + int: Int +} + input AnnotatedInput @onInputObject { annotatedField: Type @onInputFieldDefinition } diff --git a/tests/language/test_schema_printer.py b/tests/language/test_schema_printer.py index 35da0b06..95fcac97 100644 --- a/tests/language/test_schema_printer.py +++ b/tests/language/test_schema_printer.py @@ -57,6 +57,7 @@ def prints_kitchen_sink_without_altering_ast(kitchen_sink_sdl): # noqa: F811 five(argument: [String] = ["string", "string"]): String six(argument: InputType = { key: "value" }): Type seven(argument: Int = null): Type + eight(argument: OneOfInputType): Type } type AnnotatedObject @onObject(arg: "value") { @@ -139,6 +140,11 @@ def prints_kitchen_sink_without_altering_ast(kitchen_sink_sdl): # noqa: F811 answer: Int = 42 } + input OneOfInputType @oneOf { + string: String + int: Int + } + input AnnotatedInput @onInputObject { annotatedField: Type @onInputFieldDefinition } diff --git a/tests/type/test_introspection.py b/tests/type/test_introspection.py index 09a21c31..1a52f7a2 100644 --- a/tests/type/test_introspection.py +++ b/tests/type/test_introspection.py @@ -364,6 +364,17 @@ def executes_an_introspection_query(): "isDeprecated": False, "deprecationReason": None, }, + { + "name": "isOneOf", + "args": [], + "type": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": None, + }, + "isDeprecated": False, + "deprecationReason": None, + }, ], "inputFields": None, "interfaces": [], @@ -981,6 +992,12 @@ def executes_an_introspection_query(): } ], }, + { + "name": "oneOf", + "isRepeatable": False, + "locations": ["INPUT_OBJECT"], + "args": [], + }, ], } } @@ -1433,6 +1450,109 @@ def respects_the_include_deprecated_parameter_for_enum_values(): None, ) + def identifies_one_of_for_input_objects(): + schema = build_schema( + """ + input SomeInputObject @oneOf { + a: String + } + + input AnotherInputObject { + a: String + b: String + } + + type Query { + someField(someArg: SomeInputObject): String + anotherField(anotherArg: AnotherInputObject): String + } + """ + ) + + source = """ + { + oneOfInputObject: __type(name: "SomeInputObject") { + isOneOf + } + inputObject: __type(name: "AnotherInputObject") { + isOneOf + } + } + """ + + assert graphql_sync(schema=schema, source=source) == ( + { + "oneOfInputObject": { + "isOneOf": True, + }, + "inputObject": { + "isOneOf": False, + }, + }, + None, + ) + + def returns_null_for_one_of_for_other_types(): + schema = build_schema( + """ + type SomeObject implements SomeInterface { + fieldA: String + } + enum SomeEnum { + SomeObject + } + interface SomeInterface { + fieldA: String + } + union SomeUnion = SomeObject + type Query { + someField(enum: SomeEnum): SomeUnion + anotherField(enum: SomeEnum): SomeInterface + } + """ + ) + + source = """ + { + object: __type(name: "SomeObject") { + isOneOf + } + enum: __type(name: "SomeEnum") { + isOneOf + } + interface: __type(name: "SomeInterface") { + isOneOf + } + scalar: __type(name: "String") { + isOneOf + } + union: __type(name: "SomeUnion") { + isOneOf + } + } + """ + + assert graphql_sync(schema=schema, source=source) == ( + { + "object": { + "isOneOf": None, + }, + "enum": { + "isOneOf": None, + }, + "interface": { + "isOneOf": None, + }, + "scalar": { + "isOneOf": None, + }, + "union": { + "isOneOf": None, + }, + }, + None, + ) + def fails_as_expected_on_the_type_root_field_without_an_arg(): schema = build_schema( """ diff --git a/tests/type/test_validation.py b/tests/type/test_validation.py index eb4e2ab7..ab364e9f 100644 --- a/tests/type/test_validation.py +++ b/tests/type/test_validation.py @@ -1593,6 +1593,49 @@ def rejects_with_relevant_locations_for_a_non_input_type(): ] +def describe_type_system_one_of_input_object_fields_must_be_nullable(): + def rejects_non_nullable_fields(): + schema = build_schema( + """ + type Query { + test(arg: SomeInputObject): String + } + + input SomeInputObject @oneOf { + a: String + b: String! + } + """ + ) + assert validate_schema(schema) == [ + { + "message": "OneOf input field SomeInputObject.b must be nullable.", + "locations": [(8, 18)], + } + ] + + def rejects_fields_with_default_values(): + schema = build_schema( + """ + type Query { + test(arg: SomeInputObject): String + } + + input SomeInputObject @oneOf { + a: String + b: String = "foo" + } + """ + ) + assert validate_schema(schema) == [ + { + "message": "OneOf input field SomeInputObject.b" + " cannot have a default value.", + "locations": [(8, 15)], + } + ] + + def describe_objects_must_adhere_to_interfaces_they_implement(): def accepts_an_object_which_implements_an_interface(): schema = build_schema( diff --git a/tests/utilities/test_build_ast_schema.py b/tests/utilities/test_build_ast_schema.py index a0aefb1a..b236025c 100644 --- a/tests/utilities/test_build_ast_schema.py +++ b/tests/utilities/test_build_ast_schema.py @@ -22,6 +22,7 @@ GraphQLInputField, GraphQLInt, GraphQLNamedType, + GraphQLOneOfDirective, GraphQLSchema, GraphQLSkipDirective, GraphQLSpecifiedByDirective, @@ -237,14 +238,15 @@ def supports_descriptions(): ) assert cycle_sdl(sdl) == sdl - def maintains_include_skip_and_specified_by_url_directives(): + def maintains_include_skip_and_three_other_directives(): schema = build_schema("type Query") - assert len(schema.directives) == 4 + assert len(schema.directives) == 5 assert schema.get_directive("skip") is GraphQLSkipDirective assert schema.get_directive("include") is GraphQLIncludeDirective assert schema.get_directive("deprecated") is GraphQLDeprecatedDirective assert schema.get_directive("specifiedBy") is GraphQLSpecifiedByDirective + assert schema.get_directive("oneOf") is GraphQLOneOfDirective def overriding_directives_excludes_specified(): schema = build_schema( @@ -253,10 +255,11 @@ def overriding_directives_excludes_specified(): directive @include on FIELD directive @deprecated on FIELD_DEFINITION directive @specifiedBy on FIELD_DEFINITION + directive @oneOf on OBJECT """ ) - assert len(schema.directives) == 4 + assert len(schema.directives) == 5 get_directive = schema.get_directive assert get_directive("skip") is not GraphQLSkipDirective assert get_directive("skip") is not None @@ -266,19 +269,22 @@ def overriding_directives_excludes_specified(): assert get_directive("deprecated") is not None assert get_directive("specifiedBy") is not GraphQLSpecifiedByDirective assert get_directive("specifiedBy") is not None + assert get_directive("oneOf") is not GraphQLOneOfDirective + assert get_directive("oneOf") is not None - def adding_directives_maintains_include_skip_and_specified_by_directives(): + def adding_directives_maintains_include_skip_and_three_other_directives(): schema = build_schema( """ directive @foo(arg: Int) on FIELD """ ) - assert len(schema.directives) == 5 + assert len(schema.directives) == 6 assert schema.get_directive("skip") is GraphQLSkipDirective assert schema.get_directive("include") is GraphQLIncludeDirective assert schema.get_directive("deprecated") is GraphQLDeprecatedDirective assert schema.get_directive("specifiedBy") is GraphQLSpecifiedByDirective + assert schema.get_directive("oneOf") is GraphQLOneOfDirective assert schema.get_directive("foo") is not None def type_modifiers(): diff --git a/tests/utilities/test_coerce_input_value.py b/tests/utilities/test_coerce_input_value.py index 61b1feab..c18b5098 100644 --- a/tests/utilities/test_coerce_input_value.py +++ b/tests/utilities/test_coerce_input_value.py @@ -250,6 +250,99 @@ def transforms_values_with_out_type(): result = _coerce_value({"real": 1, "imag": 2}, ComplexInputObject) assert expect_value(result) == 1 + 2j + def describe_for_graphql_input_object_that_is_one_of(): + TestInputObject = GraphQLInputObjectType( + "TestInputObject", + { + "foo": GraphQLInputField(GraphQLInt), + "bar": GraphQLInputField(GraphQLInt), + }, + is_one_of=True, + ) + + def returns_no_error_for_a_valid_input(): + result = _coerce_value({"foo": 123}, TestInputObject) + assert expect_value(result) == {"foo": 123} + + def returns_an_error_if_more_than_one_field_is_specified(): + result = _coerce_value({"foo": 123, "bar": None}, TestInputObject) + assert expect_errors(result) == [ + ( + "Exactly one key must be specified" + " for OneOf type 'TestInputObject'.", + [], + {"foo": 123, "bar": None}, + ) + ] + + def returns_an_error_if_the_one_field_is_null(): + result = _coerce_value({"bar": None}, TestInputObject) + assert expect_errors(result) == [ + ( + "Field 'bar' must be non-null.", + ["bar"], + None, + ) + ] + + def returns_an_error_for_an_invalid_field(): + result = _coerce_value({"foo": nan}, TestInputObject) + assert expect_errors(result) == [ + ( + "Int cannot represent non-integer value: nan", + ["foo"], + nan, + ) + ] + + def returns_multiple_errors_for_multiple_invalid_fields(): + result = _coerce_value({"foo": "abc", "bar": "def"}, TestInputObject) + assert expect_errors(result) == [ + ( + "Int cannot represent non-integer value: 'abc'", + ["foo"], + "abc", + ), + ( + "Int cannot represent non-integer value: 'def'", + ["bar"], + "def", + ), + ( + "Exactly one key must be specified" + " for OneOf type 'TestInputObject'.", + [], + {"foo": "abc", "bar": "def"}, + ), + ] + + def returns_an_error_for_an_unknown_field(): + result = _coerce_value({"foo": 123, "unknownField": 123}, TestInputObject) + assert expect_errors(result) == [ + ( + "Field 'unknownField' is not defined by type 'TestInputObject'.", + [], + {"foo": 123, "unknownField": 123}, + ) + ] + + def returns_an_error_for_a_misspelled_field(): + result = _coerce_value({"bart": 123}, TestInputObject) + assert expect_errors(result) == [ + ( + "Field 'bart' is not defined by type 'TestInputObject'." + " Did you mean 'bar'?", + [], + {"bart": 123}, + ), + ( + "Exactly one key must be specified" + " for OneOf type 'TestInputObject'.", + [], + {"bart": 123}, + ), + ] + def describe_for_graphql_input_object_with_default_value(): def _get_test_input_object(default_value): return GraphQLInputObjectType( diff --git a/tests/utilities/test_find_breaking_changes.py b/tests/utilities/test_find_breaking_changes.py index c9003a6c..24d03704 100644 --- a/tests/utilities/test_find_breaking_changes.py +++ b/tests/utilities/test_find_breaking_changes.py @@ -1,6 +1,7 @@ from graphql.type import ( GraphQLDeprecatedDirective, GraphQLIncludeDirective, + GraphQLOneOfDirective, GraphQLSchema, GraphQLSkipDirective, GraphQLSpecifiedByDirective, @@ -817,6 +818,7 @@ def should_detect_if_a_directive_was_implicitly_removed(): GraphQLSkipDirective, GraphQLIncludeDirective, GraphQLSpecifiedByDirective, + GraphQLOneOfDirective, ] ) diff --git a/tests/utilities/test_print_schema.py b/tests/utilities/test_print_schema.py index 1939ed59..878d0770 100644 --- a/tests/utilities/test_print_schema.py +++ b/tests/utilities/test_print_schema.py @@ -771,6 +771,9 @@ def prints_introspection_schema(): url: String! ) on SCALAR + """Indicates an Input Object is a OneOf Input Object.""" + directive @oneOf on INPUT_OBJECT + """ A GraphQL Schema defines the capabilities of a GraphQL server. It exposes all available types and directives on the server, as well as the entry points for query, mutation, and subscription operations. """ @@ -813,6 +816,7 @@ def prints_introspection_schema(): enumValues(includeDeprecated: Boolean = false): [__EnumValue!] inputFields(includeDeprecated: Boolean = false): [__InputValue!] ofType: __Type + isOneOf: Boolean } """An enum describing what kind of type a given `__Type` is.""" diff --git a/tests/utilities/test_value_from_ast.py b/tests/utilities/test_value_from_ast.py index f21abcc2..6622b4dc 100644 --- a/tests/utilities/test_value_from_ast.py +++ b/tests/utilities/test_value_from_ast.py @@ -174,6 +174,15 @@ def coerces_non_null_lists_of_non_null_values(): }, ) + test_one_of_input_obj = GraphQLInputObjectType( + "TestOneOfInput", + { + "a": GraphQLInputField(GraphQLString), + "b": GraphQLInputField(GraphQLString), + }, + is_one_of=True, + ) + def coerces_input_objects_according_to_input_coercion_rules(): assert _value_from("null", test_input_obj) is None assert _value_from("[]", test_input_obj) is Undefined @@ -193,6 +202,14 @@ def coerces_input_objects_according_to_input_coercion_rules(): ) assert _value_from("{ requiredBool: null }", test_input_obj) is Undefined assert _value_from("{ bool: true }", test_input_obj) is Undefined + assert _value_from('{ a: "abc" }', test_one_of_input_obj) == {"a": "abc"} + assert _value_from('{ b: "def" }', test_one_of_input_obj) == {"b": "def"} + assert _value_from('{ a: "abc", b: None }', test_one_of_input_obj) is Undefined + assert _value_from("{ a: null }", test_one_of_input_obj) is Undefined + assert _value_from("{ a: 1 }", test_one_of_input_obj) is Undefined + assert _value_from('{ a: "abc", b: "def" }', test_one_of_input_obj) is Undefined + assert _value_from("{}", test_one_of_input_obj) is Undefined + assert _value_from('{ c: "abc" }', test_one_of_input_obj) is Undefined def accepts_variable_values_assuming_already_coerced(): assert _value_from("$var", GraphQLBoolean, {}) is Undefined diff --git a/tests/validation/harness.py b/tests/validation/harness.py index 1189e922..9a6912f4 100644 --- a/tests/validation/harness.py +++ b/tests/validation/harness.py @@ -86,6 +86,11 @@ stringListField: [String] } + input OneOfInput @oneOf { + stringField: String + intField: Int + } + type ComplicatedArgs { # TODO List # TODO Coercion @@ -100,6 +105,7 @@ stringListArgField(stringListArg: [String]): String stringListNonNullArgField(stringListNonNullArg: [String!]): String complexArgField(complexArg: ComplexInput): String + oneOfArgField(oneOfArg: OneOfInput): String multipleReqs(req1: Int!, req2: Int!): String nonNullFieldWithDefault(arg: Int! = 0): String multipleOpts(opt1: Int = 0, opt2: Int = 0): String diff --git a/tests/validation/test_values_of_correct_type.py b/tests/validation/test_values_of_correct_type.py index e19228aa..7cf20648 100644 --- a/tests/validation/test_values_of_correct_type.py +++ b/tests/validation/test_values_of_correct_type.py @@ -931,6 +931,29 @@ def full_object_with_fields_in_different_order(): """ ) + def describe_valid_one_of_input_object_value(): + def exactly_one_field(): + assert_valid( + """ + { + complicatedArgs { + oneOfArgField(oneOfArg: { stringField: "abc" }) + } + } + """ + ) + + def exactly_one_non_nullable_variable(): + assert_valid( + """ + query ($string: String!) { + complicatedArgs { + oneOfArgField(oneOfArg: { stringField: $string }) + } + } + """ + ) + def describe_invalid_input_object_value(): def partial_object_missing_required(): assert_errors( @@ -1097,6 +1120,77 @@ def allows_custom_scalar_to_accept_complex_literals(): schema=schema, ) + def describe_invalid_one_of_input_object_value(): + def invalid_field_type(): + assert_errors( + """ + { + complicatedArgs { + oneOfArgField(oneOfArg: { stringField: 2 }) + } + } + """, + [ + { + "message": "String cannot represent a non string value: 2", + "locations": [(4, 60)], + }, + ], + ) + + def exactly_one_null_field(): + assert_errors( + """ + { + complicatedArgs { + oneOfArgField(oneOfArg: { stringField: null }) + } + } + """, + [ + { + "message": "Field 'OneOfInput.stringField' must be non-null.", + "locations": [(4, 45)], + }, + ], + ) + + def exactly_one_nullable_variable(): + assert_errors( + """ + query ($string: String) { + complicatedArgs { + oneOfArgField(oneOfArg: { stringField: $string }) + } + } + """, + [ + { + "message": "Variable 'string' must be non-nullable to be used" + " for OneOf Input Object 'OneOfInput'.", + "locations": [(4, 45)], + }, + ], + ) + + def more_than_one_field(): + assert_errors( + """ + { + complicatedArgs { + oneOfArgField(oneOfArg: { stringField: "abc", intField: 123 }) + } + } + """, + [ + { + "message": "OneOf Input Object 'OneOfInput'" + " must specify exactly one key.", + "locations": [(4, 45)], + }, + ], + ) + def describe_directive_arguments(): def with_directives_of_valid_types(): assert_valid( From 4933704a0f9af3bcce5f4b6178efd68dc33c092c Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 7 Sep 2024 19:29:06 +0200 Subject: [PATCH 189/230] Use American English Replicates graphql/graphql-js@82ff6539a5b961b00367ed7d6ac57a7297af2a9a --- src/graphql/type/directives.py | 6 +++--- tests/utilities/test_build_ast_schema.py | 5 +++-- tests/utilities/test_print_schema.py | 4 ++-- 3 files changed, 8 insertions(+), 7 deletions(-) diff --git a/src/graphql/type/directives.py b/src/graphql/type/directives.py index 46201d38..d4160300 100644 --- a/src/graphql/type/directives.py +++ b/src/graphql/type/directives.py @@ -248,17 +248,17 @@ def assert_directive(directive: Any) -> GraphQLDirective: description="Marks an element of a GraphQL schema as no longer supported.", ) -# Used to provide a URL for specifying the behaviour of custom scalar definitions: +# Used to provide a URL for specifying the behavior of custom scalar definitions: GraphQLSpecifiedByDirective = GraphQLDirective( name="specifiedBy", locations=[DirectiveLocation.SCALAR], args={ "url": GraphQLArgument( GraphQLNonNull(GraphQLString), - description="The URL that specifies the behaviour of this scalar.", + description="The URL that specifies the behavior of this scalar.", ) }, - description="Exposes a URL that specifies the behaviour of this scalar.", + description="Exposes a URL that specifies the behavior of this scalar.", ) # Used to declare an Input Object as a OneOf Input Objects. diff --git a/tests/utilities/test_build_ast_schema.py b/tests/utilities/test_build_ast_schema.py index b236025c..d4c2dff9 100644 --- a/tests/utilities/test_build_ast_schema.py +++ b/tests/utilities/test_build_ast_schema.py @@ -7,6 +7,7 @@ from typing import Union import pytest + from graphql import graphql_sync from graphql.language import DocumentNode, InterfaceTypeDefinitionNode, parse, print_ast from graphql.type import ( @@ -1139,7 +1140,7 @@ def can_build_invalid_schema(): assert errors def do_not_override_standard_types(): - # Note: not sure it's desired behaviour to just silently ignore override + # Note: not sure it's desired behavior to just silently ignore override # attempts so just documenting it here. schema = build_schema( @@ -1252,7 +1253,7 @@ def can_deep_copy_pickled_schema(): # check that printing the copied schema gives the same SDL assert print_schema(copied) == sdl - @pytest.mark.slow() + @pytest.mark.slow def describe_deepcopy_and_pickle_big(): # pragma: no cover @pytest.mark.timeout(20) def can_deep_copy_big_schema(big_schema_sdl): # noqa: F811 diff --git a/tests/utilities/test_print_schema.py b/tests/utilities/test_print_schema.py index 878d0770..0e96bbbc 100644 --- a/tests/utilities/test_print_schema.py +++ b/tests/utilities/test_print_schema.py @@ -765,9 +765,9 @@ def prints_introspection_schema(): reason: String = "No longer supported" ) on FIELD_DEFINITION | ARGUMENT_DEFINITION | INPUT_FIELD_DEFINITION | ENUM_VALUE - """Exposes a URL that specifies the behaviour of this scalar.""" + """Exposes a URL that specifies the behavior of this scalar.""" directive @specifiedBy( - """The URL that specifies the behaviour of this scalar.""" + """The URL that specifies the behavior of this scalar.""" url: String! ) on SCALAR From 448d0455e26441e54405413729e501324c20de4a Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 7 Sep 2024 19:39:39 +0200 Subject: [PATCH 190/230] Update ruff and adapt code style --- pyproject.toml | 2 +- src/graphql/execution/execute.py | 2 +- src/graphql/pyutils/suggestion_list.py | 3 +- tests/execution/test_abstract.py | 1 + tests/execution/test_customize.py | 7 +- tests/execution/test_defer.py | 39 +++++---- tests/execution/test_execution_result.py | 1 + tests/execution/test_executor.py | 9 +- tests/execution/test_lists.py | 29 ++++--- tests/execution/test_map_async_iterable.py | 31 +++---- tests/execution/test_middleware.py | 7 +- tests/execution/test_mutations.py | 11 +-- tests/execution/test_nonnull.py | 21 ++--- tests/execution/test_parallel.py | 11 +-- tests/execution/test_stream.py | 87 ++++++++++--------- tests/execution/test_subscribe.py | 55 ++++++------ tests/execution/test_sync.py | 15 ++-- tests/language/test_block_string_fuzz.py | 3 +- tests/language/test_lexer.py | 1 + tests/language/test_parser.py | 1 + tests/language/test_printer.py | 1 + tests/language/test_schema_parser.py | 1 + tests/language/test_schema_printer.py | 1 + tests/language/test_source.py | 1 + tests/language/test_visitor.py | 1 + tests/pyutils/test_async_reduce.py | 9 +- tests/pyutils/test_description.py | 1 + tests/pyutils/test_format_list.py | 1 + tests/pyutils/test_inspect.py | 3 +- tests/pyutils/test_is_awaitable.py | 7 +- tests/pyutils/test_simple_pub_sub.py | 9 +- tests/pyutils/test_undefined.py | 1 + tests/star_wars_schema.py | 1 - tests/test_star_wars_query.py | 37 ++++---- tests/test_user_registry.py | 13 +-- tests/type/test_assert_name.py | 1 + tests/type/test_definition.py | 1 + tests/type/test_directives.py | 1 + tests/type/test_extensions.py | 1 + tests/type/test_predicate.py | 1 + tests/type/test_scalars.py | 1 + tests/type/test_schema.py | 1 + tests/type/test_validation.py | 1 + tests/utilities/test_ast_from_value.py | 1 + tests/utilities/test_build_client_schema.py | 1 + tests/utilities/test_coerce_input_value.py | 1 + tests/utilities/test_extend_schema.py | 1 + .../test_introspection_from_schema.py | 3 +- .../test_strip_ignored_characters.py | 1 + .../test_strip_ignored_characters_fuzz.py | 21 ++--- tests/utilities/test_type_from_ast.py | 1 + .../test_assert_equal_awaitables_or_values.py | 6 +- tests/validation/test_validation.py | 1 + tox.ini | 2 +- 54 files changed, 258 insertions(+), 212 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 3b9342b1..c3c2367c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -75,7 +75,7 @@ tox = [ optional = true [tool.poetry.group.lint.dependencies] -ruff = ">=0.5.7,<0.6" +ruff = ">=0.6.4,<0.7" mypy = [ { version = "^1.11", python = ">=3.8" }, { version = "~1.4", python = "<3.8" } diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index e370bcc1..ae56c9b9 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -96,7 +96,7 @@ anext # noqa: B018 except NameError: # pragma: no cover (Python < 3.10) # noinspection PyShadowingBuiltins - async def anext(iterator: AsyncIterator) -> Any: # noqa: A001 + async def anext(iterator: AsyncIterator) -> Any: """Return the next item from an async iterator.""" return await iterator.__anext__() diff --git a/src/graphql/pyutils/suggestion_list.py b/src/graphql/pyutils/suggestion_list.py index 6abeefed..35240c77 100644 --- a/src/graphql/pyutils/suggestion_list.py +++ b/src/graphql/pyutils/suggestion_list.py @@ -99,8 +99,7 @@ def measure(self, option: str, threshold: int) -> int | None: double_diagonal_cell = rows[(i - 2) % 3][j - 2] current_cell = min(current_cell, double_diagonal_cell + 1) - if current_cell < smallest_cell: - smallest_cell = current_cell + smallest_cell = min(current_cell, smallest_cell) current_row[j] = current_cell diff --git a/tests/execution/test_abstract.py b/tests/execution/test_abstract.py index b5ebc45b..d7d12b7a 100644 --- a/tests/execution/test_abstract.py +++ b/tests/execution/test_abstract.py @@ -3,6 +3,7 @@ from typing import Any, NamedTuple import pytest + from graphql.execution import ExecutionResult, execute, execute_sync from graphql.language import parse from graphql.pyutils import is_awaitable diff --git a/tests/execution/test_customize.py b/tests/execution/test_customize.py index 23740237..85462147 100644 --- a/tests/execution/test_customize.py +++ b/tests/execution/test_customize.py @@ -1,6 +1,7 @@ from inspect import isasyncgen import pytest + from graphql.execution import ExecutionContext, execute, subscribe from graphql.language import parse from graphql.type import GraphQLField, GraphQLObjectType, GraphQLSchema, GraphQLString @@ -9,7 +10,7 @@ anext # noqa: B018 except NameError: # pragma: no cover (Python < 3.10) # noinspection PyShadowingBuiltins - async def anext(iterator): # noqa: A001 + async def anext(iterator): """Return the next item from an async iterator.""" return await iterator.__anext__() @@ -62,7 +63,7 @@ def execute_field( def describe_customize_subscription(): - @pytest.mark.asyncio() + @pytest.mark.asyncio async def uses_a_custom_subscribe_field_resolver(): schema = GraphQLSchema( query=GraphQLObjectType("Query", {"foo": GraphQLField(GraphQLString)}), @@ -91,7 +92,7 @@ async def custom_foo(): await subscription.aclose() - @pytest.mark.asyncio() + @pytest.mark.asyncio async def uses_a_custom_execution_context_class(): class TestExecutionContext(ExecutionContext): def build_resolve_info(self, *args, **kwargs): diff --git a/tests/execution/test_defer.py b/tests/execution/test_defer.py index 6b39f74e..312a2a0b 100644 --- a/tests/execution/test_defer.py +++ b/tests/execution/test_defer.py @@ -4,6 +4,7 @@ from typing import Any, AsyncGenerator, NamedTuple import pytest + from graphql.error import GraphQLError from graphql.execution import ( ExecutionResult, @@ -333,7 +334,7 @@ def can_print_deferred_fragment_record(): "path=['bar'], label='foo', parent_context, data)" ) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def can_defer_fragments_containing_scalar_types(): document = parse( """ @@ -358,7 +359,7 @@ async def can_defer_fragments_containing_scalar_types(): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def can_disable_defer_using_if_argument(): document = parse( """ @@ -384,7 +385,7 @@ async def can_disable_defer_using_if_argument(): }, } - @pytest.mark.asyncio() + @pytest.mark.asyncio async def does_not_disable_defer_with_null_if_argument(): document = parse( """ @@ -409,7 +410,7 @@ async def does_not_disable_defer_with_null_if_argument(): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def throws_an_error_for_defer_directive_with_non_string_label(): document = parse( """ @@ -430,7 +431,7 @@ async def throws_an_error_for_defer_directive_with_non_string_label(): ], } - @pytest.mark.asyncio() + @pytest.mark.asyncio async def can_defer_fragments_on_the_top_level_query_field(): document = parse( """ @@ -456,7 +457,7 @@ async def can_defer_fragments_on_the_top_level_query_field(): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def can_defer_fragments_with_errors_on_the_top_level_query_field(): document = parse( """ @@ -493,7 +494,7 @@ async def can_defer_fragments_with_errors_on_the_top_level_query_field(): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def can_defer_a_fragment_within_an_already_deferred_fragment(): document = parse( """ @@ -540,7 +541,7 @@ async def can_defer_a_fragment_within_an_already_deferred_fragment(): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def can_defer_a_fragment_that_is_also_not_deferred_with_deferred_first(): document = parse( """ @@ -571,7 +572,7 @@ async def can_defer_a_fragment_that_is_also_not_deferred_with_deferred_first(): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def can_defer_a_fragment_that_is_also_not_deferred_with_non_deferred_first(): document = parse( """ @@ -602,7 +603,7 @@ async def can_defer_a_fragment_that_is_also_not_deferred_with_non_deferred_first }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def can_defer_an_inline_fragment(): document = parse( """ @@ -632,7 +633,7 @@ async def can_defer_an_inline_fragment(): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def handles_errors_thrown_in_deferred_fragments(): document = parse( """ @@ -669,7 +670,7 @@ async def handles_errors_thrown_in_deferred_fragments(): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def handles_non_nullable_errors_thrown_in_deferred_fragments(): document = parse( """ @@ -709,7 +710,7 @@ async def handles_non_nullable_errors_thrown_in_deferred_fragments(): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def handles_non_nullable_errors_thrown_outside_deferred_fragments(): document = parse( """ @@ -740,7 +741,7 @@ async def handles_non_nullable_errors_thrown_outside_deferred_fragments(): ], } - @pytest.mark.asyncio() + @pytest.mark.asyncio async def handles_async_non_nullable_errors_thrown_in_deferred_fragments(): document = parse( """ @@ -780,7 +781,7 @@ async def handles_async_non_nullable_errors_thrown_in_deferred_fragments(): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def returns_payloads_in_correct_order(): document = parse( """ @@ -833,7 +834,7 @@ async def returns_payloads_in_correct_order(): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def returns_payloads_from_synchronous_data_in_correct_order(): document = parse( """ @@ -886,7 +887,7 @@ async def returns_payloads_from_synchronous_data_in_correct_order(): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def filters_deferred_payloads_when_list_item_from_async_iterable_nulled(): document = parse( """ @@ -920,7 +921,7 @@ async def filters_deferred_payloads_when_list_item_from_async_iterable_nulled(): ], } - @pytest.mark.asyncio() + @pytest.mark.asyncio async def original_execute_function_throws_error_if_deferred_and_all_is_sync(): document = parse( """ @@ -938,7 +939,7 @@ async def original_execute_function_throws_error_if_deferred_and_all_is_sync(): " multiple payloads (due to @defer or @stream directive)" ) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def original_execute_function_throws_error_if_deferred_and_not_all_is_sync(): document = parse( """ diff --git a/tests/execution/test_execution_result.py b/tests/execution/test_execution_result.py index 28ba17af..162bd00d 100644 --- a/tests/execution/test_execution_result.py +++ b/tests/execution/test_execution_result.py @@ -1,4 +1,5 @@ import pytest + from graphql.error import GraphQLError from graphql.execution import ExecutionResult diff --git a/tests/execution/test_executor.py b/tests/execution/test_executor.py index 5ea1f25b..792066f1 100644 --- a/tests/execution/test_executor.py +++ b/tests/execution/test_executor.py @@ -4,6 +4,7 @@ from typing import Any, Awaitable, cast import pytest + from graphql.error import GraphQLError from graphql.execution import execute, execute_sync from graphql.language import FieldNode, OperationDefinitionNode, parse @@ -41,7 +42,7 @@ def accepts_positional_arguments(): assert result == ({"a": "rootValue"}, None) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def executes_arbitrary_code(): # noinspection PyMethodMayBeStatic,PyMethodMayBeStatic class Data: @@ -375,7 +376,7 @@ def resolve(_obj, _info, **args): assert len(resolved_args) == 1 assert resolved_args[0] == {"numArg": 123, "stringArg": "foo"} - @pytest.mark.asyncio() + @pytest.mark.asyncio async def nulls_out_error_subtrees(): document = parse( """ @@ -868,7 +869,7 @@ def resolves_to_an_error_if_schema_does_not_support_operation(): ], ) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def correct_field_ordering_despite_execution_order(): schema = GraphQLSchema( GraphQLObjectType( @@ -984,7 +985,7 @@ def does_not_include_arguments_that_were_not_set(): None, ) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def fails_when_is_type_of_check_is_not_met(): class Special: value: str diff --git a/tests/execution/test_lists.py b/tests/execution/test_lists.py index 3d2bb8fa..5dc4b5f0 100644 --- a/tests/execution/test_lists.py +++ b/tests/execution/test_lists.py @@ -1,6 +1,7 @@ from typing import Any, AsyncGenerator import pytest + from graphql.execution import ExecutionResult, execute, execute_sync from graphql.language import parse from graphql.pyutils import is_awaitable @@ -171,7 +172,7 @@ async def _list_field( assert is_awaitable(result) return await result - @pytest.mark.asyncio() + @pytest.mark.asyncio async def accepts_an_async_generator_as_a_list_value(): async def list_field(): yield "two" @@ -183,7 +184,7 @@ async def list_field(): None, ) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def accepts_a_custom_async_iterable_as_a_list_value(): class ListField: def __aiter__(self): @@ -202,7 +203,7 @@ async def __anext__(self): None, ) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def handles_an_async_generator_that_throws(): async def list_field(): yield "two" @@ -214,7 +215,7 @@ async def list_field(): [{"message": "bad", "locations": [(1, 3)], "path": ["listField"]}], ) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def handles_an_async_generator_where_intermediate_value_triggers_an_error(): async def list_field(): yield "two" @@ -232,7 +233,7 @@ async def list_field(): ], ) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def handles_errors_from_complete_value_in_async_iterables(): async def list_field(): yield "two" @@ -249,7 +250,7 @@ async def list_field(): ], ) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def handles_async_functions_from_complete_value_in_async_iterables(): async def resolve(data: _IndexData, _info: GraphQLResolveInfo) -> int: return data.index @@ -259,7 +260,7 @@ async def resolve(data: _IndexData, _info: GraphQLResolveInfo) -> int: None, ) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def handles_single_async_functions_from_complete_value_in_async_iterables(): async def resolve(data: _IndexData, _info: GraphQLResolveInfo) -> int: return data.index @@ -269,7 +270,7 @@ async def resolve(data: _IndexData, _info: GraphQLResolveInfo) -> int: None, ) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def handles_async_errors_from_complete_value_in_async_iterables(): async def resolve(data: _IndexData, _info: GraphQLResolveInfo) -> int: index = data.index @@ -288,7 +289,7 @@ async def resolve(data: _IndexData, _info: GraphQLResolveInfo) -> int: ], ) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def handles_nulls_yielded_by_async_generator(): async def list_field(): yield 1 @@ -322,7 +323,7 @@ def execute_query(list_value: Any) -> Any: return result - @pytest.mark.asyncio() + @pytest.mark.asyncio async def contains_values(): list_field = [1, 2] assert await _complete(list_field, "[Int]") == ({"listField": [1, 2]}, None) @@ -330,7 +331,7 @@ async def contains_values(): assert await _complete(list_field, "[Int!]") == ({"listField": [1, 2]}, None) assert await _complete(list_field, "[Int!]!") == ({"listField": [1, 2]}, None) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def contains_null(): list_field = [1, None, 2] errors = [ @@ -351,7 +352,7 @@ async def contains_null(): assert await _complete(list_field, "[Int!]") == ({"listField": None}, errors) assert await _complete(list_field, "[Int!]!") == (None, errors) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def returns_null(): list_field = None errors = [ @@ -366,7 +367,7 @@ async def returns_null(): assert await _complete(list_field, "[Int!]") == ({"listField": None}, None) assert await _complete(list_field, "[Int!]!") == (None, errors) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def contains_error(): list_field = [1, RuntimeError("bad"), 2] errors = [ @@ -393,7 +394,7 @@ async def contains_error(): errors, ) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def results_in_errors(): list_field = RuntimeError("bad") errors = [ diff --git a/tests/execution/test_map_async_iterable.py b/tests/execution/test_map_async_iterable.py index 055a61bc..eb3cddb8 100644 --- a/tests/execution/test_map_async_iterable.py +++ b/tests/execution/test_map_async_iterable.py @@ -1,11 +1,12 @@ import pytest + from graphql.execution import map_async_iterable try: # pragma: no cover anext # noqa: B018 except NameError: # pragma: no cover (Python < 3.10) # noinspection PyShadowingBuiltins - async def anext(iterator): # noqa: A001 + async def anext(iterator): """Return the next item from an async iterator.""" return await iterator.__anext__() @@ -21,7 +22,7 @@ async def throw(_x: int) -> int: def describe_map_async_iterable(): - @pytest.mark.asyncio() + @pytest.mark.asyncio async def maps_over_async_generator(): async def source(): yield 1 @@ -36,7 +37,7 @@ async def source(): with pytest.raises(StopAsyncIteration): assert await anext(doubles) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def maps_over_async_iterable(): items = [1, 2, 3] @@ -57,7 +58,7 @@ async def __anext__(self): assert not items assert values == [2, 4, 6] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def compatible_with_async_for(): async def source(): yield 1 @@ -70,7 +71,7 @@ async def source(): assert values == [2, 4, 6] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def allows_returning_early_from_mapped_async_generator(): async def source(): yield 1 @@ -91,7 +92,7 @@ async def source(): with pytest.raises(StopAsyncIteration): await anext(doubles) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def allows_returning_early_from_mapped_async_iterable(): items = [1, 2, 3] @@ -119,7 +120,7 @@ async def __anext__(self): with pytest.raises(StopAsyncIteration): await anext(doubles) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def allows_throwing_errors_through_async_iterable(): items = [1, 2, 3] @@ -150,7 +151,7 @@ async def __anext__(self): with pytest.raises(StopAsyncIteration): await anext(doubles) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def allows_throwing_errors_with_traceback_through_async_iterables(): class Iterable: def __aiter__(self): @@ -177,7 +178,7 @@ async def __anext__(self): with pytest.raises(StopAsyncIteration): await anext(one) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def does_not_map_over_thrown_errors(): async def source(): yield 1 @@ -192,7 +193,7 @@ async def source(): assert str(exc_info.value) == "Goodbye" - @pytest.mark.asyncio() + @pytest.mark.asyncio async def does_not_map_over_externally_thrown_errors(): async def source(): yield 1 @@ -206,7 +207,7 @@ async def source(): assert str(exc_info.value) == "Goodbye" - @pytest.mark.asyncio() + @pytest.mark.asyncio async def iterable_is_closed_when_mapped_iterable_is_closed(): class Iterable: def __init__(self): @@ -230,7 +231,7 @@ async def aclose(self): with pytest.raises(StopAsyncIteration): await anext(doubles) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def iterable_is_closed_on_callback_error(): class Iterable: def __init__(self): @@ -253,7 +254,7 @@ async def aclose(self): with pytest.raises(StopAsyncIteration): await anext(doubles) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def iterable_exits_on_callback_error(): exited = False @@ -272,7 +273,7 @@ async def iterable(): with pytest.raises(StopAsyncIteration): await anext(doubles) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def mapped_iterable_is_closed_when_iterable_cannot_be_closed(): class Iterable: def __aiter__(self): @@ -287,7 +288,7 @@ async def __anext__(self): with pytest.raises(StopAsyncIteration): await anext(doubles) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def ignores_that_iterable_cannot_be_closed_on_callback_error(): class Iterable: def __aiter__(self): diff --git a/tests/execution/test_middleware.py b/tests/execution/test_middleware.py index d4abba95..291f218c 100644 --- a/tests/execution/test_middleware.py +++ b/tests/execution/test_middleware.py @@ -2,6 +2,7 @@ from typing import Awaitable, cast import pytest + from graphql.execution import Middleware, MiddlewareManager, execute, subscribe from graphql.language.parser import parse from graphql.type import GraphQLField, GraphQLObjectType, GraphQLSchema, GraphQLString @@ -90,7 +91,7 @@ def capitalize_middleware(next_, *args, **kwargs): assert result.data == {"first": "Eno", "second": "Owt"} # type: ignore - @pytest.mark.asyncio() + @pytest.mark.asyncio async def single_async_function(): doc = parse("{ first second }") @@ -200,7 +201,7 @@ def resolve(self, next_, *args, **kwargs): ) assert result.data == {"field": "devloseR"} # type: ignore - @pytest.mark.asyncio() + @pytest.mark.asyncio async def with_async_function_and_object(): doc = parse("{ field }") @@ -237,7 +238,7 @@ async def resolve(self, next_, *args, **kwargs): result = await awaitable_result assert result.data == {"field": "devloseR"} - @pytest.mark.asyncio() + @pytest.mark.asyncio async def subscription_simple(): async def bar_resolve(_obj, _info): yield "bar" diff --git a/tests/execution/test_mutations.py b/tests/execution/test_mutations.py index 20ee1c97..3737bb6a 100644 --- a/tests/execution/test_mutations.py +++ b/tests/execution/test_mutations.py @@ -4,6 +4,7 @@ from typing import Any, Awaitable import pytest + from graphql.execution import ( ExperimentalIncrementalExecutionResults, execute, @@ -106,7 +107,7 @@ async def promise_to_get_the_number(holder: NumberHolder, _info) -> int: def describe_execute_handles_mutation_execution_ordering(): - @pytest.mark.asyncio() + @pytest.mark.asyncio async def evaluates_mutations_serially(): document = parse( """ @@ -154,7 +155,7 @@ def does_not_include_illegal_mutation_fields_in_output(): result = execute_sync(schema=schema, document=document) assert result == ({}, None) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def evaluates_mutations_correctly_in_presence_of_a_failed_mutation(): document = parse( """ @@ -211,7 +212,7 @@ async def evaluates_mutations_correctly_in_presence_of_a_failed_mutation(): ], ) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def mutation_fields_with_defer_do_not_block_next_mutation(): document = parse( """ @@ -256,7 +257,7 @@ async def mutation_fields_with_defer_do_not_block_next_mutation(): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def mutation_inside_of_a_fragment(): document = parse( """ @@ -282,7 +283,7 @@ async def mutation_inside_of_a_fragment(): None, ) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def mutation_with_defer_is_not_executed_serially(): document = parse( """ diff --git a/tests/execution/test_nonnull.py b/tests/execution/test_nonnull.py index 053009a9..99810ed9 100644 --- a/tests/execution/test_nonnull.py +++ b/tests/execution/test_nonnull.py @@ -3,6 +3,7 @@ from typing import Any, Awaitable, cast import pytest + from graphql.execution import ExecutionResult, execute, execute_sync from graphql.language import parse from graphql.pyutils import AwaitableOrValue @@ -125,12 +126,12 @@ def describe_nulls_a_nullable_field(): } """ - @pytest.mark.asyncio() + @pytest.mark.asyncio async def returns_null(): result = await execute_sync_and_async(query, NullingData()) assert result == ({"sync": None}, None) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def throws(): result = await execute_sync_and_async(query, ThrowingData()) assert result == ( @@ -153,7 +154,7 @@ def describe_nulls_a_returned_object_that_contains_a_non_null_field(): } """ - @pytest.mark.asyncio() + @pytest.mark.asyncio async def that_returns_null(): result = await execute_sync_and_async(query, NullingData()) assert result == ( @@ -168,7 +169,7 @@ async def that_returns_null(): ], ) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def that_throws(): result = await execute_sync_and_async(query, ThrowingData()) assert result == ( @@ -214,14 +215,14 @@ def describe_nulls_a_complex_tree_of_nullable_fields_each(): }, } - @pytest.mark.asyncio() + @pytest.mark.asyncio async def returns_null(): result = await cast( Awaitable[ExecutionResult], execute_query(query, NullingData()) ) assert result == (data, None) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def throws(): result = await cast( Awaitable[ExecutionResult], execute_query(query, ThrowingData()) @@ -348,7 +349,7 @@ def describe_nulls_first_nullable_after_long_chain_of_non_null_fields(): "anotherPromiseNest": None, } - @pytest.mark.asyncio() + @pytest.mark.asyncio async def returns_null(): result = await cast( Awaitable[ExecutionResult], execute_query(query, NullingData()) @@ -411,7 +412,7 @@ async def returns_null(): ], ) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def throws(): result = await cast( Awaitable[ExecutionResult], execute_query(query, ThrowingData()) @@ -477,7 +478,7 @@ def describe_nulls_the_top_level_if_non_nullable_field(): } """ - @pytest.mark.asyncio() + @pytest.mark.asyncio async def returns_null(): result = await execute_sync_and_async(query, NullingData()) await asyncio.sleep(0) # strangely needed to get coverage on Python 3.11 @@ -493,7 +494,7 @@ async def returns_null(): ], ) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def throws(): result = await execute_sync_and_async(query, ThrowingData()) await asyncio.sleep(0) # strangely needed to get coverage on Python 3.11 diff --git a/tests/execution/test_parallel.py b/tests/execution/test_parallel.py index faacd0c4..f4dc86b1 100644 --- a/tests/execution/test_parallel.py +++ b/tests/execution/test_parallel.py @@ -2,6 +2,7 @@ from typing import Awaitable import pytest + from graphql.execution import execute from graphql.language import parse from graphql.type import ( @@ -31,7 +32,7 @@ async def wait(self) -> bool: def describe_parallel_execution(): - @pytest.mark.asyncio() + @pytest.mark.asyncio async def resolve_single_field(): # make sure that the special case of resolving a single field works async def resolve(*_args): @@ -52,7 +53,7 @@ async def resolve(*_args): assert result == ({"foo": True}, None) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def resolve_fields_in_parallel(): barrier = Barrier(2) @@ -78,7 +79,7 @@ async def resolve(*_args): assert result == ({"foo": True, "bar": True}, None) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def resolve_single_element_list(): # make sure that the special case of resolving a single element list works async def resolve(*_args): @@ -97,7 +98,7 @@ async def resolve(*_args): assert result == ({"foo": [True]}, None) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def resolve_list_in_parallel(): barrier = Barrier(2) @@ -127,7 +128,7 @@ async def resolve_list(*args): assert result == ({"foo": [True, True]}, None) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def resolve_is_type_of_in_parallel(): FooType = GraphQLInterfaceType("Foo", {"foo": GraphQLField(GraphQLString)}) diff --git a/tests/execution/test_stream.py b/tests/execution/test_stream.py index 46a53b56..8a1ca605 100644 --- a/tests/execution/test_stream.py +++ b/tests/execution/test_stream.py @@ -4,6 +4,7 @@ from typing import Any, Awaitable, NamedTuple import pytest + from graphql.error import GraphQLError from graphql.execution import ( ExecutionResult, @@ -28,7 +29,7 @@ anext # noqa: B018 except NameError: # pragma: no cover (Python < 3.10) # noinspection PyShadowingBuiltins - async def anext(iterator): # noqa: A001 + async def anext(iterator): """Return the next item from an async iterator.""" return await iterator.__anext__() @@ -217,7 +218,7 @@ def can_compare_incremental_stream_result(): assert result != dict(list(args.items())[:2] + [("path", ["foo", 2])]) assert result != {**args, "label": "baz"} - @pytest.mark.asyncio() + @pytest.mark.asyncio async def can_stream_a_list_field(): document = parse("{ scalarList @stream(initialCount: 1) }") result = await complete( @@ -240,7 +241,7 @@ async def can_stream_a_list_field(): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def can_use_default_value_of_initial_count(): document = parse("{ scalarList @stream }") result = await complete( @@ -267,7 +268,7 @@ async def can_use_default_value_of_initial_count(): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def negative_values_of_initial_count_throw_field_errors(): document = parse("{ scalarList @stream(initialCount: -2) }") result = await complete( @@ -286,7 +287,7 @@ async def negative_values_of_initial_count_throw_field_errors(): ], } - @pytest.mark.asyncio() + @pytest.mark.asyncio async def non_integer_values_of_initial_count_throw_field_errors(): document = parse("{ scalarList @stream(initialCount: 1.5) }") result = await complete(document, {"scalarList": ["apple", "half of a banana"]}) @@ -303,7 +304,7 @@ async def non_integer_values_of_initial_count_throw_field_errors(): ], } - @pytest.mark.asyncio() + @pytest.mark.asyncio async def returns_label_from_stream_directive(): document = parse( '{ scalarList @stream(initialCount: 1, label: "scalar-stream") }' @@ -340,7 +341,7 @@ async def returns_label_from_stream_directive(): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def throws_an_error_for_stream_directive_with_non_string_label(): document = parse("{ scalarList @stream(initialCount: 1, label: 42) }") result = await complete(document, {"scalarList": ["some apples"]}) @@ -360,7 +361,7 @@ async def throws_an_error_for_stream_directive_with_non_string_label(): ], } - @pytest.mark.asyncio() + @pytest.mark.asyncio async def can_disable_stream_using_if_argument(): document = parse("{ scalarList @stream(initialCount: 0, if: false) }") result = await complete( @@ -372,7 +373,7 @@ async def can_disable_stream_using_if_argument(): }, } - @pytest.mark.asyncio() + @pytest.mark.asyncio @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") async def does_not_disable_stream_with_null_if_argument(): document = parse( @@ -400,7 +401,7 @@ async def does_not_disable_stream_with_null_if_argument(): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def can_stream_multi_dimensional_lists(): document = parse("{ scalarListList @stream(initialCount: 1) }") result = await complete( @@ -440,7 +441,7 @@ async def can_stream_multi_dimensional_lists(): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def can_stream_a_field_that_returns_a_list_of_awaitables(): document = parse( """ @@ -482,7 +483,7 @@ async def await_friend(f): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def can_stream_in_correct_order_with_list_of_awaitables(): document = parse( """ @@ -537,7 +538,7 @@ async def await_friend(f): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def can_stream_a_field_that_returns_a_list_with_nested_async_fields(): document = parse( """ @@ -585,7 +586,7 @@ async def get_id(f): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def handles_error_in_list_of_awaitables_before_initial_count_reached(): document = parse( """ @@ -635,7 +636,7 @@ async def await_friend(f, i): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def handles_error_in_list_of_awaitables_after_initial_count_reached(): document = parse( """ @@ -694,7 +695,7 @@ async def await_friend(f, i): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def can_stream_a_field_that_returns_an_async_iterable(): document = parse( """ @@ -750,7 +751,7 @@ async def friend_list(_info): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def can_stream_a_field_that_returns_an_async_iterable_with_initial_count(): document = parse( """ @@ -793,7 +794,7 @@ async def friend_list(_info): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def negative_initial_count_throw_error_on_field_returning_async_iterable(): document = parse( """ @@ -821,7 +822,7 @@ async def friend_list(_info): "data": {"friendList": None}, } - @pytest.mark.asyncio() + @pytest.mark.asyncio async def can_handle_concurrent_calls_to_next_without_waiting(): document = parse( """ @@ -869,7 +870,7 @@ async def friend_list(_info): {"done": True, "value": None}, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def handles_error_in_async_iterable_before_initial_count_is_reached(): document = parse( """ @@ -900,7 +901,7 @@ async def friend_list(_info): "data": {"friendList": None}, } - @pytest.mark.asyncio() + @pytest.mark.asyncio async def handles_error_in_async_iterable_after_initial_count_is_reached(): document = parse( """ @@ -945,7 +946,7 @@ async def friend_list(_info): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def handles_null_for_non_null_list_items_after_initial_count_is_reached(): document = parse( """ @@ -986,7 +987,7 @@ async def handles_null_for_non_null_list_items_after_initial_count_is_reached(): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def handles_null_for_non_null_async_items_after_initial_count_is_reached(): document = parse( """ @@ -1034,7 +1035,7 @@ async def friend_list(_info): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def handles_error_thrown_in_complete_value_after_initial_count_is_reached(): document = parse( """ @@ -1073,7 +1074,7 @@ async def scalar_list(_info): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def handles_async_error_in_complete_value_after_initial_count_is_reached(): document = parse( """ @@ -1135,7 +1136,7 @@ def get_friends(_info): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def handles_nested_async_error_in_complete_value_after_initial_count(): document = parse( """ @@ -1196,7 +1197,7 @@ def get_friends(_info): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def handles_async_error_in_complete_value_after_initial_count_non_null(): document = parse( """ @@ -1249,7 +1250,7 @@ def get_friends(_info): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def handles_nested_async_error_in_complete_value_after_initial_non_null(): document = parse( """ @@ -1301,7 +1302,7 @@ def get_friends(_info): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def handles_async_error_in_complete_value_after_initial_from_async_iterable(): document = parse( """ @@ -1367,7 +1368,7 @@ async def get_friends(_info): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def handles_async_error_in_complete_value_from_async_iterable_non_null(): document = parse( """ @@ -1421,7 +1422,7 @@ async def get_friends(_info): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def filters_payloads_that_are_nulled(): document = parse( """ @@ -1472,7 +1473,7 @@ async def friend_list(_info): }, } - @pytest.mark.asyncio() + @pytest.mark.asyncio async def filters_payloads_that_are_nulled_by_a_later_synchronous_error(): document = parse( """ @@ -1515,7 +1516,7 @@ async def friend_list(_info): }, } - @pytest.mark.asyncio() + @pytest.mark.asyncio @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") async def does_not_filter_payloads_when_null_error_is_in_a_different_path(): document = parse( @@ -1584,7 +1585,7 @@ async def friend_list(_info): {"hasNext": False}, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") async def filters_stream_payloads_that_are_nulled_in_a_deferred_payload(): document = parse( @@ -1655,7 +1656,7 @@ async def friend_list(_info): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def filters_defer_payloads_that_are_nulled_in_a_stream_response(): document = parse( """ @@ -1716,7 +1717,7 @@ async def friend_list(_info): ] @pytest.mark.timeout(1) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def returns_iterator_and_ignores_error_when_stream_payloads_are_filtered(): finished = False @@ -1795,7 +1796,7 @@ async def iterable(_info): assert not finished # running iterator cannot be canceled - @pytest.mark.asyncio() + @pytest.mark.asyncio async def handles_awaitables_from_complete_value_after_initial_count_is_reached(): document = parse( """ @@ -1858,7 +1859,7 @@ async def get_friends(_info): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def returns_payloads_properly_when_parent_deferred_slower_than_stream(): resolve_slow_field = Event() @@ -1944,7 +1945,7 @@ async def get_friends(_info): await anext(iterator) @pytest.mark.timeout(1) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def can_defer_fields_that_are_resolved_after_async_iterable_is_complete(): resolve_slow_field = Event() resolve_iterable = Event() @@ -2022,7 +2023,7 @@ async def get_friends(_info): with pytest.raises(StopAsyncIteration): await anext(iterator) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def can_defer_fields_that_are_resolved_before_async_iterable_is_complete(): resolve_slow_field = Event() resolve_iterable = Event() @@ -2106,7 +2107,7 @@ async def get_friends(_info): with pytest.raises(StopAsyncIteration): await anext(iterator) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def finishes_async_iterable_when_returned_generator_is_closed(): finished = False @@ -2146,7 +2147,7 @@ async def iterable(_info): await sleep(0) assert finished - @pytest.mark.asyncio() + @pytest.mark.asyncio async def finishes_async_iterable_when_underlying_iterator_has_no_close_method(): class Iterable: def __init__(self): @@ -2197,7 +2198,7 @@ async def __anext__(self): await sleep(0) assert iterable.index == 4 - @pytest.mark.asyncio() + @pytest.mark.asyncio async def finishes_async_iterable_when_error_is_raised_in_returned_generator(): finished = False diff --git a/tests/execution/test_subscribe.py b/tests/execution/test_subscribe.py index fcbd13ef..8a6b4c38 100644 --- a/tests/execution/test_subscribe.py +++ b/tests/execution/test_subscribe.py @@ -13,6 +13,7 @@ ) import pytest + from graphql.execution import ( ExecutionResult, create_source_event_stream, @@ -44,7 +45,7 @@ anext # noqa: B018 except NameError: # pragma: no cover (Python < 3.10) # noinspection PyShadowingBuiltins - async def anext(iterator): # noqa: A001 + async def anext(iterator): """Return the next item from an async iterator.""" return await iterator.__anext__() @@ -197,7 +198,7 @@ def subscribe_with_bad_args( # Check all error cases when initializing the subscription. def describe_subscription_initialization_phase(): - @pytest.mark.asyncio() + @pytest.mark.asyncio async def accepts_positional_arguments(): document = parse( """ @@ -217,7 +218,7 @@ async def empty_async_iterable(_info): await anext(ai) await ai.aclose() # type: ignore - @pytest.mark.asyncio() + @pytest.mark.asyncio async def accepts_multiple_subscription_fields_defined_in_schema(): schema = GraphQLSchema( query=DummyQueryType, @@ -242,7 +243,7 @@ async def foo_generator(_info): await subscription.aclose() # type: ignore - @pytest.mark.asyncio() + @pytest.mark.asyncio async def accepts_type_definition_with_sync_subscribe_function(): async def foo_generator(_obj, _info): yield {"foo": "FooValue"} @@ -262,7 +263,7 @@ async def foo_generator(_obj, _info): await subscription.aclose() # type: ignore - @pytest.mark.asyncio() + @pytest.mark.asyncio async def accepts_type_definition_with_async_subscribe_function(): async def foo_generator(_obj, _info): await asyncio.sleep(0) @@ -290,7 +291,7 @@ async def subscribe_fn(obj, info): await subscription.aclose() # type: ignore - @pytest.mark.asyncio() + @pytest.mark.asyncio async def should_only_resolve_the_first_field_of_invalid_multi_field(): did_resolve = {"foo": False, "bar": False} @@ -325,7 +326,7 @@ async def subscribe_bar(_obj, _info): # pragma: no cover await subscription.aclose() # type: ignore - @pytest.mark.asyncio() + @pytest.mark.asyncio async def resolves_to_an_error_if_schema_does_not_support_subscriptions(): schema = GraphQLSchema(query=DummyQueryType) document = parse("subscription { unknownField }") @@ -343,7 +344,7 @@ async def resolves_to_an_error_if_schema_does_not_support_subscriptions(): ], ) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def resolves_to_an_error_for_unknown_subscription_field(): schema = GraphQLSchema( query=DummyQueryType, @@ -364,7 +365,7 @@ async def resolves_to_an_error_for_unknown_subscription_field(): ], ) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def should_pass_through_unexpected_errors_thrown_in_subscribe(): schema = GraphQLSchema( query=DummyQueryType, @@ -375,7 +376,7 @@ async def should_pass_through_unexpected_errors_thrown_in_subscribe(): with pytest.raises(AttributeError): subscribe_with_bad_args(schema=schema, document={}) # type: ignore - @pytest.mark.asyncio() + @pytest.mark.asyncio @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") async def throws_an_error_if_subscribe_does_not_return_an_iterator(): expected_result = ( @@ -405,7 +406,7 @@ async def async_fn(obj, info): del result cleanup() - @pytest.mark.asyncio() + @pytest.mark.asyncio async def resolves_to_an_error_for_subscription_resolver_errors(): expected_result = ( None, @@ -447,7 +448,7 @@ async def reject_with_error(*args): assert is_awaitable(result) assert await result == expected_result - @pytest.mark.asyncio() + @pytest.mark.asyncio async def resolves_to_an_error_if_variables_were_wrong_type(): schema = GraphQLSchema( query=DummyQueryType, @@ -492,7 +493,7 @@ async def resolves_to_an_error_if_variables_were_wrong_type(): # Once a subscription returns a valid AsyncIterator, it can still yield errors. def describe_subscription_publish_phase(): - @pytest.mark.asyncio() + @pytest.mark.asyncio async def produces_a_payload_for_multiple_subscribe_in_same_subscription(): pubsub = SimplePubSub() @@ -527,7 +528,7 @@ async def produces_a_payload_for_multiple_subscribe_in_same_subscription(): assert await payload1 == (expected_payload, None) assert await payload2 == (expected_payload, None) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def produces_a_payload_when_queried_fields_are_async(): pubsub = SimplePubSub() subscription = create_subscription(pubsub, {"asyncResolver": True}) @@ -564,7 +565,7 @@ async def produces_a_payload_when_queried_fields_are_async(): with pytest.raises(StopAsyncIteration): await anext(subscription) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def produces_a_payload_per_subscription_event(): pubsub = SimplePubSub() subscription = create_subscription(pubsub) @@ -643,7 +644,7 @@ async def produces_a_payload_per_subscription_event(): with pytest.raises(StopAsyncIteration): assert await anext(subscription) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def subscribe_function_returns_errors_with_defer(): pubsub = SimplePubSub() subscription = create_subscription(pubsub, {"shouldDefer": True}) @@ -707,7 +708,7 @@ async def subscribe_function_returns_errors_with_defer(): with pytest.raises(StopAsyncIteration): assert await anext(subscription) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def subscribe_function_returns_errors_with_stream(): pubsub = SimplePubSub() subscription = create_subscription(pubsub, {"shouldStream": True}) @@ -788,7 +789,7 @@ async def subscribe_function_returns_errors_with_stream(): with pytest.raises(StopAsyncIteration): assert await anext(subscription) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def produces_a_payload_when_there_are_multiple_events(): pubsub = SimplePubSub() subscription = create_subscription(pubsub) @@ -844,7 +845,7 @@ async def produces_a_payload_when_there_are_multiple_events(): None, ) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def should_not_trigger_when_subscription_is_already_done(): pubsub = SimplePubSub() subscription = create_subscription(pubsub) @@ -895,7 +896,7 @@ async def should_not_trigger_when_subscription_is_already_done(): with pytest.raises(StopAsyncIteration): await payload - @pytest.mark.asyncio() + @pytest.mark.asyncio async def should_not_trigger_when_subscription_is_thrown(): pubsub = SimplePubSub() subscription = create_subscription(pubsub) @@ -936,7 +937,7 @@ async def should_not_trigger_when_subscription_is_thrown(): with pytest.raises(StopAsyncIteration): await payload - @pytest.mark.asyncio() + @pytest.mark.asyncio async def event_order_is_correct_for_multiple_publishes(): pubsub = SimplePubSub() subscription = create_subscription(pubsub) @@ -992,7 +993,7 @@ async def event_order_is_correct_for_multiple_publishes(): None, ) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def should_handle_error_during_execution_of_source_event(): async def generate_messages(_obj, _info): yield "Hello" @@ -1040,7 +1041,7 @@ def resolve_message(message, _info): # Subsequent events are still executed. assert await anext(subscription) == ({"newMessage": "Bonjour"}, None) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def should_pass_through_error_thrown_in_source_event_stream(): async def generate_messages(_obj, _info): yield "Hello" @@ -1077,7 +1078,7 @@ def resolve_message(message, _info): with pytest.raises(StopAsyncIteration): await anext(subscription) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def should_work_with_sync_resolve_function(): async def generate_messages(_obj, _info): yield "Hello" @@ -1105,7 +1106,7 @@ def resolve_message(message, _info): assert await anext(subscription) == ({"newMessage": "Hello"}, None) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def should_work_with_async_resolve_function(): async def generate_messages(_obj, _info): await asyncio.sleep(0) @@ -1135,7 +1136,7 @@ async def resolve_message(message, _info): assert await anext(subscription) == ({"newMessage": "Hello"}, None) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def should_work_with_custom_async_iterator(): class MessageGenerator: resolved: List[str] = [] @@ -1185,7 +1186,7 @@ async def resolve(cls, message, _info) -> str: await subscription.aclose() # type: ignore - @pytest.mark.asyncio() + @pytest.mark.asyncio async def should_close_custom_async_iterator(): class MessageGenerator: closed: bool = False diff --git a/tests/execution/test_sync.py b/tests/execution/test_sync.py index 36f8c9a5..d5e9504f 100644 --- a/tests/execution/test_sync.py +++ b/tests/execution/test_sync.py @@ -1,4 +1,5 @@ import pytest + from graphql import graphql_sync from graphql.execution import execute, execute_sync from graphql.language import parse @@ -51,7 +52,7 @@ def does_not_return_an_awaitable_if_mutation_fields_are_all_synchronous(): None, ) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def returns_an_awaitable_if_any_field_is_asynchronous(): doc = "query Example { syncField, asyncField }" result = execute(schema, parse(doc), "rootValue") @@ -80,7 +81,7 @@ def does_not_throw_if_not_encountering_async_execution_with_check_sync(): None, ) - @pytest.mark.asyncio() + @pytest.mark.asyncio @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") async def throws_if_encountering_async_execution_with_check_sync(): doc = "query Example { syncField, asyncField }" @@ -93,7 +94,7 @@ async def throws_if_encountering_async_execution_with_check_sync(): del exc_info cleanup() - @pytest.mark.asyncio() + @pytest.mark.asyncio @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") async def throws_if_encountering_async_operation_without_check_sync(): doc = "query Example { syncField, asyncField }" @@ -112,7 +113,7 @@ async def throws_if_encountering_async_operation_without_check_sync(): del result cleanup() - @pytest.mark.asyncio() + @pytest.mark.asyncio @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") async def throws_if_encountering_async_iterable_execution_with_check_sync(): doc = """ @@ -132,7 +133,7 @@ async def throws_if_encountering_async_iterable_execution_with_check_sync(): del exc_info cleanup() - @pytest.mark.asyncio() + @pytest.mark.asyncio @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") async def throws_if_encountering_async_iterable_execution_without_check_sync(): doc = """ @@ -188,7 +189,7 @@ def does_not_throw_if_not_encountering_async_operation_with_check_sync(): None, ) - @pytest.mark.asyncio() + @pytest.mark.asyncio @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") async def throws_if_encountering_async_operation_with_check_sync(): doc = "query Example { syncField, asyncField }" @@ -199,7 +200,7 @@ async def throws_if_encountering_async_operation_with_check_sync(): del exc_info cleanup() - @pytest.mark.asyncio() + @pytest.mark.asyncio @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") async def throws_if_encountering_async_operation_without_check_sync(): doc = "query Example { syncField, asyncField }" diff --git a/tests/language/test_block_string_fuzz.py b/tests/language/test_block_string_fuzz.py index feb7ca2b..0e17b4d4 100644 --- a/tests/language/test_block_string_fuzz.py +++ b/tests/language/test_block_string_fuzz.py @@ -1,4 +1,5 @@ import pytest + from graphql.language import Lexer, Source, TokenKind from graphql.language.block_string import ( is_printable_as_block_string, @@ -40,7 +41,7 @@ def assert_non_printable_block_string(test_value: str) -> None: def describe_print_block_string(): - @pytest.mark.slow() + @pytest.mark.slow @pytest.mark.timeout(80) def correctly_print_random_strings(): # Testing with length >7 is taking exponentially more time. However, it is diff --git a/tests/language/test_lexer.py b/tests/language/test_lexer.py index 0bc9a398..d2d24931 100644 --- a/tests/language/test_lexer.py +++ b/tests/language/test_lexer.py @@ -3,6 +3,7 @@ from typing import Optional, Tuple import pytest + from graphql.error import GraphQLSyntaxError from graphql.language import Lexer, Source, SourceLocation, Token, TokenKind from graphql.language.lexer import is_punctuator_token_kind diff --git a/tests/language/test_parser.py b/tests/language/test_parser.py index b671e444..e6d33064 100644 --- a/tests/language/test_parser.py +++ b/tests/language/test_parser.py @@ -3,6 +3,7 @@ from typing import Optional, Tuple, cast import pytest + from graphql.error import GraphQLSyntaxError from graphql.language import ( ArgumentNode, diff --git a/tests/language/test_printer.py b/tests/language/test_printer.py index 6117c69d..b6ac41e0 100644 --- a/tests/language/test_printer.py +++ b/tests/language/test_printer.py @@ -1,6 +1,7 @@ from copy import deepcopy import pytest + from graphql.language import FieldNode, NameNode, parse, print_ast from ..fixtures import kitchen_sink_query # noqa: F401 diff --git a/tests/language/test_schema_parser.py b/tests/language/test_schema_parser.py index a5005a06..df64381a 100644 --- a/tests/language/test_schema_parser.py +++ b/tests/language/test_schema_parser.py @@ -6,6 +6,7 @@ from typing import Optional, Tuple import pytest + from graphql.error import GraphQLSyntaxError from graphql.language import ( ArgumentNode, diff --git a/tests/language/test_schema_printer.py b/tests/language/test_schema_printer.py index 95fcac97..083dcd0f 100644 --- a/tests/language/test_schema_printer.py +++ b/tests/language/test_schema_printer.py @@ -1,6 +1,7 @@ from copy import deepcopy import pytest + from graphql.language import NameNode, ScalarTypeDefinitionNode, parse, print_ast from ..fixtures import kitchen_sink_sdl # noqa: F401 diff --git a/tests/language/test_source.py b/tests/language/test_source.py index 02014445..24008605 100644 --- a/tests/language/test_source.py +++ b/tests/language/test_source.py @@ -4,6 +4,7 @@ from typing import cast import pytest + from graphql.language import Source, SourceLocation from ..utils import dedent diff --git a/tests/language/test_visitor.py b/tests/language/test_visitor.py index 1e74c6ff..00283fe1 100644 --- a/tests/language/test_visitor.py +++ b/tests/language/test_visitor.py @@ -5,6 +5,7 @@ from typing import Any, cast import pytest + from graphql.language import ( BREAK, REMOVE, diff --git a/tests/pyutils/test_async_reduce.py b/tests/pyutils/test_async_reduce.py index cbcef554..0ac606c8 100644 --- a/tests/pyutils/test_async_reduce.py +++ b/tests/pyutils/test_async_reduce.py @@ -1,6 +1,7 @@ from functools import reduce import pytest + from graphql.pyutils import async_reduce, is_awaitable @@ -16,7 +17,7 @@ def callback(accumulator, current_value): assert result == 42 assert result == reduce(callback, values, initial_value) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def works_with_sync_values_and_sync_initial_value(): def callback(accumulator, current_value): return accumulator + "-" + current_value @@ -26,7 +27,7 @@ def callback(accumulator, current_value): assert not is_awaitable(result) assert result == "foo-bar-baz" - @pytest.mark.asyncio() + @pytest.mark.asyncio async def works_with_async_initial_value(): async def async_initial_value(): return "foo" @@ -39,7 +40,7 @@ def callback(accumulator, current_value): assert is_awaitable(result) assert await result == "foo-bar-baz" - @pytest.mark.asyncio() + @pytest.mark.asyncio async def works_with_async_callback(): async def async_callback(accumulator, current_value): return accumulator + "-" + current_value @@ -49,7 +50,7 @@ async def async_callback(accumulator, current_value): assert is_awaitable(result) assert await result == "foo-bar-baz" - @pytest.mark.asyncio() + @pytest.mark.asyncio async def works_with_async_callback_and_async_initial_value(): async def async_initial_value(): return 1 / 8 diff --git a/tests/pyutils/test_description.py b/tests/pyutils/test_description.py index 8a19396d..3148520b 100644 --- a/tests/pyutils/test_description.py +++ b/tests/pyutils/test_description.py @@ -2,6 +2,7 @@ from typing import cast import pytest + from graphql import graphql_sync from graphql.pyutils import ( Description, diff --git a/tests/pyutils/test_format_list.py b/tests/pyutils/test_format_list.py index ee425eca..09567645 100644 --- a/tests/pyutils/test_format_list.py +++ b/tests/pyutils/test_format_list.py @@ -1,4 +1,5 @@ import pytest + from graphql.pyutils import and_list, or_list diff --git a/tests/pyutils/test_inspect.py b/tests/pyutils/test_inspect.py index 3721d018..94c62b48 100644 --- a/tests/pyutils/test_inspect.py +++ b/tests/pyutils/test_inspect.py @@ -6,6 +6,7 @@ from typing import Any import pytest + from graphql.pyutils import Undefined, inspect from graphql.type import ( GraphQLDirective, @@ -138,7 +139,7 @@ def test_generator(): assert inspect(test_generator) == "" assert inspect(test_generator()) == "" - @pytest.mark.asyncio() + @pytest.mark.asyncio async def inspect_coroutine(): async def test_coroutine(): pass diff --git a/tests/pyutils/test_is_awaitable.py b/tests/pyutils/test_is_awaitable.py index dcee07d9..b05f01af 100644 --- a/tests/pyutils/test_is_awaitable.py +++ b/tests/pyutils/test_is_awaitable.py @@ -3,6 +3,7 @@ from sys import version_info as python_version import pytest + from graphql.pyutils import is_awaitable @@ -66,7 +67,7 @@ async def some_async_function(): assert not isawaitable(some_async_function) assert not is_awaitable(some_async_function) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def recognizes_a_coroutine_object(): async def some_async_function(): return True @@ -92,7 +93,7 @@ def some_function(): assert is_awaitable(some_old_style_coroutine) assert is_awaitable(some_old_style_coroutine) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def recognizes_a_future_object(): async def some_async_function(): return True @@ -105,7 +106,7 @@ async def some_async_function(): assert await some_future is True - @pytest.mark.asyncio() + @pytest.mark.asyncio async def declines_an_async_generator(): async def some_async_generator_function(): yield True diff --git a/tests/pyutils/test_simple_pub_sub.py b/tests/pyutils/test_simple_pub_sub.py index 2f30a8e2..f0a88dcb 100644 --- a/tests/pyutils/test_simple_pub_sub.py +++ b/tests/pyutils/test_simple_pub_sub.py @@ -1,11 +1,12 @@ from asyncio import sleep import pytest + from graphql.pyutils import SimplePubSub, is_awaitable def describe_simple_pub_sub(): - @pytest.mark.asyncio() + @pytest.mark.asyncio async def subscribe_async_iterator_mock(): pubsub = SimplePubSub() iterator = pubsub.get_subscriber() @@ -49,7 +50,7 @@ async def subscribe_async_iterator_mock(): with pytest.raises(StopAsyncIteration): await iterator.__anext__() - @pytest.mark.asyncio() + @pytest.mark.asyncio async def iterator_aclose_empties_push_queue(): pubsub = SimplePubSub() assert not pubsub.subscribers @@ -67,7 +68,7 @@ async def iterator_aclose_empties_push_queue(): assert iterator.pull_queue.qsize() == 0 assert not iterator.listening - @pytest.mark.asyncio() + @pytest.mark.asyncio async def iterator_aclose_empties_pull_queue(): pubsub = SimplePubSub() assert not pubsub.subscribers @@ -84,7 +85,7 @@ async def iterator_aclose_empties_pull_queue(): assert iterator.pull_queue.qsize() == 0 assert not iterator.listening - @pytest.mark.asyncio() + @pytest.mark.asyncio async def iterator_aclose_is_idempotent(): pubsub = SimplePubSub() iterator = pubsub.get_subscriber() diff --git a/tests/pyutils/test_undefined.py b/tests/pyutils/test_undefined.py index b6f62eea..b34611e3 100644 --- a/tests/pyutils/test_undefined.py +++ b/tests/pyutils/test_undefined.py @@ -1,6 +1,7 @@ import pickle import pytest + from graphql.pyutils import Undefined, UndefinedType diff --git a/tests/star_wars_schema.py b/tests/star_wars_schema.py index 3f8713ab..575bf482 100644 --- a/tests/star_wars_schema.py +++ b/tests/star_wars_schema.py @@ -54,7 +54,6 @@ GraphQLSchema, GraphQLString, ) - from tests.star_wars_data import ( get_droid, get_friends, diff --git a/tests/test_star_wars_query.py b/tests/test_star_wars_query.py index 6e5bbf59..bb1008b8 100644 --- a/tests/test_star_wars_query.py +++ b/tests/test_star_wars_query.py @@ -1,4 +1,5 @@ import pytest + from graphql import graphql, graphql_sync from .star_wars_schema import star_wars_schema as schema @@ -6,7 +7,7 @@ def describe_star_wars_query_tests(): def describe_basic_queries(): - @pytest.mark.asyncio() + @pytest.mark.asyncio async def correctly_identifies_r2_d2_as_hero_of_the_star_wars_saga(): source = """ query HeroNameQuery { @@ -18,7 +19,7 @@ async def correctly_identifies_r2_d2_as_hero_of_the_star_wars_saga(): result = await graphql(schema=schema, source=source) assert result == ({"hero": {"name": "R2-D2"}}, None) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def accepts_positional_arguments_to_graphql(): source = """ query HeroNameQuery { @@ -33,7 +34,7 @@ async def accepts_positional_arguments_to_graphql(): sync_result = graphql_sync(schema, source) assert sync_result == result - @pytest.mark.asyncio() + @pytest.mark.asyncio async def allows_us_to_query_for_the_id_and_friends_of_r2_d2(): source = """ query HeroNameAndFriendsQuery { @@ -63,7 +64,7 @@ async def allows_us_to_query_for_the_id_and_friends_of_r2_d2(): ) def describe_nested_queries(): - @pytest.mark.asyncio() + @pytest.mark.asyncio async def allows_us_to_query_for_the_friends_of_friends_of_r2_d2(): source = """ query NestedQuery { @@ -121,7 +122,7 @@ async def allows_us_to_query_for_the_friends_of_friends_of_r2_d2(): ) def describe_using_ids_and_query_parameters_to_refetch_objects(): - @pytest.mark.asyncio() + @pytest.mark.asyncio async def allows_us_to_query_for_r2_d2_directly_using_his_id(): source = """ query { @@ -133,7 +134,7 @@ async def allows_us_to_query_for_r2_d2_directly_using_his_id(): result = await graphql(schema=schema, source=source) assert result == ({"droid": {"name": "R2-D2"}}, None) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def allows_us_to_query_characters_directly_using_their_id(): source = """ query FetchLukeAndC3POQuery { @@ -151,7 +152,7 @@ async def allows_us_to_query_characters_directly_using_their_id(): None, ) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def allows_creating_a_generic_query_to_fetch_luke_using_his_id(): source = """ query FetchSomeIDQuery($someId: String!) { @@ -166,7 +167,7 @@ async def allows_creating_a_generic_query_to_fetch_luke_using_his_id(): ) assert result == ({"human": {"name": "Luke Skywalker"}}, None) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def allows_creating_a_generic_query_to_fetch_han_using_his_id(): source = """ query FetchSomeIDQuery($someId: String!) { @@ -181,7 +182,7 @@ async def allows_creating_a_generic_query_to_fetch_han_using_his_id(): ) assert result == ({"human": {"name": "Han Solo"}}, None) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def generic_query_that_gets_null_back_when_passed_invalid_id(): source = """ query humanQuery($id: String!) { @@ -197,7 +198,7 @@ async def generic_query_that_gets_null_back_when_passed_invalid_id(): assert result == ({"human": None}, None) def describe_using_aliases_to_change_the_key_in_the_response(): - @pytest.mark.asyncio() + @pytest.mark.asyncio async def allows_us_to_query_for_luke_changing_his_key_with_an_alias(): source = """ query FetchLukeAliased { @@ -209,7 +210,7 @@ async def allows_us_to_query_for_luke_changing_his_key_with_an_alias(): result = await graphql(schema=schema, source=source) assert result == ({"luke": {"name": "Luke Skywalker"}}, None) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def query_for_luke_and_leia_using_two_root_fields_and_an_alias(): source = """ query FetchLukeAndLeiaAliased { @@ -228,7 +229,7 @@ async def query_for_luke_and_leia_using_two_root_fields_and_an_alias(): ) def describe_uses_fragments_to_express_more_complex_queries(): - @pytest.mark.asyncio() + @pytest.mark.asyncio async def allows_us_to_query_using_duplicated_content(): source = """ query DuplicateFields { @@ -251,7 +252,7 @@ async def allows_us_to_query_using_duplicated_content(): None, ) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def allows_us_to_use_a_fragment_to_avoid_duplicating_content(): source = """ query UseFragment { @@ -277,7 +278,7 @@ async def allows_us_to_use_a_fragment_to_avoid_duplicating_content(): ) def describe_using_typename_to_find_the_type_of_an_object(): - @pytest.mark.asyncio() + @pytest.mark.asyncio async def allows_us_to_verify_that_r2_d2_is_a_droid(): source = """ query CheckTypeOfR2 { @@ -290,7 +291,7 @@ async def allows_us_to_verify_that_r2_d2_is_a_droid(): result = await graphql(schema=schema, source=source) assert result == ({"hero": {"__typename": "Droid", "name": "R2-D2"}}, None) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def allows_us_to_verify_that_luke_is_a_human(): source = """ query CheckTypeOfLuke { @@ -307,7 +308,7 @@ async def allows_us_to_verify_that_luke_is_a_human(): ) def describe_reporting_errors_raised_in_resolvers(): - @pytest.mark.asyncio() + @pytest.mark.asyncio async def correctly_reports_error_on_accessing_secret_backstory(): source = """ query HeroNameQuery { @@ -329,7 +330,7 @@ async def correctly_reports_error_on_accessing_secret_backstory(): ], ) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def correctly_reports_error_on_accessing_backstory_in_a_list(): source = """ query HeroNameQuery { @@ -373,7 +374,7 @@ async def correctly_reports_error_on_accessing_backstory_in_a_list(): ], ) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def correctly_reports_error_on_accessing_through_an_alias(): source = """ query HeroNameQuery { diff --git a/tests/test_user_registry.py b/tests/test_user_registry.py index 7d134a52..147e01bd 100644 --- a/tests/test_user_registry.py +++ b/tests/test_user_registry.py @@ -12,6 +12,7 @@ from typing import Any, AsyncIterable, NamedTuple import pytest + from graphql import ( GraphQLArgument, GraphQLBoolean, @@ -212,13 +213,13 @@ async def resolve_subscription_user(event, info, id): # noqa: ARG001, A002 ) -@pytest.fixture() +@pytest.fixture def context(): return {"registry": UserRegistry()} def describe_query(): - @pytest.mark.asyncio() + @pytest.mark.asyncio async def query_user(context): user = await context["registry"].create( firstName="John", lastName="Doe", tweets=42, verified=True @@ -250,7 +251,7 @@ async def query_user(context): def describe_mutation(): - @pytest.mark.asyncio() + @pytest.mark.asyncio async def create_user(context): received = {} @@ -302,7 +303,7 @@ def receive(msg): "User 0": {"user": user, "mutation": MutationEnum.CREATED.value}, } - @pytest.mark.asyncio() + @pytest.mark.asyncio async def update_user(context): received = {} @@ -358,7 +359,7 @@ def receive(msg): "User 0": {"user": user, "mutation": MutationEnum.UPDATED.value}, } - @pytest.mark.asyncio() + @pytest.mark.asyncio async def delete_user(context): received = {} @@ -400,7 +401,7 @@ def receive(msg): def describe_subscription(): - @pytest.mark.asyncio() + @pytest.mark.asyncio async def subscribe_to_user_mutations(context): query = """ subscription ($userId: ID!) { diff --git a/tests/type/test_assert_name.py b/tests/type/test_assert_name.py index 55ef75c7..24ffc55d 100644 --- a/tests/type/test_assert_name.py +++ b/tests/type/test_assert_name.py @@ -1,4 +1,5 @@ import pytest + from graphql.error import GraphQLError from graphql.type import assert_enum_value_name, assert_name diff --git a/tests/type/test_definition.py b/tests/type/test_definition.py index 88ce94f7..a8b7c24b 100644 --- a/tests/type/test_definition.py +++ b/tests/type/test_definition.py @@ -12,6 +12,7 @@ from typing_extensions import TypedDict import pytest + from graphql.error import GraphQLError from graphql.language import ( EnumTypeDefinitionNode, diff --git a/tests/type/test_directives.py b/tests/type/test_directives.py index 3f29a947..4257d81f 100644 --- a/tests/type/test_directives.py +++ b/tests/type/test_directives.py @@ -1,4 +1,5 @@ import pytest + from graphql.error import GraphQLError from graphql.language import DirectiveDefinitionNode, DirectiveLocation from graphql.type import GraphQLArgument, GraphQLDirective, GraphQLInt, GraphQLString diff --git a/tests/type/test_extensions.py b/tests/type/test_extensions.py index 5aa087e2..d28b9482 100644 --- a/tests/type/test_extensions.py +++ b/tests/type/test_extensions.py @@ -1,4 +1,5 @@ import pytest + from graphql.type import ( GraphQLArgument, GraphQLDirective, diff --git a/tests/type/test_predicate.py b/tests/type/test_predicate.py index bd006e74..c741eca3 100644 --- a/tests/type/test_predicate.py +++ b/tests/type/test_predicate.py @@ -1,6 +1,7 @@ from typing import Any import pytest + from graphql.language import DirectiveLocation from graphql.type import ( GraphQLArgument, diff --git a/tests/type/test_scalars.py b/tests/type/test_scalars.py index 27255388..0ef5e548 100644 --- a/tests/type/test_scalars.py +++ b/tests/type/test_scalars.py @@ -3,6 +3,7 @@ from typing import Any import pytest + from graphql.error import GraphQLError from graphql.language import parse_value as parse_value_to_ast from graphql.pyutils import Undefined diff --git a/tests/type/test_schema.py b/tests/type/test_schema.py index f589302b..e678de35 100644 --- a/tests/type/test_schema.py +++ b/tests/type/test_schema.py @@ -1,6 +1,7 @@ from copy import deepcopy import pytest + from graphql.language import ( DirectiveLocation, SchemaDefinitionNode, diff --git a/tests/type/test_validation.py b/tests/type/test_validation.py index ab364e9f..087832ba 100644 --- a/tests/type/test_validation.py +++ b/tests/type/test_validation.py @@ -3,6 +3,7 @@ from operator import attrgetter import pytest + from graphql.language import DirectiveLocation, parse from graphql.pyutils import inspect from graphql.type import ( diff --git a/tests/utilities/test_ast_from_value.py b/tests/utilities/test_ast_from_value.py index 1432d7a4..947f2b18 100644 --- a/tests/utilities/test_ast_from_value.py +++ b/tests/utilities/test_ast_from_value.py @@ -1,6 +1,7 @@ from math import inf, nan import pytest + from graphql.error import GraphQLError from graphql.language import ( BooleanValueNode, diff --git a/tests/utilities/test_build_client_schema.py b/tests/utilities/test_build_client_schema.py index 518fb5bf..4b861003 100644 --- a/tests/utilities/test_build_client_schema.py +++ b/tests/utilities/test_build_client_schema.py @@ -1,6 +1,7 @@ from typing import cast import pytest + from graphql import graphql_sync from graphql.type import ( GraphQLArgument, diff --git a/tests/utilities/test_coerce_input_value.py b/tests/utilities/test_coerce_input_value.py index c18b5098..90af6cb9 100644 --- a/tests/utilities/test_coerce_input_value.py +++ b/tests/utilities/test_coerce_input_value.py @@ -4,6 +4,7 @@ from typing import Any, NamedTuple import pytest + from graphql.error import GraphQLError from graphql.pyutils import Undefined from graphql.type import ( diff --git a/tests/utilities/test_extend_schema.py b/tests/utilities/test_extend_schema.py index 75c70efd..28ac0be4 100644 --- a/tests/utilities/test_extend_schema.py +++ b/tests/utilities/test_extend_schema.py @@ -3,6 +3,7 @@ from typing import Union import pytest + from graphql import graphql_sync from graphql.language import parse, print_ast from graphql.type import ( diff --git a/tests/utilities/test_introspection_from_schema.py b/tests/utilities/test_introspection_from_schema.py index 895ade9a..1c9dbd52 100644 --- a/tests/utilities/test_introspection_from_schema.py +++ b/tests/utilities/test_introspection_from_schema.py @@ -3,6 +3,7 @@ from copy import deepcopy import pytest + from graphql.type import GraphQLField, GraphQLObjectType, GraphQLSchema, GraphQLString from graphql.utilities import ( IntrospectionQuery, @@ -105,7 +106,7 @@ def can_deep_copy_pickled_schema(): # check that introspecting the copied schema gives the same result assert introspection_from_schema(copied) == introspected_schema - @pytest.mark.slow() + @pytest.mark.slow def describe_deepcopy_and_pickle_big(): # pragma: no cover @pytest.mark.timeout(20) def can_deep_copy_big_schema(big_schema_sdl): # noqa: F811 diff --git a/tests/utilities/test_strip_ignored_characters.py b/tests/utilities/test_strip_ignored_characters.py index d708bfdb..cdc6062d 100644 --- a/tests/utilities/test_strip_ignored_characters.py +++ b/tests/utilities/test_strip_ignored_characters.py @@ -1,6 +1,7 @@ from __future__ import annotations import pytest + from graphql.error import GraphQLSyntaxError from graphql.language import Lexer, Source, TokenKind, parse from graphql.utilities import strip_ignored_characters diff --git a/tests/utilities/test_strip_ignored_characters_fuzz.py b/tests/utilities/test_strip_ignored_characters_fuzz.py index 85c43aec..4c276e07 100644 --- a/tests/utilities/test_strip_ignored_characters_fuzz.py +++ b/tests/utilities/test_strip_ignored_characters_fuzz.py @@ -3,6 +3,7 @@ from json import dumps import pytest + from graphql.error import GraphQLSyntaxError from graphql.language import Lexer, Source, TokenKind from graphql.utilities import strip_ignored_characters @@ -74,7 +75,7 @@ def lex_value(s: str) -> str | None: def describe_strip_ignored_characters(): - @pytest.mark.slow() + @pytest.mark.slow @pytest.mark.timeout(10) def strips_documents_with_random_combination_of_ignored_characters(): for ignored in ignored_tokens: @@ -85,7 +86,7 @@ def strips_documents_with_random_combination_of_ignored_characters(): ExpectStripped("".join(ignored_tokens)).to_equal("") - @pytest.mark.slow() + @pytest.mark.slow @pytest.mark.timeout(10) def strips_random_leading_and_trailing_ignored_tokens(): for token in punctuator_tokens + non_punctuator_tokens: @@ -100,7 +101,7 @@ def strips_random_leading_and_trailing_ignored_tokens(): ExpectStripped("".join(ignored_tokens) + token).to_equal(token) ExpectStripped(token + "".join(ignored_tokens)).to_equal(token) - @pytest.mark.slow() + @pytest.mark.slow @pytest.mark.timeout(10) def strips_random_ignored_tokens_between_punctuator_tokens(): for left in punctuator_tokens: @@ -117,7 +118,7 @@ def strips_random_ignored_tokens_between_punctuator_tokens(): left + right ) - @pytest.mark.slow() + @pytest.mark.slow @pytest.mark.timeout(10) def strips_random_ignored_tokens_between_punctuator_and_non_punctuator_tokens(): for non_punctuator in non_punctuator_tokens: @@ -136,7 +137,7 @@ def strips_random_ignored_tokens_between_punctuator_and_non_punctuator_tokens(): punctuator + "".join(ignored_tokens) + non_punctuator ).to_equal(punctuator + non_punctuator) - @pytest.mark.slow() + @pytest.mark.slow @pytest.mark.timeout(10) def strips_random_ignored_tokens_between_non_punctuator_and_punctuator_tokens(): for non_punctuator in non_punctuator_tokens: @@ -159,7 +160,7 @@ def strips_random_ignored_tokens_between_non_punctuator_and_punctuator_tokens(): non_punctuator + "".join(ignored_tokens) + punctuator ).to_equal(non_punctuator + punctuator) - @pytest.mark.slow() + @pytest.mark.slow @pytest.mark.timeout(10) def replace_random_ignored_tokens_between_non_punctuator_and_spread_with_space(): for non_punctuator in non_punctuator_tokens: @@ -177,7 +178,7 @@ def replace_random_ignored_tokens_between_non_punctuator_and_spread_with_space() non_punctuator + " ..." ) - @pytest.mark.slow() + @pytest.mark.slow @pytest.mark.timeout(10) def replace_random_ignored_tokens_between_non_punctuator_tokens_with_space(): for left in non_punctuator_tokens: @@ -194,7 +195,7 @@ def replace_random_ignored_tokens_between_non_punctuator_tokens_with_space(): left + " " + right ) - @pytest.mark.slow() + @pytest.mark.slow @pytest.mark.timeout(10) def does_not_strip_random_ignored_tokens_embedded_in_the_string(): for ignored in ignored_tokens: @@ -205,7 +206,7 @@ def does_not_strip_random_ignored_tokens_embedded_in_the_string(): ExpectStripped(dumps("".join(ignored_tokens))).to_stay_the_same() - @pytest.mark.slow() + @pytest.mark.slow @pytest.mark.timeout(10) def does_not_strip_random_ignored_tokens_embedded_in_the_block_string(): ignored_tokens_without_formatting = [ @@ -226,7 +227,7 @@ def does_not_strip_random_ignored_tokens_embedded_in_the_block_string(): '"""|' + "".join(ignored_tokens_without_formatting) + '|"""' ).to_stay_the_same() - @pytest.mark.slow() + @pytest.mark.slow @pytest.mark.timeout(80) def strips_ignored_characters_inside_random_block_strings(): # Testing with length >7 is taking exponentially more time. However it is diff --git a/tests/utilities/test_type_from_ast.py b/tests/utilities/test_type_from_ast.py index 282c8f50..fa75a9f9 100644 --- a/tests/utilities/test_type_from_ast.py +++ b/tests/utilities/test_type_from_ast.py @@ -1,4 +1,5 @@ import pytest + from graphql.language import TypeNode, parse_type from graphql.type import GraphQLList, GraphQLNonNull, GraphQLObjectType from graphql.utilities import type_from_ast diff --git a/tests/utils/test_assert_equal_awaitables_or_values.py b/tests/utils/test_assert_equal_awaitables_or_values.py index 214acfea..3e60fbcb 100644 --- a/tests/utils/test_assert_equal_awaitables_or_values.py +++ b/tests/utils/test_assert_equal_awaitables_or_values.py @@ -15,7 +15,7 @@ def does_not_throw_when_given_equal_values(): == test_value ) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def does_not_throw_when_given_equal_awaitables(): async def test_value(): return {"test": "test"} @@ -27,7 +27,7 @@ async def test_value(): == await test_value() ) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def throws_when_given_unequal_awaitables(): async def test_value(value): return value @@ -37,7 +37,7 @@ async def test_value(value): test_value({}), test_value({}), test_value({"test": "test"}) ) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def throws_when_given_mixture_of_equal_values_and_awaitables(): async def test_value(): return {"test": "test"} diff --git a/tests/validation/test_validation.py b/tests/validation/test_validation.py index 37d57e9b..e8f08fe1 100644 --- a/tests/validation/test_validation.py +++ b/tests/validation/test_validation.py @@ -1,4 +1,5 @@ import pytest + from graphql.error import GraphQLError from graphql.language import parse from graphql.utilities import TypeInfo, build_schema diff --git a/tox.ini b/tox.ini index c261c70e..e5953a48 100644 --- a/tox.ini +++ b/tox.ini @@ -17,7 +17,7 @@ python = [testenv:ruff] basepython = python3.12 -deps = ruff>=0.5.7,<0.6 +deps = ruff>=0.6.4,<0.7 commands = ruff check src tests ruff format --check src tests From eb9edd583c7ccb2865a4dcde83298748cb7bbefe Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 14 Sep 2024 21:05:40 +0200 Subject: [PATCH 191/230] incremental: subsequent result records should not store parent references Replicates graphql/graphql-js@fae5da500bad94c39a7ecd77a4c4361b58d6d2da --- docs/conf.py | 3 + src/graphql/execution/execute.py | 92 ++++++------ .../execution/incremental_publisher.py | 134 ++++++++---------- tests/execution/test_defer.py | 12 +- tests/execution/test_stream.py | 14 +- 5 files changed, 122 insertions(+), 133 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index bd53efa0..43766c1b 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -161,7 +161,9 @@ GraphQLTypeResolver GroupedFieldSet IncrementalDataRecord +InitialResultRecord Middleware +SubsequentDataRecord asyncio.events.AbstractEventLoop graphql.execution.collect_fields.FieldsAndPatches graphql.execution.map_async_iterable.map_async_iterable @@ -169,6 +171,7 @@ graphql.execution.execute.ExperimentalIncrementalExecutionResults graphql.execution.execute.StreamArguments graphql.execution.incremental_publisher.IncrementalPublisher +graphql.execution.incremental_publisher.InitialResultRecord graphql.execution.incremental_publisher.StreamItemsRecord graphql.execution.incremental_publisher.DeferredFragmentRecord graphql.language.lexer.EscapeSequence diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index ae56c9b9..d61909a9 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -86,7 +86,9 @@ IncrementalDataRecord, IncrementalPublisher, IncrementalResult, + InitialResultRecord, StreamItemsRecord, + SubsequentDataRecord, SubsequentIncrementalExecutionResult, ) from .middleware import MiddlewareManager @@ -352,7 +354,6 @@ class ExecutionContext: field_resolver: GraphQLFieldResolver type_resolver: GraphQLTypeResolver subscribe_field_resolver: GraphQLFieldResolver - errors: list[GraphQLError] incremental_publisher: IncrementalPublisher middleware_manager: MiddlewareManager | None @@ -371,7 +372,6 @@ def __init__( field_resolver: GraphQLFieldResolver, type_resolver: GraphQLTypeResolver, subscribe_field_resolver: GraphQLFieldResolver, - errors: list[GraphQLError], incremental_publisher: IncrementalPublisher, middleware_manager: MiddlewareManager | None, is_awaitable: Callable[[Any], bool] | None, @@ -385,7 +385,6 @@ def __init__( self.field_resolver = field_resolver self.type_resolver = type_resolver self.subscribe_field_resolver = subscribe_field_resolver - self.errors = errors self.incremental_publisher = incremental_publisher self.middleware_manager = middleware_manager if is_awaitable: @@ -478,7 +477,6 @@ def build( field_resolver or default_field_resolver, type_resolver or default_type_resolver, subscribe_field_resolver or default_field_resolver, - [], IncrementalPublisher(), middleware_manager, is_awaitable, @@ -514,15 +512,14 @@ def build_per_event_execution_context(self, payload: Any) -> ExecutionContext: self.field_resolver, self.type_resolver, self.subscribe_field_resolver, - [], - # no need to update incrementalPublisher, - # incremental delivery is not supported for subscriptions self.incremental_publisher, self.middleware_manager, self.is_awaitable, ) - def execute_operation(self) -> AwaitableOrValue[dict[str, Any]]: + def execute_operation( + self, initial_result_record: InitialResultRecord + ) -> AwaitableOrValue[dict[str, Any]]: """Execute an operation. Implements the "Executing operations" section of the spec. @@ -551,12 +548,17 @@ def execute_operation(self) -> AwaitableOrValue[dict[str, Any]]: self.execute_fields_serially if operation.operation == OperationType.MUTATION else self.execute_fields - )(root_type, root_value, None, grouped_field_set) # type: ignore + )(root_type, root_value, None, grouped_field_set, initial_result_record) for patch in patches: label, patch_grouped_filed_set = patch self.execute_deferred_fragment( - root_type, root_value, patch_grouped_filed_set, label, None + root_type, + root_value, + patch_grouped_filed_set, + initial_result_record, + label, + None, ) return result @@ -567,6 +569,7 @@ def execute_fields_serially( source_value: Any, path: Path | None, grouped_field_set: GroupedFieldSet, + incremental_data_record: IncrementalDataRecord, ) -> AwaitableOrValue[dict[str, Any]]: """Execute the given fields serially. @@ -581,7 +584,11 @@ def reducer( response_name, field_group = field_item field_path = Path(path, response_name, parent_type.name) result = self.execute_field( - parent_type, source_value, field_group, field_path + parent_type, + source_value, + field_group, + field_path, + incremental_data_record, ) if result is Undefined: return results @@ -607,7 +614,7 @@ def execute_fields( source_value: Any, path: Path | None, grouped_field_set: GroupedFieldSet, - incremental_data_record: IncrementalDataRecord | None = None, + incremental_data_record: IncrementalDataRecord, ) -> AwaitableOrValue[dict[str, Any]]: """Execute the given fields concurrently. @@ -662,7 +669,7 @@ def execute_field( source: Any, field_group: FieldGroup, path: Path, - incremental_data_record: IncrementalDataRecord | None = None, + incremental_data_record: IncrementalDataRecord, ) -> AwaitableOrValue[Any]: """Resolve the field on the given source object. @@ -774,7 +781,7 @@ def handle_field_error( return_type: GraphQLOutputType, field_group: FieldGroup, path: Path, - incremental_data_record: IncrementalDataRecord | None = None, + incremental_data_record: IncrementalDataRecord, ) -> None: """Handle error properly according to the field type.""" error = located_error(raw_error, field_group, path.as_list()) @@ -784,13 +791,9 @@ def handle_field_error( if is_non_null_type(return_type): raise error - errors = ( - incremental_data_record.errors if incremental_data_record else self.errors - ) - # Otherwise, error protection is applied, logging the error and resolving a # null value for this field if one is encountered. - errors.append(error) + self.incremental_publisher.add_field_error(incremental_data_record, error) def complete_value( self, @@ -799,7 +802,7 @@ def complete_value( info: GraphQLResolveInfo, path: Path, result: Any, - incremental_data_record: IncrementalDataRecord | None, + incremental_data_record: IncrementalDataRecord, ) -> AwaitableOrValue[Any]: """Complete a value. @@ -888,7 +891,7 @@ async def complete_awaitable_value( info: GraphQLResolveInfo, path: Path, result: Any, - incremental_data_record: IncrementalDataRecord | None = None, + incremental_data_record: IncrementalDataRecord, ) -> Any: """Complete an awaitable value.""" try: @@ -955,7 +958,7 @@ async def complete_async_iterator_value( info: GraphQLResolveInfo, path: Path, async_iterator: AsyncIterator[Any], - incremental_data_record: IncrementalDataRecord | None, + incremental_data_record: IncrementalDataRecord, ) -> list[Any]: """Complete an async iterator. @@ -984,8 +987,8 @@ async def complete_async_iterator_value( info, item_type, path, - stream.label, incremental_data_record, + stream.label, ) ), timeout=ASYNC_DELAY, @@ -1039,7 +1042,7 @@ def complete_list_value( info: GraphQLResolveInfo, path: Path, result: AsyncIterable[Any] | Iterable[Any], - incremental_data_record: IncrementalDataRecord | None, + incremental_data_record: IncrementalDataRecord, ) -> AwaitableOrValue[list[Any]]: """Complete a list value. @@ -1093,8 +1096,8 @@ def complete_list_value( field_group, info, item_type, - stream.label, previous_incremental_data_record, + stream.label, ) continue @@ -1138,7 +1141,7 @@ def complete_list_item_value( field_group: FieldGroup, info: GraphQLResolveInfo, item_path: Path, - incremental_data_record: IncrementalDataRecord | None, + incremental_data_record: IncrementalDataRecord, ) -> bool: """Complete a list item value by adding it to the completed results. @@ -1229,7 +1232,7 @@ def complete_abstract_value( info: GraphQLResolveInfo, path: Path, result: Any, - incremental_data_record: IncrementalDataRecord | None, + incremental_data_record: IncrementalDataRecord, ) -> AwaitableOrValue[Any]: """Complete an abstract value. @@ -1344,7 +1347,7 @@ def complete_object_value( info: GraphQLResolveInfo, path: Path, result: Any, - incremental_data_record: IncrementalDataRecord | None, + incremental_data_record: IncrementalDataRecord, ) -> AwaitableOrValue[dict[str, Any]]: """Complete an Object value by executing all sub-selections.""" # If there is an `is_type_of()` predicate function, call it with the current @@ -1379,7 +1382,7 @@ def collect_and_execute_subfields( field_group: FieldGroup, path: Path, result: Any, - incremental_data_record: IncrementalDataRecord | None, + incremental_data_record: IncrementalDataRecord, ) -> AwaitableOrValue[dict[str, Any]]: """Collect sub-fields to execute to complete this value.""" sub_grouped_field_set, sub_patches = self.collect_subfields( @@ -1396,9 +1399,9 @@ def collect_and_execute_subfields( return_type, result, sub_patch_grouped_field_set, + incremental_data_record, label, path, - incremental_data_record, ) return sub_fields @@ -1474,9 +1477,9 @@ def execute_deferred_fragment( parent_type: GraphQLObjectType, source_value: Any, fields: GroupedFieldSet, + parent_context: IncrementalDataRecord, label: str | None = None, path: Path | None = None, - parent_context: IncrementalDataRecord | None = None, ) -> None: """Execute deferred fragment.""" incremental_publisher = self.incremental_publisher @@ -1529,9 +1532,9 @@ def execute_stream_field( field_group: FieldGroup, info: GraphQLResolveInfo, item_type: GraphQLOutputType, + parent_context: IncrementalDataRecord, label: str | None = None, - parent_context: IncrementalDataRecord | None = None, - ) -> IncrementalDataRecord: + ) -> SubsequentDataRecord: """Execute stream field.""" is_awaitable = self.is_awaitable incremental_publisher = self.incremental_publisher @@ -1678,8 +1681,8 @@ async def execute_stream_async_iterator( info: GraphQLResolveInfo, item_type: GraphQLOutputType, path: Path, + parent_context: IncrementalDataRecord, label: str | None = None, - parent_context: IncrementalDataRecord | None = None, ) -> None: """Execute stream iterator.""" incremental_publisher = self.incremental_publisher @@ -1877,21 +1880,24 @@ def execute_impl( # Errors from sub-fields of a NonNull type may propagate to the top level, # at which point we still log the error and null the parent field, which # in this case is the entire response. - errors = context.errors incremental_publisher = context.incremental_publisher + initial_result_record = incremental_publisher.prepare_initial_result_record() build_response = context.build_response try: - result = context.execute_operation() + result = context.execute_operation(initial_result_record) if context.is_awaitable(result): # noinspection PyShadowingNames async def await_result() -> Any: try: + errors = incremental_publisher.get_initial_errors( + initial_result_record + ) initial_result = build_response( await result, # type: ignore errors, ) - incremental_publisher.publish_initial() + incremental_publisher.publish_initial(initial_result_record) if incremental_publisher.has_next(): return ExperimentalIncrementalExecutionResults( initial_result=InitialIncrementalExecutionResult( @@ -1902,14 +1908,17 @@ async def await_result() -> Any: subsequent_results=incremental_publisher.subscribe(), ) except GraphQLError as error: - errors.append(error) + incremental_publisher.add_field_error(initial_result_record, error) + errors = incremental_publisher.get_initial_errors( + initial_result_record + ) return build_response(None, errors) return initial_result return await_result() - initial_result = build_response(result, errors) # type: ignore - incremental_publisher.publish_initial() + initial_result = build_response(result, initial_result_record.errors) # type: ignore + incremental_publisher.publish_initial(initial_result_record) if incremental_publisher.has_next(): return ExperimentalIncrementalExecutionResults( initial_result=InitialIncrementalExecutionResult( @@ -1920,7 +1929,8 @@ async def await_result() -> Any: subsequent_results=incremental_publisher.subscribe(), ) except GraphQLError as error: - errors.append(error) + incremental_publisher.add_field_error(initial_result_record, error) + errors = incremental_publisher.get_initial_errors(initial_result_record) return build_response(None, errors) return initial_result diff --git a/src/graphql/execution/incremental_publisher.py b/src/graphql/execution/incremental_publisher.py index fb660e85..bf145da3 100644 --- a/src/graphql/execution/incremental_publisher.py +++ b/src/graphql/execution/incremental_publisher.py @@ -33,6 +33,7 @@ "FormattedIncrementalResult", "FormattedIncrementalStreamResult", "FormattedSubsequentIncrementalExecutionResult", + "InitialResultRecord", "IncrementalDataRecord", "IncrementalDeferResult", "IncrementalPublisher", @@ -340,34 +341,23 @@ class IncrementalPublisher: The internal publishing state is managed as follows: - ``_released``: the set of Incremental Data records that are ready to be sent to the + ``_released``: the set of Subsequent Data records that are ready to be sent to the client, i.e. their parents have completed and they have also completed. - ``_pending``: the set of Incremental Data records that are definitely pending, i.e. + ``_pending``: the set of Subsequent Data records that are definitely pending, i.e. their parents have completed so that they can no longer be filtered. This includes - all Incremental Data records in `released`, as well as Incremental Data records that + all Subsequent Data records in `released`, as well as Subsequent Data records that have not yet completed. - ``_initial_result``: a record containing the state of the initial result, - as follows: - ``is_completed``: indicates whether the initial result has completed. - ``children``: the set of Incremental Data records that can be be published when the - initial result is completed. - - Each Incremental Data record also contains similar metadata, i.e. these records also - contain similar ``is_completed`` and ``children`` properties. - Note: Instead of sets we use dicts (with values set to None) which preserve order and thereby achieve more deterministic results. """ - _initial_result: InitialResult - _released: dict[IncrementalDataRecord, None] - _pending: dict[IncrementalDataRecord, None] + _released: dict[SubsequentDataRecord, None] + _pending: dict[SubsequentDataRecord, None] _resolve: Event | None def __init__(self) -> None: - self._initial_result = InitialResult({}, False) self._released = {} self._pending = {} self._resolve = None # lazy initialization @@ -420,33 +410,33 @@ async def subscribe( close_async_iterators.append(close_async_iterator) await gather(*close_async_iterators) + def prepare_initial_result_record(self) -> InitialResultRecord: + """Prepare a new initial result record.""" + return InitialResultRecord(errors=[], children={}) + def prepare_new_deferred_fragment_record( self, label: str | None, path: Path | None, - parent_context: IncrementalDataRecord | None, + parent_context: IncrementalDataRecord, ) -> DeferredFragmentRecord: """Prepare a new deferred fragment record.""" - deferred_fragment_record = DeferredFragmentRecord(label, path, parent_context) + deferred_fragment_record = DeferredFragmentRecord(label, path) - context = parent_context or self._initial_result - context.children[deferred_fragment_record] = None + parent_context.children[deferred_fragment_record] = None return deferred_fragment_record def prepare_new_stream_items_record( self, label: str | None, path: Path | None, - parent_context: IncrementalDataRecord | None, + parent_context: IncrementalDataRecord, async_iterator: AsyncIterator[Any] | None = None, ) -> StreamItemsRecord: """Prepare a new stream items record.""" - stream_items_record = StreamItemsRecord( - label, path, parent_context, async_iterator - ) + stream_items_record = StreamItemsRecord(label, path, async_iterator) - context = parent_context or self._initial_result - context.children[stream_items_record] = None + parent_context.children[stream_items_record] = None return stream_items_record def complete_deferred_fragment_record( @@ -481,29 +471,34 @@ def add_field_error( """Add a field error to the given incremental data record.""" incremental_data_record.errors.append(error) - def publish_initial(self) -> None: + def publish_initial(self, initial_result: InitialResultRecord) -> None: """Publish the initial result.""" - for child in self._initial_result.children: + for child in initial_result.children: + if child.filtered: + continue self._publish(child) + def get_initial_errors( + self, initial_result: InitialResultRecord + ) -> list[GraphQLError]: + """Get the errors from the given initial result.""" + return initial_result.errors + def filter( self, null_path: Path, - erroring_incremental_data_record: IncrementalDataRecord | None, + erroring_incremental_data_record: IncrementalDataRecord, ) -> None: """Filter out the given erroring incremental data record.""" null_path_list = null_path.as_list() - children = (erroring_incremental_data_record or self._initial_result).children + descendants = self._get_descendants(erroring_incremental_data_record.children) - for child in self._get_descendants(children): + for child in descendants: if not self._matches_path(child.path, null_path_list): continue - self._delete(child) - parent = child.parent_context or self._initial_result - with suppress_key_error: - del parent.children[child] + child.filtered = True if isinstance(child, StreamItemsRecord): async_iterator = child.async_iterator @@ -522,32 +517,24 @@ def _trigger(self) -> None: resolve.set() self._resolve = Event() - def _introduce(self, item: IncrementalDataRecord) -> None: + def _introduce(self, item: SubsequentDataRecord) -> None: """Introduce a new IncrementalDataRecord.""" self._pending[item] = None - def _release(self, item: IncrementalDataRecord) -> None: + def _release(self, item: SubsequentDataRecord) -> None: """Release the given IncrementalDataRecord.""" if item in self._pending: self._released[item] = None self._trigger() - def _push(self, item: IncrementalDataRecord) -> None: + def _push(self, item: SubsequentDataRecord) -> None: """Push the given IncrementalDataRecord.""" self._released[item] = None self._pending[item] = None self._trigger() - def _delete(self, item: IncrementalDataRecord) -> None: - """Delete the given IncrementalDataRecord.""" - with suppress_key_error: - del self._released[item] - with suppress_key_error: - del self._pending[item] - self._trigger() - def _get_incremental_result( - self, completed_records: Collection[IncrementalDataRecord] + self, completed_records: Collection[SubsequentDataRecord] ) -> SubsequentIncrementalExecutionResult | None: """Get the incremental result with the completed records.""" incremental_results: list[IncrementalResult] = [] @@ -556,6 +543,8 @@ def _get_incremental_result( for incremental_data_record in completed_records: incremental_result: IncrementalResult for child in incremental_data_record.children: + if child.filtered: + continue self._publish(child) if isinstance(incremental_data_record, StreamItemsRecord): items = incremental_data_record.items @@ -591,18 +580,18 @@ def _get_incremental_result( return SubsequentIncrementalExecutionResult(has_next=False) return None - def _publish(self, incremental_data_record: IncrementalDataRecord) -> None: + def _publish(self, subsequent_result_record: SubsequentDataRecord) -> None: """Publish the given incremental data record.""" - if incremental_data_record.is_completed: - self._push(incremental_data_record) + if subsequent_result_record.is_completed: + self._push(subsequent_result_record) else: - self._introduce(incremental_data_record) + self._introduce(subsequent_result_record) def _get_descendants( self, - children: dict[IncrementalDataRecord, None], - descendants: dict[IncrementalDataRecord, None] | None = None, - ) -> dict[IncrementalDataRecord, None]: + children: dict[SubsequentDataRecord, None], + descendants: dict[SubsequentDataRecord, None] | None = None, + ) -> dict[SubsequentDataRecord, None]: """Get the descendants of the given children.""" if descendants is None: descendants = {} @@ -625,6 +614,13 @@ def _add_task(self, awaitable: Awaitable[Any]) -> None: task.add_done_callback(tasks.discard) +class InitialResultRecord(NamedTuple): + """Formatted subsequent incremental execution result""" + + errors: list[GraphQLError] + children: dict[SubsequentDataRecord, None] + + class DeferredFragmentRecord: """A record collecting data marked with the defer directive""" @@ -632,22 +628,16 @@ class DeferredFragmentRecord: label: str | None path: list[str | int] data: dict[str, Any] | None - parent_context: IncrementalDataRecord | None - children: dict[IncrementalDataRecord, None] + children: dict[SubsequentDataRecord, None] is_completed: bool + filtered: bool - def __init__( - self, - label: str | None, - path: Path | None, - parent_context: IncrementalDataRecord | None, - ) -> None: + def __init__(self, label: str | None, path: Path | None) -> None: self.label = label self.path = path.as_list() if path else [] - self.parent_context = parent_context self.errors = [] self.children = {} - self.is_completed = False + self.is_completed = self.filtered = False self.data = None def __repr__(self) -> str: @@ -655,8 +645,6 @@ def __repr__(self) -> str: args: list[str] = [f"path={self.path!r}"] if self.label: args.append(f"label={self.label!r}") - if self.parent_context: - args.append("parent_context") if self.data is not None: args.append("data") return f"{name}({', '.join(args)})" @@ -669,26 +657,24 @@ class StreamItemsRecord: label: str | None path: list[str | int] items: list[str] | None - parent_context: IncrementalDataRecord | None - children: dict[IncrementalDataRecord, None] + children: dict[SubsequentDataRecord, None] async_iterator: AsyncIterator[Any] | None is_completed_async_iterator: bool is_completed: bool + filtered: bool def __init__( self, label: str | None, path: Path | None, - parent_context: IncrementalDataRecord | None, async_iterator: AsyncIterator[Any] | None = None, ) -> None: self.label = label self.path = path.as_list() if path else [] - self.parent_context = parent_context self.async_iterator = async_iterator self.errors = [] self.children = {} - self.is_completed_async_iterator = self.is_completed = False + self.is_completed_async_iterator = self.is_completed = self.filtered = False self.items = None def __repr__(self) -> str: @@ -696,11 +682,11 @@ def __repr__(self) -> str: args: list[str] = [f"path={self.path!r}"] if self.label: args.append(f"label={self.label!r}") - if self.parent_context: - args.append("parent_context") if self.items is not None: args.append("items") return f"{name}({', '.join(args)})" -IncrementalDataRecord = Union[DeferredFragmentRecord, StreamItemsRecord] +SubsequentDataRecord = Union[DeferredFragmentRecord, StreamItemsRecord] + +IncrementalDataRecord = Union[InitialResultRecord, SubsequentDataRecord] diff --git a/tests/execution/test_defer.py b/tests/execution/test_defer.py index 312a2a0b..41161248 100644 --- a/tests/execution/test_defer.py +++ b/tests/execution/test_defer.py @@ -321,17 +321,13 @@ def can_compare_subsequent_incremental_execution_result(): } def can_print_deferred_fragment_record(): - record = DeferredFragmentRecord(None, None, None) + record = DeferredFragmentRecord(None, None) assert str(record) == "DeferredFragmentRecord(path=[])" - record = DeferredFragmentRecord("foo", Path(None, "bar", "Bar"), record) - assert ( - str(record) == "DeferredFragmentRecord(" - "path=['bar'], label='foo', parent_context)" - ) + record = DeferredFragmentRecord("foo", Path(None, "bar", "Bar")) + assert str(record) == "DeferredFragmentRecord(" "path=['bar'], label='foo')" record.data = {"hello": "world"} assert ( - str(record) == "DeferredFragmentRecord(" - "path=['bar'], label='foo', parent_context, data)" + str(record) == "DeferredFragmentRecord(" "path=['bar'], label='foo', data)" ) @pytest.mark.asyncio diff --git a/tests/execution/test_stream.py b/tests/execution/test_stream.py index 8a1ca605..42188517 100644 --- a/tests/execution/test_stream.py +++ b/tests/execution/test_stream.py @@ -173,18 +173,12 @@ def can_format_and_print_incremental_stream_result(): ) def can_print_stream_record(): - record = StreamItemsRecord(None, None, None, None) + record = StreamItemsRecord(None, None, None) assert str(record) == "StreamItemsRecord(path=[])" - record = StreamItemsRecord("foo", Path(None, "bar", "Bar"), record, None) - assert ( - str(record) == "StreamItemsRecord(" - "path=['bar'], label='foo', parent_context)" - ) + record = StreamItemsRecord("foo", Path(None, "bar", "Bar"), None) + assert str(record) == "StreamItemsRecord(" "path=['bar'], label='foo')" record.items = ["hello", "world"] - assert ( - str(record) == "StreamItemsRecord(" - "path=['bar'], label='foo', parent_context, items)" - ) + assert str(record) == "StreamItemsRecord(" "path=['bar'], label='foo', items)" # noinspection PyTypeChecker def can_compare_incremental_stream_result(): From 7134b68ebc2eff0dfc7ab2988851854c9aba964b Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 14 Sep 2024 21:26:04 +0200 Subject: [PATCH 192/230] Speedup sorting and building/extending schema Replicates graphql/graphql-js@361078603d0d67fee2dce8214f7213fa14b393f0 --- src/graphql/utilities/extend_schema.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/src/graphql/utilities/extend_schema.py b/src/graphql/utilities/extend_schema.py index fc6cee77..72283269 100644 --- a/src/graphql/utilities/extend_schema.py +++ b/src/graphql/utilities/extend_schema.py @@ -230,8 +230,12 @@ def extend_schema_args( return schema_kwargs self = cls(type_extensions) - for existing_type in schema_kwargs["types"] or (): - self.type_map[existing_type.name] = self.extend_named_type(existing_type) + + self.type_map = { + type_.name: self.extend_named_type(type_) + for type_ in schema_kwargs["types"] or () + } + for type_node in type_defs: name = type_node.name.value self.type_map[name] = std_type_map.get(name) or self.build_type(type_node) From 2a3799fec77d5e71d7ebe61d0e257a13cc3b2f4b Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 14 Sep 2024 21:32:28 +0200 Subject: [PATCH 193/230] Add support for fourfold nested lists in introspection Replicates graphql/graphql-js@826ae7f952dcccd8bb8a7ade3d9f9c7540edcc06 --- src/graphql/utilities/get_introspection_query.py | 8 ++++++++ tests/utilities/test_build_client_schema.py | 14 +++++++------- 2 files changed, 15 insertions(+), 7 deletions(-) diff --git a/src/graphql/utilities/get_introspection_query.py b/src/graphql/utilities/get_introspection_query.py index cffaa12d..4babfaec 100644 --- a/src/graphql/utilities/get_introspection_query.py +++ b/src/graphql/utilities/get_introspection_query.py @@ -149,6 +149,14 @@ def input_deprecation(string: str) -> str | None: ofType {{ kind name + ofType {{ + kind + name + ofType {{ + kind + name + }} + }} }} }} }} diff --git a/tests/utilities/test_build_client_schema.py b/tests/utilities/test_build_client_schema.py index 4b861003..8a4cecba 100644 --- a/tests/utilities/test_build_client_schema.py +++ b/tests/utilities/test_build_client_schema.py @@ -991,11 +991,11 @@ def throws_when_missing_directive_args(): build_client_schema(introspection) def describe_very_deep_decorators_are_not_supported(): - def fails_on_very_deep_lists_more_than_7_levels(): + def fails_on_very_deep_lists_more_than_8_levels(): schema = build_schema( """ type Query { - foo: [[[[[[[[String]]]]]]]] + foo: [[[[[[[[[[String]]]]]]]]]] } """ ) @@ -1010,11 +1010,11 @@ def fails_on_very_deep_lists_more_than_7_levels(): " Decorated type deeper than introspection query." ) - def fails_on_a_very_deep_non_null_more_than_7_levels(): + def fails_on_a_very_deep_more_than_8_levels_non_null(): schema = build_schema( """ type Query { - foo: [[[[String!]!]!]!] + foo: [[[[[String!]!]!]!]!] } """ ) @@ -1029,12 +1029,12 @@ def fails_on_a_very_deep_non_null_more_than_7_levels(): " Decorated type deeper than introspection query." ) - def succeeds_on_deep_types_less_or_equal_7_levels(): - # e.g., fully non-null 3D matrix + def succeeds_on_deep_less_or_equal_8_levels_types(): + # e.g., fully non-null 4D matrix sdl = dedent( """ type Query { - foo: [[[String!]!]!]! + foo: [[[[String!]!]!]!]! } """ ) From 69a30bbc76c2e163a205e27c377b0c863440721e Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 15 Sep 2024 17:58:39 +0200 Subject: [PATCH 194/230] Configure default test path --- pyproject.toml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index c3c2367c..28c7707f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -308,12 +308,14 @@ minversion = "7.4" addopts = "--benchmark-disable" # Deactivate default name pattern for test classes (we use pytest_describe). python_classes = "PyTest*" -# Handle all async fixtures and tests automatically by asyncio +# Handle all async fixtures and tests automatically by asyncio, asyncio_mode = "auto" # Set a timeout in seconds for aborting tests that run too long. timeout = "100" # Ignore config options not (yet) available in older Python versions. filterwarnings = "ignore::pytest.PytestConfigWarning" +# All tests can be found in the tests directory. +testpaths = ["tests"] [build-system] requires = ["poetry_core>=1.6.1,<2"] From 26701397d84338a42c7acbce78368ae8f9d97271 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 15 Sep 2024 18:15:50 +0200 Subject: [PATCH 195/230] incremental publisher should handle all response building Replicates graphql/graphql-js@1f30b54edc3f7b8443f4aedc48fc56c0d2be9705 --- docs/conf.py | 9 +- src/graphql/execution/__init__.py | 10 +- src/graphql/execution/execute.py | 278 +-------------- .../execution/incremental_publisher.py | 331 +++++++++++++++--- 4 files changed, 301 insertions(+), 327 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 43766c1b..4655434b 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -150,6 +150,7 @@ EnterLeaveVisitor ExperimentalIncrementalExecutionResults FieldGroup +FormattedIncrementalResult FormattedSourceLocation GraphQLAbstractType GraphQLCompositeType @@ -161,19 +162,19 @@ GraphQLTypeResolver GroupedFieldSet IncrementalDataRecord +IncrementalResult InitialResultRecord Middleware SubsequentDataRecord asyncio.events.AbstractEventLoop graphql.execution.collect_fields.FieldsAndPatches -graphql.execution.map_async_iterable.map_async_iterable -graphql.execution.Middleware -graphql.execution.execute.ExperimentalIncrementalExecutionResults graphql.execution.execute.StreamArguments +graphql.execution.map_async_iterable.map_async_iterable +graphql.execution.incremental_publisher.DeferredFragmentRecord graphql.execution.incremental_publisher.IncrementalPublisher graphql.execution.incremental_publisher.InitialResultRecord graphql.execution.incremental_publisher.StreamItemsRecord -graphql.execution.incremental_publisher.DeferredFragmentRecord +graphql.execution.Middleware graphql.language.lexer.EscapeSequence graphql.language.visitor.EnterLeaveVisitor graphql.type.definition.GT_co diff --git a/src/graphql/execution/__init__.py b/src/graphql/execution/__init__.py index aec85be1..2d5225be 100644 --- a/src/graphql/execution/__init__.py +++ b/src/graphql/execution/__init__.py @@ -14,21 +14,21 @@ default_type_resolver, subscribe, ExecutionContext, - ExecutionResult, - ExperimentalIncrementalExecutionResults, - InitialIncrementalExecutionResult, - FormattedExecutionResult, - FormattedInitialIncrementalExecutionResult, Middleware, ) from .incremental_publisher import ( + ExecutionResult, + ExperimentalIncrementalExecutionResults, FormattedSubsequentIncrementalExecutionResult, FormattedIncrementalDeferResult, FormattedIncrementalResult, FormattedIncrementalStreamResult, + FormattedExecutionResult, + FormattedInitialIncrementalExecutionResult, IncrementalDeferResult, IncrementalResult, IncrementalStreamResult, + InitialIncrementalExecutionResult, SubsequentIncrementalExecutionResult, ) from .async_iterables import map_async_iterable diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index d61909a9..ca4df8ff 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -13,20 +13,14 @@ Awaitable, Callable, Iterable, - Iterator, List, NamedTuple, Optional, - Sequence, Tuple, Union, cast, ) -try: - from typing import TypedDict -except ImportError: # Python < 3.8 - from typing_extensions import TypedDict try: from typing import TypeAlias, TypeGuard except ImportError: # Python < 3.10 @@ -37,7 +31,7 @@ except ImportError: # Python < 3.7 from concurrent.futures import TimeoutError -from ..error import GraphQLError, GraphQLFormattedError, located_error +from ..error import GraphQLError, located_error from ..language import ( DocumentNode, FragmentDefinitionNode, @@ -82,14 +76,13 @@ ) from .incremental_publisher import ( ASYNC_DELAY, - FormattedIncrementalResult, + ExecutionResult, + ExperimentalIncrementalExecutionResults, IncrementalDataRecord, IncrementalPublisher, - IncrementalResult, InitialResultRecord, StreamItemsRecord, SubsequentDataRecord, - SubsequentIncrementalExecutionResult, ) from .middleware import MiddlewareManager from .values import get_argument_values, get_directive_values, get_variable_values @@ -112,12 +105,7 @@ async def anext(iterator: AsyncIterator) -> Any: "execute_sync", "experimental_execute_incrementally", "subscribe", - "ExecutionResult", "ExecutionContext", - "ExperimentalIncrementalExecutionResults", - "FormattedExecutionResult", - "FormattedInitialIncrementalExecutionResult", - "InitialIncrementalExecutionResult", "Middleware", ] @@ -144,181 +132,7 @@ async def anext(iterator: AsyncIterator) -> Any: # 3) inline fragment "spreads" e.g. "...on Type { a }" -class FormattedExecutionResult(TypedDict, total=False): - """Formatted execution result""" - - data: dict[str, Any] | None - errors: list[GraphQLFormattedError] - extensions: dict[str, Any] - - -class ExecutionResult: - """The result of GraphQL execution. - - - ``data`` is the result of a successful execution of the query. - - ``errors`` is included when any errors occurred as a non-empty list. - - ``extensions`` is reserved for adding non-standard properties. - """ - - __slots__ = "data", "errors", "extensions" - - data: dict[str, Any] | None - errors: list[GraphQLError] | None - extensions: dict[str, Any] | None - - def __init__( - self, - data: dict[str, Any] | None = None, - errors: list[GraphQLError] | None = None, - extensions: dict[str, Any] | None = None, - ) -> None: - self.data = data - self.errors = errors - self.extensions = extensions - - def __repr__(self) -> str: - name = self.__class__.__name__ - ext = "" if self.extensions is None else f", extensions={self.extensions}" - return f"{name}(data={self.data!r}, errors={self.errors!r}{ext})" - - def __iter__(self) -> Iterator[Any]: - return iter((self.data, self.errors)) - - @property - def formatted(self) -> FormattedExecutionResult: - """Get execution result formatted according to the specification.""" - formatted: FormattedExecutionResult = {"data": self.data} - if self.errors is not None: - formatted["errors"] = [error.formatted for error in self.errors] - if self.extensions is not None: - formatted["extensions"] = self.extensions - return formatted - - def __eq__(self, other: object) -> bool: - if isinstance(other, dict): - if "extensions" not in other: - return other == {"data": self.data, "errors": self.errors} - return other == { - "data": self.data, - "errors": self.errors, - "extensions": self.extensions, - } - if isinstance(other, tuple): - if len(other) == 2: - return other == (self.data, self.errors) - return other == (self.data, self.errors, self.extensions) - return ( - isinstance(other, self.__class__) - and other.data == self.data - and other.errors == self.errors - and other.extensions == self.extensions - ) - - def __ne__(self, other: object) -> bool: - return not self == other - - -class FormattedInitialIncrementalExecutionResult(TypedDict, total=False): - """Formatted initial incremental execution result""" - - data: dict[str, Any] | None - errors: list[GraphQLFormattedError] - hasNext: bool - incremental: list[FormattedIncrementalResult] - extensions: dict[str, Any] - - -class InitialIncrementalExecutionResult: - """Initial incremental execution result. - - - ``has_next`` is True if a future payload is expected. - - ``incremental`` is a list of the results from defer/stream directives. - """ - - data: dict[str, Any] | None - errors: list[GraphQLError] | None - incremental: Sequence[IncrementalResult] | None - has_next: bool - extensions: dict[str, Any] | None - - __slots__ = "data", "errors", "has_next", "incremental", "extensions" - - def __init__( - self, - data: dict[str, Any] | None = None, - errors: list[GraphQLError] | None = None, - incremental: Sequence[IncrementalResult] | None = None, - has_next: bool = False, - extensions: dict[str, Any] | None = None, - ) -> None: - self.data = data - self.errors = errors - self.incremental = incremental - self.has_next = has_next - self.extensions = extensions - - def __repr__(self) -> str: - name = self.__class__.__name__ - args: list[str] = [f"data={self.data!r}, errors={self.errors!r}"] - if self.incremental: - args.append(f"incremental[{len(self.incremental)}]") - if self.has_next: - args.append("has_next") - if self.extensions: - args.append(f"extensions={self.extensions}") - return f"{name}({', '.join(args)})" - - @property - def formatted(self) -> FormattedInitialIncrementalExecutionResult: - """Get execution result formatted according to the specification.""" - formatted: FormattedInitialIncrementalExecutionResult = {"data": self.data} - if self.errors is not None: - formatted["errors"] = [error.formatted for error in self.errors] - if self.incremental: - formatted["incremental"] = [result.formatted for result in self.incremental] - formatted["hasNext"] = self.has_next - if self.extensions is not None: - formatted["extensions"] = self.extensions - return formatted - - def __eq__(self, other: object) -> bool: - if isinstance(other, dict): - return ( - other.get("data") == self.data - and other.get("errors") == self.errors - and ( - "incremental" not in other - or other["incremental"] == self.incremental - ) - and ("hasNext" not in other or other["hasNext"] == self.has_next) - and ( - "extensions" not in other or other["extensions"] == self.extensions - ) - ) - if isinstance(other, tuple): - size = len(other) - return ( - 1 < size < 6 - and ( - self.data, - self.errors, - self.incremental, - self.has_next, - self.extensions, - )[:size] - == other - ) - return ( - isinstance(other, self.__class__) - and other.data == self.data - and other.errors == self.errors - and other.incremental == self.incremental - and other.has_next == self.has_next - and other.extensions == self.extensions - ) - - def __ne__(self, other: object) -> bool: - return not self == other +Middleware: TypeAlias = Optional[Union[Tuple, List, MiddlewareManager]] class StreamArguments(NamedTuple): @@ -328,16 +142,6 @@ class StreamArguments(NamedTuple): label: str | None -class ExperimentalIncrementalExecutionResults(NamedTuple): - """Execution results when retrieved incrementally.""" - - initial_result: InitialIncrementalExecutionResult - subsequent_results: AsyncGenerator[SubsequentIncrementalExecutionResult, None] - - -Middleware: TypeAlias = Optional[Union[Tuple, List, MiddlewareManager]] - - class ExecutionContext: """Data that must be available at all points during query execution. @@ -482,24 +286,6 @@ def build( is_awaitable, ) - @staticmethod - def build_response( - data: dict[str, Any] | None, errors: list[GraphQLError] - ) -> ExecutionResult: - """Build response. - - Given a completed execution context and data, build the (data, errors) response - defined by the "Response" section of the GraphQL spec. - """ - if not errors: - return ExecutionResult(data, None) - # Sort the error list in order to make it deterministic, since we might have - # been using parallel execution. - errors.sort( - key=lambda error: (error.locations or [], error.path or [], error.message) - ) - return ExecutionResult(data, errors) - def build_per_event_execution_context(self, payload: Any) -> ExecutionContext: """Create a copy of the execution context for usage with subscribe events.""" return self.__class__( @@ -1882,57 +1668,29 @@ def execute_impl( # in this case is the entire response. incremental_publisher = context.incremental_publisher initial_result_record = incremental_publisher.prepare_initial_result_record() - build_response = context.build_response try: - result = context.execute_operation(initial_result_record) + data = context.execute_operation(initial_result_record) + if context.is_awaitable(data): - if context.is_awaitable(result): - # noinspection PyShadowingNames - async def await_result() -> Any: + async def await_response() -> ( + ExecutionResult | ExperimentalIncrementalExecutionResults + ): try: - errors = incremental_publisher.get_initial_errors( - initial_result_record - ) - initial_result = build_response( - await result, # type: ignore - errors, + return incremental_publisher.build_data_response( + initial_result_record, + await data, # type: ignore ) - incremental_publisher.publish_initial(initial_result_record) - if incremental_publisher.has_next(): - return ExperimentalIncrementalExecutionResults( - initial_result=InitialIncrementalExecutionResult( - initial_result.data, - initial_result.errors, - has_next=True, - ), - subsequent_results=incremental_publisher.subscribe(), - ) except GraphQLError as error: - incremental_publisher.add_field_error(initial_result_record, error) - errors = incremental_publisher.get_initial_errors( - initial_result_record + return incremental_publisher.build_error_response( + initial_result_record, error ) - return build_response(None, errors) - return initial_result - return await_result() + return await_response() + + return incremental_publisher.build_data_response(initial_result_record, data) # type: ignore - initial_result = build_response(result, initial_result_record.errors) # type: ignore - incremental_publisher.publish_initial(initial_result_record) - if incremental_publisher.has_next(): - return ExperimentalIncrementalExecutionResults( - initial_result=InitialIncrementalExecutionResult( - initial_result.data, - initial_result.errors, - has_next=True, - ), - subsequent_results=incremental_publisher.subscribe(), - ) except GraphQLError as error: - incremental_publisher.add_field_error(initial_result_record, error) - errors = incremental_publisher.get_initial_errors(initial_result_record) - return build_response(None, errors) - return initial_result + return incremental_publisher.build_error_response(initial_result_record, error) def assume_not_awaitable(_value: Any) -> bool: diff --git a/src/graphql/execution/incremental_publisher.py b/src/graphql/execution/incremental_publisher.py index bf145da3..fdc35fff 100644 --- a/src/graphql/execution/incremental_publisher.py +++ b/src/graphql/execution/incremental_publisher.py @@ -11,6 +11,7 @@ AsyncIterator, Awaitable, Collection, + Iterator, NamedTuple, Sequence, Union, @@ -21,7 +22,6 @@ except ImportError: # Python < 3.8 from typing_extensions import TypedDict - if TYPE_CHECKING: from ..error import GraphQLError, GraphQLFormattedError from ..pyutils import Path @@ -29,10 +29,15 @@ __all__ = [ "ASYNC_DELAY", "DeferredFragmentRecord", + "ExecutionResult", + "ExperimentalIncrementalExecutionResults", + "FormattedExecutionResult", "FormattedIncrementalDeferResult", "FormattedIncrementalResult", "FormattedIncrementalStreamResult", + "FormattedInitialIncrementalExecutionResult", "FormattedSubsequentIncrementalExecutionResult", + "InitialIncrementalExecutionResult", "InitialResultRecord", "IncrementalDataRecord", "IncrementalDeferResult", @@ -49,6 +54,190 @@ suppress_key_error = suppress(KeyError) +class FormattedExecutionResult(TypedDict, total=False): + """Formatted execution result""" + + data: dict[str, Any] | None + errors: list[GraphQLFormattedError] + extensions: dict[str, Any] + + +class ExecutionResult: + """The result of GraphQL execution. + + - ``data`` is the result of a successful execution of the query. + - ``errors`` is included when any errors occurred as a non-empty list. + - ``extensions`` is reserved for adding non-standard properties. + """ + + __slots__ = "data", "errors", "extensions" + + data: dict[str, Any] | None + errors: list[GraphQLError] | None + extensions: dict[str, Any] | None + + def __init__( + self, + data: dict[str, Any] | None = None, + errors: list[GraphQLError] | None = None, + extensions: dict[str, Any] | None = None, + ) -> None: + self.data = data + self.errors = errors + self.extensions = extensions + + def __repr__(self) -> str: + name = self.__class__.__name__ + ext = "" if self.extensions is None else f", extensions={self.extensions}" + return f"{name}(data={self.data!r}, errors={self.errors!r}{ext})" + + def __iter__(self) -> Iterator[Any]: + return iter((self.data, self.errors)) + + @property + def formatted(self) -> FormattedExecutionResult: + """Get execution result formatted according to the specification.""" + formatted: FormattedExecutionResult = {"data": self.data} + if self.errors is not None: + formatted["errors"] = [error.formatted for error in self.errors] + if self.extensions is not None: + formatted["extensions"] = self.extensions + return formatted + + def __eq__(self, other: object) -> bool: + if isinstance(other, dict): + if "extensions" not in other: + return other == {"data": self.data, "errors": self.errors} + return other == { + "data": self.data, + "errors": self.errors, + "extensions": self.extensions, + } + if isinstance(other, tuple): + if len(other) == 2: + return other == (self.data, self.errors) + return other == (self.data, self.errors, self.extensions) + return ( + isinstance(other, self.__class__) + and other.data == self.data + and other.errors == self.errors + and other.extensions == self.extensions + ) + + def __ne__(self, other: object) -> bool: + return not self == other + + +class FormattedInitialIncrementalExecutionResult(TypedDict, total=False): + """Formatted initial incremental execution result""" + + data: dict[str, Any] | None + errors: list[GraphQLFormattedError] + hasNext: bool + incremental: list[FormattedIncrementalResult] + extensions: dict[str, Any] + + +class InitialIncrementalExecutionResult: + """Initial incremental execution result. + + - ``has_next`` is True if a future payload is expected. + - ``incremental`` is a list of the results from defer/stream directives. + """ + + data: dict[str, Any] | None + errors: list[GraphQLError] | None + incremental: Sequence[IncrementalResult] | None + has_next: bool + extensions: dict[str, Any] | None + + __slots__ = "data", "errors", "has_next", "incremental", "extensions" + + def __init__( + self, + data: dict[str, Any] | None = None, + errors: list[GraphQLError] | None = None, + incremental: Sequence[IncrementalResult] | None = None, + has_next: bool = False, + extensions: dict[str, Any] | None = None, + ) -> None: + self.data = data + self.errors = errors + self.incremental = incremental + self.has_next = has_next + self.extensions = extensions + + def __repr__(self) -> str: + name = self.__class__.__name__ + args: list[str] = [f"data={self.data!r}, errors={self.errors!r}"] + if self.incremental: + args.append(f"incremental[{len(self.incremental)}]") + if self.has_next: + args.append("has_next") + if self.extensions: + args.append(f"extensions={self.extensions}") + return f"{name}({', '.join(args)})" + + @property + def formatted(self) -> FormattedInitialIncrementalExecutionResult: + """Get execution result formatted according to the specification.""" + formatted: FormattedInitialIncrementalExecutionResult = {"data": self.data} + if self.errors is not None: + formatted["errors"] = [error.formatted for error in self.errors] + if self.incremental: + formatted["incremental"] = [result.formatted for result in self.incremental] + formatted["hasNext"] = self.has_next + if self.extensions is not None: + formatted["extensions"] = self.extensions + return formatted + + def __eq__(self, other: object) -> bool: + if isinstance(other, dict): + return ( + other.get("data") == self.data + and other.get("errors") == self.errors + and ( + "incremental" not in other + or other["incremental"] == self.incremental + ) + and ("hasNext" not in other or other["hasNext"] == self.has_next) + and ( + "extensions" not in other or other["extensions"] == self.extensions + ) + ) + if isinstance(other, tuple): + size = len(other) + return ( + 1 < size < 6 + and ( + self.data, + self.errors, + self.incremental, + self.has_next, + self.extensions, + )[:size] + == other + ) + return ( + isinstance(other, self.__class__) + and other.data == self.data + and other.errors == self.errors + and other.incremental == self.incremental + and other.has_next == self.has_next + and other.extensions == self.extensions + ) + + def __ne__(self, other: object) -> bool: + return not self == other + + +class ExperimentalIncrementalExecutionResults(NamedTuple): + """Execution results when retrieved incrementally.""" + + initial_result: InitialIncrementalExecutionResult + subsequent_results: AsyncGenerator[SubsequentIncrementalExecutionResult, None] + + class FormattedIncrementalDeferResult(TypedDict, total=False): """Formatted incremental deferred execution result""" @@ -363,53 +552,6 @@ def __init__(self) -> None: self._resolve = None # lazy initialization self._tasks: set[Awaitable] = set() - def has_next(self) -> bool: - """Check whether there is a next incremental result.""" - return bool(self._pending) - - async def subscribe( - self, - ) -> AsyncGenerator[SubsequentIncrementalExecutionResult, None]: - """Subscribe to the incremental results.""" - is_done = False - pending = self._pending - - try: - while not is_done: - released = self._released - for item in released: - with suppress_key_error: - del pending[item] - self._released = {} - - result = self._get_incremental_result(released) - - if not self.has_next(): - is_done = True - - if result is not None: - yield result - else: - resolve = self._resolve - if resolve is None: - self._resolve = resolve = Event() - await resolve.wait() - finally: - close_async_iterators = [] - for incremental_data_record in pending: - if isinstance( - incremental_data_record, StreamItemsRecord - ): # pragma: no cover - async_iterator = incremental_data_record.async_iterator - if async_iterator: - try: - close_async_iterator = async_iterator.aclose() # type: ignore - except AttributeError: - pass - else: - close_async_iterators.append(close_async_iterator) - await gather(*close_async_iterators) - def prepare_initial_result_record(self) -> InitialResultRecord: """Prepare a new initial result record.""" return InitialResultRecord(errors=[], children={}) @@ -471,18 +613,47 @@ def add_field_error( """Add a field error to the given incremental data record.""" incremental_data_record.errors.append(error) - def publish_initial(self, initial_result: InitialResultRecord) -> None: - """Publish the initial result.""" - for child in initial_result.children: + def build_data_response( + self, initial_result_record: InitialResultRecord, data: dict[str, Any] | None + ) -> ExecutionResult | ExperimentalIncrementalExecutionResults: + """Build response for the given data.""" + for child in initial_result_record.children: if child.filtered: continue self._publish(child) - def get_initial_errors( - self, initial_result: InitialResultRecord - ) -> list[GraphQLError]: - """Get the errors from the given initial result.""" - return initial_result.errors + errors = initial_result_record.errors or None + if errors: + errors.sort( + key=lambda error: ( + error.locations or [], + error.path or [], + error.message, + ) + ) + if self._pending: + return ExperimentalIncrementalExecutionResults( + initial_result=InitialIncrementalExecutionResult( + data, + errors, + has_next=True, + ), + subsequent_results=self._subscribe(), + ) + return ExecutionResult(data, errors) + + def build_error_response( + self, initial_result_record: InitialResultRecord, error: GraphQLError + ) -> ExecutionResult: + """Build response for the given error.""" + errors = initial_result_record.errors + errors.append(error) + # Sort the error list in order to make it deterministic, since we might have + # been using parallel execution. + errors.sort( + key=lambda error: (error.locations or [], error.path or [], error.message) + ) + return ExecutionResult(None, errors) def filter( self, @@ -510,6 +681,49 @@ def filter( else: self._add_task(close_async_iterator) + async def _subscribe( + self, + ) -> AsyncGenerator[SubsequentIncrementalExecutionResult, None]: + """Subscribe to the incremental results.""" + is_done = False + pending = self._pending + + try: + while not is_done: + released = self._released + for item in released: + with suppress_key_error: + del pending[item] + self._released = {} + + result = self._get_incremental_result(released) + + if not self._pending: + is_done = True + + if result is not None: + yield result + else: + resolve = self._resolve + if resolve is None: + self._resolve = resolve = Event() + await resolve.wait() + finally: + close_async_iterators = [] + for incremental_data_record in pending: + if isinstance( + incremental_data_record, StreamItemsRecord + ): # pragma: no cover + async_iterator = incremental_data_record.async_iterator + if async_iterator: + try: + close_async_iterator = async_iterator.aclose() # type: ignore + except AttributeError: + pass + else: + close_async_iterators.append(close_async_iterator) + await gather(*close_async_iterators) + def _trigger(self) -> None: """Trigger the resolve event.""" resolve = self._resolve @@ -572,11 +786,12 @@ def _get_incremental_result( ) append_result(incremental_result) + has_next = bool(self._pending) if incremental_results: return SubsequentIncrementalExecutionResult( - incremental=incremental_results, has_next=self.has_next() + incremental=incremental_results, has_next=has_next ) - if encountered_completed_async_iterator and not self.has_next(): + if encountered_completed_async_iterator and not has_next: return SubsequentIncrementalExecutionResult(has_next=False) return None From 841c3d2ff52661d71270a8911c769683b7142de4 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Thu, 19 Sep 2024 22:27:25 +0200 Subject: [PATCH 196/230] add tests with regard to duplication Replicates graphql/graphql-js@75d419d7c6935745f99f7b14ff4b3901d813e6e9 --- tests/execution/test_defer.py | 1166 +++++++++++++++++++++++++++++++- tests/execution/test_stream.py | 175 ++++- 2 files changed, 1329 insertions(+), 12 deletions(-) diff --git a/tests/execution/test_defer.py b/tests/execution/test_defer.py index 41161248..83201377 100644 --- a/tests/execution/test_defer.py +++ b/tests/execution/test_defer.py @@ -37,6 +37,79 @@ }, ) + +class Friend(NamedTuple): + id: int + name: str + + +friends = [Friend(2, "Han"), Friend(3, "Leia"), Friend(4, "C-3PO")] + +deeper_object = GraphQLObjectType( + "DeeperObject", + { + "foo": GraphQLField(GraphQLString), + "bar": GraphQLField(GraphQLString), + "baz": GraphQLField(GraphQLString), + "bak": GraphQLField(GraphQLString), + }, +) + +nested_object = GraphQLObjectType( + "NestedObject", + {"deeperObject": GraphQLField(deeper_object), "name": GraphQLField(GraphQLString)}, +) + +another_nested_object = GraphQLObjectType( + "AnotherNestedObject", {"deeperObject": GraphQLField(deeper_object)} +) + +hero = { + "name": "Luke", + "id": 1, + "friends": friends, + "nestedObject": nested_object, + "AnotherNestedObject": another_nested_object, +} + +c = GraphQLObjectType( + "c", + { + "d": GraphQLField(GraphQLString), + "nonNullErrorField": GraphQLField(GraphQLNonNull(GraphQLString)), + }, +) + +e = GraphQLObjectType( + "e", + { + "f": GraphQLField(GraphQLString), + }, +) + +b = GraphQLObjectType( + "b", + { + "c": GraphQLField(c), + "e": GraphQLField(e), + }, +) + +a = GraphQLObjectType( + "a", + { + "b": GraphQLField(b), + "someField": GraphQLField(GraphQLString), + }, +) + +g = GraphQLObjectType( + "g", + { + "h": GraphQLField(GraphQLString), + }, +) + hero_type = GraphQLObjectType( "Hero", { @@ -44,24 +117,19 @@ "name": GraphQLField(GraphQLString), "nonNullName": GraphQLField(GraphQLNonNull(GraphQLString)), "friends": GraphQLField(GraphQLList(friend_type)), + "nestedObject": GraphQLField(nested_object), + "anotherNestedObject": GraphQLField(another_nested_object), }, ) -query = GraphQLObjectType("Query", {"hero": GraphQLField(hero_type)}) +query = GraphQLObjectType( + "Query", + {"hero": GraphQLField(hero_type), "a": GraphQLField(a), "g": GraphQLField(g)}, +) schema = GraphQLSchema(query) -class Friend(NamedTuple): - id: int - name: str - - -friends = [Friend(2, "Han"), Friend(3, "Leia"), Friend(4, "C-3PO")] - -hero = {"id": 1, "name": "Luke", "friends": friends} - - class Resolvers: """Various resolver functions for testing.""" @@ -629,6 +697,1082 @@ async def can_defer_an_inline_fragment(): }, ] + @pytest.mark.asyncio + async def emits_empty_defer_fragments(): + document = parse( + """ + query HeroNameQuery { + hero { + ... @defer { + name @skip(if: true) + } + } + } + fragment TopFragment on Hero { + name + } + """ + ) + result = await complete(document) + + assert result == [ + {"data": {"hero": {}}, "hasNext": True}, + { + "incremental": [ + { + "data": {}, + "path": ["hero"], + }, + ], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def can_separately_emit_defer_fragments_different_labels_varying_fields(): + document = parse( + """ + query HeroNameQuery { + hero { + ... @defer(label: "DeferID") { + id + } + ... @defer(label: "DeferName") { + name + } + } + } + """ + ) + result = await complete(document) + + assert result == [ + {"data": {"hero": {}}, "hasNext": True}, + { + "incremental": [ + { + "data": {"id": "1"}, + "path": ["hero"], + "label": "DeferID", + }, + { + "data": {"name": "Luke"}, + "path": ["hero"], + "label": "DeferName", + }, + ], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def does_not_deduplicate_multiple_defers_on_the_same_object(): + document = parse( + """ + query { + hero { + friends { + ... @defer { + ...FriendFrag + ... @defer { + ...FriendFrag + ... @defer { + ...FriendFrag + ... @defer { + ...FriendFrag + } + } + } + } + } + } + } + + fragment FriendFrag on Friend { + id + name + } + """ + ) + result = await complete(document) + + assert result == [ + {"data": {"hero": {"friends": [{}, {}, {}]}}, "hasNext": True}, + { + "incremental": [ + {"data": {}, "path": ["hero", "friends", 0]}, + {"data": {}, "path": ["hero", "friends", 0]}, + {"data": {}, "path": ["hero", "friends", 0]}, + { + "data": {"id": "2", "name": "Han"}, + "path": ["hero", "friends", 0], + }, + {"data": {}, "path": ["hero", "friends", 1]}, + {"data": {}, "path": ["hero", "friends", 1]}, + {"data": {}, "path": ["hero", "friends", 1]}, + { + "data": {"id": "3", "name": "Leia"}, + "path": ["hero", "friends", 1], + }, + {"data": {}, "path": ["hero", "friends", 2]}, + {"data": {}, "path": ["hero", "friends", 2]}, + {"data": {}, "path": ["hero", "friends", 2]}, + { + "data": {"id": "4", "name": "C-3PO"}, + "path": ["hero", "friends", 2], + }, + ], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def does_not_deduplicate_fields_present_in_the_initial_payload(): + document = parse( + """ + query { + hero { + nestedObject { + deeperObject { + foo + } + } + anotherNestedObject { + deeperObject { + foo + } + } + ... @defer { + nestedObject { + deeperObject { + bar + } + } + anotherNestedObject { + deeperObject { + foo + } + } + } + } + } + """ + ) + result = await complete( + document, + { + "hero": { + "nestedObject": {"deeperObject": {"foo": "foo", "bar": "bar"}}, + "anotherNestedObject": {"deeperObject": {"foo": "foo"}}, + } + }, + ) + + assert result == [ + { + "data": { + "hero": { + "nestedObject": {"deeperObject": {"foo": "foo"}}, + "anotherNestedObject": {"deeperObject": {"foo": "foo"}}, + } + }, + "hasNext": True, + }, + { + "incremental": [ + { + "data": { + "nestedObject": { + "deeperObject": { + "bar": "bar", + }, + }, + "anotherNestedObject": { + "deeperObject": { + "foo": "foo", + }, + }, + }, + "path": ["hero"], + }, + ], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def does_not_deduplicate_fields_present_in_a_parent_defer_payload(): + document = parse( + """ + query { + hero { + ... @defer { + nestedObject { + deeperObject { + foo + ... @defer { + foo + bar + } + } + } + } + } + } + """ + ) + result = await complete( + document, + {"hero": {"nestedObject": {"deeperObject": {"foo": "foo", "bar": "bar"}}}}, + ) + + assert result == [ + { + "data": {"hero": {}}, + "hasNext": True, + }, + { + "incremental": [ + { + "data": { + "nestedObject": { + "deeperObject": { + "foo": "foo", + }, + } + }, + "path": ["hero"], + }, + ], + "hasNext": True, + }, + { + "incremental": [ + { + "data": { + "foo": "foo", + "bar": "bar", + }, + "path": ["hero", "nestedObject", "deeperObject"], + }, + ], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def does_not_deduplicate_fields_with_deferred_fragments_at_multiple_levels(): + document = parse( + """ + query { + hero { + nestedObject { + deeperObject { + foo + } + } + ... @defer { + nestedObject { + deeperObject { + foo + bar + } + ... @defer { + deeperObject { + foo + bar + baz + ... @defer { + foo + bar + baz + bak + } + } + } + } + } + } + } + """ + ) + result = await complete( + document, + { + "hero": { + "nestedObject": { + "deeperObject": { + "foo": "foo", + "bar": "bar", + "baz": "baz", + "bak": "bak", + } + } + } + }, + ) + + assert result == [ + { + "data": {"hero": {"nestedObject": {"deeperObject": {"foo": "foo"}}}}, + "hasNext": True, + }, + { + "incremental": [ + { + "data": { + "nestedObject": { + "deeperObject": { + "foo": "foo", + "bar": "bar", + }, + } + }, + "path": ["hero"], + }, + ], + "hasNext": True, + }, + { + "incremental": [ + { + "data": { + "deeperObject": { + "foo": "foo", + "bar": "bar", + "baz": "baz", + } + }, + "path": ["hero", "nestedObject"], + }, + ], + "hasNext": True, + }, + { + "incremental": [ + { + "data": { + "foo": "foo", + "bar": "bar", + "baz": "baz", + "bak": "bak", + }, + "path": ["hero", "nestedObject", "deeperObject"], + }, + ], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def does_not_combine_fields_from_deferred_fragments_branches_same_level(): + document = parse( + """ + query { + hero { + nestedObject { + deeperObject { + ... @defer { + foo + } + } + } + ... @defer { + nestedObject { + deeperObject { + ... @defer { + foo + bar + } + } + } + } + } + } + """ + ) + result = await complete( + document, + {"hero": {"nestedObject": {"deeperObject": {"foo": "foo", "bar": "bar"}}}}, + ) + + assert result == [ + { + "data": {"hero": {"nestedObject": {"deeperObject": {}}}}, + "hasNext": True, + }, + { + "incremental": [ + { + "data": { + "foo": "foo", + }, + "path": ["hero", "nestedObject", "deeperObject"], + }, + { + "data": {"nestedObject": {"deeperObject": {}}}, + "path": ["hero"], + }, + ], + "hasNext": True, + }, + { + "incremental": [ + { + "data": { + "foo": "foo", + "bar": "bar", + }, + "path": ["hero", "nestedObject", "deeperObject"], + }, + ], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def does_not_combine_fields_from_deferred_fragments_branches_multi_levels(): + document = parse( + """ + query { + a { + b { + c { + d + } + ... @defer { + e { + f + } + } + } + } + ... @defer { + a { + b { + e { + f + } + } + } + g { + h + } + } + } + """ + ) + result = await complete( + document, + {"a": {"b": {"c": {"d": "d"}, "e": {"f": "f"}}}, "g": {"h": "h"}}, + ) + + assert result == [ + { + "data": {"a": {"b": {"c": {"d": "d"}}}}, + "hasNext": True, + }, + { + "incremental": [ + { + "data": {"e": {"f": "f"}}, + "path": ["a", "b"], + }, + { + "data": {"a": {"b": {"e": {"f": "f"}}}, "g": {"h": "h"}}, + "path": [], + }, + ], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def preserves_error_boundaries_null_first(): + document = parse( + """ + query { + ... @defer { + a { + someField + b { + c { + nonNullErrorField + } + } + } + } + a { + ... @defer { + b { + c { + d + } + } + } + } + } + """ + ) + result = await complete( + document, + {"a": {"b": {"c": {"d": "d"}}, "someField": "someField"}}, + ) + + assert result == [ + { + "data": {"a": {}}, + "hasNext": True, + }, + { + "incremental": [ + { + "data": {"b": {"c": {"d": "d"}}}, + "path": ["a"], + }, + { + "data": {"a": {"b": {"c": None}, "someField": "someField"}}, + "path": [], + "errors": [ + { + "message": "Cannot return null" + " for non-nullable field c.nonNullErrorField.", + "locations": [{"line": 8, "column": 23}], + "path": ["a", "b", "c", "nonNullErrorField"], + }, + ], + }, + ], + "hasNext": False, + }, + ] + + async def preserves_error_boundaries_value_first(): + document = parse( + """ + query { + ... @defer { + a { + b { + c { + d + } + } + } + } + a { + ... @defer { + someField + b { + c { + nonNullErrorField + } + } + } + } + } + """ + ) + result = await complete( + document, + { + "a": { + "b": {"c": {"d": "d"}, "nonNullErrorFIeld": None}, + "someField": "someField", + } + }, + ) + + assert result == [ + { + "data": {"a": {}}, + "hasNext": True, + }, + { + "incremental": [ + { + "data": {"b": {"c": None}, "someField": "someField"}, + "path": ["a"], + "errors": [ + { + "message": "Cannot return null" + " for non-nullable field c.nonNullErrorField.", + "locations": [{"line": 17, "column": 23}], + "path": ["a", "b", "c", "nonNullErrorField"], + }, + ], + }, + { + "data": {"a": {"b": {"c": {"d": "d"}}}}, + "path": [], + }, + ], + "hasNext": False, + }, + ] + + async def correctly_handle_a_slow_null(): + document = parse( + """ + query { + ... @defer { + a { + someField + b { + c { + nonNullErrorField + } + } + } + } + a { + ... @defer { + b { + c { + d + } + } + } + } + } + """ + ) + + async def slow_null(_info) -> None: + await sleep(0) + + result = await complete( + document, + { + "a": { + "b": {"c": {"d": "d", "nonNullErrorField": slow_null}}, + "someField": "someField", + } + }, + ) + + assert result == [ + { + "data": {"a": {}}, + "hasNext": True, + }, + { + "incremental": [ + { + "data": {"b": {"c": {"d": "d"}}}, + "path": ["a"], + }, + ], + "hasNext": True, + }, + { + "incremental": [ + { + "data": {"a": {"b": {"c": None}, "someField": "someField"}}, + "path": [], + "errors": [ + { + "message": "Cannot return null" + " for non-nullable field c.nonNullErrorField.", + "locations": [{"line": 8, "column": 23}], + "path": ["a", "b", "c", "nonNullErrorField"], + }, + ], + }, + ], + "hasNext": False, + }, + ] + + async def cancels_deferred_fields_when_initial_result_exhibits_null_bubbling(): + document = parse( + """ + query { + hero { + nonNullName + } + ... @defer { + hero { + name + } + } + } + """ + ) + result = await complete( + document, + { + "hero": {**hero, "nonNullName": lambda _info: None}, + }, + ) + + assert result == [ + { + "data": {"hero": None}, + "errors": [ + { + "message": "Cannot return null" + " for non-nullable field Hero.nonNullName.", + "locations": [{"line": 4, "column": 17}], + "path": ["hero", "nonNullName"], + }, + ], + "hasNext": True, + }, + { + "incremental": [ + { + "data": {"hero": {"name": "Luke"}}, + "path": [], + }, + ], + "hasNext": False, + }, + ] + + async def cancels_deferred_fields_when_deferred_result_exhibits_null_bubbling(): + document = parse( + """ + query { + ... @defer { + hero { + nonNullName + name + } + } + } + """ + ) + result = await complete( + document, + { + "hero": {**hero, "nonNullName": lambda _info: None}, + }, + ) + + assert result == [ + { + "data": {}, + "hasNext": True, + }, + { + "incremental": [ + { + "data": {"hero": None}, + "path": [], + "errors": [ + { + "message": "Cannot return null" + " for non-nullable field Hero.nonNullName.", + "locations": [{"line": 5, "column": 19}], + "path": ["hero", "nonNullName"], + }, + ], + }, + ], + "hasNext": False, + }, + ] + + async def does_not_deduplicate_list_fields(): + document = parse( + """ + query { + hero { + friends { + name + } + ... @defer { + friends { + name + } + } + } + } + """ + ) + + result = await complete(document) + + assert result == [ + { + "data": { + "hero": { + "friends": [ + {"name": "Han"}, + {"name": "Leia"}, + {"name": "C-3PO"}, + ] + } + }, + "hasNext": True, + }, + { + "incremental": [ + { + "data": { + "friends": [ + {"name": "Han"}, + {"name": "Leia"}, + {"name": "C-3PO"}, + ] + }, + "path": ["hero"], + } + ], + "hasNext": False, + }, + ] + + async def does_not_deduplicate_async_iterable_list_fields(): + document = parse( + """ + query { + hero { + friends { + name + } + ... @defer { + friends { + name + } + } + } + } + """ + ) + + async def resolve_friends(_info): + await sleep(0) + yield friends[0] + + result = await complete( + document, + { + "hero": {**hero, "friends": resolve_friends}, + }, + ) + + assert result == [ + { + "data": {"hero": {"friends": [{"name": "Han"}]}}, + "hasNext": True, + }, + { + "incremental": [ + { + "data": {"friends": [{"name": "Han"}]}, + "path": ["hero"], + } + ], + "hasNext": False, + }, + ] + + async def does_not_deduplicate_empty_async_iterable_list_fields(): + document = parse( + """ + query { + hero { + friends { + name + } + ... @defer { + friends { + name + } + } + } + } + """ + ) + + async def resolve_friends(_info): + await sleep(0) + for friend in []: # type: ignore + yield friend # pragma: no cover + + result = await complete( + document, + { + "hero": {**hero, "friends": resolve_friends}, + }, + ) + + assert result == [ + { + "data": {"hero": {"friends": []}}, + "hasNext": True, + }, + { + "incremental": [ + { + "data": {"friends": []}, + "path": ["hero"], + } + ], + "hasNext": False, + }, + ] + + async def does_not_deduplicate_list_fields_with_non_overlapping_fields(): + document = parse( + """ + query { + hero { + friends { + name + } + ... @defer { + friends { + id + } + } + } + } + """ + ) + result = await complete(document) + + assert result == [ + { + "data": { + "hero": { + "friends": [ + {"name": "Han"}, + {"name": "Leia"}, + {"name": "C-3PO"}, + ] + } + }, + "hasNext": True, + }, + { + "incremental": [ + { + "data": {"friends": [{"id": "2"}, {"id": "3"}, {"id": "4"}]}, + "path": ["hero"], + } + ], + "hasNext": False, + }, + ] + + async def does_not_deduplicate_list_fields_that_return_empty_lists(): + document = parse( + """ + query { + hero { + friends { + name + } + ... @defer { + friends { + name + } + } + } + } + """ + ) + result = await complete( + document, {"hero": {**hero, "friends": lambda _info: []}} + ) + + assert result == [ + { + "data": {"hero": {"friends": []}}, + "hasNext": True, + }, + { + "incremental": [ + { + "data": {"friends": []}, + "path": ["hero"], + } + ], + "hasNext": False, + }, + ] + + async def does_not_deduplicate_null_object_fields(): + document = parse( + """ + query { + hero { + nestedObject { + name + } + ... @defer { + nestedObject { + name + } + } + } + } + """ + ) + result = await complete( + document, {"hero": {**hero, "nestedObject": lambda _info: None}} + ) + + assert result == [ + { + "data": {"hero": {"nestedObject": None}}, + "hasNext": True, + }, + { + "incremental": [ + { + "data": {"nestedObject": None}, + "path": ["hero"], + } + ], + "hasNext": False, + }, + ] + + async def does_not_deduplicate_async_object_fields(): + document = parse( + """ + query { + hero { + nestedObject { + name + } + ... @defer { + nestedObject { + name + } + } + } + } + """ + ) + + async def resolve_nested_object(_info): + return {"name": "foo"} + + result = await complete( + document, {"hero": {"nestedObject": resolve_nested_object}} + ) + + assert result == [ + { + "data": {"hero": {"nestedObject": {"name": "foo"}}}, + "hasNext": True, + }, + { + "incremental": [ + { + "data": {"nestedObject": {"name": "foo"}}, + "path": ["hero"], + } + ], + "hasNext": False, + }, + ] + @pytest.mark.asyncio async def handles_errors_thrown_in_deferred_fragments(): document = parse( diff --git a/tests/execution/test_stream.py b/tests/execution/test_stream.py index 42188517..d611f7a9 100644 --- a/tests/execution/test_stream.py +++ b/tests/execution/test_stream.py @@ -1363,7 +1363,7 @@ async def get_friends(_info): ] @pytest.mark.asyncio - async def handles_async_error_in_complete_value_from_async_iterable_non_null(): + async def handles_async_error_in_complete_value_from_async_generator_non_null(): document = parse( """ query { @@ -1853,6 +1853,179 @@ async def get_friends(_info): }, ] + @pytest.mark.asyncio + async def handles_overlapping_deferred_and_non_deferred_streams(): + document = parse( + """ + query { + nestedObject { + nestedFriendList @stream(initialCount: 0) { + id + } + } + nestedObject { + ... @defer { + nestedFriendList @stream(initialCount: 0) { + id + name + } + } + } + } + """ + ) + + async def get_nested_friend_list(_info): + for i in range(2): + await sleep(0) + yield friends[i] + + result = await complete( + document, + { + "nestedObject": { + "nestedFriendList": get_nested_friend_list, + } + }, + ) + + assert result in ( + # exact order of results depends on timing and Python version + [ + { + "data": {"nestedObject": {"nestedFriendList": []}}, + "hasNext": True, + }, + { + "incremental": [ + { + "items": [{"id": "1"}], + "path": ["nestedObject", "nestedFriendList", 0], + }, + {"data": {"nestedFriendList": []}, "path": ["nestedObject"]}, + ], + "hasNext": True, + }, + { + "incremental": [ + { + "items": [{"id": "2"}], + "path": ["nestedObject", "nestedFriendList", 1], + }, + { + "items": [{"id": "1", "name": "Luke"}], + "path": ["nestedObject", "nestedFriendList", 0], + }, + ], + "hasNext": True, + }, + { + "incremental": [ + { + "items": [{"id": "2", "name": "Han"}], + "path": ["nestedObject", "nestedFriendList", 1], + }, + ], + "hasNext": True, + }, + { + "hasNext": False, + }, + ], + [ + { + "data": {"nestedObject": {"nestedFriendList": []}}, + "hasNext": True, + }, + { + "incremental": [ + { + "items": [{"id": "1"}], + "path": ["nestedObject", "nestedFriendList", 0], + }, + ], + "hasNext": True, + }, + { + "incremental": [ + { + "items": [{"id": "2"}], + "path": ["nestedObject", "nestedFriendList", 1], + }, + {"data": {"nestedFriendList": []}, "path": ["nestedObject"]}, + ], + "hasNext": True, + }, + { + "incremental": [ + { + "items": [{"id": "1", "name": "Luke"}], + "path": ["nestedObject", "nestedFriendList", 0], + }, + ], + "hasNext": True, + }, + { + "incremental": [ + { + "items": [{"id": "2", "name": "Han"}], + "path": ["nestedObject", "nestedFriendList", 1], + }, + ], + "hasNext": True, + }, + { + "hasNext": False, + }, + ], + [ + {"data": {"nestedObject": {"nestedFriendList": []}}, "hasNext": True}, + { + "incremental": [ + { + "items": [{"id": "1"}], + "path": ["nestedObject", "nestedFriendList", 0], + } + ], + "hasNext": True, + }, + { + "incremental": [ + { + "items": [{"id": "2"}], + "path": ["nestedObject", "nestedFriendList", 1], + } + ], + "hasNext": True, + }, + { + "incremental": [ + {"data": {"nestedFriendList": []}, "path": ["nestedObject"]} + ], + "hasNext": True, + }, + { + "incremental": [ + { + "items": [{"id": "1", "name": "Luke"}], + "path": ["nestedObject", "nestedFriendList", 0], + } + ], + "hasNext": True, + }, + { + "incremental": [ + { + "items": [{"id": "2", "name": "Han"}], + "path": ["nestedObject", "nestedFriendList", 1], + } + ], + "hasNext": True, + }, + {"hasNext": False}, + ], + ) + @pytest.mark.asyncio async def returns_payloads_properly_when_parent_deferred_slower_than_stream(): resolve_slow_field = Event() From 4e83d4201a2be88832f24c5733c0a1b8568c3708 Mon Sep 17 00:00:00 2001 From: Trim21 Date: Mon, 23 Sep 2024 05:58:04 +0800 Subject: [PATCH 197/230] Export ValidationAbortedError (#227) --- src/graphql/validation/validate.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/src/graphql/validation/validate.py b/src/graphql/validation/validate.py index 1439f7e4..08c83780 100644 --- a/src/graphql/validation/validate.py +++ b/src/graphql/validation/validate.py @@ -14,7 +14,13 @@ if TYPE_CHECKING: from .rules import ASTValidationRule -__all__ = ["assert_valid_sdl", "assert_valid_sdl_extension", "validate", "validate_sdl"] +__all__ = [ + "assert_valid_sdl", + "assert_valid_sdl_extension", + "validate", + "validate_sdl", + "ValidationAbortedError", +] class ValidationAbortedError(GraphQLError): From a25b40b9de260f59a3d1b8c93722a2e80e32a3c7 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 19 Oct 2024 17:01:52 +0200 Subject: [PATCH 198/230] Support Python 3.13 and update dependencies --- .github/workflows/test.yml | 2 +- poetry.lock | 463 ++++++++++++++++------------ pyproject.toml | 5 +- src/graphql/language/parser.py | 5 +- src/graphql/language/visitor.py | 2 +- src/graphql/pyutils/async_reduce.py | 6 +- tests/execution/test_abstract.py | 2 +- tox.ini | 9 +- 8 files changed, 279 insertions(+), 215 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 01668f57..8959d0de 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -8,7 +8,7 @@ jobs: strategy: matrix: - python-version: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12', 'pypy3.9', 'pypy3.10'] + python-version: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12', '3.13', 'pypy3.9', 'pypy3.10'] steps: - uses: actions/checkout@v4 diff --git a/poetry.lock b/poetry.lock index 1d4f8e60..c402516a 100644 --- a/poetry.lock +++ b/poetry.lock @@ -58,24 +58,24 @@ files = [ [[package]] name = "cachetools" -version = "5.4.0" +version = "5.5.0" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.4.0-py3-none-any.whl", hash = "sha256:3ae3b49a3d5e28a77a0be2b37dbcb89005058959cb2323858c2657c4a8cab474"}, - {file = "cachetools-5.4.0.tar.gz", hash = "sha256:b8adc2e7c07f105ced7bc56dbb6dfbe7c4a00acce20e2227b3f355be89bc6827"}, + {file = "cachetools-5.5.0-py3-none-any.whl", hash = "sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292"}, + {file = "cachetools-5.5.0.tar.gz", hash = "sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a"}, ] [[package]] name = "certifi" -version = "2024.7.4" +version = "2024.8.30" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, - {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, + {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, + {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, ] [[package]] @@ -91,101 +91,116 @@ files = [ [[package]] name = "charset-normalizer" -version = "3.3.2" +version = "3.4.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7.0" files = [ - {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, - {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-win32.whl", hash = "sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-win32.whl", hash = "sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca"}, + {file = "charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079"}, + {file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"}, ] [[package]] @@ -363,13 +378,13 @@ toml = ["tomli"] [[package]] name = "distlib" -version = "0.3.8" +version = "0.3.9" description = "Distribution utilities" optional = false python-versions = "*" files = [ - {file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"}, - {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"}, + {file = "distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87"}, + {file = "distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403"}, ] [[package]] @@ -425,31 +440,34 @@ testing = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "diff-cover (>=7.5)", "p [[package]] name = "filelock" -version = "3.15.4" +version = "3.16.1" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.15.4-py3-none-any.whl", hash = "sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7"}, - {file = "filelock-3.15.4.tar.gz", hash = "sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb"}, + {file = "filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0"}, + {file = "filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435"}, ] [package.extras] -docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-asyncio (>=0.21)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)", "virtualenv (>=20.26.2)"] -typing = ["typing-extensions (>=4.8)"] +docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4.1)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.2)", "pytest (>=8.3.3)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.4)"] +typing = ["typing-extensions (>=4.12.2)"] [[package]] name = "idna" -version = "3.7" +version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" files = [ - {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, - {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, ] +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + [[package]] name = "imagesize" version = "1.4.1" @@ -483,22 +501,26 @@ testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs [[package]] name = "importlib-metadata" -version = "8.2.0" +version = "8.5.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-8.2.0-py3-none-any.whl", hash = "sha256:11901fa0c2f97919b288679932bb64febaeacf289d18ac84dd68cb2e74213369"}, - {file = "importlib_metadata-8.2.0.tar.gz", hash = "sha256:72e8d4399996132204f9a16dcc751af254a48f8d1b20b9ff0f98d4a8f901e73d"}, + {file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"}, + {file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"}, ] [package.dependencies] -zipp = ">=0.5" +zipp = ">=3.20" [package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] perf = ["ipython"] -test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] +test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] +type = ["pytest-mypy"] [[package]] name = "iniconfig" @@ -646,38 +668,43 @@ reports = ["lxml"] [[package]] name = "mypy" -version = "1.11.1" +version = "1.12.0" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.11.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a32fc80b63de4b5b3e65f4be82b4cfa362a46702672aa6a0f443b4689af7008c"}, - {file = "mypy-1.11.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c1952f5ea8a5a959b05ed5f16452fddadbaae48b5d39235ab4c3fc444d5fd411"}, - {file = "mypy-1.11.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1e30dc3bfa4e157e53c1d17a0dad20f89dc433393e7702b813c10e200843b03"}, - {file = "mypy-1.11.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2c63350af88f43a66d3dfeeeb8d77af34a4f07d760b9eb3a8697f0386c7590b4"}, - {file = "mypy-1.11.1-cp310-cp310-win_amd64.whl", hash = "sha256:a831671bad47186603872a3abc19634f3011d7f83b083762c942442d51c58d58"}, - {file = "mypy-1.11.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7b6343d338390bb946d449677726edf60102a1c96079b4f002dedff375953fc5"}, - {file = "mypy-1.11.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e4fe9f4e5e521b458d8feb52547f4bade7ef8c93238dfb5bbc790d9ff2d770ca"}, - {file = "mypy-1.11.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:886c9dbecc87b9516eff294541bf7f3655722bf22bb898ee06985cd7269898de"}, - {file = "mypy-1.11.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fca4a60e1dd9fd0193ae0067eaeeb962f2d79e0d9f0f66223a0682f26ffcc809"}, - {file = "mypy-1.11.1-cp311-cp311-win_amd64.whl", hash = "sha256:0bd53faf56de9643336aeea1c925012837432b5faf1701ccca7fde70166ccf72"}, - {file = "mypy-1.11.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f39918a50f74dc5969807dcfaecafa804fa7f90c9d60506835036cc1bc891dc8"}, - {file = "mypy-1.11.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0bc71d1fb27a428139dd78621953effe0d208aed9857cb08d002280b0422003a"}, - {file = "mypy-1.11.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b868d3bcff720dd7217c383474008ddabaf048fad8d78ed948bb4b624870a417"}, - {file = "mypy-1.11.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a707ec1527ffcdd1c784d0924bf5cb15cd7f22683b919668a04d2b9c34549d2e"}, - {file = "mypy-1.11.1-cp312-cp312-win_amd64.whl", hash = "sha256:64f4a90e3ea07f590c5bcf9029035cf0efeae5ba8be511a8caada1a4893f5525"}, - {file = "mypy-1.11.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:749fd3213916f1751fff995fccf20c6195cae941dc968f3aaadf9bb4e430e5a2"}, - {file = "mypy-1.11.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b639dce63a0b19085213ec5fdd8cffd1d81988f47a2dec7100e93564f3e8fb3b"}, - {file = "mypy-1.11.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4c956b49c5d865394d62941b109728c5c596a415e9c5b2be663dd26a1ff07bc0"}, - {file = "mypy-1.11.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45df906e8b6804ef4b666af29a87ad9f5921aad091c79cc38e12198e220beabd"}, - {file = "mypy-1.11.1-cp38-cp38-win_amd64.whl", hash = "sha256:d44be7551689d9d47b7abc27c71257adfdb53f03880841a5db15ddb22dc63edb"}, - {file = "mypy-1.11.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2684d3f693073ab89d76da8e3921883019ea8a3ec20fa5d8ecca6a2db4c54bbe"}, - {file = "mypy-1.11.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:79c07eb282cb457473add5052b63925e5cc97dfab9812ee65a7c7ab5e3cb551c"}, - {file = "mypy-1.11.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11965c2f571ded6239977b14deebd3f4c3abd9a92398712d6da3a772974fad69"}, - {file = "mypy-1.11.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a2b43895a0f8154df6519706d9bca8280cda52d3d9d1514b2d9c3e26792a0b74"}, - {file = "mypy-1.11.1-cp39-cp39-win_amd64.whl", hash = "sha256:1a81cf05975fd61aec5ae16501a091cfb9f605dc3e3c878c0da32f250b74760b"}, - {file = "mypy-1.11.1-py3-none-any.whl", hash = "sha256:0624bdb940255d2dd24e829d99a13cfeb72e4e9031f9492148f410ed30bcab54"}, - {file = "mypy-1.11.1.tar.gz", hash = "sha256:f404a0b069709f18bbdb702eb3dcfe51910602995de00bd39cea3050b5772d08"}, + {file = "mypy-1.12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4397081e620dc4dc18e2f124d5e1d2c288194c2c08df6bdb1db31c38cd1fe1ed"}, + {file = "mypy-1.12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:684a9c508a283f324804fea3f0effeb7858eb03f85c4402a967d187f64562469"}, + {file = "mypy-1.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6cabe4cda2fa5eca7ac94854c6c37039324baaa428ecbf4de4567279e9810f9e"}, + {file = "mypy-1.12.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:060a07b10e999ac9e7fa249ce2bdcfa9183ca2b70756f3bce9df7a92f78a3c0a"}, + {file = "mypy-1.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:0eff042d7257f39ba4ca06641d110ca7d2ad98c9c1fb52200fe6b1c865d360ff"}, + {file = "mypy-1.12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4b86de37a0da945f6d48cf110d5206c5ed514b1ca2614d7ad652d4bf099c7de7"}, + {file = "mypy-1.12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:20c7c5ce0c1be0b0aea628374e6cf68b420bcc772d85c3c974f675b88e3e6e57"}, + {file = "mypy-1.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a64ee25f05fc2d3d8474985c58042b6759100a475f8237da1f4faf7fcd7e6309"}, + {file = "mypy-1.12.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:faca7ab947c9f457a08dcb8d9a8664fd438080e002b0fa3e41b0535335edcf7f"}, + {file = "mypy-1.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:5bc81701d52cc8767005fdd2a08c19980de9ec61a25dbd2a937dfb1338a826f9"}, + {file = "mypy-1.12.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:8462655b6694feb1c99e433ea905d46c478041a8b8f0c33f1dab00ae881b2164"}, + {file = "mypy-1.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:923ea66d282d8af9e0f9c21ffc6653643abb95b658c3a8a32dca1eff09c06475"}, + {file = "mypy-1.12.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1ebf9e796521f99d61864ed89d1fb2926d9ab6a5fab421e457cd9c7e4dd65aa9"}, + {file = "mypy-1.12.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e478601cc3e3fa9d6734d255a59c7a2e5c2934da4378f3dd1e3411ea8a248642"}, + {file = "mypy-1.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:c72861b7139a4f738344faa0e150834467521a3fba42dc98264e5aa9507dd601"}, + {file = "mypy-1.12.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:52b9e1492e47e1790360a43755fa04101a7ac72287b1a53ce817f35899ba0521"}, + {file = "mypy-1.12.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:48d3e37dd7d9403e38fa86c46191de72705166d40b8c9f91a3de77350daa0893"}, + {file = "mypy-1.12.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2f106db5ccb60681b622ac768455743ee0e6a857724d648c9629a9bd2ac3f721"}, + {file = "mypy-1.12.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:233e11b3f73ee1f10efada2e6da0f555b2f3a5316e9d8a4a1224acc10e7181d3"}, + {file = "mypy-1.12.0-cp313-cp313-win_amd64.whl", hash = "sha256:4ae8959c21abcf9d73aa6c74a313c45c0b5a188752bf37dace564e29f06e9c1b"}, + {file = "mypy-1.12.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:eafc1b7319b40ddabdc3db8d7d48e76cfc65bbeeafaa525a4e0fa6b76175467f"}, + {file = "mypy-1.12.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9b9ce1ad8daeb049c0b55fdb753d7414260bad8952645367e70ac91aec90e07e"}, + {file = "mypy-1.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bfe012b50e1491d439172c43ccb50db66d23fab714d500b57ed52526a1020bb7"}, + {file = "mypy-1.12.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2c40658d4fa1ab27cb53d9e2f1066345596af2f8fe4827defc398a09c7c9519b"}, + {file = "mypy-1.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:dee78a8b9746c30c1e617ccb1307b351ded57f0de0d287ca6276378d770006c0"}, + {file = "mypy-1.12.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b5df6c8a8224f6b86746bda716bbe4dbe0ce89fd67b1fa4661e11bfe38e8ec8"}, + {file = "mypy-1.12.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5feee5c74eb9749e91b77f60b30771563327329e29218d95bedbe1257e2fe4b0"}, + {file = "mypy-1.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:77278e8c6ffe2abfba6db4125de55f1024de9a323be13d20e4f73b8ed3402bd1"}, + {file = "mypy-1.12.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:dcfb754dea911039ac12434d1950d69a2f05acd4d56f7935ed402be09fad145e"}, + {file = "mypy-1.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:06de0498798527451ffb60f68db0d368bd2bae2bbfb5237eae616d4330cc87aa"}, + {file = "mypy-1.12.0-py3-none-any.whl", hash = "sha256:fd313226af375d52e1e36c383f39bf3836e1f192801116b31b090dfcd3ec5266"}, + {file = "mypy-1.12.0.tar.gz", hash = "sha256:65a22d87e757ccd95cbbf6f7e181e6caa87128255eb2b6be901bb71b26d8a99d"}, ] [package.dependencies] @@ -744,19 +771,19 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-co [[package]] name = "platformdirs" -version = "4.2.2" +version = "4.3.6" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, - {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, + {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, + {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, ] [package.extras] -docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] -type = ["mypy (>=1.8)"] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.11.2)"] [[package]] name = "pluggy" @@ -844,13 +871,13 @@ windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pyproject-api" -version = "1.7.1" +version = "1.8.0" description = "API to interact with the python pyproject.toml based projects" optional = false python-versions = ">=3.8" files = [ - {file = "pyproject_api-1.7.1-py3-none-any.whl", hash = "sha256:2dc1654062c2b27733d8fd4cdda672b22fe8741ef1dde8e3a998a9547b071eeb"}, - {file = "pyproject_api-1.7.1.tar.gz", hash = "sha256:7ebc6cd10710f89f4cf2a2731710a98abce37ebff19427116ff2174c9236a827"}, + {file = "pyproject_api-1.8.0-py3-none-any.whl", hash = "sha256:3d7d347a047afe796fd5d1885b1e391ba29be7169bd2f102fcd378f04273d228"}, + {file = "pyproject_api-1.8.0.tar.gz", hash = "sha256:77b8049f2feb5d33eefcc21b57f1e279636277a8ac8ad6b5871037b243778496"}, ] [package.dependencies] @@ -858,8 +885,8 @@ packaging = ">=24.1" tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} [package.extras] -docs = ["furo (>=2024.5.6)", "sphinx-autodoc-typehints (>=2.2.1)"] -testing = ["covdefaults (>=2.3)", "pytest (>=8.2.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "setuptools (>=70.1)"] +docs = ["furo (>=2024.8.6)", "sphinx-autodoc-typehints (>=2.4.1)"] +testing = ["covdefaults (>=2.3)", "pytest (>=8.3.3)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "setuptools (>=75.1)"] [[package]] name = "pytest" @@ -886,13 +913,13 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no [[package]] name = "pytest" -version = "8.3.2" +version = "8.3.3" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-8.3.2-py3-none-any.whl", hash = "sha256:4ba08f9ae7dcf84ded419494d229b48d0903ea6407b030eaec46df5e6a73bba5"}, - {file = "pytest-8.3.2.tar.gz", hash = "sha256:c132345d12ce551242c87269de812483f5bcc87cdbb4722e48487ba194f9fdce"}, + {file = "pytest-8.3.3-py3-none-any.whl", hash = "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2"}, + {file = "pytest-8.3.3.tar.gz", hash = "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181"}, ] [package.dependencies] @@ -1029,13 +1056,13 @@ pytest = ">=7.0.0" [[package]] name = "pytz" -version = "2024.1" +version = "2024.2" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" files = [ - {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, - {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, + {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"}, + {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"}, ] [[package]] @@ -1082,29 +1109,29 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "ruff" -version = "0.5.7" +version = "0.7.0" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.5.7-py3-none-linux_armv6l.whl", hash = "sha256:548992d342fc404ee2e15a242cdbea4f8e39a52f2e7752d0e4cbe88d2d2f416a"}, - {file = "ruff-0.5.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:00cc8872331055ee017c4f1071a8a31ca0809ccc0657da1d154a1d2abac5c0be"}, - {file = "ruff-0.5.7-py3-none-macosx_11_0_arm64.whl", hash = "sha256:eaf3d86a1fdac1aec8a3417a63587d93f906c678bb9ed0b796da7b59c1114a1e"}, - {file = "ruff-0.5.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a01c34400097b06cf8a6e61b35d6d456d5bd1ae6961542de18ec81eaf33b4cb8"}, - {file = "ruff-0.5.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fcc8054f1a717e2213500edaddcf1dbb0abad40d98e1bd9d0ad364f75c763eea"}, - {file = "ruff-0.5.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f70284e73f36558ef51602254451e50dd6cc479f8b6f8413a95fcb5db4a55fc"}, - {file = "ruff-0.5.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:a78ad870ae3c460394fc95437d43deb5c04b5c29297815a2a1de028903f19692"}, - {file = "ruff-0.5.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ccd078c66a8e419475174bfe60a69adb36ce04f8d4e91b006f1329d5cd44bcf"}, - {file = "ruff-0.5.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e31c9bad4ebf8fdb77b59cae75814440731060a09a0e0077d559a556453acbb"}, - {file = "ruff-0.5.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d796327eed8e168164346b769dd9a27a70e0298d667b4ecee6877ce8095ec8e"}, - {file = "ruff-0.5.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:4a09ea2c3f7778cc635e7f6edf57d566a8ee8f485f3c4454db7771efb692c499"}, - {file = "ruff-0.5.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:a36d8dcf55b3a3bc353270d544fb170d75d2dff41eba5df57b4e0b67a95bb64e"}, - {file = "ruff-0.5.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9369c218f789eefbd1b8d82a8cf25017b523ac47d96b2f531eba73770971c9e5"}, - {file = "ruff-0.5.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:b88ca3db7eb377eb24fb7c82840546fb7acef75af4a74bd36e9ceb37a890257e"}, - {file = "ruff-0.5.7-py3-none-win32.whl", hash = "sha256:33d61fc0e902198a3e55719f4be6b375b28f860b09c281e4bdbf783c0566576a"}, - {file = "ruff-0.5.7-py3-none-win_amd64.whl", hash = "sha256:083bbcbe6fadb93cd86709037acc510f86eed5a314203079df174c40bbbca6b3"}, - {file = "ruff-0.5.7-py3-none-win_arm64.whl", hash = "sha256:2dca26154ff9571995107221d0aeaad0e75a77b5a682d6236cf89a58c70b76f4"}, - {file = "ruff-0.5.7.tar.gz", hash = "sha256:8dfc0a458797f5d9fb622dd0efc52d796f23f0a1493a9527f4e49a550ae9a7e5"}, + {file = "ruff-0.7.0-py3-none-linux_armv6l.whl", hash = "sha256:0cdf20c2b6ff98e37df47b2b0bd3a34aaa155f59a11182c1303cce79be715628"}, + {file = "ruff-0.7.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:496494d350c7fdeb36ca4ef1c9f21d80d182423718782222c29b3e72b3512737"}, + {file = "ruff-0.7.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:214b88498684e20b6b2b8852c01d50f0651f3cc6118dfa113b4def9f14faaf06"}, + {file = "ruff-0.7.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:630fce3fefe9844e91ea5bbf7ceadab4f9981f42b704fae011bb8efcaf5d84be"}, + {file = "ruff-0.7.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:211d877674e9373d4bb0f1c80f97a0201c61bcd1e9d045b6e9726adc42c156aa"}, + {file = "ruff-0.7.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:194d6c46c98c73949a106425ed40a576f52291c12bc21399eb8f13a0f7073495"}, + {file = "ruff-0.7.0-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:82c2579b82b9973a110fab281860403b397c08c403de92de19568f32f7178598"}, + {file = "ruff-0.7.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9af971fe85dcd5eaed8f585ddbc6bdbe8c217fb8fcf510ea6bca5bdfff56040e"}, + {file = "ruff-0.7.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b641c7f16939b7d24b7bfc0be4102c56562a18281f84f635604e8a6989948914"}, + {file = "ruff-0.7.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d71672336e46b34e0c90a790afeac8a31954fd42872c1f6adaea1dff76fd44f9"}, + {file = "ruff-0.7.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:ab7d98c7eed355166f367597e513a6c82408df4181a937628dbec79abb2a1fe4"}, + {file = "ruff-0.7.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:1eb54986f770f49edb14f71d33312d79e00e629a57387382200b1ef12d6a4ef9"}, + {file = "ruff-0.7.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:dc452ba6f2bb9cf8726a84aa877061a2462afe9ae0ea1d411c53d226661c601d"}, + {file = "ruff-0.7.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:4b406c2dce5be9bad59f2de26139a86017a517e6bcd2688da515481c05a2cb11"}, + {file = "ruff-0.7.0-py3-none-win32.whl", hash = "sha256:f6c968509f767776f524a8430426539587d5ec5c662f6addb6aa25bc2e8195ec"}, + {file = "ruff-0.7.0-py3-none-win_amd64.whl", hash = "sha256:ff4aabfbaaba880e85d394603b9e75d32b0693152e16fa659a3064a85df7fce2"}, + {file = "ruff-0.7.0-py3-none-win_arm64.whl", hash = "sha256:10842f69c245e78d6adec7e1db0a7d9ddc2fff0621d730e61657b64fa36f207e"}, + {file = "ruff-0.7.0.tar.gz", hash = "sha256:47a86360cf62d9cd53ebfb0b5eb0e882193fc191c6d717e8bef4462bc3b9ea2b"}, ] [[package]] @@ -1362,6 +1389,17 @@ files = [ {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] +[[package]] +name = "tomli" +version = "2.0.2" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.8" +files = [ + {file = "tomli-2.0.2-py3-none-any.whl", hash = "sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38"}, + {file = "tomli-2.0.2.tar.gz", hash = "sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed"}, +] + [[package]] name = "tox" version = "3.28.0" @@ -1390,30 +1428,27 @@ testing = ["flaky (>=3.4.0)", "freezegun (>=0.3.11)", "pathlib2 (>=2.3.3)", "psu [[package]] name = "tox" -version = "4.17.1" +version = "4.23.0" description = "tox is a generic virtualenv management and test command line tool" optional = false python-versions = ">=3.8" files = [ - {file = "tox-4.17.1-py3-none-any.whl", hash = "sha256:2974597c0353577126ab014f52d1a399fb761049e165ff34427f84e8cfe6c990"}, - {file = "tox-4.17.1.tar.gz", hash = "sha256:2c41565a571e34480bd401d668a4899806169a4633e972ac296c54406d2ded8a"}, + {file = "tox-4.23.0-py3-none-any.whl", hash = "sha256:46da40afb660e46238c251280eb910bdaf00b390c7557c8e4bb611f422e9db12"}, + {file = "tox-4.23.0.tar.gz", hash = "sha256:a6bd7d54231d755348d3c3a7b450b5bf6563833716d1299a1619587a1b77a3bf"}, ] [package.dependencies] -cachetools = ">=5.4" +cachetools = ">=5.5" chardet = ">=5.2" colorama = ">=0.4.6" -filelock = ">=3.15.4" +filelock = ">=3.16.1" packaging = ">=24.1" -platformdirs = ">=4.2.2" +platformdirs = ">=4.3.6" pluggy = ">=1.5" -pyproject-api = ">=1.7.1" +pyproject-api = ">=1.8" tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} -virtualenv = ">=20.26.3" - -[package.extras] -docs = ["furo (>=2024.7.18)", "sphinx (>=7.4.7)", "sphinx-argparse-cli (>=1.16)", "sphinx-autodoc-typehints (>=2.2.3)", "sphinx-copybutton (>=0.5.2)", "sphinx-inline-tabs (>=2023.4.21)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.11)"] -testing = ["build[virtualenv] (>=1.2.1)", "covdefaults (>=2.3)", "detect-test-pollution (>=1.2)", "devpi-process (>=1)", "diff-cover (>=9.1.1)", "distlib (>=0.3.8)", "flaky (>=3.8.1)", "hatch-vcs (>=0.4)", "hatchling (>=1.25)", "psutil (>=6)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-xdist (>=3.6.1)", "re-assert (>=1.1)", "setuptools (>=70.3)", "time-machine (>=2.14.2)", "wheel (>=0.43)"] +typing-extensions = {version = ">=4.12.2", markers = "python_version < \"3.11\""} +virtualenv = ">=20.26.6" [[package]] name = "typed-ast" @@ -1506,13 +1541,13 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "urllib3" -version = "2.2.2" +version = "2.2.3" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, - {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, + {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, + {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, ] [package.extras] @@ -1523,13 +1558,13 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "virtualenv" -version = "20.26.3" +version = "20.26.6" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.26.3-py3-none-any.whl", hash = "sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589"}, - {file = "virtualenv-20.26.3.tar.gz", hash = "sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a"}, + {file = "virtualenv-20.26.6-py3-none-any.whl", hash = "sha256:7345cc5b25405607a624d8418154577459c3e0277f5466dd79c49d5e492995f2"}, + {file = "virtualenv-20.26.6.tar.gz", hash = "sha256:280aede09a2a5c317e409a00102e7077c6432c5a38f0ef938e643805a7ad2c48"}, ] [package.dependencies] @@ -1542,6 +1577,26 @@ platformdirs = ">=3.9.1,<5" docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] +[[package]] +name = "virtualenv" +version = "20.27.0" +description = "Virtual Python Environment builder" +optional = false +python-versions = ">=3.8" +files = [ + {file = "virtualenv-20.27.0-py3-none-any.whl", hash = "sha256:44a72c29cceb0ee08f300b314848c86e57bf8d1f13107a5e671fb9274138d655"}, + {file = "virtualenv-20.27.0.tar.gz", hash = "sha256:2ca56a68ed615b8fe4326d11a0dca5dfbe8fd68510fb6c6349163bed3c15f2b2"}, +] + +[package.dependencies] +distlib = ">=0.3.7,<1" +filelock = ">=3.12.2,<4" +platformdirs = ">=3.9.1,<5" + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] + [[package]] name = "zipp" version = "3.15.0" @@ -1559,20 +1614,24 @@ testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more [[package]] name = "zipp" -version = "3.20.0" +version = "3.20.2" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.20.0-py3-none-any.whl", hash = "sha256:58da6168be89f0be59beb194da1250516fdaa062ccebd30127ac65d30045e10d"}, - {file = "zipp-3.20.0.tar.gz", hash = "sha256:0145e43d89664cfe1a2e533adc75adafed82fe2da404b4bbb6b026c0157bdb31"}, + {file = "zipp-3.20.2-py3-none-any.whl", hash = "sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350"}, + {file = "zipp-3.20.2.tar.gz", hash = "sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29"}, ] [package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = "^3.7" -content-hash = "de9ad44d919a23237212508ca6da20b929c8c6cc8aa0da01406ef2f731debe10" +content-hash = "450b262692d7c4cd0e88d628604e32375eef04d37d6f352cf9a93a34ed189506" diff --git a/pyproject.toml b/pyproject.toml index 28c7707f..668dd7ff 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -22,6 +22,7 @@ classifiers = [ "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13" ] packages = [ { include = "graphql", from = "src" }, @@ -75,9 +76,9 @@ tox = [ optional = true [tool.poetry.group.lint.dependencies] -ruff = ">=0.6.4,<0.7" +ruff = ">=0.7,<0.8" mypy = [ - { version = "^1.11", python = ">=3.8" }, + { version = "^1.12", python = ">=3.8" }, { version = "~1.4", python = "<3.8" } ] bump2version = ">=1.0,<2" diff --git a/src/graphql/language/parser.py b/src/graphql/language/parser.py index 78d308d0..95c69ccb 100644 --- a/src/graphql/language/parser.py +++ b/src/graphql/language/parser.py @@ -471,8 +471,9 @@ def parse_nullability_assertion(self) -> NullabilityAssertionNode | None: def parse_arguments(self, is_const: bool) -> list[ArgumentNode]: """Arguments[Const]: (Argument[?Const]+)""" item = self.parse_const_argument if is_const else self.parse_argument - item = cast(Callable[[], ArgumentNode], item) - return self.optional_many(TokenKind.PAREN_L, item, TokenKind.PAREN_R) + return self.optional_many( + TokenKind.PAREN_L, cast(Callable[[], ArgumentNode], item), TokenKind.PAREN_R + ) def parse_argument(self, is_const: bool = False) -> ArgumentNode: """Argument[Const]: Name : Value[?Const]""" diff --git a/src/graphql/language/visitor.py b/src/graphql/language/visitor.py index be410466..450996d8 100644 --- a/src/graphql/language/visitor.py +++ b/src/graphql/language/visitor.py @@ -289,7 +289,7 @@ def visit( else: stack = Stack(in_array, idx, keys, edits, stack) in_array = isinstance(node, tuple) - keys = node if in_array else visitor_keys.get(node.kind, ()) + keys = node if in_array else visitor_keys.get(node.kind, ()) # type: ignore idx = -1 edits = [] if parent: diff --git a/src/graphql/pyutils/async_reduce.py b/src/graphql/pyutils/async_reduce.py index 33d97f9c..02fbf648 100644 --- a/src/graphql/pyutils/async_reduce.py +++ b/src/graphql/pyutils/async_reduce.py @@ -36,8 +36,10 @@ def async_reduce( async def async_callback( current_accumulator: Awaitable[U], current_value: T ) -> U: - result = callback(await current_accumulator, current_value) - return await cast(Awaitable, result) if is_awaitable(result) else result + result: AwaitableOrValue[U] = callback( + await current_accumulator, current_value + ) + return await result if is_awaitable(result) else result # type: ignore accumulator = async_callback(cast(Awaitable[U], accumulator), value) else: diff --git a/tests/execution/test_abstract.py b/tests/execution/test_abstract.py index d7d12b7a..75a1e875 100644 --- a/tests/execution/test_abstract.py +++ b/tests/execution/test_abstract.py @@ -42,7 +42,7 @@ async def execute_query( assert isinstance(schema, GraphQLSchema) assert isinstance(query, str) document = parse(query) - result = (execute_sync if sync else execute)(schema, document, root_value) # type: ignore + result = (execute_sync if sync else execute)(schema, document, root_value) if not sync and is_awaitable(result): result = await result assert isinstance(result, ExecutionResult) diff --git a/tox.ini b/tox.ini index e5953a48..fcd4f015 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = py3{7,8,9,10,11,12}, pypy3{9,10}, ruff, mypy, docs +envlist = py3{7,8,9,10,11,12,13}, pypy3{9,10}, ruff, mypy, docs isolated_build = true [gh-actions] @@ -11,13 +11,14 @@ python = 3.10: py310 3.11: py311 3.12: py312 + 3.13: py313 pypy3: pypy39 pypy3.9: pypy39 pypy3.10: pypy310 [testenv:ruff] basepython = python3.12 -deps = ruff>=0.6.4,<0.7 +deps = ruff>=0.7,<0.8 commands = ruff check src tests ruff format --check src tests @@ -25,7 +26,7 @@ commands = [testenv:mypy] basepython = python3.12 deps = - mypy>=1.11,<2 + mypy>=1.12,<2 pytest>=8.3,<9 commands = mypy src tests @@ -50,5 +51,5 @@ deps = commands = # to also run the time-consuming tests: tox -e py311 -- --run-slow # to run the benchmarks: tox -e py311 -- -k benchmarks --benchmark-enable - py3{7,8,9,10,11}, pypy3{9,10}: pytest tests {posargs} + py3{7,8,9,10,11,13}, pypy3{9,10}: pytest tests {posargs} py312: pytest tests {posargs: --cov-report=term-missing --cov=graphql --cov=tests --cov-fail-under=100} From 0d573dab0cf5d2c3bf3e133275d1fe70bdd2731f Mon Sep 17 00:00:00 2001 From: Erik Wrede Date: Mon, 21 Oct 2024 20:20:33 +0200 Subject: [PATCH 199/230] feat: add codspeed for continuous benchmarking (#230) --- .github/workflows/benchmarks.yml | 31 +++++ poetry.lock | 199 ++++++++++++++++++++++++++++++- pyproject.toml | 1 + tests/benchmarks/test_visit.py | 2 +- 4 files changed, 231 insertions(+), 2 deletions(-) create mode 100644 .github/workflows/benchmarks.yml diff --git a/.github/workflows/benchmarks.yml b/.github/workflows/benchmarks.yml new file mode 100644 index 00000000..35867e43 --- /dev/null +++ b/.github/workflows/benchmarks.yml @@ -0,0 +1,31 @@ +name: CodSpeed + +on: + push: + branches: + - "main" + pull_request: + workflow_dispatch: + +jobs: + benchmarks: + name: 📈 Benchmarks + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 + id: setup-python + with: + python-version: "3.12" + architecture: x64 + + - run: pipx install poetry + + - run: poetry env use 3.12 + - run: poetry install --with test + + - name: Run benchmarks + uses: CodSpeedHQ/action@v3 + with: + token: ${{ secrets.CODSPEED_TOKEN }} + run: poetry run pytest tests --benchmark-enable --codspeed diff --git a/poetry.lock b/poetry.lock index c402516a..dfb6a622 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,9 +1,10 @@ -# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.4.2 and should not be changed by hand. [[package]] name = "alabaster" version = "0.7.13" description = "A configurable sidebar-enabled Sphinx theme" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -15,6 +16,7 @@ files = [ name = "babel" version = "2.14.0" description = "Internationalization utilities" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -32,6 +34,7 @@ dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] name = "babel" version = "2.16.0" description = "Internationalization utilities" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -49,6 +52,7 @@ dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] name = "bump2version" version = "1.0.1" description = "Version-bump your software with a single command!" +category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -60,6 +64,7 @@ files = [ name = "cachetools" version = "5.5.0" description = "Extensible memoizing collections and decorators" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -71,6 +76,7 @@ files = [ name = "certifi" version = "2024.8.30" description = "Python package for providing Mozilla's CA Bundle." +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -78,10 +84,88 @@ files = [ {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, ] +[[package]] +name = "cffi" +version = "1.15.1" +description = "Foreign Function Interface for Python calling C code." +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, + {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, + {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, + {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, + {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, + {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, + {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, + {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, + {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, + {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, + {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, + {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, + {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, + {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, + {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, + {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, + {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, + {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, + {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, +] + +[package.dependencies] +pycparser = "*" + [[package]] name = "chardet" version = "5.2.0" description = "Universal encoding detector for Python 3" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -93,6 +177,7 @@ files = [ name = "charset-normalizer" version = "3.4.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "dev" optional = false python-versions = ">=3.7.0" files = [ @@ -207,6 +292,7 @@ files = [ name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." +category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -218,6 +304,7 @@ files = [ name = "coverage" version = "7.2.7" description = "Code coverage measurement for Python" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -293,6 +380,7 @@ toml = ["tomli"] name = "coverage" version = "7.6.1" description = "Code coverage measurement for Python" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -380,6 +468,7 @@ toml = ["tomli"] name = "distlib" version = "0.3.9" description = "Distribution utilities" +category = "dev" optional = false python-versions = "*" files = [ @@ -391,6 +480,7 @@ files = [ name = "docutils" version = "0.19" description = "Docutils -- Python Documentation Utilities" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -402,6 +492,7 @@ files = [ name = "docutils" version = "0.20.1" description = "Docutils -- Python Documentation Utilities" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -413,6 +504,7 @@ files = [ name = "exceptiongroup" version = "1.2.2" description = "Backport of PEP 654 (exception groups)" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -427,6 +519,7 @@ test = ["pytest (>=6)"] name = "filelock" version = "3.12.2" description = "A platform independent file lock." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -442,6 +535,7 @@ testing = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "diff-cover (>=7.5)", "p name = "filelock" version = "3.16.1" description = "A platform independent file lock." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -458,6 +552,7 @@ typing = ["typing-extensions (>=4.12.2)"] name = "idna" version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -472,6 +567,7 @@ all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2 name = "imagesize" version = "1.4.1" description = "Getting image size from png/jpeg/jpeg2000/gif file" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -483,6 +579,7 @@ files = [ name = "importlib-metadata" version = "6.7.0" description = "Read metadata from Python packages" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -503,6 +600,7 @@ testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs name = "importlib-metadata" version = "8.5.0" description = "Read metadata from Python packages" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -526,6 +624,7 @@ type = ["pytest-mypy"] name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -537,6 +636,7 @@ files = [ name = "jinja2" version = "3.1.4" description = "A very fast and expressive template engine." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -554,6 +654,7 @@ i18n = ["Babel (>=2.7)"] name = "markupsafe" version = "2.1.5" description = "Safely add untrusted strings to HTML/XML markup." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -623,6 +724,7 @@ files = [ name = "mypy" version = "1.4.1" description = "Optional static typing for Python" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -670,6 +772,7 @@ reports = ["lxml"] name = "mypy" version = "1.12.0" description = "Optional static typing for Python" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -722,6 +825,7 @@ reports = ["lxml"] name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." +category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -733,6 +837,7 @@ files = [ name = "packaging" version = "24.0" description = "Core utilities for Python packages" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -744,6 +849,7 @@ files = [ name = "packaging" version = "24.1" description = "Core utilities for Python packages" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -755,6 +861,7 @@ files = [ name = "platformdirs" version = "4.0.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -773,6 +880,7 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-co name = "platformdirs" version = "4.3.6" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -789,6 +897,7 @@ type = ["mypy (>=1.11.2)"] name = "pluggy" version = "1.2.0" description = "plugin and hook calling mechanisms for python" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -807,6 +916,7 @@ testing = ["pytest", "pytest-benchmark"] name = "pluggy" version = "1.5.0" description = "plugin and hook calling mechanisms for python" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -822,6 +932,7 @@ testing = ["pytest", "pytest-benchmark"] name = "py" version = "1.11.0" description = "library with cross-python path, ini-parsing, io, code, log facilities" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -833,6 +944,7 @@ files = [ name = "py-cpuinfo" version = "9.0.0" description = "Get CPU info with pure Python" +category = "dev" optional = false python-versions = "*" files = [ @@ -840,10 +952,23 @@ files = [ {file = "py_cpuinfo-9.0.0-py3-none-any.whl", hash = "sha256:859625bc251f64e21f077d099d4162689c762b5d6a4c3c97553d56241c9674d5"}, ] +[[package]] +name = "pycparser" +version = "2.21" +description = "C parser in Python" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, + {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, +] + [[package]] name = "pygments" version = "2.17.2" description = "Pygments is a syntax highlighting package written in Python." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -859,6 +984,7 @@ windows-terminal = ["colorama (>=0.4.6)"] name = "pygments" version = "2.18.0" description = "Pygments is a syntax highlighting package written in Python." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -873,6 +999,7 @@ windows-terminal = ["colorama (>=0.4.6)"] name = "pyproject-api" version = "1.8.0" description = "API to interact with the python pyproject.toml based projects" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -892,6 +1019,7 @@ testing = ["covdefaults (>=2.3)", "pytest (>=8.3.3)", "pytest-cov (>=5)", "pytes name = "pytest" version = "7.4.4" description = "pytest: simple powerful testing with Python" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -915,6 +1043,7 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no name = "pytest" version = "8.3.3" description = "pytest: simple powerful testing with Python" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -937,6 +1066,7 @@ dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments name = "pytest-asyncio" version = "0.21.2" description = "Pytest support for asyncio" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -956,6 +1086,7 @@ testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy name = "pytest-asyncio" version = "0.23.8" description = "Pytest support for asyncio" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -974,6 +1105,7 @@ testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] name = "pytest-benchmark" version = "4.0.0" description = "A ``pytest`` fixture for benchmarking code. It will group the tests into rounds that are calibrated to the chosen timer." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -990,10 +1122,34 @@ aspect = ["aspectlib"] elasticsearch = ["elasticsearch"] histogram = ["pygal", "pygaljs"] +[[package]] +name = "pytest-codspeed" +version = "2.2.1" +description = "Pytest plugin to create CodSpeed benchmarks" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest_codspeed-2.2.1-py3-none-any.whl", hash = "sha256:aad08033015f3e6c8c14c8bf0eca475921a9b088e92c98b626bf8af8f516471e"}, + {file = "pytest_codspeed-2.2.1.tar.gz", hash = "sha256:0adc24baf01c64a6ca0a0b83b3cd704351708997e09ec086b7776c32227d4e0a"}, +] + +[package.dependencies] +cffi = ">=1.15.1" +filelock = ">=3.12.2" +pytest = ">=3.8" +setuptools = {version = "*", markers = "python_full_version >= \"3.12.0\""} + +[package.extras] +compat = ["pytest-benchmark (>=4.0.0,<4.1.0)", "pytest-xdist (>=2.0.0,<2.1.0)"] +lint = ["mypy (>=1.3.0,<1.4.0)", "ruff (>=0.3.3,<0.4.0)"] +test = ["pytest (>=7.0,<8.0)", "pytest-cov (>=4.0.0,<4.1.0)"] + [[package]] name = "pytest-cov" version = "4.1.0" description = "Pytest plugin for measuring coverage." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1012,6 +1168,7 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtuale name = "pytest-cov" version = "5.0.0" description = "Pytest plugin for measuring coverage." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1030,6 +1187,7 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"] name = "pytest-describe" version = "2.2.0" description = "Describe-style plugin for pytest" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1044,6 +1202,7 @@ pytest = ">=4.6,<9" name = "pytest-timeout" version = "2.3.1" description = "pytest plugin to abort hanging tests" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1058,6 +1217,7 @@ pytest = ">=7.0.0" name = "pytz" version = "2024.2" description = "World timezone definitions, modern and historical" +category = "dev" optional = false python-versions = "*" files = [ @@ -1069,6 +1229,7 @@ files = [ name = "requests" version = "2.31.0" description = "Python HTTP for Humans." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1090,6 +1251,7 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "requests" version = "2.32.3" description = "Python HTTP for Humans." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1111,6 +1273,7 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "ruff" version = "0.7.0" description = "An extremely fast Python linter and code formatter, written in Rust." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1134,10 +1297,20 @@ files = [ {file = "ruff-0.7.0.tar.gz", hash = "sha256:47a86360cf62d9cd53ebfb0b5eb0e882193fc191c6d717e8bef4462bc3b9ea2b"}, ] +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.5.2)"] +core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.collections", "jaraco.functools", "jaraco.text (>=3.7)", "more-itertools", "more-itertools (>=8.8)", "packaging", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib-metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (>=1.11.0,<1.12.0)", "pytest-mypy"] + [[package]] name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -1149,6 +1322,7 @@ files = [ name = "snowballstemmer" version = "2.2.0" description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +category = "dev" optional = false python-versions = "*" files = [ @@ -1160,6 +1334,7 @@ files = [ name = "sphinx" version = "5.3.0" description = "Python documentation generator" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1195,6 +1370,7 @@ test = ["cython", "html5lib", "pytest (>=4.6)", "typed_ast"] name = "sphinx" version = "7.1.2" description = "Python documentation generator" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1230,6 +1406,7 @@ test = ["cython", "filelock", "html5lib", "pytest (>=4.6)"] name = "sphinx-rtd-theme" version = "2.0.0" description = "Read the Docs theme for Sphinx" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1249,6 +1426,7 @@ dev = ["bump2version", "sphinxcontrib-httpdomain", "transifex-client", "wheel"] name = "sphinxcontrib-applehelp" version = "1.0.2" description = "sphinxcontrib-applehelp is a sphinx extension which outputs Apple help books" +category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1264,6 +1442,7 @@ test = ["pytest"] name = "sphinxcontrib-applehelp" version = "1.0.4" description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1279,6 +1458,7 @@ test = ["pytest"] name = "sphinxcontrib-devhelp" version = "1.0.2" description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." +category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1294,6 +1474,7 @@ test = ["pytest"] name = "sphinxcontrib-htmlhelp" version = "2.0.0" description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1309,6 +1490,7 @@ test = ["html5lib", "pytest"] name = "sphinxcontrib-htmlhelp" version = "2.0.1" description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1324,6 +1506,7 @@ test = ["html5lib", "pytest"] name = "sphinxcontrib-jquery" version = "4.1" description = "Extension to include jQuery on newer Sphinx releases" +category = "dev" optional = false python-versions = ">=2.7" files = [ @@ -1338,6 +1521,7 @@ Sphinx = ">=1.8" name = "sphinxcontrib-jsmath" version = "1.0.1" description = "A sphinx extension which renders display math in HTML via JavaScript" +category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1352,6 +1536,7 @@ test = ["flake8", "mypy", "pytest"] name = "sphinxcontrib-qthelp" version = "1.0.3" description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." +category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1367,6 +1552,7 @@ test = ["pytest"] name = "sphinxcontrib-serializinghtml" version = "1.1.5" description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." +category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1382,6 +1568,7 @@ test = ["pytest"] name = "tomli" version = "2.0.1" description = "A lil' TOML parser" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1404,6 +1591,7 @@ files = [ name = "tox" version = "3.28.0" description = "tox is a generic virtualenv management and test command line tool" +category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" files = [ @@ -1430,6 +1618,7 @@ testing = ["flaky (>=3.4.0)", "freezegun (>=0.3.11)", "pathlib2 (>=2.3.3)", "psu name = "tox" version = "4.23.0" description = "tox is a generic virtualenv management and test command line tool" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1454,6 +1643,7 @@ virtualenv = ">=20.26.6" name = "typed-ast" version = "1.5.5" description = "a fork of Python 2 and 3 ast modules with type comment support" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1504,6 +1694,7 @@ files = [ name = "typing-extensions" version = "4.7.1" description = "Backported and Experimental Type Hints for Python 3.7+" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1515,6 +1706,7 @@ files = [ name = "typing-extensions" version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1526,6 +1718,7 @@ files = [ name = "urllib3" version = "2.0.7" description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1543,6 +1736,7 @@ zstd = ["zstandard (>=0.18.0)"] name = "urllib3" version = "2.2.3" description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1560,6 +1754,7 @@ zstd = ["zstandard (>=0.18.0)"] name = "virtualenv" version = "20.26.6" description = "Virtual Python Environment builder" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1601,6 +1796,7 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess name = "zipp" version = "3.15.0" description = "Backport of pathlib-compatible object wrapper for zip files" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1616,6 +1812,7 @@ testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more name = "zipp" version = "3.20.2" description = "Backport of pathlib-compatible object wrapper for zip files" +category = "dev" optional = false python-versions = ">=3.8" files = [ diff --git a/pyproject.toml b/pyproject.toml index 668dd7ff..70f39c61 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -71,6 +71,7 @@ tox = [ { version = "^4.16", python = ">=3.8" }, { version = "^3.28", python = "<3.8" } ] +pytest-codspeed = "^2.2.1" [tool.poetry.group.lint] optional = true diff --git a/tests/benchmarks/test_visit.py b/tests/benchmarks/test_visit.py index 53bfc98e..583075bf 100644 --- a/tests/benchmarks/test_visit.py +++ b/tests/benchmarks/test_visit.py @@ -23,5 +23,5 @@ def test_visit_all_ast_nodes(benchmark, big_schema_sdl): # noqa: F811 def test_visit_all_ast_nodes_in_parallel(benchmark, big_schema_sdl): # noqa: F811 document_ast = parse(big_schema_sdl) visitor = DummyVisitor() - parallel_visitor = ParallelVisitor([visitor] * 50) + parallel_visitor = ParallelVisitor([visitor] * 20) benchmark(lambda: visit(document_ast, parallel_visitor)) From 46244446c66ca3033f5295f6c796108e4c1f7365 Mon Sep 17 00:00:00 2001 From: Erik Wrede Date: Mon, 21 Oct 2024 21:10:06 +0200 Subject: [PATCH 200/230] fix: recreate poetry lock (#231) --- poetry.lock | 125 +++++++++++++--------------------------------------- 1 file changed, 30 insertions(+), 95 deletions(-) diff --git a/poetry.lock b/poetry.lock index dfb6a622..2d534289 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,10 +1,9 @@ -# This file is automatically @generated by Poetry 1.4.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. [[package]] name = "alabaster" version = "0.7.13" description = "A configurable sidebar-enabled Sphinx theme" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -16,7 +15,6 @@ files = [ name = "babel" version = "2.14.0" description = "Internationalization utilities" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -34,7 +32,6 @@ dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] name = "babel" version = "2.16.0" description = "Internationalization utilities" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -52,7 +49,6 @@ dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] name = "bump2version" version = "1.0.1" description = "Version-bump your software with a single command!" -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -64,7 +60,6 @@ files = [ name = "cachetools" version = "5.5.0" description = "Extensible memoizing collections and decorators" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -76,7 +71,6 @@ files = [ name = "certifi" version = "2024.8.30" description = "Python package for providing Mozilla's CA Bundle." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -88,7 +82,6 @@ files = [ name = "cffi" version = "1.15.1" description = "Foreign Function Interface for Python calling C code." -category = "dev" optional = false python-versions = "*" files = [ @@ -165,7 +158,6 @@ pycparser = "*" name = "chardet" version = "5.2.0" description = "Universal encoding detector for Python 3" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -177,7 +169,6 @@ files = [ name = "charset-normalizer" version = "3.4.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "dev" optional = false python-versions = ">=3.7.0" files = [ @@ -292,7 +283,6 @@ files = [ name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." -category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -304,7 +294,6 @@ files = [ name = "coverage" version = "7.2.7" description = "Code coverage measurement for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -380,7 +369,6 @@ toml = ["tomli"] name = "coverage" version = "7.6.1" description = "Code coverage measurement for Python" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -468,7 +456,6 @@ toml = ["tomli"] name = "distlib" version = "0.3.9" description = "Distribution utilities" -category = "dev" optional = false python-versions = "*" files = [ @@ -480,7 +467,6 @@ files = [ name = "docutils" version = "0.19" description = "Docutils -- Python Documentation Utilities" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -492,7 +478,6 @@ files = [ name = "docutils" version = "0.20.1" description = "Docutils -- Python Documentation Utilities" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -504,7 +489,6 @@ files = [ name = "exceptiongroup" version = "1.2.2" description = "Backport of PEP 654 (exception groups)" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -519,7 +503,6 @@ test = ["pytest (>=6)"] name = "filelock" version = "3.12.2" description = "A platform independent file lock." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -533,26 +516,24 @@ testing = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "diff-cover (>=7.5)", "p [[package]] name = "filelock" -version = "3.16.1" +version = "3.16.0" description = "A platform independent file lock." -category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0"}, - {file = "filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435"}, + {file = "filelock-3.16.0-py3-none-any.whl", hash = "sha256:f6ed4c963184f4c84dd5557ce8fece759a3724b37b80c6c4f20a2f63a4dc6609"}, + {file = "filelock-3.16.0.tar.gz", hash = "sha256:81de9eb8453c769b63369f87f11131a7ab04e367f8d97ad39dc230daa07e3bec"}, ] [package.extras] -docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4.1)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.2)", "pytest (>=8.3.3)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.4)"] +docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.1.1)", "pytest (>=8.3.2)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.3)"] typing = ["typing-extensions (>=4.12.2)"] [[package]] name = "idna" version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -567,7 +548,6 @@ all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2 name = "imagesize" version = "1.4.1" description = "Getting image size from png/jpeg/jpeg2000/gif file" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -579,7 +559,6 @@ files = [ name = "importlib-metadata" version = "6.7.0" description = "Read metadata from Python packages" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -600,7 +579,6 @@ testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs name = "importlib-metadata" version = "8.5.0" description = "Read metadata from Python packages" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -624,7 +602,6 @@ type = ["pytest-mypy"] name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -636,7 +613,6 @@ files = [ name = "jinja2" version = "3.1.4" description = "A very fast and expressive template engine." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -654,7 +630,6 @@ i18n = ["Babel (>=2.7)"] name = "markupsafe" version = "2.1.5" description = "Safely add untrusted strings to HTML/XML markup." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -724,7 +699,6 @@ files = [ name = "mypy" version = "1.4.1" description = "Optional static typing for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -772,7 +746,6 @@ reports = ["lxml"] name = "mypy" version = "1.12.0" description = "Optional static typing for Python" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -825,7 +798,6 @@ reports = ["lxml"] name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -837,7 +809,6 @@ files = [ name = "packaging" version = "24.0" description = "Core utilities for Python packages" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -849,7 +820,6 @@ files = [ name = "packaging" version = "24.1" description = "Core utilities for Python packages" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -861,7 +831,6 @@ files = [ name = "platformdirs" version = "4.0.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -880,7 +849,6 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-co name = "platformdirs" version = "4.3.6" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -897,7 +865,6 @@ type = ["mypy (>=1.11.2)"] name = "pluggy" version = "1.2.0" description = "plugin and hook calling mechanisms for python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -916,7 +883,6 @@ testing = ["pytest", "pytest-benchmark"] name = "pluggy" version = "1.5.0" description = "plugin and hook calling mechanisms for python" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -932,7 +898,6 @@ testing = ["pytest", "pytest-benchmark"] name = "py" version = "1.11.0" description = "library with cross-python path, ini-parsing, io, code, log facilities" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -944,7 +909,6 @@ files = [ name = "py-cpuinfo" version = "9.0.0" description = "Get CPU info with pure Python" -category = "dev" optional = false python-versions = "*" files = [ @@ -956,7 +920,6 @@ files = [ name = "pycparser" version = "2.21" description = "C parser in Python" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -968,7 +931,6 @@ files = [ name = "pygments" version = "2.17.2" description = "Pygments is a syntax highlighting package written in Python." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -984,7 +946,6 @@ windows-terminal = ["colorama (>=0.4.6)"] name = "pygments" version = "2.18.0" description = "Pygments is a syntax highlighting package written in Python." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -999,7 +960,6 @@ windows-terminal = ["colorama (>=0.4.6)"] name = "pyproject-api" version = "1.8.0" description = "API to interact with the python pyproject.toml based projects" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1019,7 +979,6 @@ testing = ["covdefaults (>=2.3)", "pytest (>=8.3.3)", "pytest-cov (>=5)", "pytes name = "pytest" version = "7.4.4" description = "pytest: simple powerful testing with Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1043,7 +1002,6 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no name = "pytest" version = "8.3.3" description = "pytest: simple powerful testing with Python" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1066,7 +1024,6 @@ dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments name = "pytest-asyncio" version = "0.21.2" description = "Pytest support for asyncio" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1086,7 +1043,6 @@ testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy name = "pytest-asyncio" version = "0.23.8" description = "Pytest support for asyncio" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1105,7 +1061,6 @@ testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] name = "pytest-benchmark" version = "4.0.0" description = "A ``pytest`` fixture for benchmarking code. It will group the tests into rounds that are calibrated to the chosen timer." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1126,7 +1081,6 @@ histogram = ["pygal", "pygaljs"] name = "pytest-codspeed" version = "2.2.1" description = "Pytest plugin to create CodSpeed benchmarks" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1149,7 +1103,6 @@ test = ["pytest (>=7.0,<8.0)", "pytest-cov (>=4.0.0,<4.1.0)"] name = "pytest-cov" version = "4.1.0" description = "Pytest plugin for measuring coverage." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1168,7 +1121,6 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtuale name = "pytest-cov" version = "5.0.0" description = "Pytest plugin for measuring coverage." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1187,7 +1139,6 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"] name = "pytest-describe" version = "2.2.0" description = "Describe-style plugin for pytest" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1202,7 +1153,6 @@ pytest = ">=4.6,<9" name = "pytest-timeout" version = "2.3.1" description = "pytest plugin to abort hanging tests" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1217,7 +1167,6 @@ pytest = ">=7.0.0" name = "pytz" version = "2024.2" description = "World timezone definitions, modern and historical" -category = "dev" optional = false python-versions = "*" files = [ @@ -1229,7 +1178,6 @@ files = [ name = "requests" version = "2.31.0" description = "Python HTTP for Humans." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1251,7 +1199,6 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "requests" version = "2.32.3" description = "Python HTTP for Humans." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1273,7 +1220,6 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "ruff" version = "0.7.0" description = "An extremely fast Python linter and code formatter, written in Rust." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1297,6 +1243,17 @@ files = [ {file = "ruff-0.7.0.tar.gz", hash = "sha256:47a86360cf62d9cd53ebfb0b5eb0e882193fc191c6d717e8bef4462bc3b9ea2b"}, ] +[[package]] +name = "setuptools" +version = "75.2.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-75.2.0-py3-none-any.whl", hash = "sha256:a7fcb66f68b4d9e8e66b42f9876150a3371558f98fa32222ffaa5bced76406f8"}, + {file = "setuptools-75.2.0.tar.gz", hash = "sha256:753bb6ebf1f465a1912e19ed1d41f403a79173a9acf66a42e7e6aec45c3c16ec"}, +] + [package.extras] check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.5.2)"] core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.collections", "jaraco.functools", "jaraco.text (>=3.7)", "more-itertools", "more-itertools (>=8.8)", "packaging", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] @@ -1304,13 +1261,12 @@ cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] enabler = ["pytest-enabler (>=2.2)"] test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] -type = ["importlib-metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (>=1.11.0,<1.12.0)", "pytest-mypy"] +type = ["importlib-metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.11.*)", "pytest-mypy"] [[package]] name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -1322,7 +1278,6 @@ files = [ name = "snowballstemmer" version = "2.2.0" description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." -category = "dev" optional = false python-versions = "*" files = [ @@ -1334,7 +1289,6 @@ files = [ name = "sphinx" version = "5.3.0" description = "Python documentation generator" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1370,7 +1324,6 @@ test = ["cython", "html5lib", "pytest (>=4.6)", "typed_ast"] name = "sphinx" version = "7.1.2" description = "Python documentation generator" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1406,7 +1359,6 @@ test = ["cython", "filelock", "html5lib", "pytest (>=4.6)"] name = "sphinx-rtd-theme" version = "2.0.0" description = "Read the Docs theme for Sphinx" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1426,7 +1378,6 @@ dev = ["bump2version", "sphinxcontrib-httpdomain", "transifex-client", "wheel"] name = "sphinxcontrib-applehelp" version = "1.0.2" description = "sphinxcontrib-applehelp is a sphinx extension which outputs Apple help books" -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1442,7 +1393,6 @@ test = ["pytest"] name = "sphinxcontrib-applehelp" version = "1.0.4" description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1458,7 +1408,6 @@ test = ["pytest"] name = "sphinxcontrib-devhelp" version = "1.0.2" description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1474,7 +1423,6 @@ test = ["pytest"] name = "sphinxcontrib-htmlhelp" version = "2.0.0" description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1490,7 +1438,6 @@ test = ["html5lib", "pytest"] name = "sphinxcontrib-htmlhelp" version = "2.0.1" description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1506,7 +1453,6 @@ test = ["html5lib", "pytest"] name = "sphinxcontrib-jquery" version = "4.1" description = "Extension to include jQuery on newer Sphinx releases" -category = "dev" optional = false python-versions = ">=2.7" files = [ @@ -1521,7 +1467,6 @@ Sphinx = ">=1.8" name = "sphinxcontrib-jsmath" version = "1.0.1" description = "A sphinx extension which renders display math in HTML via JavaScript" -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1536,7 +1481,6 @@ test = ["flake8", "mypy", "pytest"] name = "sphinxcontrib-qthelp" version = "1.0.3" description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1552,7 +1496,6 @@ test = ["pytest"] name = "sphinxcontrib-serializinghtml" version = "1.1.5" description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1568,7 +1511,6 @@ test = ["pytest"] name = "tomli" version = "2.0.1" description = "A lil' TOML parser" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1591,7 +1533,6 @@ files = [ name = "tox" version = "3.28.0" description = "tox is a generic virtualenv management and test command line tool" -category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" files = [ @@ -1616,34 +1557,35 @@ testing = ["flaky (>=3.4.0)", "freezegun (>=0.3.11)", "pathlib2 (>=2.3.3)", "psu [[package]] name = "tox" -version = "4.23.0" +version = "4.20.0" description = "tox is a generic virtualenv management and test command line tool" -category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "tox-4.23.0-py3-none-any.whl", hash = "sha256:46da40afb660e46238c251280eb910bdaf00b390c7557c8e4bb611f422e9db12"}, - {file = "tox-4.23.0.tar.gz", hash = "sha256:a6bd7d54231d755348d3c3a7b450b5bf6563833716d1299a1619587a1b77a3bf"}, + {file = "tox-4.20.0-py3-none-any.whl", hash = "sha256:21a8005e3d3fe5658a8e36b8ca3ed13a4230429063c5cc2a2fdac6ee5aa0de34"}, + {file = "tox-4.20.0.tar.gz", hash = "sha256:5b78a49b6eaaeab3ae4186415e7c97d524f762ae967c63562687c3e5f0ec23d5"}, ] [package.dependencies] cachetools = ">=5.5" chardet = ">=5.2" colorama = ">=0.4.6" -filelock = ">=3.16.1" +filelock = ">=3.15.4" packaging = ">=24.1" -platformdirs = ">=4.3.6" +platformdirs = ">=4.2.2" pluggy = ">=1.5" -pyproject-api = ">=1.8" +pyproject-api = ">=1.7.1" tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} -typing-extensions = {version = ">=4.12.2", markers = "python_version < \"3.11\""} -virtualenv = ">=20.26.6" +virtualenv = ">=20.26.3" + +[package.extras] +docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-argparse-cli (>=1.17)", "sphinx-autodoc-typehints (>=2.4)", "sphinx-copybutton (>=0.5.2)", "sphinx-inline-tabs (>=2023.4.21)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=24.8)"] +testing = ["build[virtualenv] (>=1.2.2)", "covdefaults (>=2.3)", "detect-test-pollution (>=1.2)", "devpi-process (>=1)", "diff-cover (>=9.1.1)", "distlib (>=0.3.8)", "flaky (>=3.8.1)", "hatch-vcs (>=0.4)", "hatchling (>=1.25)", "psutil (>=6)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-xdist (>=3.6.1)", "re-assert (>=1.1)", "setuptools (>=74.1.2)", "time-machine (>=2.15)", "wheel (>=0.44)"] [[package]] name = "typed-ast" version = "1.5.5" description = "a fork of Python 2 and 3 ast modules with type comment support" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1694,7 +1636,6 @@ files = [ name = "typing-extensions" version = "4.7.1" description = "Backported and Experimental Type Hints for Python 3.7+" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1706,7 +1647,6 @@ files = [ name = "typing-extensions" version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1718,7 +1658,6 @@ files = [ name = "urllib3" version = "2.0.7" description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1736,7 +1675,6 @@ zstd = ["zstandard (>=0.18.0)"] name = "urllib3" version = "2.2.3" description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1754,7 +1692,6 @@ zstd = ["zstandard (>=0.18.0)"] name = "virtualenv" version = "20.26.6" description = "Virtual Python Environment builder" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1796,7 +1733,6 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess name = "zipp" version = "3.15.0" description = "Backport of pathlib-compatible object wrapper for zip files" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1812,7 +1748,6 @@ testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more name = "zipp" version = "3.20.2" description = "Backport of pathlib-compatible object wrapper for zip files" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1831,4 +1766,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = "^3.7" -content-hash = "450b262692d7c4cd0e88d628604e32375eef04d37d6f352cf9a93a34ed189506" +content-hash = "5d8998e59f0991b7dea9d5302fadc9c06be82722d75d9f00271efa1cd81555dd" From fc4ce56515a196a67c4d26a32107c1a9d3bca342 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Tue, 17 Dec 2024 20:36:14 +0100 Subject: [PATCH 201/230] Unify action titles --- .github/workflows/benchmarks.yml | 18 +++++++++++------- .github/workflows/lint.yml | 1 + .github/workflows/publish.yml | 1 + .github/workflows/test.yml | 1 + tests/benchmarks/test_visit.py | 2 +- 5 files changed, 15 insertions(+), 8 deletions(-) diff --git a/.github/workflows/benchmarks.yml b/.github/workflows/benchmarks.yml index 35867e43..fce1037f 100644 --- a/.github/workflows/benchmarks.yml +++ b/.github/workflows/benchmarks.yml @@ -1,4 +1,4 @@ -name: CodSpeed +name: Performance on: push: @@ -11,20 +11,24 @@ jobs: benchmarks: name: 📈 Benchmarks runs-on: ubuntu-latest + steps: - uses: actions/checkout@v4 - - uses: actions/setup-python@v5 + + - name: Set up Python 3.12 + uses: actions/setup-python@v5 id: setup-python with: python-version: "3.12" architecture: x64 - - run: pipx install poetry - - - run: poetry env use 3.12 - - run: poetry install --with test + - name: Install with poetry + run: | + pipx install poetry + poetry env use 3.12 + poetry install --with test - - name: Run benchmarks + - name: Run benchmarks with CodSpeed uses: CodSpeedHQ/action@v3 with: token: ${{ secrets.CODSPEED_TOKEN }} diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 74f14604..703a56aa 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -4,6 +4,7 @@ on: [push, pull_request] jobs: lint: + name: 🧹 Lint runs-on: ubuntu-latest steps: diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 561b3028..8bd8c296 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -7,6 +7,7 @@ on: jobs: build: + name: 🏗️ Build runs-on: ubuntu-latest steps: diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 8959d0de..298d3dd0 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -4,6 +4,7 @@ on: [push, pull_request] jobs: tests: + name: 🧪 Tests runs-on: ubuntu-latest strategy: diff --git a/tests/benchmarks/test_visit.py b/tests/benchmarks/test_visit.py index 583075bf..4e7a85a2 100644 --- a/tests/benchmarks/test_visit.py +++ b/tests/benchmarks/test_visit.py @@ -23,5 +23,5 @@ def test_visit_all_ast_nodes(benchmark, big_schema_sdl): # noqa: F811 def test_visit_all_ast_nodes_in_parallel(benchmark, big_schema_sdl): # noqa: F811 document_ast = parse(big_schema_sdl) visitor = DummyVisitor() - parallel_visitor = ParallelVisitor([visitor] * 20) + parallel_visitor = ParallelVisitor([visitor] * 25) benchmark(lambda: visit(document_ast, parallel_visitor)) From e4a15d6abfe38344f3dc46f49ea78a079c2e93de Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Tue, 17 Dec 2024 21:43:56 +0100 Subject: [PATCH 202/230] Fix dependencies --- .../{benchmarks.yml => benchmark.yml} | 0 poetry.lock | 420 +++++++++++++----- pyproject.toml | 7 +- tests/execution/test_oneof.py | 2 +- tox.ini | 2 +- 5 files changed, 319 insertions(+), 112 deletions(-) rename .github/workflows/{benchmarks.yml => benchmark.yml} (100%) diff --git a/.github/workflows/benchmarks.yml b/.github/workflows/benchmark.yml similarity index 100% rename from .github/workflows/benchmarks.yml rename to .github/workflows/benchmark.yml diff --git a/poetry.lock b/poetry.lock index 2d534289..abd0077f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand. [[package]] name = "alabaster" @@ -69,13 +69,13 @@ files = [ [[package]] name = "certifi" -version = "2024.8.30" +version = "2024.12.14" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, - {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, + {file = "certifi-2024.12.14-py3-none-any.whl", hash = "sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56"}, + {file = "certifi-2024.12.14.tar.gz", hash = "sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db"}, ] [[package]] @@ -154,6 +154,85 @@ files = [ [package.dependencies] pycparser = "*" +[[package]] +name = "cffi" +version = "1.17.1" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, + {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, + {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, + {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, + {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, + {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, + {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, + {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, + {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, + {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, + {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, + {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, + {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, + {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, + {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, +] + +[package.dependencies] +pycparser = "*" + [[package]] name = "chardet" version = "5.2.0" @@ -516,18 +595,18 @@ testing = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "diff-cover (>=7.5)", "p [[package]] name = "filelock" -version = "3.16.0" +version = "3.16.1" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.16.0-py3-none-any.whl", hash = "sha256:f6ed4c963184f4c84dd5557ce8fece759a3724b37b80c6c4f20a2f63a4dc6609"}, - {file = "filelock-3.16.0.tar.gz", hash = "sha256:81de9eb8453c769b63369f87f11131a7ab04e367f8d97ad39dc230daa07e3bec"}, + {file = "filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0"}, + {file = "filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435"}, ] [package.extras] -docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.1.1)", "pytest (>=8.3.2)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.3)"] +docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4.1)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.2)", "pytest (>=8.3.3)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.4)"] typing = ["typing-extensions (>=4.12.2)"] [[package]] @@ -626,6 +705,30 @@ MarkupSafe = ">=2.0" [package.extras] i18n = ["Babel (>=2.7)"] +[[package]] +name = "markdown-it-py" +version = "3.0.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +optional = false +python-versions = ">=3.8" +files = [ + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +code-style = ["pre-commit (>=3.0,<4.0)"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins"] +profiling = ["gprof2dot"] +rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + [[package]] name = "markupsafe" version = "2.1.5" @@ -695,6 +798,17 @@ files = [ {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, ] +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + [[package]] name = "mypy" version = "1.4.1" @@ -744,43 +858,43 @@ reports = ["lxml"] [[package]] name = "mypy" -version = "1.12.0" +version = "1.13.0" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4397081e620dc4dc18e2f124d5e1d2c288194c2c08df6bdb1db31c38cd1fe1ed"}, - {file = "mypy-1.12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:684a9c508a283f324804fea3f0effeb7858eb03f85c4402a967d187f64562469"}, - {file = "mypy-1.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6cabe4cda2fa5eca7ac94854c6c37039324baaa428ecbf4de4567279e9810f9e"}, - {file = "mypy-1.12.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:060a07b10e999ac9e7fa249ce2bdcfa9183ca2b70756f3bce9df7a92f78a3c0a"}, - {file = "mypy-1.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:0eff042d7257f39ba4ca06641d110ca7d2ad98c9c1fb52200fe6b1c865d360ff"}, - {file = "mypy-1.12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4b86de37a0da945f6d48cf110d5206c5ed514b1ca2614d7ad652d4bf099c7de7"}, - {file = "mypy-1.12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:20c7c5ce0c1be0b0aea628374e6cf68b420bcc772d85c3c974f675b88e3e6e57"}, - {file = "mypy-1.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a64ee25f05fc2d3d8474985c58042b6759100a475f8237da1f4faf7fcd7e6309"}, - {file = "mypy-1.12.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:faca7ab947c9f457a08dcb8d9a8664fd438080e002b0fa3e41b0535335edcf7f"}, - {file = "mypy-1.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:5bc81701d52cc8767005fdd2a08c19980de9ec61a25dbd2a937dfb1338a826f9"}, - {file = "mypy-1.12.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:8462655b6694feb1c99e433ea905d46c478041a8b8f0c33f1dab00ae881b2164"}, - {file = "mypy-1.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:923ea66d282d8af9e0f9c21ffc6653643abb95b658c3a8a32dca1eff09c06475"}, - {file = "mypy-1.12.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1ebf9e796521f99d61864ed89d1fb2926d9ab6a5fab421e457cd9c7e4dd65aa9"}, - {file = "mypy-1.12.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e478601cc3e3fa9d6734d255a59c7a2e5c2934da4378f3dd1e3411ea8a248642"}, - {file = "mypy-1.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:c72861b7139a4f738344faa0e150834467521a3fba42dc98264e5aa9507dd601"}, - {file = "mypy-1.12.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:52b9e1492e47e1790360a43755fa04101a7ac72287b1a53ce817f35899ba0521"}, - {file = "mypy-1.12.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:48d3e37dd7d9403e38fa86c46191de72705166d40b8c9f91a3de77350daa0893"}, - {file = "mypy-1.12.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2f106db5ccb60681b622ac768455743ee0e6a857724d648c9629a9bd2ac3f721"}, - {file = "mypy-1.12.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:233e11b3f73ee1f10efada2e6da0f555b2f3a5316e9d8a4a1224acc10e7181d3"}, - {file = "mypy-1.12.0-cp313-cp313-win_amd64.whl", hash = "sha256:4ae8959c21abcf9d73aa6c74a313c45c0b5a188752bf37dace564e29f06e9c1b"}, - {file = "mypy-1.12.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:eafc1b7319b40ddabdc3db8d7d48e76cfc65bbeeafaa525a4e0fa6b76175467f"}, - {file = "mypy-1.12.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9b9ce1ad8daeb049c0b55fdb753d7414260bad8952645367e70ac91aec90e07e"}, - {file = "mypy-1.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bfe012b50e1491d439172c43ccb50db66d23fab714d500b57ed52526a1020bb7"}, - {file = "mypy-1.12.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2c40658d4fa1ab27cb53d9e2f1066345596af2f8fe4827defc398a09c7c9519b"}, - {file = "mypy-1.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:dee78a8b9746c30c1e617ccb1307b351ded57f0de0d287ca6276378d770006c0"}, - {file = "mypy-1.12.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b5df6c8a8224f6b86746bda716bbe4dbe0ce89fd67b1fa4661e11bfe38e8ec8"}, - {file = "mypy-1.12.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5feee5c74eb9749e91b77f60b30771563327329e29218d95bedbe1257e2fe4b0"}, - {file = "mypy-1.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:77278e8c6ffe2abfba6db4125de55f1024de9a323be13d20e4f73b8ed3402bd1"}, - {file = "mypy-1.12.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:dcfb754dea911039ac12434d1950d69a2f05acd4d56f7935ed402be09fad145e"}, - {file = "mypy-1.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:06de0498798527451ffb60f68db0d368bd2bae2bbfb5237eae616d4330cc87aa"}, - {file = "mypy-1.12.0-py3-none-any.whl", hash = "sha256:fd313226af375d52e1e36c383f39bf3836e1f192801116b31b090dfcd3ec5266"}, - {file = "mypy-1.12.0.tar.gz", hash = "sha256:65a22d87e757ccd95cbbf6f7e181e6caa87128255eb2b6be901bb71b26d8a99d"}, + {file = "mypy-1.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6607e0f1dd1fb7f0aca14d936d13fd19eba5e17e1cd2a14f808fa5f8f6d8f60a"}, + {file = "mypy-1.13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8a21be69bd26fa81b1f80a61ee7ab05b076c674d9b18fb56239d72e21d9f4c80"}, + {file = "mypy-1.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7b2353a44d2179846a096e25691d54d59904559f4232519d420d64da6828a3a7"}, + {file = "mypy-1.13.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0730d1c6a2739d4511dc4253f8274cdd140c55c32dfb0a4cf8b7a43f40abfa6f"}, + {file = "mypy-1.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:c5fc54dbb712ff5e5a0fca797e6e0aa25726c7e72c6a5850cfd2adbc1eb0a372"}, + {file = "mypy-1.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:581665e6f3a8a9078f28d5502f4c334c0c8d802ef55ea0e7276a6e409bc0d82d"}, + {file = "mypy-1.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3ddb5b9bf82e05cc9a627e84707b528e5c7caaa1c55c69e175abb15a761cec2d"}, + {file = "mypy-1.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:20c7ee0bc0d5a9595c46f38beb04201f2620065a93755704e141fcac9f59db2b"}, + {file = "mypy-1.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3790ded76f0b34bc9c8ba4def8f919dd6a46db0f5a6610fb994fe8efdd447f73"}, + {file = "mypy-1.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:51f869f4b6b538229c1d1bcc1dd7d119817206e2bc54e8e374b3dfa202defcca"}, + {file = "mypy-1.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5c7051a3461ae84dfb5dd15eff5094640c61c5f22257c8b766794e6dd85e72d5"}, + {file = "mypy-1.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:39bb21c69a5d6342f4ce526e4584bc5c197fd20a60d14a8624d8743fffb9472e"}, + {file = "mypy-1.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:164f28cb9d6367439031f4c81e84d3ccaa1e19232d9d05d37cb0bd880d3f93c2"}, + {file = "mypy-1.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a4c1bfcdbce96ff5d96fc9b08e3831acb30dc44ab02671eca5953eadad07d6d0"}, + {file = "mypy-1.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:a0affb3a79a256b4183ba09811e3577c5163ed06685e4d4b46429a271ba174d2"}, + {file = "mypy-1.13.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a7b44178c9760ce1a43f544e595d35ed61ac2c3de306599fa59b38a6048e1aa7"}, + {file = "mypy-1.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5d5092efb8516d08440e36626f0153b5006d4088c1d663d88bf79625af3d1d62"}, + {file = "mypy-1.13.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:de2904956dac40ced10931ac967ae63c5089bd498542194b436eb097a9f77bc8"}, + {file = "mypy-1.13.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:7bfd8836970d33c2105562650656b6846149374dc8ed77d98424b40b09340ba7"}, + {file = "mypy-1.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:9f73dba9ec77acb86457a8fc04b5239822df0c14a082564737833d2963677dbc"}, + {file = "mypy-1.13.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:100fac22ce82925f676a734af0db922ecfea991e1d7ec0ceb1e115ebe501301a"}, + {file = "mypy-1.13.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7bcb0bb7f42a978bb323a7c88f1081d1b5dee77ca86f4100735a6f541299d8fb"}, + {file = "mypy-1.13.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bde31fc887c213e223bbfc34328070996061b0833b0a4cfec53745ed61f3519b"}, + {file = "mypy-1.13.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:07de989f89786f62b937851295ed62e51774722e5444a27cecca993fc3f9cd74"}, + {file = "mypy-1.13.0-cp38-cp38-win_amd64.whl", hash = "sha256:4bde84334fbe19bad704b3f5b78c4abd35ff1026f8ba72b29de70dda0916beb6"}, + {file = "mypy-1.13.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0246bcb1b5de7f08f2826451abd947bf656945209b140d16ed317f65a17dc7dc"}, + {file = "mypy-1.13.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7f5b7deae912cf8b77e990b9280f170381fdfbddf61b4ef80927edd813163732"}, + {file = "mypy-1.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7029881ec6ffb8bc233a4fa364736789582c738217b133f1b55967115288a2bc"}, + {file = "mypy-1.13.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3e38b980e5681f28f033f3be86b099a247b13c491f14bb8b1e1e134d23bb599d"}, + {file = "mypy-1.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:a6789be98a2017c912ae6ccb77ea553bbaf13d27605d2ca20a76dfbced631b24"}, + {file = "mypy-1.13.0-py3-none-any.whl", hash = "sha256:9c250883f9fd81d212e0952c92dbfcc96fc237f4b7c92f56ac81fd48460b3e5a"}, + {file = "mypy-1.13.0.tar.gz", hash = "sha256:0291a61b6fbf3e6673e3405cfcc0e7650bebc7939659fdca2702958038bd835e"}, ] [package.dependencies] @@ -790,6 +904,7 @@ typing-extensions = ">=4.6.0" [package.extras] dmypy = ["psutil (>=4.0)"] +faster-cache = ["orjson"] install-types = ["pip"] mypyc = ["setuptools (>=50)"] reports = ["lxml"] @@ -818,13 +933,13 @@ files = [ [[package]] name = "packaging" -version = "24.1" +version = "24.2" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, - {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, + {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, + {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, ] [[package]] @@ -927,6 +1042,17 @@ files = [ {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, ] +[[package]] +name = "pycparser" +version = "2.22" +description = "C parser in Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, + {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, +] + [[package]] name = "pygments" version = "2.17.2" @@ -1000,13 +1126,13 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no [[package]] name = "pytest" -version = "8.3.3" +version = "8.3.4" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-8.3.3-py3-none-any.whl", hash = "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2"}, - {file = "pytest-8.3.3.tar.gz", hash = "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181"}, + {file = "pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6"}, + {file = "pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761"}, ] [package.dependencies] @@ -1092,13 +1218,43 @@ files = [ cffi = ">=1.15.1" filelock = ">=3.12.2" pytest = ">=3.8" -setuptools = {version = "*", markers = "python_full_version >= \"3.12.0\""} [package.extras] compat = ["pytest-benchmark (>=4.0.0,<4.1.0)", "pytest-xdist (>=2.0.0,<2.1.0)"] lint = ["mypy (>=1.3.0,<1.4.0)", "ruff (>=0.3.3,<0.4.0)"] test = ["pytest (>=7.0,<8.0)", "pytest-cov (>=4.0.0,<4.1.0)"] +[[package]] +name = "pytest-codspeed" +version = "3.1.0" +description = "Pytest plugin to create CodSpeed benchmarks" +optional = false +python-versions = ">=3.9" +files = [ + {file = "pytest_codspeed-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1cb7c16e5a64cb30bad30f5204c7690f3cbc9ae5b9839ce187ef1727aa5d2d9c"}, + {file = "pytest_codspeed-3.1.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4d23910893c22ceef6efbdf85d80e803b7fb4a231c9e7676ab08f5ddfc228438"}, + {file = "pytest_codspeed-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fb1495a633a33e15268a1f97d91a4809c868de06319db50cf97b4e9fa426372c"}, + {file = "pytest_codspeed-3.1.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dbd8a54b99207bd25a4c3f64d9a83ac0f3def91cdd87204ca70a49f822ba919c"}, + {file = "pytest_codspeed-3.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4d1ac896ebaea5b365e69b41319b4d09b57dab85ec6234f6ff26116b3795f03"}, + {file = "pytest_codspeed-3.1.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5f0c1857a0a6cce6a23c49f98c588c2eef66db353c76ecbb2fb65c1a2b33a8d5"}, + {file = "pytest_codspeed-3.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a4731a7cf1d8d38f58140d51faa69b7c1401234c59d9759a2507df570c805b11"}, + {file = "pytest_codspeed-3.1.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c5f2e4b63260f65493b8d42c8167f831b8ed90788f81eb4eb95a103ee6aa4294"}, + {file = "pytest_codspeed-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:db44099b3f1ec1c9c41f0267c4d57d94e31667f4cb3fb4b71901561e8ab8bc98"}, + {file = "pytest_codspeed-3.1.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a533c1ad3cc60f07be432864c83d1769ce2877753ac778e1bfc5a9821f5c6ddf"}, + {file = "pytest_codspeed-3.1.0.tar.gz", hash = "sha256:f29641d27b4ded133b1058a4c859e510a2612ad4217ef9a839ba61750abd2f8a"}, +] + +[package.dependencies] +cffi = ">=1.17.1" +importlib-metadata = {version = ">=8.5.0", markers = "python_version < \"3.10\""} +pytest = ">=3.8" +rich = ">=13.8.1" + +[package.extras] +compat = ["pytest-benchmark (>=5.0.0,<5.1.0)", "pytest-xdist (>=3.6.1,<3.7.0)"] +lint = ["mypy (>=1.11.2,<1.12.0)", "ruff (>=0.6.5,<0.7.0)"] +test = ["pytest (>=7.0,<8.0)", "pytest-cov (>=4.0.0,<4.1.0)"] + [[package]] name = "pytest-cov" version = "4.1.0" @@ -1216,62 +1372,61 @@ urllib3 = ">=1.21.1,<3" socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] +[[package]] +name = "rich" +version = "13.9.4" +description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90"}, + {file = "rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098"}, +] + +[package.dependencies] +markdown-it-py = ">=2.2.0" +pygments = ">=2.13.0,<3.0.0" +typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.11\""} + +[package.extras] +jupyter = ["ipywidgets (>=7.5.1,<9)"] + [[package]] name = "ruff" -version = "0.7.0" +version = "0.8.3" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.7.0-py3-none-linux_armv6l.whl", hash = "sha256:0cdf20c2b6ff98e37df47b2b0bd3a34aaa155f59a11182c1303cce79be715628"}, - {file = "ruff-0.7.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:496494d350c7fdeb36ca4ef1c9f21d80d182423718782222c29b3e72b3512737"}, - {file = "ruff-0.7.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:214b88498684e20b6b2b8852c01d50f0651f3cc6118dfa113b4def9f14faaf06"}, - {file = "ruff-0.7.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:630fce3fefe9844e91ea5bbf7ceadab4f9981f42b704fae011bb8efcaf5d84be"}, - {file = "ruff-0.7.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:211d877674e9373d4bb0f1c80f97a0201c61bcd1e9d045b6e9726adc42c156aa"}, - {file = "ruff-0.7.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:194d6c46c98c73949a106425ed40a576f52291c12bc21399eb8f13a0f7073495"}, - {file = "ruff-0.7.0-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:82c2579b82b9973a110fab281860403b397c08c403de92de19568f32f7178598"}, - {file = "ruff-0.7.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9af971fe85dcd5eaed8f585ddbc6bdbe8c217fb8fcf510ea6bca5bdfff56040e"}, - {file = "ruff-0.7.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b641c7f16939b7d24b7bfc0be4102c56562a18281f84f635604e8a6989948914"}, - {file = "ruff-0.7.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d71672336e46b34e0c90a790afeac8a31954fd42872c1f6adaea1dff76fd44f9"}, - {file = "ruff-0.7.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:ab7d98c7eed355166f367597e513a6c82408df4181a937628dbec79abb2a1fe4"}, - {file = "ruff-0.7.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:1eb54986f770f49edb14f71d33312d79e00e629a57387382200b1ef12d6a4ef9"}, - {file = "ruff-0.7.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:dc452ba6f2bb9cf8726a84aa877061a2462afe9ae0ea1d411c53d226661c601d"}, - {file = "ruff-0.7.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:4b406c2dce5be9bad59f2de26139a86017a517e6bcd2688da515481c05a2cb11"}, - {file = "ruff-0.7.0-py3-none-win32.whl", hash = "sha256:f6c968509f767776f524a8430426539587d5ec5c662f6addb6aa25bc2e8195ec"}, - {file = "ruff-0.7.0-py3-none-win_amd64.whl", hash = "sha256:ff4aabfbaaba880e85d394603b9e75d32b0693152e16fa659a3064a85df7fce2"}, - {file = "ruff-0.7.0-py3-none-win_arm64.whl", hash = "sha256:10842f69c245e78d6adec7e1db0a7d9ddc2fff0621d730e61657b64fa36f207e"}, - {file = "ruff-0.7.0.tar.gz", hash = "sha256:47a86360cf62d9cd53ebfb0b5eb0e882193fc191c6d717e8bef4462bc3b9ea2b"}, -] - -[[package]] -name = "setuptools" -version = "75.2.0" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "setuptools-75.2.0-py3-none-any.whl", hash = "sha256:a7fcb66f68b4d9e8e66b42f9876150a3371558f98fa32222ffaa5bced76406f8"}, - {file = "setuptools-75.2.0.tar.gz", hash = "sha256:753bb6ebf1f465a1912e19ed1d41f403a79173a9acf66a42e7e6aec45c3c16ec"}, + {file = "ruff-0.8.3-py3-none-linux_armv6l.whl", hash = "sha256:8d5d273ffffff0acd3db5bf626d4b131aa5a5ada1276126231c4174543ce20d6"}, + {file = "ruff-0.8.3-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:e4d66a21de39f15c9757d00c50c8cdd20ac84f55684ca56def7891a025d7e939"}, + {file = "ruff-0.8.3-py3-none-macosx_11_0_arm64.whl", hash = "sha256:c356e770811858bd20832af696ff6c7e884701115094f427b64b25093d6d932d"}, + {file = "ruff-0.8.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c0a60a825e3e177116c84009d5ebaa90cf40dfab56e1358d1df4e29a9a14b13"}, + {file = "ruff-0.8.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:75fb782f4db39501210ac093c79c3de581d306624575eddd7e4e13747e61ba18"}, + {file = "ruff-0.8.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f26bc76a133ecb09a38b7868737eded6941b70a6d34ef53a4027e83913b6502"}, + {file = "ruff-0.8.3-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:01b14b2f72a37390c1b13477c1c02d53184f728be2f3ffc3ace5b44e9e87b90d"}, + {file = "ruff-0.8.3-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:53babd6e63e31f4e96ec95ea0d962298f9f0d9cc5990a1bbb023a6baf2503a82"}, + {file = "ruff-0.8.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1ae441ce4cf925b7f363d33cd6570c51435972d697e3e58928973994e56e1452"}, + {file = "ruff-0.8.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7c65bc0cadce32255e93c57d57ecc2cca23149edd52714c0c5d6fa11ec328cd"}, + {file = "ruff-0.8.3-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:5be450bb18f23f0edc5a4e5585c17a56ba88920d598f04a06bd9fd76d324cb20"}, + {file = "ruff-0.8.3-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:8faeae3827eaa77f5721f09b9472a18c749139c891dbc17f45e72d8f2ca1f8fc"}, + {file = "ruff-0.8.3-py3-none-musllinux_1_2_i686.whl", hash = "sha256:db503486e1cf074b9808403991663e4277f5c664d3fe237ee0d994d1305bb060"}, + {file = "ruff-0.8.3-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:6567be9fb62fbd7a099209257fef4ad2c3153b60579818b31a23c886ed4147ea"}, + {file = "ruff-0.8.3-py3-none-win32.whl", hash = "sha256:19048f2f878f3ee4583fc6cb23fb636e48c2635e30fb2022b3a1cd293402f964"}, + {file = "ruff-0.8.3-py3-none-win_amd64.whl", hash = "sha256:f7df94f57d7418fa7c3ffb650757e0c2b96cf2501a0b192c18e4fb5571dfada9"}, + {file = "ruff-0.8.3-py3-none-win_arm64.whl", hash = "sha256:fe2756edf68ea79707c8d68b78ca9a58ed9af22e430430491ee03e718b5e4936"}, + {file = "ruff-0.8.3.tar.gz", hash = "sha256:5e7558304353b84279042fc584a4f4cb8a07ae79b2bf3da1a7551d960b5626d3"}, ] -[package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.5.2)"] -core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.collections", "jaraco.functools", "jaraco.text (>=3.7)", "more-itertools", "more-itertools (>=8.8)", "packaging", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] -cover = ["pytest-cov"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] -enabler = ["pytest-enabler (>=2.2)"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] -type = ["importlib-metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.11.*)", "pytest-mypy"] - [[package]] name = "six" -version = "1.16.0" +version = "1.17.0" description = "Python 2 and 3 compatibility utilities" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, + {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, + {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, ] [[package]] @@ -1520,13 +1675,43 @@ files = [ [[package]] name = "tomli" -version = "2.0.2" +version = "2.2.1" description = "A lil' TOML parser" optional = false python-versions = ">=3.8" files = [ - {file = "tomli-2.0.2-py3-none-any.whl", hash = "sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38"}, - {file = "tomli-2.0.2.tar.gz", hash = "sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed"}, + {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, + {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}, + {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}, + {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}, + {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}, + {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}, + {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}, + {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}, + {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, + {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, ] [[package]] @@ -1557,30 +1742,30 @@ testing = ["flaky (>=3.4.0)", "freezegun (>=0.3.11)", "pathlib2 (>=2.3.3)", "psu [[package]] name = "tox" -version = "4.20.0" +version = "4.23.2" description = "tox is a generic virtualenv management and test command line tool" optional = false python-versions = ">=3.8" files = [ - {file = "tox-4.20.0-py3-none-any.whl", hash = "sha256:21a8005e3d3fe5658a8e36b8ca3ed13a4230429063c5cc2a2fdac6ee5aa0de34"}, - {file = "tox-4.20.0.tar.gz", hash = "sha256:5b78a49b6eaaeab3ae4186415e7c97d524f762ae967c63562687c3e5f0ec23d5"}, + {file = "tox-4.23.2-py3-none-any.whl", hash = "sha256:452bc32bb031f2282881a2118923176445bac783ab97c874b8770ab4c3b76c38"}, + {file = "tox-4.23.2.tar.gz", hash = "sha256:86075e00e555df6e82e74cfc333917f91ecb47ffbc868dcafbd2672e332f4a2c"}, ] [package.dependencies] cachetools = ">=5.5" chardet = ">=5.2" colorama = ">=0.4.6" -filelock = ">=3.15.4" +filelock = ">=3.16.1" packaging = ">=24.1" -platformdirs = ">=4.2.2" +platformdirs = ">=4.3.6" pluggy = ">=1.5" -pyproject-api = ">=1.7.1" +pyproject-api = ">=1.8" tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} -virtualenv = ">=20.26.3" +typing-extensions = {version = ">=4.12.2", markers = "python_version < \"3.11\""} +virtualenv = ">=20.26.6" [package.extras] -docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-argparse-cli (>=1.17)", "sphinx-autodoc-typehints (>=2.4)", "sphinx-copybutton (>=0.5.2)", "sphinx-inline-tabs (>=2023.4.21)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=24.8)"] -testing = ["build[virtualenv] (>=1.2.2)", "covdefaults (>=2.3)", "detect-test-pollution (>=1.2)", "devpi-process (>=1)", "diff-cover (>=9.1.1)", "distlib (>=0.3.8)", "flaky (>=3.8.1)", "hatch-vcs (>=0.4)", "hatchling (>=1.25)", "psutil (>=6)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-xdist (>=3.6.1)", "re-assert (>=1.1)", "setuptools (>=74.1.2)", "time-machine (>=2.15)", "wheel (>=0.44)"] +test = ["devpi-process (>=1.0.2)", "pytest (>=8.3.3)", "pytest-mock (>=3.14)"] [[package]] name = "typed-ast" @@ -1711,13 +1896,13 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess [[package]] name = "virtualenv" -version = "20.27.0" +version = "20.28.0" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.8" files = [ - {file = "virtualenv-20.27.0-py3-none-any.whl", hash = "sha256:44a72c29cceb0ee08f300b314848c86e57bf8d1f13107a5e671fb9274138d655"}, - {file = "virtualenv-20.27.0.tar.gz", hash = "sha256:2ca56a68ed615b8fe4326d11a0dca5dfbe8fd68510fb6c6349163bed3c15f2b2"}, + {file = "virtualenv-20.28.0-py3-none-any.whl", hash = "sha256:23eae1b4516ecd610481eda647f3a7c09aea295055337331bb4e6892ecce47b0"}, + {file = "virtualenv-20.28.0.tar.gz", hash = "sha256:2c9c3262bb8e7b87ea801d715fae4495e6032450c71d2309be9550e7364049aa"}, ] [package.dependencies] @@ -1763,7 +1948,26 @@ enabler = ["pytest-enabler (>=2.2)"] test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] type = ["pytest-mypy"] +[[package]] +name = "zipp" +version = "3.21.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.9" +files = [ + {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"}, + {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"}, +] + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +type = ["pytest-mypy"] + [metadata] lock-version = "2.0" python-versions = "^3.7" -content-hash = "5d8998e59f0991b7dea9d5302fadc9c06be82722d75d9f00271efa1cd81555dd" +content-hash = "2f41e2d562a00d6905a8b02cd7ccf5dbcc2fb0218476addd64faff18ee8b46bf" diff --git a/pyproject.toml b/pyproject.toml index 70f39c61..30026bc5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -67,17 +67,20 @@ pytest-cov = [ ] pytest-describe = "^2.2" pytest-timeout = "^2.3" +pytest-codspeed = [ + { version = "^3.1.0", python = ">=3.9" }, + { version = "^2.2.1", python = "<3.8" } +] tox = [ { version = "^4.16", python = ">=3.8" }, { version = "^3.28", python = "<3.8" } ] -pytest-codspeed = "^2.2.1" [tool.poetry.group.lint] optional = true [tool.poetry.group.lint.dependencies] -ruff = ">=0.7,<0.8" +ruff = ">=0.8,<0.9" mypy = [ { version = "^1.12", python = ">=3.8" }, { version = "~1.4", python = "<3.8" } diff --git a/tests/execution/test_oneof.py b/tests/execution/test_oneof.py index 2df1000d..81f3d224 100644 --- a/tests/execution/test_oneof.py +++ b/tests/execution/test_oneof.py @@ -35,7 +35,7 @@ def execute_query( def describe_execute_handles_one_of_input_objects(): def describe_one_of_input_objects(): root_value = { - "test": lambda _info, input: input, # noqa: A002 + "test": lambda _info, input: input, } def accepts_a_good_default_value(): diff --git a/tox.ini b/tox.ini index fcd4f015..c998afd8 100644 --- a/tox.ini +++ b/tox.ini @@ -18,7 +18,7 @@ python = [testenv:ruff] basepython = python3.12 -deps = ruff>=0.7,<0.8 +deps = ruff>=0.8,<0.9 commands = ruff check src tests ruff format --check src tests From 59d478af061af9721a9a04ebc6def17c6fc30c55 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Tue, 17 Dec 2024 22:10:22 +0100 Subject: [PATCH 203/230] Fix sorting of exported names --- pyproject.toml | 1 - src/graphql/__init__.py | 576 +++++++++--------- src/graphql/error/graphql_error.py | 10 +- src/graphql/execution/__init__.py | 32 +- src/graphql/execution/collect_fields.py | 4 +- src/graphql/execution/execute.py | 8 +- .../execution/incremental_publisher.py | 12 +- src/graphql/execution/middleware.py | 2 +- src/graphql/language/__init__.py | 152 ++--- src/graphql/language/ast.py | 140 ++--- src/graphql/language/block_string.py | 3 +- src/graphql/language/character_classes.py | 2 +- src/graphql/language/location.py | 2 +- src/graphql/language/parser.py | 2 +- src/graphql/language/predicates.py | 8 +- src/graphql/language/source.py | 2 +- src/graphql/language/visitor.py | 10 +- src/graphql/pyutils/__init__.py | 32 +- src/graphql/pyutils/format_list.py | 2 +- src/graphql/pyutils/is_awaitable.py | 6 +- src/graphql/type/__init__.py | 216 +++---- src/graphql/type/assert_name.py | 2 +- src/graphql/type/definition.py | 96 +-- src/graphql/type/directives.py | 16 +- src/graphql/type/scalars.py | 14 +- src/graphql/type/schema.py | 2 +- src/graphql/type/validate.py | 2 +- src/graphql/utilities/__init__.py | 6 +- src/graphql/utilities/extend_schema.py | 2 +- .../utilities/find_breaking_changes.py | 7 +- .../utilities/get_introspection_query.py | 2 +- src/graphql/utilities/print_schema.py | 4 +- src/graphql/utilities/type_comparators.py | 2 +- src/graphql/validation/__init__.py | 32 +- ...ream_directive_on_valid_operations_rule.py | 3 +- .../validation/rules/known_argument_names.py | 2 +- .../rules/provided_required_arguments.py | 2 +- src/graphql/validation/validate.py | 2 +- src/graphql/version.py | 2 +- tests/execution/test_schema.py | 2 +- tests/fixtures/__init__.py | 4 +- tests/utils/__init__.py | 2 +- tests/validation/harness.py | 4 +- 43 files changed, 715 insertions(+), 717 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 30026bc5..7cdedaa9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -149,7 +149,6 @@ select = [ "YTT", # flake8-2020 ] ignore = [ - "ANN101", "ANN102", # no type annotation for self and cls needed "ANN401", # allow explicit Any "COM812", # allow trailing commas for auto-formatting "D105", "D107", # no docstring needed for magic methods diff --git a/src/graphql/__init__.py b/src/graphql/__init__.py index f70e77b0..6938435a 100644 --- a/src/graphql/__init__.py +++ b/src/graphql/__init__.py @@ -474,345 +474,345 @@ __all__ = [ - "version", - "version_info", - "version_js", - "version_info_js", - "graphql", - "graphql_sync", - "GraphQLSchema", - "GraphQLDirective", - "GraphQLScalarType", - "GraphQLObjectType", - "GraphQLInterfaceType", - "GraphQLUnionType", - "GraphQLEnumType", - "GraphQLInputObjectType", - "GraphQLList", - "GraphQLNonNull", - "specified_scalar_types", - "GraphQLInt", - "GraphQLFloat", - "GraphQLString", - "GraphQLBoolean", - "GraphQLID", + "BREAK", + "DEFAULT_DEPRECATION_REASON", "GRAPHQL_MAX_INT", "GRAPHQL_MIN_INT", - "specified_directives", - "GraphQLIncludeDirective", - "GraphQLSkipDirective", - "GraphQLDeferDirective", - "GraphQLStreamDirective", - "GraphQLDeprecatedDirective", - "GraphQLSpecifiedByDirective", - "GraphQLOneOfDirective", - "TypeKind", - "DEFAULT_DEPRECATION_REASON", - "introspection_types", - "SchemaMetaFieldDef", - "TypeMetaFieldDef", - "TypeNameMetaFieldDef", - "is_schema", - "is_directive", - "is_type", - "is_scalar_type", - "is_object_type", - "is_interface_type", - "is_union_type", - "is_enum_type", - "is_input_object_type", - "is_list_type", - "is_non_null_type", - "is_input_type", - "is_output_type", - "is_leaf_type", - "is_composite_type", - "is_abstract_type", - "is_wrapping_type", - "is_nullable_type", - "is_named_type", - "is_required_argument", - "is_required_input_field", - "is_specified_scalar_type", - "is_introspection_type", - "is_specified_directive", - "assert_schema", - "assert_directive", - "assert_type", - "assert_scalar_type", - "assert_object_type", - "assert_interface_type", - "assert_union_type", - "assert_enum_type", - "assert_input_object_type", - "assert_list_type", - "assert_non_null_type", - "assert_input_type", - "assert_output_type", - "assert_leaf_type", - "assert_composite_type", - "assert_abstract_type", - "assert_wrapping_type", - "assert_nullable_type", - "assert_named_type", - "get_nullable_type", - "get_named_type", - "resolve_thunk", - "validate_schema", - "assert_valid_schema", - "assert_name", - "assert_enum_value_name", - "GraphQLType", - "GraphQLInputType", - "GraphQLOutputType", - "GraphQLLeafType", - "GraphQLCompositeType", + "IDLE", + "REMOVE", + "SKIP", + "ASTValidationRule", + "ArgumentNode", + "BooleanValueNode", + "BreakingChange", + "BreakingChangeType", + "ConstArgumentNode", + "ConstDirectiveNode", + "ConstListValueNode", + "ConstObjectFieldNode", + "ConstObjectValueNode", + "ConstValueNode", + "DangerousChange", + "DangerousChangeType", + "DefinitionNode", + "DirectiveDefinitionNode", + "DirectiveLocation", + "DirectiveNode", + "DocumentNode", + "EnumTypeDefinitionNode", + "EnumTypeExtensionNode", + "EnumValueDefinitionNode", + "EnumValueNode", + "ErrorBoundaryNode", + "ExecutableDefinitionNode", + "ExecutableDefinitionsRule", + "ExecutionContext", + "ExecutionResult", + "ExperimentalIncrementalExecutionResults", + "FieldDefinitionNode", + "FieldNode", + "FieldsOnCorrectTypeRule", + "FloatValueNode", + "FormattedExecutionResult", + "FormattedIncrementalDeferResult", + "FormattedIncrementalResult", + "FormattedIncrementalStreamResult", + "FormattedInitialIncrementalExecutionResult", + "FormattedSubsequentIncrementalExecutionResult", + "FragmentDefinitionNode", + "FragmentSpreadNode", + "FragmentsOnCompositeTypesRule", "GraphQLAbstractType", - "GraphQLWrappingType", - "GraphQLNullableType", - "GraphQLNullableInputType", - "GraphQLNullableOutputType", - "GraphQLNamedType", - "GraphQLNamedInputType", - "GraphQLNamedOutputType", - "Thunk", - "ThunkCollection", - "ThunkMapping", "GraphQLArgument", + "GraphQLArgumentKwargs", "GraphQLArgumentMap", + "GraphQLBoolean", + "GraphQLCompositeType", + "GraphQLDeferDirective", + "GraphQLDeprecatedDirective", + "GraphQLDirective", + "GraphQLDirectiveKwargs", + "GraphQLEnumType", + "GraphQLEnumTypeKwargs", "GraphQLEnumValue", + "GraphQLEnumValueKwargs", "GraphQLEnumValueMap", + "GraphQLError", + "GraphQLErrorExtensions", "GraphQLField", + "GraphQLFieldKwargs", "GraphQLFieldMap", "GraphQLFieldResolver", + "GraphQLFloat", + "GraphQLFormattedError", + "GraphQLID", + "GraphQLIncludeDirective", "GraphQLInputField", + "GraphQLInputFieldKwargs", "GraphQLInputFieldMap", "GraphQLInputFieldOutType", - "GraphQLScalarSerializer", - "GraphQLScalarValueParser", - "GraphQLScalarLiteralParser", - "GraphQLIsTypeOfFn", - "GraphQLResolveInfo", - "ResponsePath", - "GraphQLTypeResolver", - "GraphQLArgumentKwargs", - "GraphQLDirectiveKwargs", - "GraphQLEnumTypeKwargs", - "GraphQLEnumValueKwargs", - "GraphQLFieldKwargs", - "GraphQLInputFieldKwargs", + "GraphQLInputObjectType", "GraphQLInputObjectTypeKwargs", + "GraphQLInputType", + "GraphQLInt", + "GraphQLInterfaceType", "GraphQLInterfaceTypeKwargs", + "GraphQLIsTypeOfFn", + "GraphQLLeafType", + "GraphQLList", + "GraphQLNamedInputType", + "GraphQLNamedOutputType", + "GraphQLNamedType", "GraphQLNamedTypeKwargs", + "GraphQLNonNull", + "GraphQLNullableInputType", + "GraphQLNullableOutputType", + "GraphQLNullableType", + "GraphQLObjectType", "GraphQLObjectTypeKwargs", + "GraphQLOneOfDirective", + "GraphQLOutputType", + "GraphQLResolveInfo", + "GraphQLScalarLiteralParser", + "GraphQLScalarSerializer", + "GraphQLScalarType", "GraphQLScalarTypeKwargs", + "GraphQLScalarValueParser", + "GraphQLSchema", "GraphQLSchemaKwargs", - "GraphQLUnionTypeKwargs", - "Source", - "get_location", - "print_location", - "print_source_location", - "Lexer", - "TokenKind", - "parse", - "parse_value", - "parse_const_value", - "parse_type", - "print_ast", - "visit", - "ParallelVisitor", - "TypeInfoVisitor", - "Visitor", - "VisitorAction", - "VisitorKeyMap", - "BREAK", - "SKIP", - "REMOVE", - "IDLE", - "DirectiveLocation", - "is_definition_node", - "is_executable_definition_node", - "is_nullability_assertion_node", - "is_selection_node", - "is_value_node", - "is_const_value_node", - "is_type_node", - "is_type_system_definition_node", - "is_type_definition_node", - "is_type_system_extension_node", - "is_type_extension_node", - "SourceLocation", - "Location", - "Token", - "Node", - "NameNode", - "DocumentNode", - "DefinitionNode", - "ExecutableDefinitionNode", - "OperationDefinitionNode", - "OperationType", - "VariableDefinitionNode", - "VariableNode", - "SelectionSetNode", - "SelectionNode", - "FieldNode", - "ArgumentNode", - "NullabilityAssertionNode", - "NonNullAssertionNode", - "ErrorBoundaryNode", - "ListNullabilityOperatorNode", - "ConstArgumentNode", - "FragmentSpreadNode", - "InlineFragmentNode", - "FragmentDefinitionNode", - "ValueNode", - "ConstValueNode", - "IntValueNode", - "FloatValueNode", - "StringValueNode", - "BooleanValueNode", - "NullValueNode", - "EnumValueNode", - "ListValueNode", - "ConstListValueNode", - "ObjectValueNode", - "ConstObjectValueNode", - "ObjectFieldNode", - "ConstObjectFieldNode", - "DirectiveNode", - "ConstDirectiveNode", - "TypeNode", - "NamedTypeNode", - "ListTypeNode", - "NonNullTypeNode", - "TypeSystemDefinitionNode", - "SchemaDefinitionNode", - "OperationTypeDefinitionNode", - "TypeDefinitionNode", - "ScalarTypeDefinitionNode", - "ObjectTypeDefinitionNode", - "FieldDefinitionNode", - "InputValueDefinitionNode", - "InterfaceTypeDefinitionNode", - "UnionTypeDefinitionNode", - "EnumTypeDefinitionNode", - "EnumValueDefinitionNode", - "InputObjectTypeDefinitionNode", - "DirectiveDefinitionNode", - "TypeSystemExtensionNode", - "SchemaExtensionNode", - "TypeExtensionNode", - "ScalarTypeExtensionNode", - "ObjectTypeExtensionNode", - "InterfaceTypeExtensionNode", - "UnionTypeExtensionNode", - "EnumTypeExtensionNode", - "InputObjectTypeExtensionNode", - "execute", - "execute_sync", - "default_field_resolver", - "default_type_resolver", - "get_argument_values", - "get_directive_values", - "get_variable_values", - "ExecutionContext", - "ExecutionResult", - "ExperimentalIncrementalExecutionResults", - "InitialIncrementalExecutionResult", - "SubsequentIncrementalExecutionResult", + "GraphQLSkipDirective", + "GraphQLSpecifiedByDirective", + "GraphQLStreamDirective", + "GraphQLString", + "GraphQLSyntaxError", + "GraphQLType", + "GraphQLTypeResolver", + "GraphQLUnionType", + "GraphQLUnionTypeKwargs", + "GraphQLWrappingType", "IncrementalDeferResult", - "IncrementalStreamResult", "IncrementalResult", - "FormattedExecutionResult", - "FormattedInitialIncrementalExecutionResult", - "FormattedSubsequentIncrementalExecutionResult", - "FormattedIncrementalDeferResult", - "FormattedIncrementalStreamResult", - "FormattedIncrementalResult", - "Middleware", - "MiddlewareManager", - "subscribe", - "create_source_event_stream", - "map_async_iterable", - "validate", - "ValidationContext", - "ValidationRule", - "ASTValidationRule", - "SDLValidationRule", - "specified_rules", - "ExecutableDefinitionsRule", - "FieldsOnCorrectTypeRule", - "FragmentsOnCompositeTypesRule", + "IncrementalStreamResult", + "InitialIncrementalExecutionResult", + "InlineFragmentNode", + "InputObjectTypeDefinitionNode", + "InputObjectTypeExtensionNode", + "InputValueDefinitionNode", + "IntValueNode", + "InterfaceTypeDefinitionNode", + "InterfaceTypeExtensionNode", + "IntrospectionQuery", "KnownArgumentNamesRule", "KnownDirectivesRule", "KnownFragmentNamesRule", "KnownTypeNamesRule", + "Lexer", + "ListNullabilityOperatorNode", + "ListTypeNode", + "ListValueNode", + "Location", "LoneAnonymousOperationRule", + "LoneSchemaDefinitionRule", + "Middleware", + "MiddlewareManager", + "NameNode", + "NamedTypeNode", + "NoDeprecatedCustomRule", "NoFragmentCyclesRule", + "NoSchemaIntrospectionCustomRule", "NoUndefinedVariablesRule", "NoUnusedFragmentsRule", "NoUnusedVariablesRule", + "Node", + "NonNullAssertionNode", + "NonNullTypeNode", + "NullValueNode", + "NullabilityAssertionNode", + "ObjectFieldNode", + "ObjectTypeDefinitionNode", + "ObjectTypeExtensionNode", + "ObjectValueNode", + "OperationDefinitionNode", + "OperationType", + "OperationTypeDefinitionNode", "OverlappingFieldsCanBeMergedRule", + "ParallelVisitor", "PossibleFragmentSpreadsRule", + "PossibleTypeExtensionsRule", "ProvidedRequiredArgumentsRule", + "ResponsePath", + "SDLValidationRule", "ScalarLeafsRule", + "ScalarTypeDefinitionNode", + "ScalarTypeExtensionNode", + "SchemaDefinitionNode", + "SchemaExtensionNode", + "SchemaMetaFieldDef", + "SelectionNode", + "SelectionSetNode", "SingleFieldSubscriptionsRule", + "Source", + "SourceLocation", + "StringValueNode", + "SubsequentIncrementalExecutionResult", + "Thunk", + "ThunkCollection", + "ThunkMapping", + "Token", + "TokenKind", + "TypeDefinitionNode", + "TypeExtensionNode", + "TypeInfo", + "TypeInfoVisitor", + "TypeKind", + "TypeMetaFieldDef", + "TypeNameMetaFieldDef", + "TypeNode", + "TypeSystemDefinitionNode", + "TypeSystemExtensionNode", + "Undefined", + "UndefinedType", + "UnionTypeDefinitionNode", + "UnionTypeExtensionNode", + "UniqueArgumentDefinitionNamesRule", "UniqueArgumentNamesRule", + "UniqueDirectiveNamesRule", "UniqueDirectivesPerLocationRule", + "UniqueEnumValueNamesRule", + "UniqueFieldDefinitionNamesRule", "UniqueFragmentNamesRule", "UniqueInputFieldNamesRule", "UniqueOperationNamesRule", + "UniqueOperationTypesRule", + "UniqueTypeNamesRule", "UniqueVariableNamesRule", + "ValidationContext", + "ValidationRule", + "ValueNode", "ValuesOfCorrectTypeRule", + "VariableDefinitionNode", + "VariableNode", "VariablesAreInputTypesRule", "VariablesInAllowedPositionRule", - "LoneSchemaDefinitionRule", - "UniqueOperationTypesRule", - "UniqueTypeNamesRule", - "UniqueEnumValueNamesRule", - "UniqueFieldDefinitionNamesRule", - "UniqueArgumentDefinitionNamesRule", - "UniqueDirectiveNamesRule", - "PossibleTypeExtensionsRule", - "NoDeprecatedCustomRule", - "NoSchemaIntrospectionCustomRule", - "GraphQLError", - "GraphQLErrorExtensions", - "GraphQLFormattedError", - "GraphQLSyntaxError", - "located_error", - "get_introspection_query", - "IntrospectionQuery", - "get_operation_ast", - "introspection_from_schema", - "build_client_schema", + "Visitor", + "VisitorAction", + "VisitorKeyMap", + "assert_abstract_type", + "assert_composite_type", + "assert_directive", + "assert_enum_type", + "assert_enum_value_name", + "assert_input_object_type", + "assert_input_type", + "assert_interface_type", + "assert_leaf_type", + "assert_list_type", + "assert_name", + "assert_named_type", + "assert_non_null_type", + "assert_nullable_type", + "assert_object_type", + "assert_output_type", + "assert_scalar_type", + "assert_schema", + "assert_type", + "assert_union_type", + "assert_valid_schema", + "assert_wrapping_type", + "ast_from_value", + "ast_to_dict", "build_ast_schema", + "build_client_schema", "build_schema", + "coerce_input_value", + "concat_ast", + "create_source_event_stream", + "default_field_resolver", + "default_type_resolver", + "do_types_overlap", + "execute", + "execute_sync", "extend_schema", + "find_breaking_changes", + "find_dangerous_changes", + "get_argument_values", + "get_directive_values", + "get_introspection_query", + "get_location", + "get_named_type", + "get_nullable_type", + "get_operation_ast", + "get_variable_values", + "graphql", + "graphql_sync", + "introspection_from_schema", + "introspection_types", + "is_abstract_type", + "is_composite_type", + "is_const_value_node", + "is_definition_node", + "is_directive", + "is_enum_type", + "is_equal_type", + "is_executable_definition_node", + "is_input_object_type", + "is_input_type", + "is_interface_type", + "is_introspection_type", + "is_leaf_type", + "is_list_type", + "is_named_type", + "is_non_null_type", + "is_nullability_assertion_node", + "is_nullable_type", + "is_object_type", + "is_output_type", + "is_required_argument", + "is_required_input_field", + "is_scalar_type", + "is_schema", + "is_selection_node", + "is_specified_directive", + "is_specified_scalar_type", + "is_type", + "is_type_definition_node", + "is_type_extension_node", + "is_type_node", + "is_type_sub_type_of", + "is_type_system_definition_node", + "is_type_system_extension_node", + "is_union_type", + "is_value_node", + "is_wrapping_type", "lexicographic_sort_schema", - "print_schema", - "print_type", + "located_error", + "map_async_iterable", + "parse", + "parse_const_value", + "parse_type", + "parse_value", + "print_ast", "print_directive", "print_introspection_schema", + "print_location", + "print_schema", + "print_source_location", + "print_type", + "resolve_thunk", + "separate_operations", + "specified_directives", + "specified_rules", + "specified_scalar_types", + "strip_ignored_characters", + "subscribe", "type_from_ast", + "validate", + "validate_schema", "value_from_ast", "value_from_ast_untyped", - "ast_from_value", - "ast_to_dict", - "TypeInfo", - "coerce_input_value", - "concat_ast", - "separate_operations", - "strip_ignored_characters", - "is_equal_type", - "is_type_sub_type_of", - "do_types_overlap", - "find_breaking_changes", - "find_dangerous_changes", - "BreakingChange", - "BreakingChangeType", - "DangerousChange", - "DangerousChangeType", - "Undefined", - "UndefinedType", + "version", + "version_info", + "version_info_js", + "version_js", + "visit", ] diff --git a/src/graphql/error/graphql_error.py b/src/graphql/error/graphql_error.py index ff128748..8123a713 100644 --- a/src/graphql/error/graphql_error.py +++ b/src/graphql/error/graphql_error.py @@ -108,14 +108,14 @@ class GraphQLError(Exception): """Extension fields to add to the formatted error""" __slots__ = ( + "extensions", + "locations", "message", "nodes", - "source", - "positions", - "locations", - "path", "original_error", - "extensions", + "path", + "positions", + "source", ) __hash__ = Exception.__hash__ diff --git a/src/graphql/execution/__init__.py b/src/graphql/execution/__init__.py index 2d5225be..375ec400 100644 --- a/src/graphql/execution/__init__.py +++ b/src/graphql/execution/__init__.py @@ -37,31 +37,31 @@ __all__ = [ "ASYNC_DELAY", - "create_source_event_stream", - "execute", - "experimental_execute_incrementally", - "execute_sync", - "default_field_resolver", - "default_type_resolver", - "subscribe", "ExecutionContext", "ExecutionResult", "ExperimentalIncrementalExecutionResults", - "InitialIncrementalExecutionResult", - "SubsequentIncrementalExecutionResult", - "IncrementalDeferResult", - "IncrementalStreamResult", - "IncrementalResult", "FormattedExecutionResult", - "FormattedInitialIncrementalExecutionResult", - "FormattedSubsequentIncrementalExecutionResult", "FormattedIncrementalDeferResult", - "FormattedIncrementalStreamResult", "FormattedIncrementalResult", - "map_async_iterable", + "FormattedIncrementalStreamResult", + "FormattedInitialIncrementalExecutionResult", + "FormattedSubsequentIncrementalExecutionResult", + "IncrementalDeferResult", + "IncrementalResult", + "IncrementalStreamResult", + "InitialIncrementalExecutionResult", "Middleware", "MiddlewareManager", + "SubsequentIncrementalExecutionResult", + "create_source_event_stream", + "default_field_resolver", + "default_type_resolver", + "execute", + "execute_sync", + "experimental_execute_incrementally", "get_argument_values", "get_directive_values", "get_variable_values", + "map_async_iterable", + "subscribe", ] diff --git a/src/graphql/execution/collect_fields.py b/src/graphql/execution/collect_fields.py index 5cb5a723..4f581252 100644 --- a/src/graphql/execution/collect_fields.py +++ b/src/graphql/execution/collect_fields.py @@ -33,11 +33,11 @@ __all__ = [ - "collect_fields", - "collect_subfields", "FieldGroup", "FieldsAndPatches", "GroupedFieldSet", + "collect_fields", + "collect_subfields", ] if sys.version_info < (3, 9): diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index ca4df8ff..30a6234d 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -27,9 +27,9 @@ from typing_extensions import TypeAlias, TypeGuard try: # only needed for Python < 3.11 # noinspection PyCompatibility - from asyncio.exceptions import TimeoutError + from asyncio.exceptions import TimeoutError # noqa: A004 except ImportError: # Python < 3.7 - from concurrent.futures import TimeoutError + from concurrent.futures import TimeoutError # noqa: A004 from ..error import GraphQLError, located_error from ..language import ( @@ -98,6 +98,8 @@ async def anext(iterator: AsyncIterator) -> Any: __all__ = [ "ASYNC_DELAY", + "ExecutionContext", + "Middleware", "create_source_event_stream", "default_field_resolver", "default_type_resolver", @@ -105,8 +107,6 @@ async def anext(iterator: AsyncIterator) -> Any: "execute_sync", "experimental_execute_incrementally", "subscribe", - "ExecutionContext", - "Middleware", ] suppress_exceptions = suppress(Exception) diff --git a/src/graphql/execution/incremental_publisher.py b/src/graphql/execution/incremental_publisher.py index fdc35fff..a1b8c507 100644 --- a/src/graphql/execution/incremental_publisher.py +++ b/src/graphql/execution/incremental_publisher.py @@ -37,13 +37,13 @@ "FormattedIncrementalStreamResult", "FormattedInitialIncrementalExecutionResult", "FormattedSubsequentIncrementalExecutionResult", - "InitialIncrementalExecutionResult", - "InitialResultRecord", "IncrementalDataRecord", "IncrementalDeferResult", "IncrementalPublisher", "IncrementalResult", "IncrementalStreamResult", + "InitialIncrementalExecutionResult", + "InitialResultRecord", "StreamItemsRecord", "SubsequentIncrementalExecutionResult", ] @@ -151,7 +151,7 @@ class InitialIncrementalExecutionResult: has_next: bool extensions: dict[str, Any] | None - __slots__ = "data", "errors", "has_next", "incremental", "extensions" + __slots__ = "data", "errors", "extensions", "has_next", "incremental" def __init__( self, @@ -257,7 +257,7 @@ class IncrementalDeferResult: label: str | None extensions: dict[str, Any] | None - __slots__ = "data", "errors", "path", "label", "extensions" + __slots__ = "data", "errors", "extensions", "label", "path" def __init__( self, @@ -350,7 +350,7 @@ class IncrementalStreamResult: label: str | None extensions: dict[str, Any] | None - __slots__ = "items", "errors", "path", "label", "extensions" + __slots__ = "errors", "extensions", "items", "label", "path" def __init__( self, @@ -446,7 +446,7 @@ class SubsequentIncrementalExecutionResult: - ``incremental`` is a list of the results from defer/stream directives. """ - __slots__ = "has_next", "incremental", "extensions" + __slots__ = "extensions", "has_next", "incremental" incremental: Sequence[IncrementalResult] | None has_next: bool diff --git a/src/graphql/execution/middleware.py b/src/graphql/execution/middleware.py index de99e12b..6d999171 100644 --- a/src/graphql/execution/middleware.py +++ b/src/graphql/execution/middleware.py @@ -30,7 +30,7 @@ class MiddlewareManager: """ # allow custom attributes (not used internally) - __slots__ = "__dict__", "middlewares", "_middleware_resolvers", "_cached_resolvers" + __slots__ = "__dict__", "_cached_resolvers", "_middleware_resolvers", "middlewares" _cached_resolvers: dict[GraphQLFieldResolver, GraphQLFieldResolver] _middleware_resolvers: list[Callable] | None diff --git a/src/graphql/language/__init__.py b/src/graphql/language/__init__.py index 2f105a98..bd5e7be1 100644 --- a/src/graphql/language/__init__.py +++ b/src/graphql/language/__init__.py @@ -115,104 +115,104 @@ from .directive_locations import DirectiveLocation __all__ = [ - "get_location", - "SourceLocation", - "FormattedSourceLocation", - "print_location", - "print_source_location", - "TokenKind", - "Lexer", - "parse", - "parse_value", - "parse_const_value", - "parse_type", - "print_ast", - "Source", - "visit", - "Visitor", - "ParallelVisitor", - "VisitorAction", - "VisitorKeyMap", "BREAK", - "SKIP", - "REMOVE", "IDLE", - "Location", - "Token", + "REMOVE", + "SKIP", + "ArgumentNode", + "BooleanValueNode", + "ConstArgumentNode", + "ConstDirectiveNode", + "ConstListValueNode", + "ConstObjectFieldNode", + "ConstObjectValueNode", + "ConstValueNode", + "DefinitionNode", + "DirectiveDefinitionNode", "DirectiveLocation", - "Node", - "NameNode", + "DirectiveNode", "DocumentNode", - "DefinitionNode", + "EnumTypeDefinitionNode", + "EnumTypeExtensionNode", + "EnumValueDefinitionNode", + "EnumValueNode", + "ErrorBoundaryNode", "ExecutableDefinitionNode", - "OperationDefinitionNode", - "OperationType", - "VariableDefinitionNode", - "VariableNode", - "SelectionSetNode", - "SelectionNode", + "FieldDefinitionNode", "FieldNode", - "NullabilityAssertionNode", - "NonNullAssertionNode", - "ErrorBoundaryNode", - "ListNullabilityOperatorNode", - "ArgumentNode", - "ConstArgumentNode", + "FloatValueNode", + "FormattedSourceLocation", + "FragmentDefinitionNode", "FragmentSpreadNode", "InlineFragmentNode", - "FragmentDefinitionNode", - "ValueNode", - "ConstValueNode", + "InputObjectTypeDefinitionNode", + "InputObjectTypeExtensionNode", + "InputValueDefinitionNode", "IntValueNode", - "FloatValueNode", - "StringValueNode", - "BooleanValueNode", - "NullValueNode", - "EnumValueNode", + "InterfaceTypeDefinitionNode", + "InterfaceTypeExtensionNode", + "Lexer", + "ListNullabilityOperatorNode", + "ListTypeNode", "ListValueNode", - "ConstListValueNode", - "ObjectValueNode", - "ConstObjectValueNode", - "ObjectFieldNode", - "ConstObjectFieldNode", - "DirectiveNode", - "ConstDirectiveNode", - "TypeNode", + "Location", + "NameNode", "NamedTypeNode", - "ListTypeNode", + "Node", + "NonNullAssertionNode", "NonNullTypeNode", - "TypeSystemDefinitionNode", - "SchemaDefinitionNode", + "NullValueNode", + "NullabilityAssertionNode", + "ObjectFieldNode", + "ObjectTypeDefinitionNode", + "ObjectTypeExtensionNode", + "ObjectValueNode", + "OperationDefinitionNode", + "OperationType", "OperationTypeDefinitionNode", - "TypeDefinitionNode", + "ParallelVisitor", "ScalarTypeDefinitionNode", - "ObjectTypeDefinitionNode", - "FieldDefinitionNode", - "InputValueDefinitionNode", - "InterfaceTypeDefinitionNode", - "UnionTypeDefinitionNode", - "EnumTypeDefinitionNode", - "EnumValueDefinitionNode", - "InputObjectTypeDefinitionNode", - "DirectiveDefinitionNode", - "TypeSystemExtensionNode", + "ScalarTypeExtensionNode", + "SchemaDefinitionNode", "SchemaExtensionNode", + "SelectionNode", + "SelectionSetNode", + "Source", + "SourceLocation", + "StringValueNode", + "Token", + "TokenKind", + "TypeDefinitionNode", "TypeExtensionNode", - "ScalarTypeExtensionNode", - "ObjectTypeExtensionNode", - "InterfaceTypeExtensionNode", + "TypeNode", + "TypeSystemDefinitionNode", + "TypeSystemExtensionNode", + "UnionTypeDefinitionNode", "UnionTypeExtensionNode", - "EnumTypeExtensionNode", - "InputObjectTypeExtensionNode", + "ValueNode", + "VariableDefinitionNode", + "VariableNode", + "Visitor", + "VisitorAction", + "VisitorKeyMap", + "get_location", + "is_const_value_node", "is_definition_node", "is_executable_definition_node", "is_nullability_assertion_node", "is_selection_node", - "is_value_node", - "is_const_value_node", + "is_type_definition_node", + "is_type_extension_node", "is_type_node", "is_type_system_definition_node", - "is_type_definition_node", "is_type_system_extension_node", - "is_type_extension_node", + "is_value_node", + "parse", + "parse_const_value", + "parse_type", + "parse_value", + "print_ast", + "print_location", + "print_source_location", + "visit", ] diff --git a/src/graphql/language/ast.py b/src/graphql/language/ast.py index 5b61767d..a67ee1ea 100644 --- a/src/graphql/language/ast.py +++ b/src/graphql/language/ast.py @@ -19,73 +19,73 @@ __all__ = [ - "Location", - "Token", - "Node", - "NameNode", - "DocumentNode", + "QUERY_DOCUMENT_KEYS", + "ArgumentNode", + "BooleanValueNode", + "ConstArgumentNode", + "ConstDirectiveNode", + "ConstListValueNode", + "ConstObjectFieldNode", + "ConstObjectValueNode", + "ConstValueNode", "DefinitionNode", + "DirectiveDefinitionNode", + "DirectiveNode", + "DocumentNode", + "EnumTypeDefinitionNode", + "EnumTypeExtensionNode", + "EnumValueDefinitionNode", + "EnumValueNode", + "ErrorBoundaryNode", "ExecutableDefinitionNode", - "OperationDefinitionNode", - "VariableDefinitionNode", - "SelectionSetNode", - "SelectionNode", + "FieldDefinitionNode", "FieldNode", - "NullabilityAssertionNode", - "NonNullAssertionNode", - "ErrorBoundaryNode", - "ListNullabilityOperatorNode", - "ArgumentNode", - "ConstArgumentNode", + "FloatValueNode", + "FragmentDefinitionNode", "FragmentSpreadNode", "InlineFragmentNode", - "FragmentDefinitionNode", - "ValueNode", - "ConstValueNode", - "VariableNode", + "InputObjectTypeDefinitionNode", + "InputObjectTypeExtensionNode", + "InputValueDefinitionNode", "IntValueNode", - "FloatValueNode", - "StringValueNode", - "BooleanValueNode", - "NullValueNode", - "EnumValueNode", + "InterfaceTypeDefinitionNode", + "InterfaceTypeExtensionNode", + "ListNullabilityOperatorNode", + "ListTypeNode", "ListValueNode", - "ConstListValueNode", - "ObjectValueNode", - "ConstObjectValueNode", - "ObjectFieldNode", - "ConstObjectFieldNode", - "DirectiveNode", - "ConstDirectiveNode", - "TypeNode", + "Location", + "NameNode", "NamedTypeNode", - "ListTypeNode", + "Node", + "NonNullAssertionNode", "NonNullTypeNode", - "TypeSystemDefinitionNode", - "SchemaDefinitionNode", + "NullValueNode", + "NullabilityAssertionNode", + "ObjectFieldNode", + "ObjectTypeDefinitionNode", + "ObjectTypeExtensionNode", + "ObjectValueNode", + "OperationDefinitionNode", "OperationType", "OperationTypeDefinitionNode", - "TypeDefinitionNode", "ScalarTypeDefinitionNode", - "ObjectTypeDefinitionNode", - "FieldDefinitionNode", - "InputValueDefinitionNode", - "InterfaceTypeDefinitionNode", - "UnionTypeDefinitionNode", - "EnumTypeDefinitionNode", - "EnumValueDefinitionNode", - "InputObjectTypeDefinitionNode", - "DirectiveDefinitionNode", + "ScalarTypeExtensionNode", + "SchemaDefinitionNode", "SchemaExtensionNode", + "SelectionNode", + "SelectionSetNode", + "StringValueNode", + "Token", + "TypeDefinitionNode", "TypeExtensionNode", + "TypeNode", + "TypeSystemDefinitionNode", "TypeSystemExtensionNode", - "ScalarTypeExtensionNode", - "ObjectTypeExtensionNode", - "InterfaceTypeExtensionNode", + "UnionTypeDefinitionNode", "UnionTypeExtensionNode", - "EnumTypeExtensionNode", - "InputObjectTypeExtensionNode", - "QUERY_DOCUMENT_KEYS", + "ValueNode", + "VariableDefinitionNode", + "VariableNode", ] @@ -95,7 +95,7 @@ class Token: Represents a range of characters represented by a lexical token within a Source. """ - __slots__ = "kind", "start", "end", "line", "column", "prev", "next", "value" + __slots__ = "column", "end", "kind", "line", "next", "prev", "start", "value" kind: TokenKind # the kind of token start: int # the character offset at which this Node begins @@ -202,11 +202,11 @@ class Location: """ __slots__ = ( - "start", "end", - "start_token", "end_token", "source", + "start", + "start_token", ) start: int # character offset at which this Node begins @@ -345,7 +345,7 @@ class Node: """AST nodes""" # allow custom attributes and weak references (not used internally) - __slots__ = "__dict__", "__weakref__", "loc", "_hash" + __slots__ = "__dict__", "__weakref__", "_hash", "loc" loc: Location | None @@ -457,7 +457,7 @@ class DefinitionNode(Node): class ExecutableDefinitionNode(DefinitionNode): - __slots__ = "name", "directives", "variable_definitions", "selection_set" + __slots__ = "directives", "name", "selection_set", "variable_definitions" name: NameNode | None directives: tuple[DirectiveNode, ...] @@ -472,7 +472,7 @@ class OperationDefinitionNode(ExecutableDefinitionNode): class VariableDefinitionNode(Node): - __slots__ = "variable", "type", "default_value", "directives" + __slots__ = "default_value", "directives", "type", "variable" variable: VariableNode type: TypeNode @@ -493,7 +493,7 @@ class SelectionNode(Node): class FieldNode(SelectionNode): - __slots__ = "alias", "name", "arguments", "nullability_assertion", "selection_set" + __slots__ = "alias", "arguments", "name", "nullability_assertion", "selection_set" alias: NameNode | None name: NameNode @@ -542,7 +542,7 @@ class FragmentSpreadNode(SelectionNode): class InlineFragmentNode(SelectionNode): - __slots__ = "type_condition", "selection_set" + __slots__ = "selection_set", "type_condition" type_condition: NamedTypeNode selection_set: SelectionSetNode @@ -581,7 +581,7 @@ class FloatValueNode(ValueNode): class StringValueNode(ValueNode): - __slots__ = "value", "block" + __slots__ = "block", "value" value: str block: bool | None @@ -650,7 +650,7 @@ class ConstObjectFieldNode(ObjectFieldNode): class DirectiveNode(Node): - __slots__ = "name", "arguments" + __slots__ = "arguments", "name" name: NameNode arguments: tuple[ArgumentNode, ...] @@ -711,7 +711,7 @@ class OperationTypeDefinitionNode(Node): class TypeDefinitionNode(TypeSystemDefinitionNode): - __slots__ = "description", "name", "directives" + __slots__ = "description", "directives", "name" description: StringValueNode | None name: NameNode @@ -725,7 +725,7 @@ class ScalarTypeDefinitionNode(TypeDefinitionNode): class ObjectTypeDefinitionNode(TypeDefinitionNode): - __slots__ = "interfaces", "fields" + __slots__ = "fields", "interfaces" interfaces: tuple[NamedTypeNode, ...] directives: tuple[ConstDirectiveNode, ...] @@ -733,7 +733,7 @@ class ObjectTypeDefinitionNode(TypeDefinitionNode): class FieldDefinitionNode(DefinitionNode): - __slots__ = "description", "name", "directives", "arguments", "type" + __slots__ = "arguments", "description", "directives", "name", "type" description: StringValueNode | None name: NameNode @@ -743,7 +743,7 @@ class FieldDefinitionNode(DefinitionNode): class InputValueDefinitionNode(DefinitionNode): - __slots__ = "description", "name", "directives", "type", "default_value" + __slots__ = "default_value", "description", "directives", "name", "type" description: StringValueNode | None name: NameNode @@ -775,7 +775,7 @@ class EnumTypeDefinitionNode(TypeDefinitionNode): class EnumValueDefinitionNode(DefinitionNode): - __slots__ = "description", "name", "directives" + __slots__ = "description", "directives", "name" description: StringValueNode | None name: NameNode @@ -793,7 +793,7 @@ class InputObjectTypeDefinitionNode(TypeDefinitionNode): class DirectiveDefinitionNode(TypeSystemDefinitionNode): - __slots__ = "description", "name", "arguments", "repeatable", "locations" + __slots__ = "arguments", "description", "locations", "name", "repeatable" description: StringValueNode | None name: NameNode @@ -816,7 +816,7 @@ class SchemaExtensionNode(Node): class TypeExtensionNode(TypeSystemDefinitionNode): - __slots__ = "name", "directives" + __slots__ = "directives", "name" name: NameNode directives: tuple[ConstDirectiveNode, ...] @@ -830,14 +830,14 @@ class ScalarTypeExtensionNode(TypeExtensionNode): class ObjectTypeExtensionNode(TypeExtensionNode): - __slots__ = "interfaces", "fields" + __slots__ = "fields", "interfaces" interfaces: tuple[NamedTypeNode, ...] fields: tuple[FieldDefinitionNode, ...] class InterfaceTypeExtensionNode(TypeExtensionNode): - __slots__ = "interfaces", "fields" + __slots__ = "fields", "interfaces" interfaces: tuple[NamedTypeNode, ...] fields: tuple[FieldDefinitionNode, ...] diff --git a/src/graphql/language/block_string.py b/src/graphql/language/block_string.py index d784c236..248927b4 100644 --- a/src/graphql/language/block_string.py +++ b/src/graphql/language/block_string.py @@ -149,8 +149,7 @@ def print_block_string(value: str, minimize: bool = False) -> str: skip_leading_new_line = is_single_line and value and value[0] in " \t" before = ( "\n" - if print_as_multiple_lines - and not skip_leading_new_line + if (print_as_multiple_lines and not skip_leading_new_line) or force_leading_new_line else "" ) diff --git a/src/graphql/language/character_classes.py b/src/graphql/language/character_classes.py index 628bd60f..5d870576 100644 --- a/src/graphql/language/character_classes.py +++ b/src/graphql/language/character_classes.py @@ -1,6 +1,6 @@ """Character classes""" -__all__ = ["is_digit", "is_letter", "is_name_start", "is_name_continue"] +__all__ = ["is_digit", "is_letter", "is_name_continue", "is_name_start"] def is_digit(char: str) -> bool: diff --git a/src/graphql/language/location.py b/src/graphql/language/location.py index 8b1ee38d..7af55082 100644 --- a/src/graphql/language/location.py +++ b/src/graphql/language/location.py @@ -12,7 +12,7 @@ if TYPE_CHECKING: from .source import Source -__all__ = ["get_location", "SourceLocation", "FormattedSourceLocation"] +__all__ = ["FormattedSourceLocation", "SourceLocation", "get_location"] class FormattedSourceLocation(TypedDict): diff --git a/src/graphql/language/parser.py b/src/graphql/language/parser.py index 95c69ccb..55c249ba 100644 --- a/src/graphql/language/parser.py +++ b/src/graphql/language/parser.py @@ -77,7 +77,7 @@ from typing_extensions import TypeAlias -__all__ = ["parse", "parse_type", "parse_value", "parse_const_value"] +__all__ = ["parse", "parse_const_value", "parse_type", "parse_value"] T = TypeVar("T") diff --git a/src/graphql/language/predicates.py b/src/graphql/language/predicates.py index b65b1982..280662f8 100644 --- a/src/graphql/language/predicates.py +++ b/src/graphql/language/predicates.py @@ -26,17 +26,17 @@ __all__ = [ + "is_const_value_node", "is_definition_node", "is_executable_definition_node", "is_nullability_assertion_node", "is_selection_node", - "is_value_node", - "is_const_value_node", + "is_type_definition_node", + "is_type_extension_node", "is_type_node", "is_type_system_definition_node", - "is_type_definition_node", "is_type_system_extension_node", - "is_type_extension_node", + "is_value_node", ] diff --git a/src/graphql/language/source.py b/src/graphql/language/source.py index 01bb013f..d54bf969 100644 --- a/src/graphql/language/source.py +++ b/src/graphql/language/source.py @@ -21,7 +21,7 @@ class Source: """A representation of source input to GraphQL.""" # allow custom attributes and weak references (not used internally) - __slots__ = "__weakref__", "__dict__", "body", "name", "location_offset" + __slots__ = "__dict__", "__weakref__", "body", "location_offset", "name" def __init__( self, diff --git a/src/graphql/language/visitor.py b/src/graphql/language/visitor.py index 450996d8..c9901230 100644 --- a/src/graphql/language/visitor.py +++ b/src/graphql/language/visitor.py @@ -25,15 +25,15 @@ __all__ = [ - "Visitor", + "BREAK", + "IDLE", + "REMOVE", + "SKIP", "ParallelVisitor", + "Visitor", "VisitorAction", "VisitorKeyMap", "visit", - "BREAK", - "SKIP", - "REMOVE", - "IDLE", ] diff --git a/src/graphql/pyutils/__init__.py b/src/graphql/pyutils/__init__.py index e1aefd6a..10faca9e 100644 --- a/src/graphql/pyutils/__init__.py +++ b/src/graphql/pyutils/__init__.py @@ -35,32 +35,32 @@ from .undefined import Undefined, UndefinedType __all__ = [ + "AwaitableOrValue", + "Description", + "FrozenError", + "Path", + "SimplePubSub", + "SimplePubSubIterator", + "Undefined", + "UndefinedType", + "and_list", "async_reduce", - "camel_to_snake", - "snake_to_camel", "cached_property", + "camel_to_snake", "did_you_mean", - "or_list", - "and_list", - "Description", "group_by", - "is_description", - "register_description", - "unregister_description", "identity_func", "inspect", "is_awaitable", "is_collection", + "is_description", "is_iterable", "merge_kwargs", "natural_comparison_key", - "AwaitableOrValue", - "suggestion_list", - "FrozenError", - "Path", + "or_list", "print_path_list", - "SimplePubSub", - "SimplePubSubIterator", - "Undefined", - "UndefinedType", + "register_description", + "snake_to_camel", + "suggestion_list", + "unregister_description", ] diff --git a/src/graphql/pyutils/format_list.py b/src/graphql/pyutils/format_list.py index 87184728..368e7ae0 100644 --- a/src/graphql/pyutils/format_list.py +++ b/src/graphql/pyutils/format_list.py @@ -4,7 +4,7 @@ from typing import Sequence -__all__ = ["or_list", "and_list"] +__all__ = ["and_list", "or_list"] def or_list(items: Sequence[str]) -> str: diff --git a/src/graphql/pyutils/is_awaitable.py b/src/graphql/pyutils/is_awaitable.py index ce8c93c0..158bcd40 100644 --- a/src/graphql/pyutils/is_awaitable.py +++ b/src/graphql/pyutils/is_awaitable.py @@ -27,8 +27,10 @@ def is_awaitable(value: Any) -> TypeGuard[Awaitable]: # check for coroutine objects isinstance(value, CoroutineType) # check for old-style generator based coroutine objects - or isinstance(value, GeneratorType) # for Python < 3.11 - and bool(value.gi_code.co_flags & CO_ITERABLE_COROUTINE) + or ( + isinstance(value, GeneratorType) # for Python < 3.11 + and bool(value.gi_code.co_flags & CO_ITERABLE_COROUTINE) + ) # check for other awaitables (e.g. futures) or hasattr(value, "__await__") ) diff --git a/src/graphql/type/__init__.py b/src/graphql/type/__init__.py index b95e0e55..8c41bd28 100644 --- a/src/graphql/type/__init__.py +++ b/src/graphql/type/__init__.py @@ -177,134 +177,134 @@ from .validate import validate_schema, assert_valid_schema __all__ = [ - "is_schema", - "assert_schema", - "assert_name", - "assert_enum_value_name", - "GraphQLSchema", - "GraphQLSchemaKwargs", - "is_type", - "is_scalar_type", - "is_object_type", - "is_interface_type", - "is_union_type", - "is_enum_type", - "is_input_object_type", - "is_list_type", - "is_non_null_type", - "is_input_type", - "is_output_type", - "is_leaf_type", - "is_composite_type", - "is_abstract_type", - "is_wrapping_type", - "is_nullable_type", - "is_named_type", - "is_required_argument", - "is_required_input_field", - "assert_type", - "assert_scalar_type", - "assert_object_type", - "assert_interface_type", - "assert_union_type", - "assert_enum_type", - "assert_input_object_type", - "assert_list_type", - "assert_non_null_type", - "assert_input_type", - "assert_output_type", - "assert_leaf_type", - "assert_composite_type", - "assert_abstract_type", - "assert_wrapping_type", - "assert_nullable_type", - "assert_named_type", - "get_nullable_type", - "get_named_type", - "resolve_thunk", - "GraphQLScalarType", - "GraphQLObjectType", - "GraphQLInterfaceType", - "GraphQLUnionType", - "GraphQLEnumType", - "GraphQLInputObjectType", - "GraphQLInputType", - "GraphQLArgument", - "GraphQLList", - "GraphQLNonNull", - "GraphQLType", - "GraphQLInputType", - "GraphQLOutputType", - "GraphQLLeafType", - "GraphQLCompositeType", + "DEFAULT_DEPRECATION_REASON", + "GRAPHQL_MAX_INT", + "GRAPHQL_MIN_INT", "GraphQLAbstractType", - "GraphQLWrappingType", - "GraphQLNullableType", - "GraphQLNullableInputType", - "GraphQLNullableOutputType", - "GraphQLNamedType", - "GraphQLNamedInputType", - "GraphQLNamedOutputType", - "Thunk", - "ThunkCollection", - "ThunkMapping", "GraphQLArgument", + "GraphQLArgument", + "GraphQLArgumentKwargs", "GraphQLArgumentMap", + "GraphQLBoolean", + "GraphQLCompositeType", + "GraphQLDeferDirective", + "GraphQLDeprecatedDirective", + "GraphQLDirective", + "GraphQLDirectiveKwargs", + "GraphQLEnumType", + "GraphQLEnumTypeKwargs", "GraphQLEnumValue", + "GraphQLEnumValueKwargs", "GraphQLEnumValueMap", "GraphQLField", + "GraphQLFieldKwargs", "GraphQLFieldMap", + "GraphQLFieldResolver", + "GraphQLFloat", + "GraphQLID", + "GraphQLIncludeDirective", "GraphQLInputField", + "GraphQLInputFieldKwargs", "GraphQLInputFieldMap", "GraphQLInputFieldOutType", - "GraphQLScalarSerializer", - "GraphQLScalarValueParser", - "GraphQLScalarLiteralParser", - "GraphQLArgumentKwargs", - "GraphQLEnumTypeKwargs", - "GraphQLEnumValueKwargs", - "GraphQLFieldKwargs", - "GraphQLInputFieldKwargs", + "GraphQLInputObjectType", "GraphQLInputObjectTypeKwargs", + "GraphQLInputType", + "GraphQLInputType", + "GraphQLInt", + "GraphQLInterfaceType", "GraphQLInterfaceTypeKwargs", + "GraphQLIsTypeOfFn", + "GraphQLLeafType", + "GraphQLList", + "GraphQLNamedInputType", + "GraphQLNamedOutputType", + "GraphQLNamedType", "GraphQLNamedTypeKwargs", + "GraphQLNonNull", + "GraphQLNullableInputType", + "GraphQLNullableOutputType", + "GraphQLNullableType", + "GraphQLObjectType", "GraphQLObjectTypeKwargs", - "GraphQLScalarTypeKwargs", - "GraphQLUnionTypeKwargs", - "GraphQLFieldResolver", - "GraphQLTypeResolver", - "GraphQLIsTypeOfFn", + "GraphQLOneOfDirective", + "GraphQLOutputType", "GraphQLResolveInfo", - "ResponsePath", - "is_directive", - "assert_directive", - "is_specified_directive", - "specified_directives", - "GraphQLDirective", - "GraphQLIncludeDirective", + "GraphQLScalarLiteralParser", + "GraphQLScalarSerializer", + "GraphQLScalarType", + "GraphQLScalarTypeKwargs", + "GraphQLScalarValueParser", + "GraphQLSchema", + "GraphQLSchemaKwargs", "GraphQLSkipDirective", - "GraphQLDeferDirective", - "GraphQLStreamDirective", - "GraphQLDeprecatedDirective", "GraphQLSpecifiedByDirective", - "GraphQLOneOfDirective", - "GraphQLDirectiveKwargs", - "DEFAULT_DEPRECATION_REASON", - "is_specified_scalar_type", - "specified_scalar_types", - "GraphQLInt", - "GraphQLFloat", + "GraphQLStreamDirective", "GraphQLString", - "GraphQLBoolean", - "GraphQLID", - "GRAPHQL_MAX_INT", - "GRAPHQL_MIN_INT", - "is_introspection_type", - "introspection_types", - "TypeKind", + "GraphQLType", + "GraphQLTypeResolver", + "GraphQLUnionType", + "GraphQLUnionTypeKwargs", + "GraphQLWrappingType", + "ResponsePath", "SchemaMetaFieldDef", + "Thunk", + "ThunkCollection", + "ThunkMapping", + "TypeKind", "TypeMetaFieldDef", "TypeNameMetaFieldDef", - "validate_schema", + "assert_abstract_type", + "assert_composite_type", + "assert_directive", + "assert_enum_type", + "assert_enum_value_name", + "assert_input_object_type", + "assert_input_type", + "assert_interface_type", + "assert_leaf_type", + "assert_list_type", + "assert_name", + "assert_named_type", + "assert_non_null_type", + "assert_nullable_type", + "assert_object_type", + "assert_output_type", + "assert_scalar_type", + "assert_schema", + "assert_type", + "assert_union_type", "assert_valid_schema", + "assert_wrapping_type", + "get_named_type", + "get_nullable_type", + "introspection_types", + "is_abstract_type", + "is_composite_type", + "is_directive", + "is_enum_type", + "is_input_object_type", + "is_input_type", + "is_interface_type", + "is_introspection_type", + "is_leaf_type", + "is_list_type", + "is_named_type", + "is_non_null_type", + "is_nullable_type", + "is_object_type", + "is_output_type", + "is_required_argument", + "is_required_input_field", + "is_scalar_type", + "is_schema", + "is_specified_directive", + "is_specified_scalar_type", + "is_type", + "is_union_type", + "is_wrapping_type", + "resolve_thunk", + "specified_directives", + "specified_scalar_types", + "validate_schema", ] diff --git a/src/graphql/type/assert_name.py b/src/graphql/type/assert_name.py index b7e94e2d..1a8f7689 100644 --- a/src/graphql/type/assert_name.py +++ b/src/graphql/type/assert_name.py @@ -3,7 +3,7 @@ from ..error import GraphQLError from ..language.character_classes import is_name_continue, is_name_start -__all__ = ["assert_name", "assert_enum_value_name"] +__all__ = ["assert_enum_value_name", "assert_name"] def assert_name(name: str) -> str: diff --git a/src/graphql/type/definition.py b/src/graphql/type/definition.py index 312a41b2..f49691e7 100644 --- a/src/graphql/type/definition.py +++ b/src/graphql/type/definition.py @@ -70,45 +70,6 @@ from .schema import GraphQLSchema __all__ = [ - "is_type", - "is_scalar_type", - "is_object_type", - "is_interface_type", - "is_union_type", - "is_enum_type", - "is_input_object_type", - "is_list_type", - "is_non_null_type", - "is_input_type", - "is_output_type", - "is_leaf_type", - "is_composite_type", - "is_abstract_type", - "is_wrapping_type", - "is_nullable_type", - "is_named_type", - "is_required_argument", - "is_required_input_field", - "assert_type", - "assert_scalar_type", - "assert_object_type", - "assert_interface_type", - "assert_union_type", - "assert_enum_type", - "assert_input_object_type", - "assert_list_type", - "assert_non_null_type", - "assert_input_type", - "assert_output_type", - "assert_leaf_type", - "assert_composite_type", - "assert_abstract_type", - "assert_wrapping_type", - "assert_nullable_type", - "assert_named_type", - "get_nullable_type", - "get_named_type", - "resolve_thunk", "GraphQLAbstractType", "GraphQLArgument", "GraphQLArgumentKwargs", @@ -135,23 +96,23 @@ "GraphQLIsTypeOfFn", "GraphQLLeafType", "GraphQLList", - "GraphQLNamedType", - "GraphQLNamedTypeKwargs", "GraphQLNamedInputType", "GraphQLNamedOutputType", - "GraphQLNullableType", + "GraphQLNamedType", + "GraphQLNamedTypeKwargs", + "GraphQLNonNull", "GraphQLNullableInputType", "GraphQLNullableOutputType", - "GraphQLNonNull", + "GraphQLNullableType", + "GraphQLObjectType", + "GraphQLObjectTypeKwargs", + "GraphQLOutputType", "GraphQLResolveInfo", + "GraphQLScalarLiteralParser", + "GraphQLScalarSerializer", "GraphQLScalarType", "GraphQLScalarTypeKwargs", - "GraphQLScalarSerializer", "GraphQLScalarValueParser", - "GraphQLScalarLiteralParser", - "GraphQLObjectType", - "GraphQLObjectTypeKwargs", - "GraphQLOutputType", "GraphQLType", "GraphQLTypeResolver", "GraphQLUnionType", @@ -160,6 +121,45 @@ "Thunk", "ThunkCollection", "ThunkMapping", + "assert_abstract_type", + "assert_composite_type", + "assert_enum_type", + "assert_input_object_type", + "assert_input_type", + "assert_interface_type", + "assert_leaf_type", + "assert_list_type", + "assert_named_type", + "assert_non_null_type", + "assert_nullable_type", + "assert_object_type", + "assert_output_type", + "assert_scalar_type", + "assert_type", + "assert_union_type", + "assert_wrapping_type", + "get_named_type", + "get_nullable_type", + "is_abstract_type", + "is_composite_type", + "is_enum_type", + "is_input_object_type", + "is_input_type", + "is_interface_type", + "is_leaf_type", + "is_list_type", + "is_named_type", + "is_non_null_type", + "is_nullable_type", + "is_object_type", + "is_output_type", + "is_required_argument", + "is_required_input_field", + "is_scalar_type", + "is_type", + "is_union_type", + "is_wrapping_type", + "resolve_thunk", ] diff --git a/src/graphql/type/directives.py b/src/graphql/type/directives.py index d4160300..5fe48b94 100644 --- a/src/graphql/type/directives.py +++ b/src/graphql/type/directives.py @@ -20,20 +20,20 @@ from typing_extensions import TypeGuard __all__ = [ - "is_directive", - "assert_directive", - "is_specified_directive", - "specified_directives", + "DEFAULT_DEPRECATION_REASON", + "DirectiveLocation", "GraphQLDeferDirective", + "GraphQLDeprecatedDirective", "GraphQLDirective", "GraphQLDirectiveKwargs", "GraphQLIncludeDirective", "GraphQLSkipDirective", - "GraphQLStreamDirective", - "GraphQLDeprecatedDirective", "GraphQLSpecifiedByDirective", - "DirectiveLocation", - "DEFAULT_DEPRECATION_REASON", + "GraphQLStreamDirective", + "assert_directive", + "is_directive", + "is_specified_directive", + "specified_directives", ] diff --git a/src/graphql/type/scalars.py b/src/graphql/type/scalars.py index 22669c80..1bc98c21 100644 --- a/src/graphql/type/scalars.py +++ b/src/graphql/type/scalars.py @@ -23,15 +23,15 @@ from typing_extensions import TypeGuard __all__ = [ - "is_specified_scalar_type", - "specified_scalar_types", - "GraphQLInt", - "GraphQLFloat", - "GraphQLString", - "GraphQLBoolean", - "GraphQLID", "GRAPHQL_MAX_INT", "GRAPHQL_MIN_INT", + "GraphQLBoolean", + "GraphQLFloat", + "GraphQLID", + "GraphQLInt", + "GraphQLString", + "is_specified_scalar_type", + "specified_scalar_types", ] # As per the GraphQL Spec, Integers are only treated as valid diff --git a/src/graphql/type/schema.py b/src/graphql/type/schema.py index 5e546298..3099991d 100644 --- a/src/graphql/type/schema.py +++ b/src/graphql/type/schema.py @@ -49,7 +49,7 @@ except ImportError: # Python < 3.10 from typing_extensions import TypeAlias, TypeGuard -__all__ = ["GraphQLSchema", "GraphQLSchemaKwargs", "is_schema", "assert_schema"] +__all__ = ["GraphQLSchema", "GraphQLSchemaKwargs", "assert_schema", "is_schema"] TypeMap: TypeAlias = Dict[str, GraphQLNamedType] diff --git a/src/graphql/type/validate.py b/src/graphql/type/validate.py index c1e806c1..109667f1 100644 --- a/src/graphql/type/validate.py +++ b/src/graphql/type/validate.py @@ -41,7 +41,7 @@ from .introspection import is_introspection_type from .schema import GraphQLSchema, assert_schema -__all__ = ["validate_schema", "assert_valid_schema"] +__all__ = ["assert_valid_schema", "validate_schema"] def validate_schema(schema: GraphQLSchema) -> list[GraphQLError]: diff --git a/src/graphql/utilities/__init__.py b/src/graphql/utilities/__init__.py index f528bdcc..5aadcc31 100644 --- a/src/graphql/utilities/__init__.py +++ b/src/graphql/utilities/__init__.py @@ -100,14 +100,14 @@ "find_dangerous_changes", "get_introspection_query", "get_operation_ast", + "introspection_from_schema", "is_equal_type", "is_type_sub_type_of", - "introspection_from_schema", "lexicographic_sort_schema", - "print_schema", - "print_type", "print_directive", "print_introspection_schema", + "print_schema", + "print_type", "print_value", "separate_operations", "strip_ignored_characters", diff --git a/src/graphql/utilities/extend_schema.py b/src/graphql/utilities/extend_schema.py index 72283269..14adc661 100644 --- a/src/graphql/utilities/extend_schema.py +++ b/src/graphql/utilities/extend_schema.py @@ -92,8 +92,8 @@ from .value_from_ast import value_from_ast __all__ = [ - "extend_schema", "ExtendSchemaImpl", + "extend_schema", ] diff --git a/src/graphql/utilities/find_breaking_changes.py b/src/graphql/utilities/find_breaking_changes.py index c88c1265..d436f1d4 100644 --- a/src/graphql/utilities/find_breaking_changes.py +++ b/src/graphql/utilities/find_breaking_changes.py @@ -216,11 +216,8 @@ def find_type_changes( schema_changes.extend(find_union_type_changes(old_type, new_type)) elif is_input_object_type(old_type) and is_input_object_type(new_type): schema_changes.extend(find_input_object_type_changes(old_type, new_type)) - elif ( - is_object_type(old_type) - and is_object_type(new_type) - or is_interface_type(old_type) - and is_interface_type(new_type) + elif (is_object_type(old_type) and is_object_type(new_type)) or ( + is_interface_type(old_type) and is_interface_type(new_type) ): schema_changes.extend(find_field_changes(old_type, new_type)) schema_changes.extend( diff --git a/src/graphql/utilities/get_introspection_query.py b/src/graphql/utilities/get_introspection_query.py index 4babfaec..c23a1533 100644 --- a/src/graphql/utilities/get_introspection_query.py +++ b/src/graphql/utilities/get_introspection_query.py @@ -19,7 +19,6 @@ __all__ = [ - "get_introspection_query", "IntrospectionDirective", "IntrospectionEnumType", "IntrospectionField", @@ -35,6 +34,7 @@ "IntrospectionType", "IntrospectionTypeRef", "IntrospectionUnionType", + "get_introspection_query", ] diff --git a/src/graphql/utilities/print_schema.py b/src/graphql/utilities/print_schema.py index 44c876dc..dd68e54e 100644 --- a/src/graphql/utilities/print_schema.py +++ b/src/graphql/utilities/print_schema.py @@ -33,10 +33,10 @@ from .ast_from_value import ast_from_value __all__ = [ - "print_schema", - "print_type", "print_directive", "print_introspection_schema", + "print_schema", + "print_type", "print_value", ] diff --git a/src/graphql/utilities/type_comparators.py b/src/graphql/utilities/type_comparators.py index 3ab50dc5..609c19b6 100644 --- a/src/graphql/utilities/type_comparators.py +++ b/src/graphql/utilities/type_comparators.py @@ -11,7 +11,7 @@ is_object_type, ) -__all__ = ["is_equal_type", "is_type_sub_type_of", "do_types_overlap"] +__all__ = ["do_types_overlap", "is_equal_type", "is_type_sub_type_of"] def is_equal_type(type_a: GraphQLType, type_b: GraphQLType) -> bool: diff --git a/src/graphql/validation/__init__.py b/src/graphql/validation/__init__.py index 8f67f9b7..ed6ca6c8 100644 --- a/src/graphql/validation/__init__.py +++ b/src/graphql/validation/__init__.py @@ -124,14 +124,8 @@ from .rules.custom.no_schema_introspection import NoSchemaIntrospectionCustomRule __all__ = [ - "validate", "ASTValidationContext", "ASTValidationRule", - "SDLValidationContext", - "SDLValidationRule", - "ValidationContext", - "ValidationRule", - "specified_rules", "DeferStreamDirectiveLabel", "DeferStreamDirectiveOnRootField", "DeferStreamDirectiveOnValidOperationsRule", @@ -143,33 +137,39 @@ "KnownFragmentNamesRule", "KnownTypeNamesRule", "LoneAnonymousOperationRule", + "LoneSchemaDefinitionRule", + "NoDeprecatedCustomRule", "NoFragmentCyclesRule", + "NoSchemaIntrospectionCustomRule", "NoUndefinedVariablesRule", "NoUnusedFragmentsRule", "NoUnusedVariablesRule", "OverlappingFieldsCanBeMergedRule", "PossibleFragmentSpreadsRule", + "PossibleTypeExtensionsRule", "ProvidedRequiredArgumentsRule", + "SDLValidationContext", + "SDLValidationRule", "ScalarLeafsRule", "SingleFieldSubscriptionsRule", "StreamDirectiveOnListField", + "UniqueArgumentDefinitionNamesRule", "UniqueArgumentNamesRule", + "UniqueDirectiveNamesRule", "UniqueDirectivesPerLocationRule", + "UniqueEnumValueNamesRule", + "UniqueFieldDefinitionNamesRule", "UniqueFragmentNamesRule", "UniqueInputFieldNamesRule", "UniqueOperationNamesRule", + "UniqueOperationTypesRule", + "UniqueTypeNamesRule", "UniqueVariableNamesRule", + "ValidationContext", + "ValidationRule", "ValuesOfCorrectTypeRule", "VariablesAreInputTypesRule", "VariablesInAllowedPositionRule", - "LoneSchemaDefinitionRule", - "UniqueOperationTypesRule", - "UniqueTypeNamesRule", - "UniqueEnumValueNamesRule", - "UniqueFieldDefinitionNamesRule", - "UniqueArgumentDefinitionNamesRule", - "UniqueDirectiveNamesRule", - "PossibleTypeExtensionsRule", - "NoDeprecatedCustomRule", - "NoSchemaIntrospectionCustomRule", + "specified_rules", + "validate", ] diff --git a/src/graphql/validation/rules/defer_stream_directive_on_valid_operations_rule.py b/src/graphql/validation/rules/defer_stream_directive_on_valid_operations_rule.py index c412b89e..0159715d 100644 --- a/src/graphql/validation/rules/defer_stream_directive_on_valid_operations_rule.py +++ b/src/graphql/validation/rules/defer_stream_directive_on_valid_operations_rule.py @@ -66,7 +66,8 @@ def enter_directive( if ( isinstance(definition_node, FragmentDefinitionNode) and definition_node.name.value in self.fragments_used_on_subscriptions - or isinstance(definition_node, OperationDefinitionNode) + ) or ( + isinstance(definition_node, OperationDefinitionNode) and definition_node.operation == OperationType.SUBSCRIPTION ): if node.name.value == GraphQLDeferDirective.name: diff --git a/src/graphql/validation/rules/known_argument_names.py b/src/graphql/validation/rules/known_argument_names.py index dadfd34a..46f9ef42 100644 --- a/src/graphql/validation/rules/known_argument_names.py +++ b/src/graphql/validation/rules/known_argument_names.py @@ -16,7 +16,7 @@ from ...type import specified_directives from . import ASTValidationRule, SDLValidationContext, ValidationContext -__all__ = ["KnownArgumentNamesRule", "KnownArgumentNamesOnDirectivesRule"] +__all__ = ["KnownArgumentNamesOnDirectivesRule", "KnownArgumentNamesRule"] class KnownArgumentNamesOnDirectivesRule(ASTValidationRule): diff --git a/src/graphql/validation/rules/provided_required_arguments.py b/src/graphql/validation/rules/provided_required_arguments.py index a9313273..f94515fe 100644 --- a/src/graphql/validation/rules/provided_required_arguments.py +++ b/src/graphql/validation/rules/provided_required_arguments.py @@ -19,7 +19,7 @@ from ...type import GraphQLArgument, is_required_argument, is_type, specified_directives from . import ASTValidationRule, SDLValidationContext, ValidationContext -__all__ = ["ProvidedRequiredArgumentsRule", "ProvidedRequiredArgumentsOnDirectivesRule"] +__all__ = ["ProvidedRequiredArgumentsOnDirectivesRule", "ProvidedRequiredArgumentsRule"] class ProvidedRequiredArgumentsOnDirectivesRule(ASTValidationRule): diff --git a/src/graphql/validation/validate.py b/src/graphql/validation/validate.py index 08c83780..8e59821c 100644 --- a/src/graphql/validation/validate.py +++ b/src/graphql/validation/validate.py @@ -15,11 +15,11 @@ from .rules import ASTValidationRule __all__ = [ + "ValidationAbortedError", "assert_valid_sdl", "assert_valid_sdl_extension", "validate", "validate_sdl", - "ValidationAbortedError", ] diff --git a/src/graphql/version.py b/src/graphql/version.py index 29166e49..7b08ac67 100644 --- a/src/graphql/version.py +++ b/src/graphql/version.py @@ -5,7 +5,7 @@ import re from typing import NamedTuple -__all__ = ["version", "version_info", "version_js", "version_info_js"] +__all__ = ["version", "version_info", "version_info_js", "version_js"] version = "3.3.0a6" diff --git a/tests/execution/test_schema.py b/tests/execution/test_schema.py index a3448d89..593c1cf6 100644 --- a/tests/execution/test_schema.py +++ b/tests/execution/test_schema.py @@ -78,7 +78,7 @@ def __init__(self, id: int): # noqa: A002 "article": GraphQLField( BlogArticle, args={"id": GraphQLArgument(GraphQLID)}, - resolve=lambda _obj, _info, id: Article(id), # noqa: A002 + resolve=lambda _obj, _info, id: Article(id), ), "feed": GraphQLField( GraphQLList(BlogArticle), diff --git a/tests/fixtures/__init__.py b/tests/fixtures/__init__.py index 3df1c2f0..5e4058f9 100644 --- a/tests/fixtures/__init__.py +++ b/tests/fixtures/__init__.py @@ -7,11 +7,11 @@ import pytest __all__ = [ + "big_schema_introspection_result", + "big_schema_sdl", "cleanup", "kitchen_sink_query", "kitchen_sink_sdl", - "big_schema_sdl", - "big_schema_introspection_result", ] diff --git a/tests/utils/__init__.py b/tests/utils/__init__.py index 6ae4a6e5..ea374993 100644 --- a/tests/utils/__init__.py +++ b/tests/utils/__init__.py @@ -8,8 +8,8 @@ from .viral_sdl import viral_sdl __all__ = [ - "assert_matching_values", "assert_equal_awaitables_or_values", + "assert_matching_values", "dedent", "gen_fuzz_strings", "viral_schema", diff --git a/tests/validation/harness.py b/tests/validation/harness.py index 9a6912f4..737fb2df 100644 --- a/tests/validation/harness.py +++ b/tests/validation/harness.py @@ -12,9 +12,9 @@ from graphql.validation import ASTValidationRule __all__ = [ - "test_schema", - "assert_validation_errors", "assert_sdl_validation_errors", + "assert_validation_errors", + "test_schema", ] test_schema = build_schema( From 233df173133a0044e8e88ac93c556a3aeabb430d Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 18 Jan 2025 19:39:10 +0100 Subject: [PATCH 204/230] Introduces new incremental response format Replicates graphql/graphql-js@00e2b50fc453b5a4c00e65ab5c902963cca26d3f --- docs/conf.py | 17 +- docs/modules/pyutils.rst | 4 + pyproject.toml | 2 + src/graphql/execution/async_iterables.py | 7 +- src/graphql/execution/collect_fields.py | 363 ++++++--- src/graphql/execution/execute.py | 615 ++++++++++----- .../execution/incremental_publisher.py | 644 +++++++++++----- src/graphql/pyutils/__init__.py | 4 + src/graphql/pyutils/ref_map.py | 79 ++ src/graphql/pyutils/ref_set.py | 67 ++ .../rules/single_field_subscriptions.py | 26 +- tests/execution/test_customize.py | 10 +- tests/execution/test_defer.py | 702 ++++++++++++------ tests/execution/test_lists.py | 1 + tests/execution/test_mutations.py | 4 +- tests/execution/test_stream.py | 545 +++++++------- tests/pyutils/test_ref_map.py | 124 ++++ tests/pyutils/test_ref_set.py | 89 +++ 18 files changed, 2290 insertions(+), 1013 deletions(-) create mode 100644 src/graphql/pyutils/ref_map.py create mode 100644 src/graphql/pyutils/ref_set.py create mode 100644 tests/pyutils/test_ref_map.py create mode 100644 tests/pyutils/test_ref_set.py diff --git a/docs/conf.py b/docs/conf.py index 4655434b..d3de91ea 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -147,6 +147,8 @@ types.TracebackType TypeMap AwaitableOrValue +DeferredFragmentRecord +DeferUsage EnterLeaveVisitor ExperimentalIncrementalExecutionResults FieldGroup @@ -165,18 +167,31 @@ IncrementalResult InitialResultRecord Middleware +StreamItemsRecord +StreamRecord SubsequentDataRecord asyncio.events.AbstractEventLoop -graphql.execution.collect_fields.FieldsAndPatches +collections.abc.MutableMapping +collections.abc.MutableSet +graphql.execution.collect_fields.DeferUsage +graphql.execution.collect_fields.CollectFieldsResult +graphql.execution.collect_fields.FieldGroup graphql.execution.execute.StreamArguments +graphql.execution.execute.StreamUsage graphql.execution.map_async_iterable.map_async_iterable +graphql.execution.incremental_publisher.CompletedResult graphql.execution.incremental_publisher.DeferredFragmentRecord +graphql.execution.incremental_publisher.DeferredGroupedFieldSetRecord +graphql.execution.incremental_publisher.FormattedCompletedResult graphql.execution.incremental_publisher.IncrementalPublisher graphql.execution.incremental_publisher.InitialResultRecord graphql.execution.incremental_publisher.StreamItemsRecord +graphql.execution.incremental_publisher.StreamRecord graphql.execution.Middleware graphql.language.lexer.EscapeSequence graphql.language.visitor.EnterLeaveVisitor +graphql.pyutils.ref_map.K +graphql.pyutils.ref_map.V graphql.type.definition.GT_co graphql.type.definition.GNT_co graphql.type.definition.TContext diff --git a/docs/modules/pyutils.rst b/docs/modules/pyutils.rst index cd178d65..e33b5d1f 100644 --- a/docs/modules/pyutils.rst +++ b/docs/modules/pyutils.rst @@ -30,3 +30,7 @@ PyUtils .. autoclass:: SimplePubSub .. autoclass:: SimplePubSubIterator .. autodata:: Undefined +.. autoclass:: RefMap + :no-inherited-members: +.. autoclass:: RefSet + :no-inherited-members: diff --git a/pyproject.toml b/pyproject.toml index 7cdedaa9..4d366945 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -320,6 +320,8 @@ timeout = "100" filterwarnings = "ignore::pytest.PytestConfigWarning" # All tests can be found in the tests directory. testpaths = ["tests"] +# Use the functions scope as the default for asynchronous tests. +asyncio_default_fixture_loop_scope = "function" [build-system] requires = ["poetry_core>=1.6.1,<2"] diff --git a/src/graphql/execution/async_iterables.py b/src/graphql/execution/async_iterables.py index 747a515d..b8faad88 100644 --- a/src/graphql/execution/async_iterables.py +++ b/src/graphql/execution/async_iterables.py @@ -2,7 +2,7 @@ from __future__ import annotations -from contextlib import AbstractAsyncContextManager +from contextlib import AbstractAsyncContextManager, suppress from typing import ( AsyncGenerator, AsyncIterable, @@ -20,6 +20,8 @@ AsyncIterableOrGenerator = Union[AsyncGenerator[T, None], AsyncIterable[T]] +suppress_exceptions = suppress(Exception) + class aclosing(AbstractAsyncContextManager, Generic[T]): # noqa: N801 """Async context manager for safely finalizing an async iterator or generator. @@ -40,7 +42,8 @@ async def __aexit__(self, *_exc_info: object) -> None: except AttributeError: pass # do not complain if the iterator has no aclose() method else: - await aclose() + with suppress_exceptions: # or if the aclose() method fails + await aclose() async def map_async_iterable( diff --git a/src/graphql/execution/collect_fields.py b/src/graphql/execution/collect_fields.py index 4f581252..613a55c2 100644 --- a/src/graphql/execution/collect_fields.py +++ b/src/graphql/execution/collect_fields.py @@ -3,8 +3,7 @@ from __future__ import annotations import sys -from collections import defaultdict -from typing import Any, Dict, List, NamedTuple +from typing import Any, Dict, NamedTuple, Union, cast from ..language import ( FieldNode, @@ -15,6 +14,7 @@ OperationType, SelectionSetNode, ) +from ..pyutils import RefMap, RefSet from ..type import ( GraphQLDeferDirective, GraphQLIncludeDirective, @@ -33,33 +33,88 @@ __all__ = [ + "NON_DEFERRED_TARGET_SET", + "CollectFieldsContext", + "CollectFieldsResult", + "DeferUsage", + "DeferUsageSet", + "FieldDetails", "FieldGroup", - "FieldsAndPatches", - "GroupedFieldSet", + "GroupedFieldSetDetails", + "Target", + "TargetSet", "collect_fields", "collect_subfields", ] + +class DeferUsage(NamedTuple): + """An optionally labelled list of ancestor targets.""" + + label: str | None + ancestors: list[Target] + + +Target: TypeAlias = Union[DeferUsage, None] + +TargetSet: TypeAlias = RefSet[Target] +DeferUsageSet: TypeAlias = RefSet[DeferUsage] + + +NON_DEFERRED_TARGET_SET: TargetSet = RefSet([None]) + + +class FieldDetails(NamedTuple): + """A field node and its target.""" + + node: FieldNode + target: Target + + +class FieldGroup(NamedTuple): + """A group of fields that share the same target set.""" + + fields: list[FieldDetails] + targets: TargetSet + + def to_nodes(self) -> list[FieldNode]: + """Return the field nodes in this group.""" + return [field_details.node for field_details in self.fields] + + if sys.version_info < (3, 9): - FieldGroup: TypeAlias = List[FieldNode] - GroupedFieldSet = Dict[str, FieldGroup] + GroupedFieldSet: TypeAlias = Dict[str, FieldGroup] else: # Python >= 3.9 - FieldGroup: TypeAlias = list[FieldNode] - GroupedFieldSet = dict[str, FieldGroup] + GroupedFieldSet: TypeAlias = dict[str, FieldGroup] -class PatchFields(NamedTuple): - """Optionally labelled set of fields to be used as a patch.""" +class GroupedFieldSetDetails(NamedTuple): + """A grouped field set with defer info.""" - label: str | None grouped_field_set: GroupedFieldSet + should_initiate_defer: bool -class FieldsAndPatches(NamedTuple): - """Tuple of collected fields and patches to be applied.""" +class CollectFieldsResult(NamedTuple): + """Collected fields and deferred usages.""" grouped_field_set: GroupedFieldSet - patches: list[PatchFields] + new_grouped_field_set_details: RefMap[DeferUsageSet, GroupedFieldSetDetails] + new_defer_usages: list[DeferUsage] + + +class CollectFieldsContext(NamedTuple): + """Context for collecting fields.""" + + schema: GraphQLSchema + fragments: dict[str, FragmentDefinitionNode] + variable_values: dict[str, Any] + operation: OperationDefinitionNode + runtime_type: GraphQLObjectType + targets_by_key: dict[str, TargetSet] + fields_by_target: RefMap[Target, dict[str, list[FieldNode]]] + new_defer_usages: list[DeferUsage] + visited_fragment_names: set[str] def collect_fields( @@ -68,7 +123,7 @@ def collect_fields( variable_values: dict[str, Any], runtime_type: GraphQLObjectType, operation: OperationDefinitionNode, -) -> FieldsAndPatches: +) -> CollectFieldsResult: """Collect fields. Given a selection_set, collects all the fields and returns them. @@ -79,20 +134,23 @@ def collect_fields( For internal use only. """ - grouped_field_set: dict[str, list[FieldNode]] = defaultdict(list) - patches: list[PatchFields] = [] - collect_fields_impl( + context = CollectFieldsContext( schema, fragments, variable_values, operation, runtime_type, - operation.selection_set, - grouped_field_set, - patches, + {}, + RefMap(), + [], set(), ) - return FieldsAndPatches(grouped_field_set, patches) + collect_fields_impl(context, operation.selection_set) + + return CollectFieldsResult( + *build_grouped_field_sets(context.targets_by_key, context.fields_by_target), + context.new_defer_usages, + ) def collect_subfields( @@ -102,7 +160,7 @@ def collect_subfields( operation: OperationDefinitionNode, return_type: GraphQLObjectType, field_group: FieldGroup, -) -> FieldsAndPatches: +) -> CollectFieldsResult: """Collect subfields. Given a list of field nodes, collects all the subfields of the passed in fields, @@ -114,47 +172,73 @@ def collect_subfields( For internal use only. """ - sub_grouped_field_set: dict[str, list[FieldNode]] = defaultdict(list) - visited_fragment_names: set[str] = set() - - sub_patches: list[PatchFields] = [] - sub_fields_and_patches = FieldsAndPatches(sub_grouped_field_set, sub_patches) + context = CollectFieldsContext( + schema, + fragments, + variable_values, + operation, + return_type, + {}, + RefMap(), + [], + set(), + ) - for node in field_group: + for field_details in field_group.fields: + node = field_details.node if node.selection_set: - collect_fields_impl( - schema, - fragments, - variable_values, - operation, - return_type, - node.selection_set, - sub_grouped_field_set, - sub_patches, - visited_fragment_names, - ) - return sub_fields_and_patches + collect_fields_impl(context, node.selection_set, field_details.target) + + return CollectFieldsResult( + *build_grouped_field_sets( + context.targets_by_key, context.fields_by_target, field_group.targets + ), + context.new_defer_usages, + ) def collect_fields_impl( - schema: GraphQLSchema, - fragments: dict[str, FragmentDefinitionNode], - variable_values: dict[str, Any], - operation: OperationDefinitionNode, - runtime_type: GraphQLObjectType, + context: CollectFieldsContext, selection_set: SelectionSetNode, - grouped_field_set: dict[str, list[FieldNode]], - patches: list[PatchFields], - visited_fragment_names: set[str], + parent_target: Target | None = None, + new_target: Target | None = None, ) -> None: """Collect fields (internal implementation).""" - patch_fields: dict[str, list[FieldNode]] + ( + schema, + fragments, + variable_values, + operation, + runtime_type, + targets_by_key, + fields_by_target, + new_defer_usages, + visited_fragment_names, + ) = context + + ancestors: list[Target] for selection in selection_set.selections: if isinstance(selection, FieldNode): if not should_include_node(variable_values, selection): continue - grouped_field_set[get_field_entry_key(selection)].append(selection) + key = get_field_entry_key(selection) + target = new_target or parent_target + key_targets = targets_by_key.get(key) + if key_targets is None: + key_targets = RefSet([target]) + targets_by_key[key] = key_targets + else: + key_targets.add(target) + target_fields = fields_by_target.get(target) + if target_fields is None: + fields_by_target[target] = {key: [selection]} + else: + field_nodes = target_fields.get(key) + if field_nodes is None: + target_fields[key] = [selection] + else: + field_nodes.append(selection) elif isinstance(selection, InlineFragmentNode): if not should_include_node( variable_values, selection @@ -162,32 +246,19 @@ def collect_fields_impl( continue defer = get_defer_values(operation, variable_values, selection) + if defer: - patch_fields = defaultdict(list) - collect_fields_impl( - schema, - fragments, - variable_values, - operation, - runtime_type, - selection.selection_set, - patch_fields, - patches, - visited_fragment_names, + ancestors = ( + [None] + if parent_target is None + else [parent_target, *parent_target.ancestors] ) - patches.append(PatchFields(defer.label, patch_fields)) + target = DeferUsage(defer.label, ancestors) + new_defer_usages.append(target) else: - collect_fields_impl( - schema, - fragments, - variable_values, - operation, - runtime_type, - selection.selection_set, - grouped_field_set, - patches, - visited_fragment_names, - ) + target = new_target + + collect_fields_impl(context, selection.selection_set, parent_target, target) elif isinstance(selection, FragmentSpreadNode): # pragma: no cover else frag_name = selection.name.value @@ -204,35 +275,19 @@ def collect_fields_impl( ): continue - if not defer: - visited_fragment_names.add(frag_name) - if defer: - patch_fields = defaultdict(list) - collect_fields_impl( - schema, - fragments, - variable_values, - operation, - runtime_type, - fragment.selection_set, - patch_fields, - patches, - visited_fragment_names, + ancestors = ( + [None] + if parent_target is None + else [parent_target, *parent_target.ancestors] ) - patches.append(PatchFields(defer.label, patch_fields)) + target = DeferUsage(defer.label, ancestors) + new_defer_usages.append(target) else: - collect_fields_impl( - schema, - fragments, - variable_values, - operation, - runtime_type, - fragment.selection_set, - grouped_field_set, - patches, - visited_fragment_names, - ) + visited_fragment_names.add(frag_name) + target = new_target + + collect_fields_impl(context, fragment.selection_set, parent_target, target) class DeferValues(NamedTuple): @@ -305,3 +360,111 @@ def does_fragment_condition_match( def get_field_entry_key(node: FieldNode) -> str: """Implement the logic to compute the key of a given field's entry""" return node.alias.value if node.alias else node.name.value + + +def build_grouped_field_sets( + targets_by_key: dict[str, TargetSet], + fields_by_target: RefMap[Target, dict[str, list[FieldNode]]], + parent_targets: TargetSet = NON_DEFERRED_TARGET_SET, +) -> tuple[GroupedFieldSet, RefMap[DeferUsageSet, GroupedFieldSetDetails]]: + """Build grouped field sets.""" + parent_target_keys, target_set_details_map = get_target_set_details( + targets_by_key, parent_targets + ) + + grouped_field_set = ( + get_ordered_grouped_field_set( + parent_target_keys, parent_targets, targets_by_key, fields_by_target + ) + if parent_target_keys + else {} + ) + + new_grouped_field_set_details: RefMap[DeferUsageSet, GroupedFieldSetDetails] = ( + RefMap() + ) + + for masking_targets, target_set_details in target_set_details_map.items(): + keys, should_initiate_defer = target_set_details + + new_grouped_field_set = get_ordered_grouped_field_set( + keys, masking_targets, targets_by_key, fields_by_target + ) + + # All TargetSets that causes new grouped field sets consist only of DeferUsages + # and have should_initiate_defer defined + + new_grouped_field_set_details[cast(DeferUsageSet, masking_targets)] = ( + GroupedFieldSetDetails(new_grouped_field_set, should_initiate_defer) + ) + + return grouped_field_set, new_grouped_field_set_details + + +class TargetSetDetails(NamedTuple): + """A set of target keys with defer info.""" + + keys: set[str] + should_initiate_defer: bool + + +def get_target_set_details( + targets_by_key: dict[str, TargetSet], parent_targets: TargetSet +) -> tuple[set[str], RefMap[TargetSet, TargetSetDetails]]: + """Get target set details.""" + parent_target_keys: set[str] = set() + target_set_details_map: RefMap[TargetSet, TargetSetDetails] = RefMap() + + for response_key, targets in targets_by_key.items(): + masking_target_list: list[Target] = [] + for target in targets: + if not target or all( + ancestor not in targets for ancestor in target.ancestors + ): + masking_target_list.append(target) + + masking_targets: TargetSet = RefSet(masking_target_list) + if masking_targets == parent_targets: + parent_target_keys.add(response_key) + continue + + for target_set, target_set_details in target_set_details_map.items(): + if target_set == masking_targets: + target_set_details.keys.add(response_key) + break + else: + target_set_details = TargetSetDetails( + {response_key}, + any( + defer_usage not in parent_targets for defer_usage in masking_targets + ), + ) + target_set_details_map[masking_targets] = target_set_details + + return parent_target_keys, target_set_details_map + + +def get_ordered_grouped_field_set( + keys: set[str], + masking_targets: TargetSet, + targets_by_key: dict[str, TargetSet], + fields_by_target: RefMap[Target, dict[str, list[FieldNode]]], +) -> GroupedFieldSet: + """Get ordered grouped field set.""" + grouped_field_set: GroupedFieldSet = {} + + first_target = next(iter(masking_targets)) + first_fields = fields_by_target[first_target] + for key in list(first_fields): + if key in keys: + field_group = grouped_field_set.get(key) + if field_group is None: # pragma: no cover else + field_group = FieldGroup([], masking_targets) + grouped_field_set[key] = field_group + for target in targets_by_key[key]: + fields_for_target = fields_by_target[target] + nodes = fields_for_target[key] + del fields_for_target[key] + field_group.fields.extend(FieldDetails(node, target) for node in nodes) + + return grouped_field_set diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index 30a6234d..ac041392 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -3,7 +3,6 @@ from __future__ import annotations from asyncio import ensure_future, gather, shield, wait_for -from collections.abc import Mapping from contextlib import suppress from typing import ( Any, @@ -14,19 +13,20 @@ Callable, Iterable, List, + Mapping, NamedTuple, Optional, + Sequence, Tuple, Union, cast, ) try: - from typing import TypeAlias, TypeGuard + from typing import TypeAlias, TypeGuard # noqa: F401 except ImportError: # Python < 3.10 - from typing_extensions import TypeAlias, TypeGuard + from typing_extensions import TypeAlias try: # only needed for Python < 3.11 - # noinspection PyCompatibility from asyncio.exceptions import TimeoutError # noqa: A004 except ImportError: # Python < 3.7 from concurrent.futures import TimeoutError # noqa: A004 @@ -41,6 +41,7 @@ from ..pyutils import ( AwaitableOrValue, Path, + RefMap, Undefined, async_reduce, inspect, @@ -68,27 +69,34 @@ ) from .async_iterables import map_async_iterable from .collect_fields import ( + NON_DEFERRED_TARGET_SET, + CollectFieldsResult, + DeferUsage, + DeferUsageSet, + FieldDetails, FieldGroup, - FieldsAndPatches, GroupedFieldSet, + GroupedFieldSetDetails, collect_fields, collect_subfields, ) from .incremental_publisher import ( ASYNC_DELAY, + DeferredFragmentRecord, + DeferredGroupedFieldSetRecord, ExecutionResult, ExperimentalIncrementalExecutionResults, IncrementalDataRecord, IncrementalPublisher, InitialResultRecord, StreamItemsRecord, - SubsequentDataRecord, + StreamRecord, ) from .middleware import MiddlewareManager from .values import get_argument_values, get_directive_values, get_variable_values try: # pragma: no cover - anext # noqa: B018 + anext # noqa: B018 # pyright: ignore except NameError: # pragma: no cover (Python < 3.10) # noinspection PyShadowingBuiltins async def anext(iterator: AsyncIterator) -> Any: @@ -135,11 +143,12 @@ async def anext(iterator: AsyncIterator) -> Any: Middleware: TypeAlias = Optional[Union[Tuple, List, MiddlewareManager]] -class StreamArguments(NamedTuple): - """Arguments of the stream directive""" +class StreamUsage(NamedTuple): + """Stream directive usage information""" - initial_count: int label: str | None + initial_count: int + field_group: FieldGroup class ExecutionContext: @@ -161,9 +170,7 @@ class ExecutionContext: incremental_publisher: IncrementalPublisher middleware_manager: MiddlewareManager | None - is_awaitable: Callable[[Any], TypeGuard[Awaitable]] = staticmethod( - default_is_awaitable - ) + is_awaitable: Callable[[Any], bool] = staticmethod(default_is_awaitable) def __init__( self, @@ -194,8 +201,9 @@ def __init__( if is_awaitable: self.is_awaitable = is_awaitable self._canceled_iterators: set[AsyncIterator] = set() - self._subfields_cache: dict[tuple, FieldsAndPatches] = {} + self._subfields_cache: dict[tuple, CollectFieldsResult] = {} self._tasks: set[Awaitable] = set() + self._stream_usages: RefMap[FieldGroup, StreamUsage] = RefMap() @classmethod def build( @@ -310,8 +318,8 @@ def execute_operation( Implements the "Executing operations" section of the spec. """ - schema = self.schema operation = self.operation + schema = self.schema root_type = schema.get_root_type(operation.operation) if root_type is None: msg = ( @@ -320,12 +328,24 @@ def execute_operation( ) raise GraphQLError(msg, operation) - grouped_field_set, patches = collect_fields( - schema, - self.fragments, - self.variable_values, - root_type, - operation, + grouped_field_set, new_grouped_field_set_details, new_defer_usages = ( + collect_fields( + schema, self.fragments, self.variable_values, root_type, operation + ) + ) + + incremental_publisher = self.incremental_publisher + new_defer_map = add_new_deferred_fragments( + incremental_publisher, new_defer_usages, initial_result_record + ) + + path: Path | None = None + + new_deferred_grouped_field_set_records = add_new_deferred_grouped_field_sets( + incremental_publisher, + new_grouped_field_set_details, + new_defer_map, + path, ) root_value = self.root_value @@ -334,18 +354,22 @@ def execute_operation( self.execute_fields_serially if operation.operation == OperationType.MUTATION else self.execute_fields - )(root_type, root_value, None, grouped_field_set, initial_result_record) - - for patch in patches: - label, patch_grouped_filed_set = patch - self.execute_deferred_fragment( - root_type, - root_value, - patch_grouped_filed_set, - initial_result_record, - label, - None, - ) + )( + root_type, + root_value, + path, + grouped_field_set, + initial_result_record, + new_defer_map, + ) + + self.execute_deferred_grouped_field_sets( + root_type, + root_value, + path, + new_deferred_grouped_field_set_records, + new_defer_map, + ) return result @@ -356,6 +380,7 @@ def execute_fields_serially( path: Path | None, grouped_field_set: GroupedFieldSet, incremental_data_record: IncrementalDataRecord, + defer_map: RefMap[DeferUsage, DeferredFragmentRecord], ) -> AwaitableOrValue[dict[str, Any]]: """Execute the given fields serially. @@ -375,6 +400,7 @@ def reducer( field_group, field_path, incremental_data_record, + defer_map, ) if result is Undefined: return results @@ -401,6 +427,7 @@ def execute_fields( path: Path | None, grouped_field_set: GroupedFieldSet, incremental_data_record: IncrementalDataRecord, + defer_map: RefMap[DeferUsage, DeferredFragmentRecord], ) -> AwaitableOrValue[dict[str, Any]]: """Execute the given fields concurrently. @@ -419,6 +446,7 @@ def execute_fields( field_group, field_path, incremental_data_record, + defer_map, ) if result is not Undefined: results[response_name] = result @@ -456,6 +484,7 @@ def execute_field( field_group: FieldGroup, path: Path, incremental_data_record: IncrementalDataRecord, + defer_map: RefMap[DeferUsage, DeferredFragmentRecord], ) -> AwaitableOrValue[Any]: """Resolve the field on the given source object. @@ -465,7 +494,7 @@ def execute_field( calling its resolve function, then calls complete_value to await coroutine objects, serialize scalars, or execute the sub-selection-set for objects. """ - field_name = field_group[0].name.value + field_name = field_group.fields[0].node.name.value field_def = self.schema.get_field(parent_type, field_name) if not field_def: return Undefined @@ -483,7 +512,9 @@ def execute_field( try: # Build a dictionary of arguments from the field.arguments AST, using the # variables scope to fulfill any variable references. - args = get_argument_values(field_def, field_group[0], self.variable_values) + args = get_argument_values( + field_def, field_group.fields[0].node, self.variable_values + ) # Note that contrary to the JavaScript implementation, we pass the context # value as part of the resolve info. @@ -497,10 +528,17 @@ def execute_field( path, result, incremental_data_record, + defer_map, ) completed = self.complete_value( - return_type, field_group, info, path, result, incremental_data_record + return_type, + field_group, + info, + path, + result, + incremental_data_record, + defer_map, ) if self.is_awaitable(completed): # noinspection PyShadowingNames @@ -547,8 +585,8 @@ def build_resolve_info( # The resolve function's first argument is a collection of information about # the current execution state. return GraphQLResolveInfo( - field_group[0].name.value, - field_group, + field_group.fields[0].node.name.value, + field_group.to_nodes(), field_def.type, parent_type, path, @@ -570,7 +608,7 @@ def handle_field_error( incremental_data_record: IncrementalDataRecord, ) -> None: """Handle error properly according to the field type.""" - error = located_error(raw_error, field_group, path.as_list()) + error = located_error(raw_error, field_group.to_nodes(), path.as_list()) # If the field type is non-nullable, then it is resolved without any protection # from errors, however it still properly locates the error. @@ -589,6 +627,7 @@ def complete_value( path: Path, result: Any, incremental_data_record: IncrementalDataRecord, + defer_map: RefMap[DeferUsage, DeferredFragmentRecord], ) -> AwaitableOrValue[Any]: """Complete a value. @@ -626,6 +665,7 @@ def complete_value( path, result, incremental_data_record, + defer_map, ) if completed is None: msg = ( @@ -642,7 +682,13 @@ def complete_value( # If field type is List, complete each item in the list with inner type if is_list_type(return_type): return self.complete_list_value( - return_type, field_group, info, path, result, incremental_data_record + return_type, + field_group, + info, + path, + result, + incremental_data_record, + defer_map, ) # If field type is a leaf type, Scalar or Enum, serialize to a valid value, @@ -654,13 +700,25 @@ def complete_value( # Object type and complete for that type. if is_abstract_type(return_type): return self.complete_abstract_value( - return_type, field_group, info, path, result, incremental_data_record + return_type, + field_group, + info, + path, + result, + incremental_data_record, + defer_map, ) # If field type is Object, execute and complete all sub-selections. if is_object_type(return_type): return self.complete_object_value( - return_type, field_group, info, path, result, incremental_data_record + return_type, + field_group, + info, + path, + result, + incremental_data_record, + defer_map, ) # Not reachable. All possible output types have been considered. @@ -678,6 +736,7 @@ async def complete_awaitable_value( path: Path, result: Any, incremental_data_record: IncrementalDataRecord, + defer_map: RefMap[DeferUsage, DeferredFragmentRecord], ) -> Any: """Complete an awaitable value.""" try: @@ -689,6 +748,7 @@ async def complete_awaitable_value( path, resolved, incremental_data_record, + defer_map, ) if self.is_awaitable(completed): completed = await completed @@ -700,12 +760,12 @@ async def complete_awaitable_value( completed = None return completed - def get_stream_values( + def get_stream_usage( self, field_group: FieldGroup, path: Path - ) -> StreamArguments | None: - """Get stream values. + ) -> StreamUsage | None: + """Get stream usage. - Returns an object containing the `@stream` arguments if a field should be + Returns an object containing info for streaming if a field should be streamed based on the experimental flag, stream directive present and not disabled by the "if" argument. """ @@ -713,10 +773,14 @@ def get_stream_values( if isinstance(path.key, int): return None + stream_usage = self._stream_usages.get(field_group) + if stream_usage is not None: + return stream_usage # pragma: no cover + # validation only allows equivalent streams on multiple fields, so it is # safe to only check the first field_node for the stream directive stream = get_directive_values( - GraphQLStreamDirective, field_group[0], self.variable_values + GraphQLStreamDirective, field_group.fields[0].node, self.variable_values ) if not stream or stream.get("if") is False: @@ -734,8 +798,21 @@ def get_stream_values( ) raise TypeError(msg) - label = stream.get("label") - return StreamArguments(initial_count=initial_count, label=label) + streamed_field_group = FieldGroup( + [ + FieldDetails(field_details.node, None) + for field_details in field_group.fields + ], + NON_DEFERRED_TARGET_SET, + ) + + stream_usage = StreamUsage( + stream.get("label"), stream["initialCount"], streamed_field_group + ) + + self._stream_usages[field_group] = stream_usage + + return stream_usage async def complete_async_iterator_value( self, @@ -745,36 +822,39 @@ async def complete_async_iterator_value( path: Path, async_iterator: AsyncIterator[Any], incremental_data_record: IncrementalDataRecord, + defer_map: RefMap[DeferUsage, DeferredFragmentRecord], ) -> list[Any]: """Complete an async iterator. Complete an async iterator value by completing the result and calling recursively until all the results are completed. """ - stream = self.get_stream_values(field_group, path) + stream_usage = self.get_stream_usage(field_group, path) complete_list_item_value = self.complete_list_item_value awaitable_indices: list[int] = [] append_awaitable = awaitable_indices.append completed_results: list[Any] = [] index = 0 while True: - if ( - stream - and isinstance(stream.initial_count, int) - and index >= stream.initial_count - ): + if stream_usage and index >= stream_usage.initial_count: + try: + early_return = async_iterator.aclose # type: ignore + except AttributeError: + early_return = None + stream_record = StreamRecord(path, stream_usage.label, early_return) + with suppress_timeout_error: await wait_for( shield( self.execute_stream_async_iterator( index, async_iterator, - field_group, + stream_usage.field_group, info, item_type, path, incremental_data_record, - stream.label, + stream_record, ) ), timeout=ASYNC_DELAY, @@ -789,7 +869,7 @@ async def complete_async_iterator_value( break except Exception as raw_error: raise located_error( - raw_error, field_group, path.as_list() + raw_error, field_group.to_nodes(), path.as_list() ) from raw_error if complete_list_item_value( value, @@ -799,6 +879,7 @@ async def complete_async_iterator_value( info, item_path, incremental_data_record, + defer_map, ): append_awaitable(index) @@ -829,6 +910,7 @@ def complete_list_value( path: Path, result: AsyncIterable[Any] | Iterable[Any], incremental_data_record: IncrementalDataRecord, + defer_map: RefMap[DeferUsage, DeferredFragmentRecord], ) -> AwaitableOrValue[list[Any]]: """Complete a list value. @@ -846,6 +928,7 @@ def complete_list_value( path, async_iterator, incremental_data_record, + defer_map, ) if not is_iterable(result): @@ -855,35 +938,34 @@ def complete_list_value( ) raise GraphQLError(msg) - stream = self.get_stream_values(field_group, path) + stream_usage = self.get_stream_usage(field_group, path) # This is specified as a simple map, however we're optimizing the path where # the list contains no coroutine objects by avoiding creating another coroutine # object. complete_list_item_value = self.complete_list_item_value + current_parents = incremental_data_record awaitable_indices: list[int] = [] append_awaitable = awaitable_indices.append - previous_incremental_data_record = incremental_data_record completed_results: list[Any] = [] + stream_record: StreamRecord | None = None for index, item in enumerate(result): # No need to modify the info object containing the path, since from here on # it is not ever accessed by resolver functions. item_path = path.add_key(index, None) - if ( - stream - and isinstance(stream.initial_count, int) - and index >= stream.initial_count - ): - previous_incremental_data_record = self.execute_stream_field( + if stream_usage and index >= stream_usage.initial_count: + if stream_record is None: + stream_record = StreamRecord(path, stream_usage.label) + current_parents = self.execute_stream_field( path, item_path, item, - field_group, + stream_usage.field_group, info, item_type, - previous_incremental_data_record, - stream.label, + current_parents, + stream_record, ) continue @@ -895,9 +977,15 @@ def complete_list_value( info, item_path, incremental_data_record, + defer_map, ): append_awaitable(index) + if stream_record is not None: + self.incremental_publisher.set_is_final_record( + cast(StreamItemsRecord, current_parents) + ) + if not awaitable_indices: return completed_results @@ -928,6 +1016,7 @@ def complete_list_item_value( info: GraphQLResolveInfo, item_path: Path, incremental_data_record: IncrementalDataRecord, + defer_map: RefMap[DeferUsage, DeferredFragmentRecord], ) -> bool: """Complete a list item value by adding it to the completed results. @@ -944,6 +1033,7 @@ def complete_list_item_value( item_path, item, incremental_data_record, + defer_map, ) ) return True @@ -956,6 +1046,7 @@ def complete_list_item_value( item_path, item, incremental_data_record, + defer_map, ) if is_awaitable(completed_item): @@ -1019,6 +1110,7 @@ def complete_abstract_value( path: Path, result: Any, incremental_data_record: IncrementalDataRecord, + defer_map: RefMap[DeferUsage, DeferredFragmentRecord], ) -> AwaitableOrValue[Any]: """Complete an abstract value. @@ -1045,6 +1137,7 @@ async def await_complete_object_value() -> Any: path, result, incremental_data_record, + defer_map, ) if self.is_awaitable(value): return await value # type: ignore @@ -1062,6 +1155,7 @@ async def await_complete_object_value() -> Any: path, result, incremental_data_record, + defer_map, ) def ensure_valid_runtime_type( @@ -1082,7 +1176,7 @@ def ensure_valid_runtime_type( " a 'resolve_type' function or each possible type should provide" " an 'is_type_of' function." ) - raise GraphQLError(msg, field_group) + raise GraphQLError(msg, field_group.to_nodes()) if is_object_type(runtime_type_name): # pragma: no cover msg = ( @@ -1098,7 +1192,7 @@ def ensure_valid_runtime_type( f" for field '{info.parent_type.name}.{info.field_name}' with value" f" {inspect(result)}, received '{inspect(runtime_type_name)}'." ) - raise GraphQLError(msg, field_group) + raise GraphQLError(msg, field_group.to_nodes()) runtime_type = self.schema.get_type(runtime_type_name) @@ -1107,21 +1201,21 @@ def ensure_valid_runtime_type( f"Abstract type '{return_type.name}' was resolved to a type" f" '{runtime_type_name}' that does not exist inside the schema." ) - raise GraphQLError(msg, field_group) + raise GraphQLError(msg, field_group.to_nodes()) if not is_object_type(runtime_type): msg = ( f"Abstract type '{return_type.name}' was resolved" f" to a non-object type '{runtime_type_name}'." ) - raise GraphQLError(msg, field_group) + raise GraphQLError(msg, field_group.to_nodes()) if not self.schema.is_sub_type(return_type, runtime_type): msg = ( f"Runtime Object type '{runtime_type.name}' is not a possible" f" type for '{return_type.name}'." ) - raise GraphQLError(msg, field_group) + raise GraphQLError(msg, field_group.to_nodes()) # noinspection PyTypeChecker return runtime_type @@ -1134,6 +1228,7 @@ def complete_object_value( path: Path, result: Any, incremental_data_record: IncrementalDataRecord, + defer_map: RefMap[DeferUsage, DeferredFragmentRecord], ) -> AwaitableOrValue[dict[str, Any]]: """Complete an Object value by executing all sub-selections.""" # If there is an `is_type_of()` predicate function, call it with the current @@ -1150,7 +1245,12 @@ async def execute_subfields_async() -> dict[str, Any]: return_type, result, field_group ) return self.collect_and_execute_subfields( - return_type, field_group, path, result, incremental_data_record + return_type, + field_group, + path, + result, + incremental_data_record, + defer_map, ) # type: ignore return execute_subfields_async() @@ -1159,7 +1259,7 @@ async def execute_subfields_async() -> dict[str, Any]: raise invalid_return_type_error(return_type, result, field_group) return self.collect_and_execute_subfields( - return_type, field_group, path, result, incremental_data_record + return_type, field_group, path, result, incremental_data_record, defer_map ) def collect_and_execute_subfields( @@ -1169,32 +1269,47 @@ def collect_and_execute_subfields( path: Path, result: Any, incremental_data_record: IncrementalDataRecord, + defer_map: RefMap[DeferUsage, DeferredFragmentRecord], ) -> AwaitableOrValue[dict[str, Any]]: """Collect sub-fields to execute to complete this value.""" - sub_grouped_field_set, sub_patches = self.collect_subfields( - return_type, field_group + grouped_field_set, new_grouped_field_set_details, new_defer_usages = ( + self.collect_subfields(return_type, field_group) + ) + + incremental_publisher = self.incremental_publisher + new_defer_map = add_new_deferred_fragments( + incremental_publisher, + new_defer_usages, + incremental_data_record, + defer_map, + path, + ) + new_deferred_grouped_field_set_records = add_new_deferred_grouped_field_sets( + incremental_publisher, new_grouped_field_set_details, new_defer_map, path ) sub_fields = self.execute_fields( - return_type, result, path, sub_grouped_field_set, incremental_data_record + return_type, + result, + path, + grouped_field_set, + incremental_data_record, + new_defer_map, ) - for sub_patch in sub_patches: - label, sub_patch_grouped_field_set = sub_patch - self.execute_deferred_fragment( - return_type, - result, - sub_patch_grouped_field_set, - incremental_data_record, - label, - path, - ) + self.execute_deferred_grouped_field_sets( + return_type, + result, + path, + new_deferred_grouped_field_set_records, + new_defer_map, + ) return sub_fields def collect_subfields( self, return_type: GraphQLObjectType, field_group: FieldGroup - ) -> FieldsAndPatches: + ) -> CollectFieldsResult: """Collect subfields. A cached collection of relevant subfields with regard to the return type is @@ -1258,57 +1373,91 @@ async def callback(payload: Any) -> ExecutionResult: return map_async_iterable(result_or_stream, callback) - def execute_deferred_fragment( + def execute_deferred_grouped_field_sets( + self, + parent_type: GraphQLObjectType, + source_value: Any, + path: Path | None, + new_deferred_grouped_field_set_records: Sequence[DeferredGroupedFieldSetRecord], + defer_map: RefMap[DeferUsage, DeferredFragmentRecord], + ) -> None: + """Execute deferred grouped field sets.""" + for deferred_grouped_field_set_record in new_deferred_grouped_field_set_records: + if deferred_grouped_field_set_record.should_initiate_defer: + + async def execute_deferred_grouped_field_set( + deferred_grouped_field_set_record: DeferredGroupedFieldSetRecord, + ) -> None: + self.execute_deferred_grouped_field_set( + parent_type, + source_value, + path, + deferred_grouped_field_set_record, + defer_map, + ) + + self.add_task( + execute_deferred_grouped_field_set( + deferred_grouped_field_set_record + ) + ) + + else: + self.execute_deferred_grouped_field_set( + parent_type, + source_value, + path, + deferred_grouped_field_set_record, + defer_map, + ) + + def execute_deferred_grouped_field_set( self, parent_type: GraphQLObjectType, source_value: Any, - fields: GroupedFieldSet, - parent_context: IncrementalDataRecord, - label: str | None = None, - path: Path | None = None, + path: Path | None, + deferred_grouped_field_set_record: DeferredGroupedFieldSetRecord, + defer_map: RefMap[DeferUsage, DeferredFragmentRecord], ) -> None: - """Execute deferred fragment.""" + """Execute deferred grouped field set.""" incremental_publisher = self.incremental_publisher - incremental_data_record = ( - incremental_publisher.prepare_new_deferred_fragment_record( - label, path, parent_context - ) - ) try: - awaitable_or_data = self.execute_fields( - parent_type, source_value, path, fields, incremental_data_record + incremental_result = self.execute_fields( + parent_type, + source_value, + path, + deferred_grouped_field_set_record.grouped_field_set, + deferred_grouped_field_set_record, + defer_map, ) - if self.is_awaitable(awaitable_or_data): + if self.is_awaitable(incremental_result): + incremental_result = cast(Awaitable, incremental_result) - async def await_data() -> None: + async def await_incremental_result() -> None: try: - data = await awaitable_or_data # type: ignore + result = await incremental_result except GraphQLError as error: - incremental_publisher.add_field_error( - incremental_data_record, error - ) - incremental_publisher.complete_deferred_fragment_record( - incremental_data_record, None + incremental_publisher.mark_errored_deferred_grouped_field_set( + deferred_grouped_field_set_record, error ) else: - incremental_publisher.complete_deferred_fragment_record( - incremental_data_record, data + incremental_publisher.complete_deferred_grouped_field_set( + deferred_grouped_field_set_record, result ) - self.add_task(await_data()) + self.add_task(await_incremental_result()) else: - incremental_publisher.complete_deferred_fragment_record( - incremental_data_record, - awaitable_or_data, # type: ignore + incremental_publisher.complete_deferred_grouped_field_set( + deferred_grouped_field_set_record, + incremental_result, # type: ignore ) + except GraphQLError as error: - incremental_publisher.add_field_error(incremental_data_record, error) - incremental_publisher.complete_deferred_fragment_record( - incremental_data_record, None + incremental_publisher.mark_errored_deferred_grouped_field_set( + deferred_grouped_field_set_record, error ) - awaitable_or_data = None def execute_stream_field( self, @@ -1318,14 +1467,15 @@ def execute_stream_field( field_group: FieldGroup, info: GraphQLResolveInfo, item_type: GraphQLOutputType, - parent_context: IncrementalDataRecord, - label: str | None = None, - ) -> SubsequentDataRecord: + incremental_data_record: IncrementalDataRecord, + stream_record: StreamRecord, + ) -> StreamItemsRecord: """Execute stream field.""" is_awaitable = self.is_awaitable incremental_publisher = self.incremental_publisher - incremental_data_record = incremental_publisher.prepare_new_stream_items_record( - label, item_path, parent_context + stream_items_record = StreamItemsRecord(stream_record, item_path) + incremental_publisher.report_new_stream_items_record( + stream_items_record, incremental_data_record ) completed_item: Any @@ -1339,23 +1489,21 @@ async def await_completed_awaitable_item() -> None: info, item_path, item, - incremental_data_record, + stream_items_record, + RefMap(), ) except GraphQLError as error: - incremental_publisher.add_field_error( - incremental_data_record, error - ) - incremental_publisher.filter(path, incremental_data_record) - incremental_publisher.complete_stream_items_record( - incremental_data_record, None + incremental_publisher.filter(path, stream_items_record) + incremental_publisher.mark_errored_stream_items_record( + stream_items_record, error ) else: incremental_publisher.complete_stream_items_record( - incremental_data_record, [value] + stream_items_record, [value] ) self.add_task(await_completed_awaitable_item()) - return incremental_data_record + return stream_items_record try: try: @@ -1365,7 +1513,8 @@ async def await_completed_awaitable_item() -> None: info, item_path, item, - incremental_data_record, + stream_items_record, + RefMap(), ) except Exception as raw_error: self.handle_field_error( @@ -1373,17 +1522,16 @@ async def await_completed_awaitable_item() -> None: item_type, field_group, item_path, - incremental_data_record, + stream_items_record, ) completed_item = None - incremental_publisher.filter(item_path, incremental_data_record) + incremental_publisher.filter(item_path, stream_items_record) except GraphQLError as error: - incremental_publisher.add_field_error(incremental_data_record, error) - incremental_publisher.filter(path, incremental_data_record) - incremental_publisher.complete_stream_items_record( - incremental_data_record, None + incremental_publisher.filter(path, stream_items_record) + incremental_publisher.mark_errored_stream_items_record( + stream_items_record, error ) - return incremental_data_record + return stream_items_record if is_awaitable(completed_item): @@ -1397,30 +1545,27 @@ async def await_completed_item() -> None: item_type, field_group, item_path, - incremental_data_record, + stream_items_record, ) - incremental_publisher.filter(item_path, incremental_data_record) + incremental_publisher.filter(item_path, stream_items_record) value = None except GraphQLError as error: # pragma: no cover - incremental_publisher.add_field_error( - incremental_data_record, error - ) - incremental_publisher.filter(path, incremental_data_record) - incremental_publisher.complete_stream_items_record( - incremental_data_record, None + incremental_publisher.filter(path, stream_items_record) + incremental_publisher.mark_errored_stream_items_record( + stream_items_record, error ) else: incremental_publisher.complete_stream_items_record( - incremental_data_record, [value] + stream_items_record, [value] ) self.add_task(await_completed_item()) - return incremental_data_record + return stream_items_record incremental_publisher.complete_stream_items_record( - incremental_data_record, [completed_item] + stream_items_record, [completed_item] ) - return incremental_data_record + return stream_items_record async def execute_stream_async_iterator_item( self, @@ -1428,8 +1573,7 @@ async def execute_stream_async_iterator_item( field_group: FieldGroup, info: GraphQLResolveInfo, item_type: GraphQLOutputType, - incremental_data_record: StreamItemsRecord, - path: Path, + stream_items_record: StreamItemsRecord, item_path: Path, ) -> Any: """Execute stream iterator item.""" @@ -1439,14 +1583,27 @@ async def execute_stream_async_iterator_item( item = await anext(async_iterator) except StopAsyncIteration as raw_error: self.incremental_publisher.set_is_completed_async_iterator( - incremental_data_record + stream_items_record ) raise StopAsyncIteration from raw_error except Exception as raw_error: - raise located_error(raw_error, field_group, path.as_list()) from raw_error + raise located_error( + raw_error, + field_group.to_nodes(), + stream_items_record.stream_record.path, + ) from raw_error + else: + if stream_items_record.stream_record.errors: + raise StopAsyncIteration # pragma: no cover try: completed_item = self.complete_value( - item_type, field_group, info, item_path, item, incremental_data_record + item_type, + field_group, + info, + item_path, + item, + stream_items_record, + RefMap(), ) return ( await completed_item @@ -1455,9 +1612,9 @@ async def execute_stream_async_iterator_item( ) except Exception as raw_error: self.handle_field_error( - raw_error, item_type, field_group, item_path, incremental_data_record + raw_error, item_type, field_group, item_path, stream_items_record ) - self.incremental_publisher.filter(item_path, incremental_data_record) + self.incremental_publisher.filter(item_path, stream_items_record) async def execute_stream_async_iterator( self, @@ -1467,21 +1624,19 @@ async def execute_stream_async_iterator( info: GraphQLResolveInfo, item_type: GraphQLOutputType, path: Path, - parent_context: IncrementalDataRecord, - label: str | None = None, + incremental_data_record: IncrementalDataRecord, + stream_record: StreamRecord, ) -> None: """Execute stream iterator.""" incremental_publisher = self.incremental_publisher index = initial_index - previous_incremental_data_record = parent_context + current_incremental_data_record = incremental_data_record - done = False while True: item_path = Path(path, index, None) - incremental_data_record = ( - incremental_publisher.prepare_new_stream_items_record( - label, item_path, previous_incremental_data_record, async_iterator - ) + stream_items_record = StreamItemsRecord(stream_record, item_path) + incremental_publisher.report_new_stream_items_record( + stream_items_record, current_incremental_data_record ) try: @@ -1490,15 +1645,13 @@ async def execute_stream_async_iterator( field_group, info, item_type, - incremental_data_record, - path, + stream_items_record, item_path, ) except GraphQLError as error: - incremental_publisher.add_field_error(incremental_data_record, error) - incremental_publisher.filter(path, incremental_data_record) - incremental_publisher.complete_stream_items_record( - incremental_data_record, None + incremental_publisher.filter(path, stream_items_record) + incremental_publisher.mark_errored_stream_items_record( + stream_items_record, error ) if async_iterator: # pragma: no cover else with suppress_exceptions: @@ -1506,18 +1659,20 @@ async def execute_stream_async_iterator( # running generators cannot be closed since Python 3.8, # so we need to remember that this iterator is already canceled self._canceled_iterators.add(async_iterator) - break + return except StopAsyncIteration: done = True + completed_item = None + else: + done = False incremental_publisher.complete_stream_items_record( - incremental_data_record, - [completed_item], + stream_items_record, [completed_item] ) if done: break - previous_incremental_data_record = incremental_data_record + current_incremental_data_record = stream_items_record index += 1 def add_task(self, awaitable: Awaitable[Any]) -> None: @@ -1667,7 +1822,7 @@ def execute_impl( # at which point we still log the error and null the parent field, which # in this case is the entire response. incremental_publisher = context.incremental_publisher - initial_result_record = incremental_publisher.prepare_initial_result_record() + initial_result_record = InitialResultRecord() try: data = context.execute_operation(initial_result_record) if context.is_awaitable(data): @@ -1759,10 +1914,92 @@ def invalid_return_type_error( """Create a GraphQLError for an invalid return type.""" return GraphQLError( f"Expected value of type '{return_type.name}' but got: {inspect(result)}.", - field_group, + field_group.to_nodes(), ) +def add_new_deferred_fragments( + incremental_publisher: IncrementalPublisher, + new_defer_usages: Sequence[DeferUsage], + incremental_data_record: IncrementalDataRecord, + defer_map: RefMap[DeferUsage, DeferredFragmentRecord] | None = None, + path: Path | None = None, +) -> RefMap[DeferUsage, DeferredFragmentRecord]: + """Add new deferred fragments to the defer map.""" + new_defer_map: RefMap[DeferUsage, DeferredFragmentRecord] + if not new_defer_usages: + return RefMap() if defer_map is None else defer_map + new_defer_map = RefMap() if defer_map is None else RefMap(defer_map.items()) + for defer_usage in new_defer_usages: + ancestors = defer_usage.ancestors + parent_defer_usage = ancestors[0] if ancestors else None + + parent = ( + cast(Union[InitialResultRecord, StreamItemsRecord], incremental_data_record) + if parent_defer_usage is None + else deferred_fragment_record_from_defer_usage( + parent_defer_usage, new_defer_map + ) + ) + + deferred_fragment_record = DeferredFragmentRecord(path, defer_usage.label) + + incremental_publisher.report_new_defer_fragment_record( + deferred_fragment_record, parent + ) + + new_defer_map[defer_usage] = deferred_fragment_record + + return new_defer_map + + +def deferred_fragment_record_from_defer_usage( + defer_usage: DeferUsage, defer_map: RefMap[DeferUsage, DeferredFragmentRecord] +) -> DeferredFragmentRecord: + """Get the deferred fragment record mapped to the given defer usage.""" + return defer_map[defer_usage] + + +def add_new_deferred_grouped_field_sets( + incremental_publisher: IncrementalPublisher, + new_grouped_field_set_details: Mapping[DeferUsageSet, GroupedFieldSetDetails], + defer_map: RefMap[DeferUsage, DeferredFragmentRecord], + path: Path | None = None, +) -> list[DeferredGroupedFieldSetRecord]: + """Add new deferred grouped field sets to the defer map.""" + new_deferred_grouped_field_set_records: list[DeferredGroupedFieldSetRecord] = [] + + for ( + new_grouped_field_set_defer_usages, + grouped_field_set_details, + ) in new_grouped_field_set_details.items(): + deferred_fragment_records = get_deferred_fragment_records( + new_grouped_field_set_defer_usages, defer_map + ) + deferred_grouped_field_set_record = DeferredGroupedFieldSetRecord( + deferred_fragment_records, + grouped_field_set_details.grouped_field_set, + grouped_field_set_details.should_initiate_defer, + path, + ) + incremental_publisher.report_new_deferred_grouped_filed_set_record( + deferred_grouped_field_set_record + ) + new_deferred_grouped_field_set_records.append(deferred_grouped_field_set_record) + + return new_deferred_grouped_field_set_records + + +def get_deferred_fragment_records( + defer_usages: DeferUsageSet, defer_map: RefMap[DeferUsage, DeferredFragmentRecord] +) -> list[DeferredFragmentRecord]: + """Get the deferred fragment records for the given defer usages.""" + return [ + deferred_fragment_record_from_defer_usage(defer_usage, defer_map) + for defer_usage in defer_usages + ] + + def get_typename(value: Any) -> str | None: """Get the ``__typename`` property of the given value.""" if isinstance(value, Mapping): @@ -2025,12 +2262,12 @@ def execute_subscription( ).grouped_field_set first_root_field = next(iter(grouped_field_set.items())) response_name, field_group = first_root_field - field_name = field_group[0].name.value + field_name = field_group.fields[0].node.name.value field_def = schema.get_field(root_type, field_name) if not field_def: msg = f"The subscription field '{field_name}' is not defined." - raise GraphQLError(msg, field_group) + raise GraphQLError(msg, field_group.to_nodes()) path = Path(None, response_name, root_type.name) info = context.build_resolve_info(field_def, field_group, root_type, path) @@ -2041,7 +2278,9 @@ def execute_subscription( try: # Build a dictionary of arguments from the field.arguments AST, using the # variables scope to fulfill any variable references. - args = get_argument_values(field_def, field_group[0], context.variable_values) + args = get_argument_values( + field_def, field_group.fields[0].node, context.variable_values + ) # Call the `subscribe()` resolver or the default resolver to produce an # AsyncIterable yielding raw payloads. @@ -2054,14 +2293,16 @@ async def await_result() -> AsyncIterable[Any]: try: return assert_event_stream(await result) except Exception as error: - raise located_error(error, field_group, path.as_list()) from error + raise located_error( + error, field_group.to_nodes(), path.as_list() + ) from error return await_result() return assert_event_stream(result) except Exception as error: - raise located_error(error, field_group, path.as_list()) from error + raise located_error(error, field_group.to_nodes(), path.as_list()) from error def assert_event_stream(result: Any) -> AsyncIterable: diff --git a/src/graphql/execution/incremental_publisher.py b/src/graphql/execution/incremental_publisher.py index a1b8c507..18890fb3 100644 --- a/src/graphql/execution/incremental_publisher.py +++ b/src/graphql/execution/incremental_publisher.py @@ -2,14 +2,14 @@ from __future__ import annotations -from asyncio import Event, ensure_future, gather +from asyncio import Event, ensure_future, gather, sleep from contextlib import suppress from typing import ( TYPE_CHECKING, Any, AsyncGenerator, - AsyncIterator, Awaitable, + Callable, Collection, Iterator, NamedTuple, @@ -25,6 +25,7 @@ if TYPE_CHECKING: from ..error import GraphQLError, GraphQLFormattedError from ..pyutils import Path + from .collect_fields import GroupedFieldSet __all__ = [ "ASYNC_DELAY", @@ -54,6 +55,80 @@ suppress_key_error = suppress(KeyError) +class FormattedCompletedResult(TypedDict, total=False): + """Formatted completed execution result""" + + path: list[str | int] + label: str + errors: list[GraphQLFormattedError] + + +class CompletedResult: + """Completed execution result""" + + path: list[str | int] + label: str | None + errors: list[GraphQLError] | None + + __slots__ = "errors", "label", "path" + + def __init__( + self, + path: list[str | int], + label: str | None = None, + errors: list[GraphQLError] | None = None, + ) -> None: + self.path = path + self.label = label + self.errors = errors + + def __repr__(self) -> str: + name = self.__class__.__name__ + args: list[str] = [f"path={self.path!r}"] + if self.label: + args.append(f"label={self.label!r}") + if self.errors: + args.append(f"errors={self.errors!r}") + return f"{name}({', '.join(args)})" + + @property + def formatted(self) -> FormattedCompletedResult: + """Get execution result formatted according to the specification.""" + formatted: FormattedCompletedResult = {"path": self.path} + if self.label is not None: + formatted["label"] = self.label + if self.errors is not None: + formatted["errors"] = [error.formatted for error in self.errors] + return formatted + + def __eq__(self, other: object) -> bool: + if isinstance(other, dict): + return ( + other.get("path") == self.path + and ("label" not in other or other["label"] == self.label) + and ("errors" not in other or other["errors"] == self.errors) + ) + if isinstance(other, tuple): + size = len(other) + return 1 < size < 4 and (self.path, self.label, self.errors)[:size] == other + return ( + isinstance(other, self.__class__) + and other.path == self.path + and other.label == self.label + and other.errors == self.errors + ) + + def __ne__(self, other: object) -> bool: + return not self == other + + +class IncrementalUpdate(NamedTuple): + """Incremental update""" + + incremental: list[IncrementalResult] + completed: list[CompletedResult] + + class FormattedExecutionResult(TypedDict, total=False): """Formatted execution result""" @@ -147,31 +222,26 @@ class InitialIncrementalExecutionResult: data: dict[str, Any] | None errors: list[GraphQLError] | None - incremental: Sequence[IncrementalResult] | None has_next: bool extensions: dict[str, Any] | None - __slots__ = "data", "errors", "extensions", "has_next", "incremental" + __slots__ = "data", "errors", "extensions", "has_next" def __init__( self, data: dict[str, Any] | None = None, errors: list[GraphQLError] | None = None, - incremental: Sequence[IncrementalResult] | None = None, has_next: bool = False, extensions: dict[str, Any] | None = None, ) -> None: self.data = data self.errors = errors - self.incremental = incremental self.has_next = has_next self.extensions = extensions def __repr__(self) -> str: name = self.__class__.__name__ args: list[str] = [f"data={self.data!r}, errors={self.errors!r}"] - if self.incremental: - args.append(f"incremental[{len(self.incremental)}]") if self.has_next: args.append("has_next") if self.extensions: @@ -184,8 +254,6 @@ def formatted(self) -> FormattedInitialIncrementalExecutionResult: formatted: FormattedInitialIncrementalExecutionResult = {"data": self.data} if self.errors is not None: formatted["errors"] = [error.formatted for error in self.errors] - if self.incremental: - formatted["incremental"] = [result.formatted for result in self.incremental] formatted["hasNext"] = self.has_next if self.extensions is not None: formatted["extensions"] = self.extensions @@ -196,10 +264,6 @@ def __eq__(self, other: object) -> bool: return ( other.get("data") == self.data and other.get("errors") == self.errors - and ( - "incremental" not in other - or other["incremental"] == self.incremental - ) and ("hasNext" not in other or other["hasNext"] == self.has_next) and ( "extensions" not in other or other["extensions"] == self.extensions @@ -208,11 +272,10 @@ def __eq__(self, other: object) -> bool: if isinstance(other, tuple): size = len(other) return ( - 1 < size < 6 + 1 < size < 5 and ( self.data, self.errors, - self.incremental, self.has_next, self.extensions, )[:size] @@ -222,7 +285,6 @@ def __eq__(self, other: object) -> bool: isinstance(other, self.__class__) and other.data == self.data and other.errors == self.errors - and other.incremental == self.incremental and other.has_next == self.has_next and other.extensions == self.extensions ) @@ -244,7 +306,6 @@ class FormattedIncrementalDeferResult(TypedDict, total=False): data: dict[str, Any] | None errors: list[GraphQLFormattedError] path: list[str | int] - label: str extensions: dict[str, Any] @@ -254,23 +315,20 @@ class IncrementalDeferResult: data: dict[str, Any] | None errors: list[GraphQLError] | None path: list[str | int] | None - label: str | None extensions: dict[str, Any] | None - __slots__ = "data", "errors", "extensions", "label", "path" + __slots__ = "data", "errors", "extensions", "path" def __init__( self, data: dict[str, Any] | None = None, errors: list[GraphQLError] | None = None, path: list[str | int] | None = None, - label: str | None = None, extensions: dict[str, Any] | None = None, ) -> None: self.data = data self.errors = errors self.path = path - self.label = label self.extensions = extensions def __repr__(self) -> str: @@ -278,8 +336,6 @@ def __repr__(self) -> str: args: list[str] = [f"data={self.data!r}, errors={self.errors!r}"] if self.path: args.append(f"path={self.path!r}") - if self.label: - args.append(f"label={self.label!r}") if self.extensions: args.append(f"extensions={self.extensions}") return f"{name}({', '.join(args)})" @@ -292,8 +348,6 @@ def formatted(self) -> FormattedIncrementalDeferResult: formatted["errors"] = [error.formatted for error in self.errors] if self.path is not None: formatted["path"] = self.path - if self.label is not None: - formatted["label"] = self.label if self.extensions is not None: formatted["extensions"] = self.extensions return formatted @@ -304,7 +358,6 @@ def __eq__(self, other: object) -> bool: other.get("data") == self.data and other.get("errors") == self.errors and ("path" not in other or other["path"] == self.path) - and ("label" not in other or other["label"] == self.label) and ( "extensions" not in other or other["extensions"] == self.extensions ) @@ -312,18 +365,14 @@ def __eq__(self, other: object) -> bool: if isinstance(other, tuple): size = len(other) return ( - 1 < size < 6 - and (self.data, self.errors, self.path, self.label, self.extensions)[ - :size - ] - == other + 1 < size < 5 + and (self.data, self.errors, self.path, self.extensions)[:size] == other ) return ( isinstance(other, self.__class__) and other.data == self.data and other.errors == self.errors and other.path == self.path - and other.label == self.label and other.extensions == self.extensions ) @@ -337,7 +386,6 @@ class FormattedIncrementalStreamResult(TypedDict, total=False): items: list[Any] | None errors: list[GraphQLFormattedError] path: list[str | int] - label: str extensions: dict[str, Any] @@ -347,7 +395,6 @@ class IncrementalStreamResult: items: list[Any] | None errors: list[GraphQLError] | None path: list[str | int] | None - label: str | None extensions: dict[str, Any] | None __slots__ = "errors", "extensions", "items", "label", "path" @@ -357,13 +404,11 @@ def __init__( items: list[Any] | None = None, errors: list[GraphQLError] | None = None, path: list[str | int] | None = None, - label: str | None = None, extensions: dict[str, Any] | None = None, ) -> None: self.items = items self.errors = errors self.path = path - self.label = label self.extensions = extensions def __repr__(self) -> str: @@ -371,8 +416,6 @@ def __repr__(self) -> str: args: list[str] = [f"items={self.items!r}, errors={self.errors!r}"] if self.path: args.append(f"path={self.path!r}") - if self.label: - args.append(f"label={self.label!r}") if self.extensions: args.append(f"extensions={self.extensions}") return f"{name}({', '.join(args)})" @@ -385,8 +428,6 @@ def formatted(self) -> FormattedIncrementalStreamResult: formatted["errors"] = [error.formatted for error in self.errors] if self.path is not None: formatted["path"] = self.path - if self.label is not None: - formatted["label"] = self.label if self.extensions is not None: formatted["extensions"] = self.extensions return formatted @@ -397,7 +438,6 @@ def __eq__(self, other: object) -> bool: other.get("items") == self.items and other.get("errors") == self.errors and ("path" not in other or other["path"] == self.path) - and ("label" not in other or other["label"] == self.label) and ( "extensions" not in other or other["extensions"] == self.extensions ) @@ -405,10 +445,8 @@ def __eq__(self, other: object) -> bool: if isinstance(other, tuple): size = len(other) return ( - 1 < size < 6 - and (self.items, self.errors, self.path, self.label, self.extensions)[ - :size - ] + 1 < size < 5 + and (self.items, self.errors, self.path, self.extensions)[:size] == other ) return ( @@ -416,7 +454,6 @@ def __eq__(self, other: object) -> bool: and other.items == self.items and other.errors == self.errors and other.path == self.path - and other.label == self.label and other.extensions == self.extensions ) @@ -434,8 +471,9 @@ def __ne__(self, other: object) -> bool: class FormattedSubsequentIncrementalExecutionResult(TypedDict, total=False): """Formatted subsequent incremental execution result""" - incremental: list[FormattedIncrementalResult] hasNext: bool + incremental: list[FormattedIncrementalResult] + completed: list[FormattedCompletedResult] extensions: dict[str, Any] @@ -446,29 +484,34 @@ class SubsequentIncrementalExecutionResult: - ``incremental`` is a list of the results from defer/stream directives. """ - __slots__ = "extensions", "has_next", "incremental" + __slots__ = "completed", "extensions", "has_next", "incremental" - incremental: Sequence[IncrementalResult] | None has_next: bool + incremental: Sequence[IncrementalResult] | None + completed: Sequence[CompletedResult] | None extensions: dict[str, Any] | None def __init__( self, - incremental: Sequence[IncrementalResult] | None = None, has_next: bool = False, + incremental: Sequence[IncrementalResult] | None = None, + completed: Sequence[CompletedResult] | None = None, extensions: dict[str, Any] | None = None, ) -> None: - self.incremental = incremental self.has_next = has_next + self.incremental = incremental + self.completed = completed self.extensions = extensions def __repr__(self) -> str: name = self.__class__.__name__ args: list[str] = [] - if self.incremental: - args.append(f"incremental[{len(self.incremental)}]") if self.has_next: args.append("has_next") + if self.incremental: + args.append(f"incremental[{len(self.incremental)}]") + if self.completed: + args.append(f"completed[{len(self.completed)}]") if self.extensions: args.append(f"extensions={self.extensions}") return f"{name}({', '.join(args)})" @@ -477,9 +520,11 @@ def __repr__(self) -> str: def formatted(self) -> FormattedSubsequentIncrementalExecutionResult: """Get execution result formatted according to the specification.""" formatted: FormattedSubsequentIncrementalExecutionResult = {} + formatted["hasNext"] = self.has_next if self.incremental: formatted["incremental"] = [result.formatted for result in self.incremental] - formatted["hasNext"] = self.has_next + if self.completed: + formatted["completed"] = [result.formatted for result in self.completed] if self.extensions is not None: formatted["extensions"] = self.extensions return formatted @@ -487,8 +532,12 @@ def formatted(self) -> FormattedSubsequentIncrementalExecutionResult: def __eq__(self, other: object) -> bool: if isinstance(other, dict): return ( - ("incremental" not in other or other["incremental"] == self.incremental) - and ("hasNext" in other and other["hasNext"] == self.has_next) + ("hasNext" in other and other["hasNext"] == self.has_next) + and ( + "incremental" not in other + or other["incremental"] == self.incremental + ) + and ("completed" not in other or other["completed"] == self.completed) and ( "extensions" not in other or other["extensions"] == self.extensions ) @@ -496,18 +545,20 @@ def __eq__(self, other: object) -> bool: if isinstance(other, tuple): size = len(other) return ( - 1 < size < 4 + 1 < size < 5 and ( - self.incremental, self.has_next, + self.incremental, + self.completed, self.extensions, )[:size] == other ) return ( isinstance(other, self.__class__) - and other.incremental == self.incremental and other.has_next == self.has_next + and other.incremental == self.incremental + and other.completed == self.completed and other.extensions == self.extensions ) @@ -530,20 +581,20 @@ class IncrementalPublisher: The internal publishing state is managed as follows: - ``_released``: the set of Subsequent Data records that are ready to be sent to the + ``_released``: the set of Subsequent Result records that are ready to be sent to the client, i.e. their parents have completed and they have also completed. - ``_pending``: the set of Subsequent Data records that are definitely pending, i.e. + ``_pending``: the set of Subsequent Result records that are definitely pending, i.e. their parents have completed so that they can no longer be filtered. This includes - all Subsequent Data records in `released`, as well as Subsequent Data records that - have not yet completed. + all Subsequent Result records in `released`, as well as the records that have not + yet completed. Note: Instead of sets we use dicts (with values set to None) which preserve order and thereby achieve more deterministic results. """ - _released: dict[SubsequentDataRecord, None] - _pending: dict[SubsequentDataRecord, None] + _released: dict[SubsequentResultRecord, None] + _pending: dict[SubsequentResultRecord, None] _resolve: Event | None def __init__(self) -> None: @@ -552,60 +603,107 @@ def __init__(self) -> None: self._resolve = None # lazy initialization self._tasks: set[Awaitable] = set() - def prepare_initial_result_record(self) -> InitialResultRecord: - """Prepare a new initial result record.""" - return InitialResultRecord(errors=[], children={}) - - def prepare_new_deferred_fragment_record( - self, - label: str | None, - path: Path | None, - parent_context: IncrementalDataRecord, - ) -> DeferredFragmentRecord: - """Prepare a new deferred fragment record.""" - deferred_fragment_record = DeferredFragmentRecord(label, path) + @staticmethod + def report_new_defer_fragment_record( + deferred_fragment_record: DeferredFragmentRecord, + parent_incremental_result_record: InitialResultRecord + | DeferredFragmentRecord + | StreamItemsRecord, + ) -> None: + """Report a new deferred fragment record.""" + parent_incremental_result_record.children[deferred_fragment_record] = None - parent_context.children[deferred_fragment_record] = None - return deferred_fragment_record + @staticmethod + def report_new_deferred_grouped_filed_set_record( + deferred_grouped_field_set_record: DeferredGroupedFieldSetRecord, + ) -> None: + """Report a new deferred grouped field set record.""" + for ( + deferred_fragment_record + ) in deferred_grouped_field_set_record.deferred_fragment_records: + deferred_fragment_record._pending[deferred_grouped_field_set_record] = None # noqa: SLF001 + deferred_fragment_record.deferred_grouped_field_set_records[ + deferred_grouped_field_set_record + ] = None + + @staticmethod + def report_new_stream_items_record( + stream_items_record: StreamItemsRecord, + parent_incremental_data_record: IncrementalDataRecord, + ) -> None: + """Report a new stream items record.""" + if isinstance(parent_incremental_data_record, DeferredGroupedFieldSetRecord): + for parent in parent_incremental_data_record.deferred_fragment_records: + parent.children[stream_items_record] = None + else: + parent_incremental_data_record.children[stream_items_record] = None - def prepare_new_stream_items_record( + def complete_deferred_grouped_field_set( self, - label: str | None, - path: Path | None, - parent_context: IncrementalDataRecord, - async_iterator: AsyncIterator[Any] | None = None, - ) -> StreamItemsRecord: - """Prepare a new stream items record.""" - stream_items_record = StreamItemsRecord(label, path, async_iterator) - - parent_context.children[stream_items_record] = None - return stream_items_record + deferred_grouped_field_set_record: DeferredGroupedFieldSetRecord, + data: dict[str, Any], + ) -> None: + """Complete the given deferred grouped field set record with the given data.""" + deferred_grouped_field_set_record.data = data + for ( + deferred_fragment_record + ) in deferred_grouped_field_set_record.deferred_fragment_records: + pending = deferred_fragment_record._pending # noqa: SLF001 + del pending[deferred_grouped_field_set_record] + if not pending: + self.complete_deferred_fragment_record(deferred_fragment_record) + + def mark_errored_deferred_grouped_field_set( + self, + deferred_grouped_field_set_record: DeferredGroupedFieldSetRecord, + error: GraphQLError, + ) -> None: + """Mark the given deferred grouped field set record as errored.""" + for ( + deferred_fragment_record + ) in deferred_grouped_field_set_record.deferred_fragment_records: + deferred_fragment_record.errors.append(error) + self.complete_deferred_fragment_record(deferred_fragment_record) def complete_deferred_fragment_record( - self, - deferred_fragment_record: DeferredFragmentRecord, - data: dict[str, Any] | None, + self, deferred_fragment_record: DeferredFragmentRecord ) -> None: """Complete the given deferred fragment record.""" - deferred_fragment_record.data = data - deferred_fragment_record.is_completed = True self._release(deferred_fragment_record) def complete_stream_items_record( self, stream_items_record: StreamItemsRecord, - items: list[str] | None, + items: list[Any], ) -> None: """Complete the given stream items record.""" stream_items_record.items = items stream_items_record.is_completed = True self._release(stream_items_record) + def mark_errored_stream_items_record( + self, stream_items_record: StreamItemsRecord, error: GraphQLError + ) -> None: + """Mark the given stream items record as errored.""" + stream_items_record.stream_record.errors.append(error) + self.set_is_final_record(stream_items_record) + stream_items_record.is_completed = True + early_return = stream_items_record.stream_record.early_return + if early_return: + self._add_task(early_return()) + self._release(stream_items_record) + + @staticmethod + def set_is_final_record(stream_items_record: StreamItemsRecord) -> None: + """Mark stream items record as final.""" + stream_items_record.is_final_record = True + def set_is_completed_async_iterator( self, stream_items_record: StreamItemsRecord ) -> None: """Mark async iterator for stream items as completed.""" stream_items_record.is_completed_async_iterator = True + self.set_is_final_record(stream_items_record) def add_field_error( self, incremental_data_record: IncrementalDataRecord, error: GraphQLError @@ -657,29 +755,33 @@ def build_error_response( def filter( self, - null_path: Path, + null_path: Path | None, erroring_incremental_data_record: IncrementalDataRecord, ) -> None: """Filter out the given erroring incremental data record.""" - null_path_list = null_path.as_list() + null_path_list = null_path.as_list() if null_path else [] + + streams: list[StreamRecord] = [] - descendants = self._get_descendants(erroring_incremental_data_record.children) + children = self._get_children(erroring_incremental_data_record) + descendants = self._get_descendants(children) for child in descendants: - if not self._matches_path(child.path, null_path_list): + if not self._nulls_child_subsequent_result_record(child, null_path_list): continue child.filtered = True if isinstance(child, StreamItemsRecord): - async_iterator = child.async_iterator - if async_iterator: - try: - close_async_iterator = async_iterator.aclose() # type:ignore - except AttributeError: # pragma: no cover - pass - else: - self._add_task(close_async_iterator) + streams.append(child.stream_record) + + early_returns = [] + for stream in streams: + early_return = stream.early_return + if early_return: + early_returns.append(early_return()) + if early_returns: + self._add_task(gather(*early_returns)) async def _subscribe( self, @@ -688,6 +790,8 @@ async def _subscribe( is_done = False pending = self._pending + await sleep(0) # execute pending tasks + try: while not is_done: released = self._released @@ -709,20 +813,18 @@ async def _subscribe( self._resolve = resolve = Event() await resolve.wait() finally: - close_async_iterators = [] - for incremental_data_record in pending: - if isinstance( - incremental_data_record, StreamItemsRecord - ): # pragma: no cover - async_iterator = incremental_data_record.async_iterator - if async_iterator: - try: - close_async_iterator = async_iterator.aclose() # type: ignore - except AttributeError: - pass - else: - close_async_iterators.append(close_async_iterator) - await gather(*close_async_iterators) + streams: list[StreamRecord] = [] + descendants = self._get_descendants(pending) + for subsequent_result_record in descendants: # pragma: no cover + if isinstance(subsequent_result_record, StreamItemsRecord): + streams.append(subsequent_result_record.stream_record) + early_returns = [] + for stream in streams: # pragma: no cover + early_return = stream.early_return + if early_return: + early_returns.append(early_return()) + if early_returns: # pragma: no cover + await gather(*early_returns) def _trigger(self) -> None: """Trigger the resolve event.""" @@ -731,82 +833,129 @@ def _trigger(self) -> None: resolve.set() self._resolve = Event() - def _introduce(self, item: SubsequentDataRecord) -> None: + def _introduce(self, item: SubsequentResultRecord) -> None: """Introduce a new IncrementalDataRecord.""" self._pending[item] = None - def _release(self, item: SubsequentDataRecord) -> None: + def _release(self, item: SubsequentResultRecord) -> None: """Release the given IncrementalDataRecord.""" if item in self._pending: self._released[item] = None self._trigger() - def _push(self, item: SubsequentDataRecord) -> None: + def _push(self, item: SubsequentResultRecord) -> None: """Push the given IncrementalDataRecord.""" self._released[item] = None self._pending[item] = None self._trigger() def _get_incremental_result( - self, completed_records: Collection[SubsequentDataRecord] + self, completed_records: Collection[SubsequentResultRecord] ) -> SubsequentIncrementalExecutionResult | None: """Get the incremental result with the completed records.""" + update = self._process_pending(completed_records) + incremental, completed = update.incremental, update.completed + + has_next = bool(self._pending) + if not incremental and not completed and has_next: + return None + + return SubsequentIncrementalExecutionResult( + has_next, incremental or None, completed or None + ) + + def _process_pending( + self, + completed_records: Collection[SubsequentResultRecord], + ) -> IncrementalUpdate: + """Process the pending records.""" incremental_results: list[IncrementalResult] = [] - encountered_completed_async_iterator = False - append_result = incremental_results.append - for incremental_data_record in completed_records: - incremental_result: IncrementalResult - for child in incremental_data_record.children: + completed_results: list[CompletedResult] = [] + to_result = self._completed_record_to_result + for subsequent_result_record in completed_records: + for child in subsequent_result_record.children: if child.filtered: continue self._publish(child) - if isinstance(incremental_data_record, StreamItemsRecord): - items = incremental_data_record.items - if incremental_data_record.is_completed_async_iterator: + incremental_result: IncrementalResult + if isinstance(subsequent_result_record, StreamItemsRecord): + if subsequent_result_record.is_final_record: + completed_results.append( + to_result(subsequent_result_record.stream_record) + ) + if subsequent_result_record.is_completed_async_iterator: # async iterable resolver finished but there may be pending payload - encountered_completed_async_iterator = True - continue # pragma: no cover + continue + if subsequent_result_record.stream_record.errors: + continue incremental_result = IncrementalStreamResult( - items, - incremental_data_record.errors - if incremental_data_record.errors - else None, - incremental_data_record.path, - incremental_data_record.label, + subsequent_result_record.items, + subsequent_result_record.errors or None, + subsequent_result_record.stream_record.path, ) + incremental_results.append(incremental_result) else: - data = incremental_data_record.data - incremental_result = IncrementalDeferResult( - data, - incremental_data_record.errors - if incremental_data_record.errors - else None, - incremental_data_record.path, - incremental_data_record.label, - ) - append_result(incremental_result) - - has_next = bool(self._pending) - if incremental_results: - return SubsequentIncrementalExecutionResult( - incremental=incremental_results, has_next=has_next - ) - if encountered_completed_async_iterator and not has_next: - return SubsequentIncrementalExecutionResult(has_next=False) - return None + completed_results.append(to_result(subsequent_result_record)) + if subsequent_result_record.errors: + continue + for ( + deferred_grouped_field_set_record + ) in subsequent_result_record.deferred_grouped_field_set_records: + if not deferred_grouped_field_set_record.sent: + deferred_grouped_field_set_record.sent = True + incremental_result = IncrementalDeferResult( + deferred_grouped_field_set_record.data, + deferred_grouped_field_set_record.errors or None, + deferred_grouped_field_set_record.path, + ) + incremental_results.append(incremental_result) + return IncrementalUpdate(incremental_results, completed_results) + + @staticmethod + def _completed_record_to_result( + completed_record: DeferredFragmentRecord | StreamRecord, + ) -> CompletedResult: + """Convert the completed record to a result.""" + return CompletedResult( + completed_record.path, + completed_record.label or None, + completed_record.errors or None, + ) - def _publish(self, subsequent_result_record: SubsequentDataRecord) -> None: + def _publish(self, subsequent_result_record: SubsequentResultRecord) -> None: """Publish the given incremental data record.""" - if subsequent_result_record.is_completed: + if isinstance(subsequent_result_record, StreamItemsRecord): + if subsequent_result_record.is_completed: + self._push(subsequent_result_record) + else: + self._introduce(subsequent_result_record) + elif subsequent_result_record._pending: # noqa: SLF001 + self._introduce(subsequent_result_record) + else: self._push(subsequent_result_record) + + @staticmethod + def _get_children( + erroring_incremental_data_record: IncrementalDataRecord, + ) -> dict[SubsequentResultRecord, None]: + """Get the children of the given erroring incremental data record.""" + children: dict[SubsequentResultRecord, None] = {} + if isinstance(erroring_incremental_data_record, DeferredGroupedFieldSetRecord): + for ( + erroring_incremental_result_record + ) in erroring_incremental_data_record.deferred_fragment_records: + for child in erroring_incremental_result_record.children: + children[child] = None else: - self._introduce(subsequent_result_record) + for child in erroring_incremental_data_record.children: + children[child] = None + return children def _get_descendants( self, - children: dict[SubsequentDataRecord, None], - descendants: dict[SubsequentDataRecord, None] | None = None, - ) -> dict[SubsequentDataRecord, None]: + children: dict[SubsequentResultRecord, None], + descendants: dict[SubsequentResultRecord, None] | None = None, + ) -> dict[SubsequentResultRecord, None]: """Get the descendants of the given children.""" if descendants is None: descendants = {} @@ -815,6 +964,24 @@ def _get_descendants( self._get_descendants(child.children, descendants) return descendants + def _nulls_child_subsequent_result_record( + self, + subsequent_result_record: SubsequentResultRecord, + null_path: list[str | int], + ) -> bool: + """Check whether the given subsequent result record is nulled.""" + incremental_data_records: ( + list[SubsequentResultRecord] | dict[DeferredGroupedFieldSetRecord, None] + ) = ( + [subsequent_result_record] + if isinstance(subsequent_result_record, StreamItemsRecord) + else subsequent_result_record.deferred_grouped_field_set_records + ) + return any( + self._matches_path(incremental_data_record.path, null_path) + for incremental_data_record in incremental_data_records + ) + def _matches_path( self, test_path: list[str | int], base_path: list[str | int] ) -> bool: @@ -829,79 +996,148 @@ def _add_task(self, awaitable: Awaitable[Any]) -> None: task.add_done_callback(tasks.discard) -class InitialResultRecord(NamedTuple): - """Formatted subsequent incremental execution result""" +class InitialResultRecord: + """Initial result record""" errors: list[GraphQLError] - children: dict[SubsequentDataRecord, None] + children: dict[SubsequentResultRecord, None] + + def __init__(self) -> None: + self.errors = [] + self.children = {} + + +class DeferredGroupedFieldSetRecord: + """Deferred grouped field set record""" + + path: list[str | int] + deferred_fragment_records: list[DeferredFragmentRecord] + grouped_field_set: GroupedFieldSet + should_initiate_defer: bool + errors: list[GraphQLError] + data: dict[str, Any] | None + sent: bool + + def __init__( + self, + deferred_fragment_records: list[DeferredFragmentRecord], + grouped_field_set: GroupedFieldSet, + should_initiate_defer: bool, + path: Path | None = None, + ) -> None: + self.path = path.as_list() if path else [] + self.deferred_fragment_records = deferred_fragment_records + self.grouped_field_set = grouped_field_set + self.should_initiate_defer = should_initiate_defer + self.errors = [] + self.sent = False + + def __repr__(self) -> str: + name = self.__class__.__name__ + args: list[str] = [ + f"deferred_fragment_records={self.deferred_fragment_records!r}", + f"grouped_field_set={self.grouped_field_set!r}", + ] + if self.path: + args.append(f"path={self.path!r}") + return f"{name}({', '.join(args)})" class DeferredFragmentRecord: - """A record collecting data marked with the defer directive""" + """Deferred fragment record""" + path: list[str | int] + label: str | None + children: dict[SubsequentResultRecord, None] + deferred_grouped_field_set_records: dict[DeferredGroupedFieldSetRecord, None] errors: list[GraphQLError] + filtered: bool + _pending: dict[DeferredGroupedFieldSetRecord, None] + + def __init__(self, path: Path | None = None, label: str | None = None) -> None: + self.path = path.as_list() if path else [] + self.label = label + self.children = {} + self.filtered = False + self.deferred_grouped_field_set_records = {} + self.errors = [] + self._pending = {} + + def __repr__(self) -> str: + name = self.__class__.__name__ + args: list[str] = [] + if self.path: + args.append(f"path={self.path!r}") + if self.label: + args.append(f"label={self.label!r}") + return f"{name}({', '.join(args)})" + + +class StreamRecord: + """Stream record""" + label: str | None path: list[str | int] - data: dict[str, Any] | None - children: dict[SubsequentDataRecord, None] - is_completed: bool - filtered: bool + errors: list[GraphQLError] + early_return: Callable[[], Awaitable[Any]] | None - def __init__(self, label: str | None, path: Path | None) -> None: + def __init__( + self, + path: Path, + label: str | None = None, + early_return: Callable[[], Awaitable[Any]] | None = None, + ) -> None: + self.path = path.as_list() self.label = label - self.path = path.as_list() if path else [] self.errors = [] - self.children = {} - self.is_completed = self.filtered = False - self.data = None + self.early_return = early_return def __repr__(self) -> str: name = self.__class__.__name__ - args: list[str] = [f"path={self.path!r}"] + args: list[str] = [] + if self.path: + args.append(f"path={self.path!r}") if self.label: args.append(f"label={self.label!r}") - if self.data is not None: - args.append("data") return f"{name}({', '.join(args)})" class StreamItemsRecord: - """A record collecting items marked with the stream directive""" + """Stream items record""" errors: list[GraphQLError] - label: str | None + stream_record: StreamRecord path: list[str | int] - items: list[str] | None - children: dict[SubsequentDataRecord, None] - async_iterator: AsyncIterator[Any] | None + items: list[str] + children: dict[SubsequentResultRecord, None] + is_final_record: bool is_completed_async_iterator: bool is_completed: bool filtered: bool def __init__( self, - label: str | None, - path: Path | None, - async_iterator: AsyncIterator[Any] | None = None, + stream_record: StreamRecord, + path: Path | None = None, ) -> None: - self.label = label + self.stream_record = stream_record self.path = path.as_list() if path else [] - self.async_iterator = async_iterator - self.errors = [] self.children = {} - self.is_completed_async_iterator = self.is_completed = self.filtered = False - self.items = None + self.errors = [] + self.is_completed_async_iterator = self.is_completed = False + self.is_final_record = self.filtered = False + self.items = [] def __repr__(self) -> str: name = self.__class__.__name__ - args: list[str] = [f"path={self.path!r}"] - if self.label: - args.append(f"label={self.label!r}") - if self.items is not None: - args.append("items") + args: list[str] = [f"stream_record={self.stream_record!r}"] + if self.path: + args.append(f"path={self.path!r}") return f"{name}({', '.join(args)})" -SubsequentDataRecord = Union[DeferredFragmentRecord, StreamItemsRecord] +IncrementalDataRecord = Union[ + InitialResultRecord, DeferredGroupedFieldSetRecord, StreamItemsRecord +] -IncrementalDataRecord = Union[InitialResultRecord, SubsequentDataRecord] +SubsequentResultRecord = Union[DeferredFragmentRecord, StreamItemsRecord] diff --git a/src/graphql/pyutils/__init__.py b/src/graphql/pyutils/__init__.py index 10faca9e..28ad1a92 100644 --- a/src/graphql/pyutils/__init__.py +++ b/src/graphql/pyutils/__init__.py @@ -33,12 +33,16 @@ from .print_path_list import print_path_list from .simple_pub_sub import SimplePubSub, SimplePubSubIterator from .undefined import Undefined, UndefinedType +from .ref_map import RefMap +from .ref_set import RefSet __all__ = [ "AwaitableOrValue", "Description", "FrozenError", "Path", + "RefMap", + "RefSet", "SimplePubSub", "SimplePubSubIterator", "Undefined", diff --git a/src/graphql/pyutils/ref_map.py b/src/graphql/pyutils/ref_map.py new file mode 100644 index 00000000..0cffd533 --- /dev/null +++ b/src/graphql/pyutils/ref_map.py @@ -0,0 +1,79 @@ +"""A Map class that work similar to JavaScript.""" + +from __future__ import annotations + +from collections.abc import MutableMapping + +try: + MutableMapping[str, int] +except TypeError: # Python < 3.9 + from typing import MutableMapping +from typing import Any, Iterable, Iterator, TypeVar + +__all__ = ["RefMap"] + +K = TypeVar("K") +V = TypeVar("V") + + +class RefMap(MutableMapping[K, V]): + """A dictionary like object that allows mutable objects as keys. + + This class keeps the insertion order like a normal dictionary. + + Note that the implementation is limited to what is needed internally. + """ + + _map: dict[int, tuple[K, V]] + + def __init__(self, items: Iterable[tuple[K, V]] | None = None) -> None: + super().__init__() + self._map = {} + if items: + self.update(items) + + def __setitem__(self, key: K, value: V) -> None: + self._map[id(key)] = (key, value) + + def __getitem__(self, key: K) -> Any: + return self._map[id(key)][1] + + def __delitem__(self, key: K) -> None: + del self._map[id(key)] + + def __contains__(self, key: Any) -> bool: + return id(key) in self._map + + def __len__(self) -> int: + return len(self._map) + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({list(self.items())!r})" + + def get(self, key: Any, default: Any = None) -> Any: + """Get the mapped value for the given key.""" + try: + return self._map[id(key)][1] + except KeyError: + return default + + def __iter__(self) -> Iterator[K]: + return self.keys() + + def keys(self) -> Iterator[K]: # type: ignore + """Return an iterator over the keys of the map.""" + return (item[0] for item in self._map.values()) + + def values(self) -> Iterator[V]: # type: ignore + """Return an iterator over the values of the map.""" + return (item[1] for item in self._map.values()) + + def items(self) -> Iterator[tuple[K, V]]: # type: ignore + """Return an iterator over the key/value-pairs of the map.""" + return self._map.values() # type: ignore + + def update(self, items: Iterable[tuple[K, V]] | None = None) -> None: # type: ignore + """Update the map with the given key/value-pairs.""" + if items: + for key, value in items: + self[key] = value diff --git a/src/graphql/pyutils/ref_set.py b/src/graphql/pyutils/ref_set.py new file mode 100644 index 00000000..731c021d --- /dev/null +++ b/src/graphql/pyutils/ref_set.py @@ -0,0 +1,67 @@ +"""A Set class that work similar to JavaScript.""" + +from __future__ import annotations + +from collections.abc import MutableSet + +try: + MutableSet[int] +except TypeError: # Python < 3.9 + from typing import MutableSet +from contextlib import suppress +from typing import Any, Iterable, Iterator, TypeVar + +from .ref_map import RefMap + +__all__ = ["RefSet"] + + +T = TypeVar("T") + + +class RefSet(MutableSet[T]): + """A set like object that allows mutable objects as elements. + + This class keeps the insertion order unlike a normal set. + + Note that the implementation is limited to what is needed internally. + """ + + _map: RefMap[T, None] + + def __init__(self, values: Iterable[T] | None = None) -> None: + super().__init__() + self._map = RefMap() + if values: + self.update(values) + + def __contains__(self, key: Any) -> bool: + return key in self._map + + def __iter__(self) -> Iterator[T]: + return iter(self._map) + + def __len__(self) -> int: + return len(self._map) + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({list(self)!r})" + + def add(self, value: T) -> None: + """Add the given item to the set.""" + self._map[value] = None + + def remove(self, value: T) -> None: + """Remove the given item from the set.""" + del self._map[value] + + def discard(self, value: T) -> None: + """Remove the given item from the set if it exists.""" + with suppress(KeyError): + self.remove(value) + + def update(self, values: Iterable[T] | None = None) -> None: + """Update the set with the given items.""" + if values: + for item in values: + self.add(item) diff --git a/src/graphql/validation/rules/single_field_subscriptions.py b/src/graphql/validation/rules/single_field_subscriptions.py index 9a689809..89235856 100644 --- a/src/graphql/validation/rules/single_field_subscriptions.py +++ b/src/graphql/validation/rules/single_field_subscriptions.py @@ -2,10 +2,10 @@ from __future__ import annotations -from typing import Any, cast +from typing import Any from ...error import GraphQLError -from ...execution.collect_fields import collect_fields +from ...execution.collect_fields import FieldGroup, collect_fields from ...language import ( FieldNode, FragmentDefinitionNode, @@ -17,6 +17,10 @@ __all__ = ["SingleFieldSubscriptionsRule"] +def to_nodes(field_group: FieldGroup) -> list[FieldNode]: + return [field_details.node for field_details in field_group.fields] + + class SingleFieldSubscriptionsRule(ValidationRule): """Subscriptions must only include a single non-introspection field. @@ -50,16 +54,12 @@ def enter_operation_definition( node, ).grouped_field_set if len(grouped_field_set) > 1: - field_selection_lists = list(grouped_field_set.values()) - extra_field_selection_lists = field_selection_lists[1:] + field_groups = list(grouped_field_set.values()) + extra_field_groups = field_groups[1:] extra_field_selection = [ - field - for fields in extra_field_selection_lists - for field in ( - fields - if isinstance(fields, list) - else [cast(FieldNode, fields)] - ) + node + for field_group in extra_field_groups + for node in to_nodes(field_group) ] self.report_error( GraphQLError( @@ -73,7 +73,7 @@ def enter_operation_definition( ) ) for field_group in grouped_field_set.values(): - field_name = field_group[0].name.value + field_name = to_nodes(field_group)[0].name.value if field_name.startswith("__"): self.report_error( GraphQLError( @@ -83,6 +83,6 @@ def enter_operation_definition( else f"Subscription '{operation_name}'" ) + " must not select an introspection top level field.", - field_group, + to_nodes(field_group), ) ) diff --git a/tests/execution/test_customize.py b/tests/execution/test_customize.py index 85462147..ac8b9ae1 100644 --- a/tests/execution/test_customize.py +++ b/tests/execution/test_customize.py @@ -49,10 +49,16 @@ def execute_field( source, field_group, path, - incremental_data_record=None, + incremental_data_record, + defer_map, ): result = super().execute_field( - parent_type, source, field_group, path, incremental_data_record + parent_type, + source, + field_group, + path, + incremental_data_record, + defer_map, ) return result * 2 # type: ignore diff --git a/tests/execution/test_defer.py b/tests/execution/test_defer.py index 83201377..d6d17105 100644 --- a/tests/execution/test_defer.py +++ b/tests/execution/test_defer.py @@ -15,7 +15,13 @@ execute, experimental_execute_incrementally, ) -from graphql.execution.incremental_publisher import DeferredFragmentRecord +from graphql.execution.incremental_publisher import ( + CompletedResult, + DeferredFragmentRecord, + DeferredGroupedFieldSetRecord, + StreamItemsRecord, + StreamRecord, +) from graphql.language import DocumentNode, parse from graphql.pyutils import Path, is_awaitable from graphql.type import ( @@ -145,19 +151,23 @@ async def null_async(_info) -> None: @staticmethod async def slow(_info) -> str: - """Simulate a slow async resolver returning a value.""" + """Simulate a slow async resolver returning a non-null value.""" await sleep(0) return "slow" + @staticmethod + async def slow_null(_info) -> None: + """Simulate a slow async resolver returning a null value.""" + await sleep(0) + @staticmethod def bad(_info) -> str: """Simulate a bad resolver raising an error.""" raise RuntimeError("bad") @staticmethod - async def friends(_info) -> AsyncGenerator[Friend, None]: - """A slow async generator yielding the first friend.""" - await sleep(0) + async def first_friend(_info) -> AsyncGenerator[Friend, None]: + """An async generator yielding the first friend.""" yield friends[0] @@ -183,6 +193,42 @@ def modified_args(args: dict[str, Any], **modifications: Any) -> dict[str, Any]: def describe_execute_defer_directive(): + def can_format_and_print_completed_result(): + result = CompletedResult([]) + assert result.formatted == {"path": []} + assert str(result) == "CompletedResult(path=[])" + + result = CompletedResult( + path=["foo", 1], label="bar", errors=[GraphQLError("oops")] + ) + assert result.formatted == { + "path": ["foo", 1], + "label": "bar", + "errors": [{"message": "oops"}], + } + assert ( + str(result) == "CompletedResult(path=['foo', 1], label='bar'," + " errors=[GraphQLError('oops')])" + ) + + def can_compare_completed_result(): + args: dict[str, Any] = {"path": ["foo", 1], "label": "bar", "errors": []} + result = CompletedResult(**args) + assert result == CompletedResult(**args) + assert result != CompletedResult(**modified_args(args, path=["foo", 2])) + assert result != CompletedResult(**modified_args(args, label="baz")) + assert result != CompletedResult( + **modified_args(args, errors=[GraphQLError("oops")]) + ) + assert result == tuple(args.values()) + assert result == tuple(args.values())[:2] + assert result != tuple(args.values())[:1] + assert result == args + assert result == dict(list(args.items())[:2]) + assert result != dict( + list(args.items())[:1] + [("errors", [GraphQLError("oops")])] + ) + def can_format_and_print_incremental_defer_result(): result = IncrementalDeferResult() assert result.formatted == {"data": None} @@ -192,20 +238,17 @@ def can_format_and_print_incremental_defer_result(): data={"hello": "world"}, errors=[GraphQLError("msg")], path=["foo", 1], - label="bar", extensions={"baz": 2}, ) assert result.formatted == { "data": {"hello": "world"}, "errors": [{"message": "msg"}], "extensions": {"baz": 2}, - "label": "bar", "path": ["foo", 1], } assert ( str(result) == "IncrementalDeferResult(data={'hello': 'world'}," - " errors=[GraphQLError('msg')], path=['foo', 1], label='bar'," - " extensions={'baz': 2})" + " errors=[GraphQLError('msg')], path=['foo', 1], extensions={'baz': 2})" ) # noinspection PyTypeChecker @@ -214,7 +257,6 @@ def can_compare_incremental_defer_result(): "data": {"hello": "world"}, "errors": [GraphQLError("msg")], "path": ["foo", 1], - "label": "bar", "extensions": {"baz": 2}, } result = IncrementalDeferResult(**args) @@ -224,7 +266,6 @@ def can_compare_incremental_defer_result(): ) assert result != IncrementalDeferResult(**modified_args(args, errors=[])) assert result != IncrementalDeferResult(**modified_args(args, path=["foo", 2])) - assert result != IncrementalDeferResult(**modified_args(args, label="baz")) assert result != IncrementalDeferResult( **modified_args(args, extensions={"baz": 1}) ) @@ -238,7 +279,7 @@ def can_compare_incremental_defer_result(): assert result == dict(list(args.items())[:2]) assert result == dict(list(args.items())[:3]) assert result != dict(list(args.items())[:2] + [("path", ["foo", 2])]) - assert result != {**args, "label": "baz"} + assert result != {**args, "extensions": {"baz": 3}} def can_format_and_print_initial_incremental_execution_result(): result = InitialIncrementalExecutionResult() @@ -254,33 +295,28 @@ def can_format_and_print_initial_incremental_execution_result(): == "InitialIncrementalExecutionResult(data=None, errors=None, has_next)" ) - incremental = [IncrementalDeferResult(label="foo")] result = InitialIncrementalExecutionResult( data={"hello": "world"}, errors=[GraphQLError("msg")], - incremental=incremental, has_next=True, extensions={"baz": 2}, ) assert result.formatted == { "data": {"hello": "world"}, "errors": [GraphQLError("msg")], - "incremental": [{"data": None, "label": "foo"}], "hasNext": True, "extensions": {"baz": 2}, } assert ( str(result) == "InitialIncrementalExecutionResult(" - "data={'hello': 'world'}, errors=[GraphQLError('msg')], incremental[1]," - " has_next, extensions={'baz': 2})" + "data={'hello': 'world'}, errors=[GraphQLError('msg')], has_next," + " extensions={'baz': 2})" ) def can_compare_initial_incremental_execution_result(): - incremental = [IncrementalDeferResult(label="foo")] args: dict[str, Any] = { "data": {"hello": "world"}, "errors": [GraphQLError("msg")], - "incremental": incremental, "has_next": True, "extensions": {"baz": 2}, } @@ -292,9 +328,6 @@ def can_compare_initial_incremental_execution_result(): assert result != InitialIncrementalExecutionResult( **modified_args(args, errors=[]) ) - assert result != InitialIncrementalExecutionResult( - **modified_args(args, incremental=[]) - ) assert result != InitialIncrementalExecutionResult( **modified_args(args, has_next=False) ) @@ -311,20 +344,17 @@ def can_compare_initial_incremental_execution_result(): assert result == { "data": {"hello": "world"}, "errors": [GraphQLError("msg")], - "incremental": incremental, "hasNext": True, "extensions": {"baz": 2}, } assert result == { "data": {"hello": "world"}, "errors": [GraphQLError("msg")], - "incremental": incremental, "hasNext": True, } assert result != { "data": {"hello": "world"}, "errors": [GraphQLError("msg")], - "incremental": incremental, "hasNext": False, "extensions": {"baz": 2}, } @@ -338,27 +368,32 @@ def can_format_and_print_subsequent_incremental_execution_result(): assert result.formatted == {"hasNext": True} assert str(result) == "SubsequentIncrementalExecutionResult(has_next)" - incremental = [IncrementalDeferResult(label="foo")] + incremental = [IncrementalDeferResult()] + completed = [CompletedResult(["foo", 1])] result = SubsequentIncrementalExecutionResult( - incremental=incremental, has_next=True, + incremental=incremental, + completed=completed, extensions={"baz": 2}, ) assert result.formatted == { - "incremental": [{"data": None, "label": "foo"}], "hasNext": True, + "incremental": [{"data": None}], + "completed": [{"path": ["foo", 1]}], "extensions": {"baz": 2}, } assert ( - str(result) == "SubsequentIncrementalExecutionResult(incremental[1]," - " has_next, extensions={'baz': 2})" + str(result) == "SubsequentIncrementalExecutionResult(has_next," + " incremental[1], completed[1], extensions={'baz': 2})" ) def can_compare_subsequent_incremental_execution_result(): - incremental = [IncrementalDeferResult(label="foo")] + incremental = [IncrementalDeferResult()] + completed = [CompletedResult(path=["foo", 1])] args: dict[str, Any] = { - "incremental": incremental, "has_next": True, + "incremental": incremental, + "completed": completed, "extensions": {"baz": 2}, } result = SubsequentIncrementalExecutionResult(**args) @@ -377,25 +412,57 @@ def can_compare_subsequent_incremental_execution_result(): assert result != tuple(args.values())[:1] assert result != (incremental, False) assert result == { - "incremental": incremental, "hasNext": True, + "incremental": incremental, + "completed": completed, "extensions": {"baz": 2}, } assert result == {"incremental": incremental, "hasNext": True} assert result != { - "incremental": incremental, "hasNext": False, + "incremental": incremental, + "completed": completed, "extensions": {"baz": 2}, } + def can_print_deferred_grouped_field_set_record(): + record = DeferredGroupedFieldSetRecord([], {}, False) + assert ( + str(record) == "DeferredGroupedFieldSetRecord(" + "deferred_fragment_records=[], grouped_field_set={})" + ) + record = DeferredGroupedFieldSetRecord([], {}, True, Path(None, "foo", "Foo")) + assert ( + str(record) == "DeferredGroupedFieldSetRecord(" + "deferred_fragment_records=[], grouped_field_set={}, path=['foo'])" + ) + def can_print_deferred_fragment_record(): record = DeferredFragmentRecord(None, None) - assert str(record) == "DeferredFragmentRecord(path=[])" - record = DeferredFragmentRecord("foo", Path(None, "bar", "Bar")) - assert str(record) == "DeferredFragmentRecord(" "path=['bar'], label='foo')" - record.data = {"hello": "world"} + assert str(record) == "DeferredFragmentRecord()" + record = DeferredFragmentRecord(Path(None, "bar", "Bar"), "foo") + assert str(record) == "DeferredFragmentRecord(path=['bar'], label='foo')" + + def can_print_stream_record(): + record = StreamRecord(Path(None, "bar", "Bar"), "foo") + assert str(record) == "StreamRecord(path=['bar'], label='foo')" + record.path = [] + assert str(record) == "StreamRecord(label='foo')" + record.label = None + assert str(record) == "StreamRecord()" + + def can_print_stream_items_record(): + record = StreamItemsRecord( + StreamRecord(Path(None, "bar", "Bar"), "foo"), + Path(None, "baz", "Baz"), + ) + assert ( + str(record) == "StreamItemsRecord(stream_record=StreamRecord(" + "path=['bar'], label='foo'), path=['baz'])" + ) + record = StreamItemsRecord(StreamRecord(Path(None, "bar", "Bar"))) assert ( - str(record) == "DeferredFragmentRecord(" "path=['bar'], label='foo', data)" + str(record) == "StreamItemsRecord(stream_record=StreamRecord(path=['bar']))" ) @pytest.mark.asyncio @@ -419,6 +486,7 @@ async def can_defer_fragments_containing_scalar_types(): {"data": {"hero": {"id": "1"}}, "hasNext": True}, { "incremental": [{"data": {"name": "Luke"}, "path": ["hero"]}], + "completed": [{"path": ["hero"]}], "hasNext": False, }, ] @@ -470,6 +538,7 @@ async def does_not_disable_defer_with_null_if_argument(): {"data": {"hero": {"id": "1"}}, "hasNext": True}, { "incremental": [{"data": {"name": "Luke"}, "path": ["hero"]}], + "completed": [{"path": ["hero"]}], "hasNext": False, }, ] @@ -514,9 +583,8 @@ async def can_defer_fragments_on_the_top_level_query_field(): assert result == [ {"data": {}, "hasNext": True}, { - "incremental": [ - {"data": {"hero": {"id": "1"}}, "path": [], "label": "DeferQuery"} - ], + "incremental": [{"data": {"hero": {"id": "1"}}, "path": []}], + "completed": [{"path": [], "label": "DeferQuery"}], "hasNext": False, }, ] @@ -551,9 +619,9 @@ async def can_defer_fragments_with_errors_on_the_top_level_query_field(): } ], "path": [], - "label": "DeferQuery", } ], + "completed": [{"path": [], "label": "DeferQuery"}], "hasNext": False, }, ] @@ -584,6 +652,10 @@ async def can_defer_a_fragment_within_an_already_deferred_fragment(): {"data": {"hero": {}}, "hasNext": True}, { "incremental": [ + { + "data": {"id": "1"}, + "path": ["hero"], + }, { "data": { "friends": [ @@ -593,14 +665,12 @@ async def can_defer_a_fragment_within_an_already_deferred_fragment(): ] }, "path": ["hero"], - "label": "DeferNested", - }, - { - "data": {"id": "1"}, - "path": ["hero"], - "label": "DeferTop", }, ], + "completed": [ + {"path": ["hero"], "label": "DeferTop"}, + {"path": ["hero"], "label": "DeferNested"}, + ], "hasNext": False, }, ] @@ -625,13 +695,7 @@ async def can_defer_a_fragment_that_is_also_not_deferred_with_deferred_first(): assert result == [ {"data": {"hero": {"name": "Luke"}}, "hasNext": True}, { - "incremental": [ - { - "data": {"name": "Luke"}, - "path": ["hero"], - "label": "DeferTop", - }, - ], + "completed": [{"path": ["hero"], "label": "DeferTop"}], "hasNext": False, }, ] @@ -656,13 +720,7 @@ async def can_defer_a_fragment_that_is_also_not_deferred_with_non_deferred_first assert result == [ {"data": {"hero": {"name": "Luke"}}, "hasNext": True}, { - "incremental": [ - { - "data": {"name": "Luke"}, - "path": ["hero"], - "label": "DeferTop", - }, - ], + "completed": [{"path": ["hero"], "label": "DeferTop"}], "hasNext": False, }, ] @@ -686,19 +744,14 @@ async def can_defer_an_inline_fragment(): assert result == [ {"data": {"hero": {"id": "1"}}, "hasNext": True}, { - "incremental": [ - { - "data": {"name": "Luke"}, - "path": ["hero"], - "label": "InlineDeferred", - }, - ], + "incremental": [{"data": {"name": "Luke"}, "path": ["hero"]}], + "completed": [{"path": ["hero"], "label": "InlineDeferred"}], "hasNext": False, }, ] @pytest.mark.asyncio - async def emits_empty_defer_fragments(): + async def does_not_emit_empty_defer_fragments(): document = parse( """ query HeroNameQuery { @@ -717,19 +770,164 @@ async def emits_empty_defer_fragments(): assert result == [ {"data": {"hero": {}}, "hasNext": True}, + { + "completed": [{"path": ["hero"]}], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def separately_emits_defer_fragments_different_labels_varying_fields(): + document = parse( + """ + query HeroNameQuery { + hero { + ... @defer(label: "DeferID") { + id + } + ... @defer(label: "DeferName") { + name + } + } + } + """ + ) + result = await complete(document) + + assert result == [ + { + "data": {"hero": {}}, + "hasNext": True, + }, + { + "incremental": [ + { + "data": {"id": "1"}, + "path": ["hero"], + }, + { + "data": {"name": "Luke"}, + "path": ["hero"], + }, + ], + "completed": [ + {"path": ["hero"], "label": "DeferID"}, + {"path": ["hero"], "label": "DeferName"}, + ], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def separately_emits_defer_fragments_different_labels_varying_subfields(): + document = parse( + """ + query HeroNameQuery { + ... @defer(label: "DeferID") { + hero { + id + } + } + ... @defer(label: "DeferName") { + hero { + name + } + } + } + """ + ) + result = await complete(document) + + assert result == [ + { + "data": {}, + "hasNext": True, + }, + { + "incremental": [ + { + "data": {"hero": {}}, + "path": [], + }, + { + "data": {"id": "1"}, + "path": ["hero"], + }, + { + "data": {"name": "Luke"}, + "path": ["hero"], + }, + ], + "completed": [ + {"path": [], "label": "DeferID"}, + {"path": [], "label": "DeferName"}, + ], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def separately_emits_defer_fragments_different_labels_var_subfields_async(): + document = parse( + """ + query HeroNameQuery { + ... @defer(label: "DeferID") { + hero { + id + } + } + ... @defer(label: "DeferName") { + hero { + name + } + } + } + """ + ) + + async def resolve(value): + return value + + result = await complete( + document, + { + "hero": { + "id": lambda _info: resolve(1), + "name": lambda _info: resolve("Luke"), + } + }, + ) + + assert result == [ + { + "data": {}, + "hasNext": True, + }, { "incremental": [ { - "data": {}, + "data": {"hero": {}}, + "path": [], + }, + { + "data": {"id": "1"}, + "path": ["hero"], + }, + { + "data": {"name": "Luke"}, "path": ["hero"], }, ], + "completed": [ + {"path": [], "label": "DeferID"}, + {"path": [], "label": "DeferName"}, + ], "hasNext": False, }, ] @pytest.mark.asyncio - async def can_separately_emit_defer_fragments_different_labels_varying_fields(): + async def separately_emits_defer_fragments_var_subfields_same_prio_diff_level(): document = parse( """ query HeroNameQuery { @@ -737,7 +935,9 @@ async def can_separately_emit_defer_fragments_different_labels_varying_fields(): ... @defer(label: "DeferID") { id } - ... @defer(label: "DeferName") { + } + ... @defer(label: "DeferName") { + hero { name } } @@ -747,26 +947,84 @@ async def can_separately_emit_defer_fragments_different_labels_varying_fields(): result = await complete(document) assert result == [ - {"data": {"hero": {}}, "hasNext": True}, + { + "data": {"hero": {}}, + "hasNext": True, + }, { "incremental": [ { "data": {"id": "1"}, "path": ["hero"], - "label": "DeferID", }, { "data": {"name": "Luke"}, "path": ["hero"], - "label": "DeferName", }, ], + "completed": [ + {"path": ["hero"], "label": "DeferID"}, + {"path": [], "label": "DeferName"}, + ], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def separately_emits_nested_defer_frags_var_subfields_same_prio_diff_level(): + document = parse( + """ + query HeroNameQuery { + ... @defer(label: "DeferName") { + hero { + name + ... @defer(label: "DeferID") { + id + } + } + } + } + """ + ) + result = await complete(document) + + assert result == [ + { + "data": {}, + "hasNext": True, + }, + { + "incremental": [ + { + "data": { + "hero": { + "name": "Luke", + }, + }, + "path": [], + }, + ], + "completed": [ + {"path": [], "label": "DeferName"}, + ], + "hasNext": True, + }, + { + "incremental": [ + { + "data": { + "id": "1", + }, + "path": ["hero"], + }, + ], + "completed": [{"path": ["hero"], "label": "DeferID"}], "hasNext": False, }, ] @pytest.mark.asyncio - async def does_not_deduplicate_multiple_defers_on_the_same_object(): + async def can_deduplicate_multiple_defers_on_the_same_object(): document = parse( """ query { @@ -800,34 +1058,39 @@ async def does_not_deduplicate_multiple_defers_on_the_same_object(): {"data": {"hero": {"friends": [{}, {}, {}]}}, "hasNext": True}, { "incremental": [ - {"data": {}, "path": ["hero", "friends", 0]}, - {"data": {}, "path": ["hero", "friends", 0]}, - {"data": {}, "path": ["hero", "friends", 0]}, { "data": {"id": "2", "name": "Han"}, "path": ["hero", "friends", 0], }, - {"data": {}, "path": ["hero", "friends", 1]}, - {"data": {}, "path": ["hero", "friends", 1]}, - {"data": {}, "path": ["hero", "friends", 1]}, { "data": {"id": "3", "name": "Leia"}, "path": ["hero", "friends", 1], }, - {"data": {}, "path": ["hero", "friends", 2]}, - {"data": {}, "path": ["hero", "friends", 2]}, - {"data": {}, "path": ["hero", "friends", 2]}, { "data": {"id": "4", "name": "C-3PO"}, "path": ["hero", "friends", 2], }, ], + "completed": [ + {"path": ["hero", "friends", 0]}, + {"path": ["hero", "friends", 0]}, + {"path": ["hero", "friends", 0]}, + {"path": ["hero", "friends", 1]}, + {"path": ["hero", "friends", 1]}, + {"path": ["hero", "friends", 1]}, + {"path": ["hero", "friends", 2]}, + {"path": ["hero", "friends", 2]}, + {"path": ["hero", "friends", 2]}, + {"path": ["hero", "friends", 0]}, + {"path": ["hero", "friends", 1]}, + {"path": ["hero", "friends", 2]}, + ], "hasNext": False, }, ] @pytest.mark.asyncio - async def does_not_deduplicate_fields_present_in_the_initial_payload(): + async def deduplicates_fields_present_in_the_initial_payload(): document = parse( """ query { @@ -881,27 +1144,17 @@ async def does_not_deduplicate_fields_present_in_the_initial_payload(): { "incremental": [ { - "data": { - "nestedObject": { - "deeperObject": { - "bar": "bar", - }, - }, - "anotherNestedObject": { - "deeperObject": { - "foo": "foo", - }, - }, - }, - "path": ["hero"], + "data": {"bar": "bar"}, + "path": ["hero", "nestedObject", "deeperObject"], }, ], + "completed": [{"path": ["hero"]}], "hasNext": False, }, ] @pytest.mark.asyncio - async def does_not_deduplicate_fields_present_in_a_parent_defer_payload(): + async def deduplicates_fields_present_in_a_parent_defer_payload(): document = parse( """ query { @@ -944,24 +1197,25 @@ async def does_not_deduplicate_fields_present_in_a_parent_defer_payload(): "path": ["hero"], }, ], + "completed": [{"path": ["hero"]}], "hasNext": True, }, { "incremental": [ { "data": { - "foo": "foo", "bar": "bar", }, "path": ["hero", "nestedObject", "deeperObject"], }, ], + "completed": [{"path": ["hero", "nestedObject", "deeperObject"]}], "hasNext": False, }, ] @pytest.mark.asyncio - async def does_not_deduplicate_fields_with_deferred_fragments_at_multiple_levels(): + async def deduplicates_fields_with_deferred_fragments_at_multiple_levels(): document = parse( """ query { @@ -1014,58 +1268,51 @@ async def does_not_deduplicate_fields_with_deferred_fragments_at_multiple_levels assert result == [ { - "data": {"hero": {"nestedObject": {"deeperObject": {"foo": "foo"}}}}, + "data": { + "hero": { + "nestedObject": { + "deeperObject": { + "foo": "foo", + }, + }, + }, + }, "hasNext": True, }, { "incremental": [ { - "data": { - "nestedObject": { - "deeperObject": { - "foo": "foo", - "bar": "bar", - }, - } - }, - "path": ["hero"], + "data": {"bar": "bar"}, + "path": ["hero", "nestedObject", "deeperObject"], }, ], + "completed": [{"path": ["hero"]}], "hasNext": True, }, { "incremental": [ { - "data": { - "deeperObject": { - "foo": "foo", - "bar": "bar", - "baz": "baz", - } - }, - "path": ["hero", "nestedObject"], + "data": {"baz": "baz"}, + "path": ["hero", "nestedObject", "deeperObject"], }, ], "hasNext": True, + "completed": [{"path": ["hero", "nestedObject"]}], }, { "incremental": [ { - "data": { - "foo": "foo", - "bar": "bar", - "baz": "baz", - "bak": "bak", - }, + "data": {"bak": "bak"}, "path": ["hero", "nestedObject", "deeperObject"], }, ], + "completed": [{"path": ["hero", "nestedObject", "deeperObject"]}], "hasNext": False, }, ] @pytest.mark.asyncio - async def does_not_combine_fields_from_deferred_fragments_branches_same_level(): + async def deduplicates_fields_from_deferred_fragments_branches_same_level(): document = parse( """ query { @@ -1109,10 +1356,10 @@ async def does_not_combine_fields_from_deferred_fragments_branches_same_level(): }, "path": ["hero", "nestedObject", "deeperObject"], }, - { - "data": {"nestedObject": {"deeperObject": {}}}, - "path": ["hero"], - }, + ], + "completed": [ + {"path": ["hero"]}, + {"path": ["hero", "nestedObject", "deeperObject"]}, ], "hasNext": True, }, @@ -1120,18 +1367,18 @@ async def does_not_combine_fields_from_deferred_fragments_branches_same_level(): "incremental": [ { "data": { - "foo": "foo", "bar": "bar", }, "path": ["hero", "nestedObject", "deeperObject"], }, ], + "completed": [{"path": ["hero", "nestedObject", "deeperObject"]}], "hasNext": False, }, ] @pytest.mark.asyncio - async def does_not_combine_fields_from_deferred_fragments_branches_multi_levels(): + async def deduplicates_fields_from_deferred_fragments_branches_multi_levels(): document = parse( """ query { @@ -1179,16 +1426,17 @@ async def does_not_combine_fields_from_deferred_fragments_branches_multi_levels( "path": ["a", "b"], }, { - "data": {"a": {"b": {"e": {"f": "f"}}}, "g": {"h": "h"}}, + "data": {"g": {"h": "h"}}, "path": [], }, ], + "completed": [{"path": ["a", "b"]}, {"path": []}], "hasNext": False, }, ] @pytest.mark.asyncio - async def preserves_error_boundaries_null_first(): + async def nulls_cross_defer_boundaries_null_first(): document = parse( """ query { @@ -1227,11 +1475,16 @@ async def preserves_error_boundaries_null_first(): { "incremental": [ { - "data": {"b": {"c": {"d": "d"}}}, + "data": {"b": {"c": {}}}, "path": ["a"], }, { - "data": {"a": {"b": {"c": None}, "someField": "someField"}}, + "data": {"d": "d"}, + "path": ["a", "b", "c"], + }, + ], + "completed": [ + { "path": [], "errors": [ { @@ -1242,12 +1495,13 @@ async def preserves_error_boundaries_null_first(): }, ], }, + {"path": ["a"]}, ], "hasNext": False, }, ] - async def preserves_error_boundaries_value_first(): + async def nulls_cross_defer_boundaries_value_first(): document = parse( """ query { @@ -1291,7 +1545,16 @@ async def preserves_error_boundaries_value_first(): { "incremental": [ { - "data": {"b": {"c": None}, "someField": "someField"}, + "data": {"b": {"c": {}}}, + "path": ["a"], + }, + { + "data": {"d": "d"}, + "path": ["a", "b", "c"], + }, + ], + "completed": [ + { "path": ["a"], "errors": [ { @@ -1303,7 +1566,6 @@ async def preserves_error_boundaries_value_first(): ], }, { - "data": {"a": {"b": {"c": {"d": "d"}}}}, "path": [], }, ], @@ -1311,7 +1573,7 @@ async def preserves_error_boundaries_value_first(): }, ] - async def correctly_handle_a_slow_null(): + async def filters_a_payload_with_a_null_that_cannot_be_merged(): document = parse( """ query { @@ -1338,14 +1600,11 @@ async def correctly_handle_a_slow_null(): """ ) - async def slow_null(_info) -> None: - await sleep(0) - result = await complete( document, { "a": { - "b": {"c": {"d": "d", "nonNullErrorField": slow_null}}, + "b": {"c": {"d": "d", "nonNullErrorField": Resolvers.slow_null}}, "someField": "someField", } }, @@ -1359,16 +1618,20 @@ async def slow_null(_info) -> None: { "incremental": [ { - "data": {"b": {"c": {"d": "d"}}}, + "data": {"b": {"c": {}}}, "path": ["a"], }, + { + "data": {"d": "d"}, + "path": ["a", "b", "c"], + }, ], + "completed": [{"path": ["a"]}], "hasNext": True, }, { - "incremental": [ + "completed": [ { - "data": {"a": {"b": {"c": None}, "someField": "someField"}}, "path": [], "errors": [ { @@ -1406,29 +1669,17 @@ async def cancels_deferred_fields_when_initial_result_exhibits_null_bubbling(): }, ) - assert result == [ - { - "data": {"hero": None}, - "errors": [ - { - "message": "Cannot return null" - " for non-nullable field Hero.nonNullName.", - "locations": [{"line": 4, "column": 17}], - "path": ["hero", "nonNullName"], - }, - ], - "hasNext": True, - }, - { - "incremental": [ - { - "data": {"hero": {"name": "Luke"}}, - "path": [], - }, - ], - "hasNext": False, - }, - ] + assert result == { + "data": {"hero": None}, + "errors": [ + { + "message": "Cannot return null" + " for non-nullable field Hero.nonNullName.", + "locations": [{"line": 4, "column": 17}], + "path": ["hero", "nonNullName"], + }, + ], + } async def cancels_deferred_fields_when_deferred_result_exhibits_null_bubbling(): document = parse( @@ -1470,11 +1721,12 @@ async def cancels_deferred_fields_when_deferred_result_exhibits_null_bubbling(): ], }, ], + "completed": [{"path": []}], "hasNext": False, }, ] - async def does_not_deduplicate_list_fields(): + async def deduplicates_list_fields(): document = parse( """ query { @@ -1508,23 +1760,12 @@ async def does_not_deduplicate_list_fields(): "hasNext": True, }, { - "incremental": [ - { - "data": { - "friends": [ - {"name": "Han"}, - {"name": "Leia"}, - {"name": "C-3PO"}, - ] - }, - "path": ["hero"], - } - ], + "completed": [{"path": ["hero"]}], "hasNext": False, }, ] - async def does_not_deduplicate_async_iterable_list_fields(): + async def deduplicates_async_iterable_list_fields(): document = parse( """ query { @@ -1542,14 +1783,10 @@ async def does_not_deduplicate_async_iterable_list_fields(): """ ) - async def resolve_friends(_info): - await sleep(0) - yield friends[0] - result = await complete( document, { - "hero": {**hero, "friends": resolve_friends}, + "hero": {**hero, "friends": Resolvers.first_friend}, }, ) @@ -1559,17 +1796,12 @@ async def resolve_friends(_info): "hasNext": True, }, { - "incremental": [ - { - "data": {"friends": [{"name": "Han"}]}, - "path": ["hero"], - } - ], + "completed": [{"path": ["hero"]}], "hasNext": False, }, ] - async def does_not_deduplicate_empty_async_iterable_list_fields(): + async def deduplicates_empty_async_iterable_list_fields(): document = parse( """ query { @@ -1605,12 +1837,7 @@ async def resolve_friends(_info): "hasNext": True, }, { - "incremental": [ - { - "data": {"friends": []}, - "path": ["hero"], - } - ], + "completed": [{"path": ["hero"]}], "hasNext": False, }, ] @@ -1650,15 +1877,24 @@ async def does_not_deduplicate_list_fields_with_non_overlapping_fields(): { "incremental": [ { - "data": {"friends": [{"id": "2"}, {"id": "3"}, {"id": "4"}]}, - "path": ["hero"], - } + "data": {"id": "2"}, + "path": ["hero", "friends", 0], + }, + { + "data": {"id": "3"}, + "path": ["hero", "friends", 1], + }, + { + "data": {"id": "4"}, + "path": ["hero", "friends", 2], + }, ], + "completed": [{"path": ["hero"]}], "hasNext": False, }, ] - async def does_not_deduplicate_list_fields_that_return_empty_lists(): + async def deduplicates_list_fields_that_return_empty_lists(): document = parse( """ query { @@ -1685,17 +1921,12 @@ async def does_not_deduplicate_list_fields_that_return_empty_lists(): "hasNext": True, }, { - "incremental": [ - { - "data": {"friends": []}, - "path": ["hero"], - } - ], + "completed": [{"path": ["hero"]}], "hasNext": False, }, ] - async def does_not_deduplicate_null_object_fields(): + async def deduplicates_null_object_fields(): document = parse( """ query { @@ -1722,17 +1953,12 @@ async def does_not_deduplicate_null_object_fields(): "hasNext": True, }, { - "incremental": [ - { - "data": {"nestedObject": None}, - "path": ["hero"], - } - ], + "completed": [{"path": ["hero"]}], "hasNext": False, }, ] - async def does_not_deduplicate_async_object_fields(): + async def deduplicates_async_object_fields(): document = parse( """ query { @@ -1763,12 +1989,7 @@ async def resolve_nested_object(_info): "hasNext": True, }, { - "incremental": [ - { - "data": {"nestedObject": {"name": "foo"}}, - "path": ["hero"], - } - ], + "completed": [{"path": ["hero"]}], "hasNext": False, }, ] @@ -1806,6 +2027,7 @@ async def handles_errors_thrown_in_deferred_fragments(): ], }, ], + "completed": [{"path": ["hero"]}], "hasNext": False, }, ] @@ -1832,9 +2054,8 @@ async def handles_non_nullable_errors_thrown_in_deferred_fragments(): assert result == [ {"data": {"hero": {"id": "1"}}, "hasNext": True}, { - "incremental": [ + "completed": [ { - "data": None, "path": ["hero"], "errors": [ { @@ -1903,9 +2124,8 @@ async def handles_async_non_nullable_errors_thrown_in_deferred_fragments(): assert result == [ {"data": {"hero": {"id": "1"}}, "hasNext": True}, { - "incremental": [ + "completed": [ { - "data": None, "path": ["hero"], "errors": [ { @@ -1953,6 +2173,7 @@ async def returns_payloads_in_correct_order(): "path": ["hero"], } ], + "completed": [{"path": ["hero"]}], "hasNext": True, }, { @@ -1970,6 +2191,11 @@ async def returns_payloads_in_correct_order(): "path": ["hero", "friends", 2], }, ], + "completed": [ + {"path": ["hero", "friends", 0]}, + {"path": ["hero", "friends", 1]}, + {"path": ["hero", "friends", 2]}, + ], "hasNext": False, }, ] @@ -2004,8 +2230,9 @@ async def returns_payloads_from_synchronous_data_in_correct_order(): { "data": {"name": "Luke", "friends": [{}, {}, {}]}, "path": ["hero"], - }, + } ], + "completed": [{"path": ["hero"]}], "hasNext": True, }, { @@ -2023,6 +2250,11 @@ async def returns_payloads_from_synchronous_data_in_correct_order(): "path": ["hero", "friends", 2], }, ], + "completed": [ + {"path": ["hero", "friends", 0]}, + {"path": ["hero", "friends", 1]}, + {"path": ["hero", "friends", 2]}, + ], "hasNext": False, }, ] @@ -2046,7 +2278,7 @@ async def filters_deferred_payloads_when_list_item_from_async_iterable_nulled(): ) result = await complete( - document, {"hero": {**hero, "friends": Resolvers.friends}} + document, {"hero": {**hero, "friends": Resolvers.first_friend}} ) assert result == { diff --git a/tests/execution/test_lists.py b/tests/execution/test_lists.py index 5dc4b5f0..a7f747fb 100644 --- a/tests/execution/test_lists.py +++ b/tests/execution/test_lists.py @@ -50,6 +50,7 @@ def accepts_a_tuple_as_a_list_value(): result = _complete(list_field) assert result == ({"listField": list(list_field)}, None) + @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") def accepts_a_set_as_a_list_value(): # Note that sets are not ordered in Python. list_field = {"apple", "banana", "coconut"} diff --git a/tests/execution/test_mutations.py b/tests/execution/test_mutations.py index 3737bb6a..f5030c88 100644 --- a/tests/execution/test_mutations.py +++ b/tests/execution/test_mutations.py @@ -246,13 +246,13 @@ async def mutation_fields_with_defer_do_not_block_next_mutation(): { "incremental": [ { - "label": "defer-label", "path": ["first"], "data": { "promiseToGetTheNumber": 2, }, }, ], + "completed": [{"path": ["first"], "label": "defer-label"}], "hasNext": False, }, ] @@ -317,13 +317,13 @@ async def mutation_with_defer_is_not_executed_serially(): { "incremental": [ { - "label": "defer-label", "path": [], "data": { "first": {"theNumber": 1}, }, }, ], + "completed": [{"path": [], "label": "defer-label"}], "hasNext": False, }, ] diff --git a/tests/execution/test_stream.py b/tests/execution/test_stream.py index d611f7a9..5454e826 100644 --- a/tests/execution/test_stream.py +++ b/tests/execution/test_stream.py @@ -12,7 +12,7 @@ IncrementalStreamResult, experimental_execute_incrementally, ) -from graphql.execution.incremental_publisher import StreamItemsRecord +from graphql.execution.incremental_publisher import StreamRecord from graphql.language import DocumentNode, parse from graphql.pyutils import Path from graphql.type import ( @@ -156,29 +156,24 @@ def can_format_and_print_incremental_stream_result(): items=["hello", "world"], errors=[GraphQLError("msg")], path=["foo", 1], - label="bar", extensions={"baz": 2}, ) assert result.formatted == { "items": ["hello", "world"], "errors": [{"message": "msg"}], "extensions": {"baz": 2}, - "label": "bar", "path": ["foo", 1], } assert ( str(result) == "IncrementalStreamResult(items=['hello', 'world']," - " errors=[GraphQLError('msg')], path=['foo', 1], label='bar'," - " extensions={'baz': 2})" + " errors=[GraphQLError('msg')], path=['foo', 1], extensions={'baz': 2})" ) def can_print_stream_record(): - record = StreamItemsRecord(None, None, None) - assert str(record) == "StreamItemsRecord(path=[])" - record = StreamItemsRecord("foo", Path(None, "bar", "Bar"), None) - assert str(record) == "StreamItemsRecord(" "path=['bar'], label='foo')" - record.items = ["hello", "world"] - assert str(record) == "StreamItemsRecord(" "path=['bar'], label='foo', items)" + record = StreamRecord(Path(None, 0, None)) + assert str(record) == "StreamRecord(path=[0])" + record = StreamRecord(Path(None, "bar", "Bar"), "foo") + assert str(record) == "StreamRecord(path=['bar'], label='foo')" # noinspection PyTypeChecker def can_compare_incremental_stream_result(): @@ -186,7 +181,6 @@ def can_compare_incremental_stream_result(): "items": ["hello", "world"], "errors": [GraphQLError("msg")], "path": ["foo", 1], - "label": "bar", "extensions": {"baz": 2}, } result = IncrementalStreamResult(**args) @@ -196,12 +190,10 @@ def can_compare_incremental_stream_result(): ) assert result != IncrementalStreamResult(**modified_args(args, errors=[])) assert result != IncrementalStreamResult(**modified_args(args, path=["foo", 2])) - assert result != IncrementalStreamResult(**modified_args(args, label="baz")) assert result != IncrementalStreamResult( **modified_args(args, extensions={"baz": 1}) ) assert result == tuple(args.values()) - assert result == tuple(args.values())[:4] assert result == tuple(args.values())[:3] assert result == tuple(args.values())[:2] assert result != tuple(args.values())[:1] @@ -210,7 +202,7 @@ def can_compare_incremental_stream_result(): assert result == dict(list(args.items())[:2]) assert result == dict(list(args.items())[:3]) assert result != dict(list(args.items())[:2] + [("path", ["foo", 2])]) - assert result != {**args, "label": "baz"} + assert result != {**args, "extensions": {"baz": 1}} @pytest.mark.asyncio async def can_stream_a_list_field(): @@ -226,11 +218,12 @@ async def can_stream_a_list_field(): "hasNext": True, }, { - "incremental": [{"items": ["banana"], "path": ["scalarList", 1]}], + "incremental": [{"items": ["banana"], "path": ["scalarList"]}], "hasNext": True, }, { - "incremental": [{"items": ["coconut"], "path": ["scalarList", 2]}], + "incremental": [{"items": ["coconut"], "path": ["scalarList"]}], + "completed": [{"path": ["scalarList"]}], "hasNext": False, }, ] @@ -249,15 +242,16 @@ async def can_use_default_value_of_initial_count(): "hasNext": True, }, { - "incremental": [{"items": ["apple"], "path": ["scalarList", 0]}], + "incremental": [{"items": ["apple"], "path": ["scalarList"]}], "hasNext": True, }, { - "incremental": [{"items": ["banana"], "path": ["scalarList", 1]}], + "incremental": [{"items": ["banana"], "path": ["scalarList"]}], "hasNext": True, }, { - "incremental": [{"items": ["coconut"], "path": ["scalarList", 2]}], + "incremental": [{"items": ["coconut"], "path": ["scalarList"]}], + "completed": [{"path": ["scalarList"]}], "hasNext": False, }, ] @@ -317,8 +311,7 @@ async def returns_label_from_stream_directive(): "incremental": [ { "items": ["banana"], - "path": ["scalarList", 1], - "label": "scalar-stream", + "path": ["scalarList"], } ], "hasNext": True, @@ -327,10 +320,10 @@ async def returns_label_from_stream_directive(): "incremental": [ { "items": ["coconut"], - "path": ["scalarList", 2], - "label": "scalar-stream", + "path": ["scalarList"], } ], + "completed": [{"path": ["scalarList"], "label": "scalar-stream"}], "hasNext": False, }, ] @@ -388,9 +381,10 @@ async def does_not_disable_stream_with_null_if_argument(): "incremental": [ { "items": ["coconut"], - "path": ["scalarList", 2], + "path": ["scalarList"], } ], + "completed": [{"path": ["scalarList"]}], "hasNext": False, }, ] @@ -419,7 +413,7 @@ async def can_stream_multi_dimensional_lists(): "incremental": [ { "items": [["banana", "banana", "banana"]], - "path": ["scalarListList", 1], + "path": ["scalarListList"], } ], "hasNext": True, @@ -428,9 +422,10 @@ async def can_stream_multi_dimensional_lists(): "incremental": [ { "items": [["coconut", "coconut", "coconut"]], - "path": ["scalarListList", 2], + "path": ["scalarListList"], } ], + "completed": [{"path": ["scalarListList"]}], "hasNext": False, }, ] @@ -449,7 +444,6 @@ async def can_stream_a_field_that_returns_a_list_of_awaitables(): ) async def await_friend(f): - await sleep(0) return f result = await complete( @@ -470,9 +464,10 @@ async def await_friend(f): "incremental": [ { "items": [{"name": "Leia", "id": "3"}], - "path": ["friendList", 2], + "path": ["friendList"], } ], + "completed": [{"path": ["friendList"]}], "hasNext": False, }, ] @@ -491,7 +486,6 @@ async def can_stream_in_correct_order_with_list_of_awaitables(): ) async def await_friend(f): - await sleep(0) return f result = await complete( @@ -507,7 +501,7 @@ async def await_friend(f): "incremental": [ { "items": [{"name": "Luke", "id": "1"}], - "path": ["friendList", 0], + "path": ["friendList"], } ], "hasNext": True, @@ -516,7 +510,7 @@ async def await_friend(f): "incremental": [ { "items": [{"name": "Han", "id": "2"}], - "path": ["friendList", 1], + "path": ["friendList"], } ], "hasNext": True, @@ -525,9 +519,10 @@ async def await_friend(f): "incremental": [ { "items": [{"name": "Leia", "id": "3"}], - "path": ["friendList", 2], + "path": ["friendList"], } ], + "completed": [{"path": ["friendList"]}], "hasNext": False, }, ] @@ -573,9 +568,10 @@ async def get_id(f): "incremental": [ { "items": [{"name": "Leia", "id": "3"}], - "path": ["friendList", 2], + "path": ["friendList"], } ], + "completed": [{"path": ["friendList"]}], "hasNext": False, }, ] @@ -594,7 +590,6 @@ async def handles_error_in_list_of_awaitables_before_initial_count_reached(): ) async def await_friend(f, i): - await sleep(0) if i == 1: raise RuntimeError("bad") return f @@ -623,9 +618,10 @@ async def await_friend(f, i): "incremental": [ { "items": [{"name": "Leia", "id": "3"}], - "path": ["friendList", 2], + "path": ["friendList"], } ], + "completed": [{"path": ["friendList"]}], "hasNext": False, }, ] @@ -644,7 +640,6 @@ async def handles_error_in_list_of_awaitables_after_initial_count_reached(): ) async def await_friend(f, i): - await sleep(0) if i == 1: raise RuntimeError("bad") return f @@ -666,7 +661,7 @@ async def await_friend(f, i): "incremental": [ { "items": [None], - "path": ["friendList", 1], + "path": ["friendList"], "errors": [ { "message": "bad", @@ -682,9 +677,10 @@ async def await_friend(f, i): "incremental": [ { "items": [{"name": "Leia", "id": "3"}], - "path": ["friendList", 2], + "path": ["friendList"], } ], + "completed": [{"path": ["friendList"]}], "hasNext": False, }, ] @@ -704,7 +700,6 @@ async def can_stream_a_field_that_returns_an_async_iterable(): async def friend_list(_info): for i in range(3): - await sleep(0) yield friends[i] result = await complete(document, {"friendList": friend_list}) @@ -717,7 +712,7 @@ async def friend_list(_info): "incremental": [ { "items": [{"name": "Luke", "id": "1"}], - "path": ["friendList", 0], + "path": ["friendList"], } ], "hasNext": True, @@ -726,7 +721,7 @@ async def friend_list(_info): "incremental": [ { "items": [{"name": "Han", "id": "2"}], - "path": ["friendList", 1], + "path": ["friendList"], } ], "hasNext": True, @@ -735,12 +730,13 @@ async def friend_list(_info): "incremental": [ { "items": [{"name": "Leia", "id": "3"}], - "path": ["friendList", 2], + "path": ["friendList"], } ], "hasNext": True, }, { + "completed": [{"path": ["friendList"]}], "hasNext": False, }, ] @@ -760,7 +756,6 @@ async def can_stream_a_field_that_returns_an_async_iterable_with_initial_count() async def friend_list(_info): for i in range(3): - await sleep(0) yield friends[i] result = await complete(document, {"friendList": friend_list}) @@ -778,12 +773,13 @@ async def friend_list(_info): "incremental": [ { "items": [{"name": "Leia", "id": "3"}], - "path": ["friendList", 2], + "path": ["friendList"], } ], "hasNext": True, }, { + "completed": [{"path": ["friendList"]}], "hasNext": False, }, ] @@ -831,7 +827,6 @@ async def can_handle_concurrent_calls_to_next_without_waiting(): async def friend_list(_info): for i in range(3): - await sleep(0) yield friends[i] result = await complete_async(document, 3, {"friendList": friend_list}) @@ -854,13 +849,16 @@ async def friend_list(_info): "incremental": [ { "items": [{"name": "Leia", "id": "3"}], - "path": ["friendList", 2], + "path": ["friendList"], } ], "hasNext": True, }, }, - {"done": False, "value": {"hasNext": False}}, + { + "done": False, + "value": {"completed": [{"path": ["friendList"]}], "hasNext": False}, + }, {"done": True, "value": None}, ] @@ -878,9 +876,7 @@ async def handles_error_in_async_iterable_before_initial_count_is_reached(): ) async def friend_list(_info): - await sleep(0) yield friends[0] - await sleep(0) raise RuntimeError("bad") result = await complete(document, {"friendList": friend_list}) @@ -909,9 +905,7 @@ async def handles_error_in_async_iterable_after_initial_count_is_reached(): ) async def friend_list(_info): - await sleep(0) yield friends[0] - await sleep(0) raise RuntimeError("bad") result = await complete(document, {"friendList": friend_list}) @@ -923,10 +917,9 @@ async def friend_list(_info): "hasNext": True, }, { - "incremental": [ + "completed": [ { - "items": None, - "path": ["friendList", 1], + "path": ["friendList"], "errors": [ { "message": "bad", @@ -963,10 +956,9 @@ async def handles_null_for_non_null_list_items_after_initial_count_is_reached(): "hasNext": True, }, { - "incremental": [ + "completed": [ { - "items": None, - "path": ["nonNullFriendList", 1], + "path": ["nonNullFriendList"], "errors": [ { "message": "Cannot return null for non-nullable field" @@ -995,9 +987,7 @@ async def handles_null_for_non_null_async_items_after_initial_count_is_reached() async def friend_list(_info): try: - await sleep(0) yield friends[0] - await sleep(0) yield None finally: raise RuntimeError("Oops") @@ -1011,10 +1001,9 @@ async def friend_list(_info): "hasNext": True, }, { - "incremental": [ + "completed": [ { - "items": None, - "path": ["nonNullFriendList", 1], + "path": ["nonNullFriendList"], "errors": [ { "message": "Cannot return null for non-nullable field" @@ -1054,7 +1043,7 @@ async def scalar_list(_info): "incremental": [ { "items": [None], - "path": ["scalarList", 1], + "path": ["scalarList"], "errors": [ { "message": "String cannot represent value: {}", @@ -1064,6 +1053,7 @@ async def scalar_list(_info): ], }, ], + "completed": [{"path": ["scalarList"]}], "hasNext": False, }, ] @@ -1084,7 +1074,6 @@ async def throw(): raise RuntimeError("Oops") async def get_friend(i): - await sleep(0) return {"nonNullName": throw() if i < 0 else friends[i].name} def get_friends(_info): @@ -1107,7 +1096,7 @@ def get_friends(_info): "incremental": [ { "items": [None], - "path": ["friendList", 1], + "path": ["friendList"], "errors": [ { "message": "Oops", @@ -1123,9 +1112,10 @@ def get_friends(_info): "incremental": [ { "items": [{"nonNullName": "Han"}], - "path": ["friendList", 2], + "path": ["friendList"], }, ], + "completed": [{"path": ["friendList"]}], "hasNext": False, }, ] @@ -1143,7 +1133,6 @@ async def handles_nested_async_error_in_complete_value_after_initial_count(): ) async def get_friend_name(i): - await sleep(0) if i < 0: raise RuntimeError("Oops") return friends[i].name @@ -1168,7 +1157,7 @@ def get_friends(_info): "incremental": [ { "items": [None], - "path": ["friendList", 1], + "path": ["friendList"], "errors": [ { "message": "Oops", @@ -1184,9 +1173,10 @@ def get_friends(_info): "incremental": [ { "items": [{"nonNullName": "Han"}], - "path": ["friendList", 2], + "path": ["friendList"], } ], + "completed": [{"path": ["friendList"]}], "hasNext": False, }, ] @@ -1207,7 +1197,6 @@ async def throw(): raise RuntimeError("Oops") async def get_friend(i): - await sleep(0) return {"nonNullName": throw() if i < 0 else friends[i].name} def get_friends(_info): @@ -1227,10 +1216,9 @@ def get_friends(_info): "hasNext": True, }, { - "incremental": [ + "completed": [ { - "items": None, - "path": ["nonNullFriendList", 1], + "path": ["nonNullFriendList"], "errors": [ { "message": "Oops", @@ -1257,7 +1245,6 @@ async def handles_nested_async_error_in_complete_value_after_initial_non_null(): ) async def get_friend_name(i): - await sleep(0) if i < 0: raise RuntimeError("Oops") return friends[i].name @@ -1279,10 +1266,9 @@ def get_friends(_info): "hasNext": True, }, { - "incremental": [ + "completed": [ { - "items": None, - "path": ["nonNullFriendList", 1], + "path": ["nonNullFriendList"], "errors": [ { "message": "Oops", @@ -1312,7 +1298,6 @@ async def throw(): raise RuntimeError("Oops") async def get_friend(i): - await sleep(0) return {"nonNullName": throw() if i < 0 else friends[i].name} async def get_friends(_info): @@ -1336,7 +1321,7 @@ async def get_friends(_info): "incremental": [ { "items": [None], - "path": ["friendList", 1], + "path": ["friendList"], "errors": [ { "message": "Oops", @@ -1352,12 +1337,13 @@ async def get_friends(_info): "incremental": [ { "items": [{"nonNullName": "Han"}], - "path": ["friendList", 2], + "path": ["friendList"], }, ], "hasNext": True, }, { + "completed": [{"path": ["friendList"]}], "hasNext": False, }, ] @@ -1378,7 +1364,6 @@ async def throw(): raise RuntimeError("Oops") async def get_friend(i): - await sleep(0) return {"nonNullName": throw() if i < 0 else friends[i].name} async def get_friends(_info): @@ -1399,10 +1384,9 @@ async def get_friends(_info): "hasNext": True, }, { - "incremental": [ + "completed": [ { - "items": None, - "path": ["nonNullFriendList", 1], + "path": ["nonNullFriendList"], "errors": [ { "message": "Oops", @@ -1416,6 +1400,138 @@ async def get_friends(_info): }, ] + @pytest.mark.asyncio + async def handles_async_errors_in_complete_value_after_initial_count_no_aclose(): + # Handles async errors thrown by complete_value after initialCount is reached + # from async iterable for a non-nullable list when the async iterable does + # not provide an aclose method. + document = parse( + """ + query { + nonNullFriendList @stream(initialCount: 1) { + nonNullName + } + } + """ + ) + + async def throw(): + raise RuntimeError("Oops") + + class AsyncIterableWithoutAclose: + def __init__(self): + self.count = 0 + + def __aiter__(self): + return self + + async def __anext__(self): + count = self.count + self.count += 1 + if count == 1: + name = throw() + else: + if count: + count -= 1 # pragma: no cover + name = friends[count].name + return {"nonNullName": name} + + async_iterable = AsyncIterableWithoutAclose() + result = await complete(document, {"nonNullFriendList": async_iterable}) + assert result == [ + { + "data": { + "nonNullFriendList": [{"nonNullName": "Luke"}], + }, + "hasNext": True, + }, + { + "completed": [ + { + "path": ["nonNullFriendList"], + "errors": [ + { + "message": "Oops", + "locations": [{"line": 4, "column": 17}], + "path": ["nonNullFriendList", 1, "nonNullName"], + }, + ], + }, + ], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def handles_async_errors_in_complete_value_after_initial_count_slow_aclose(): + # Handles async errors thrown by completeValue after initialCount is reached + # from async iterable for a non-nullable list when the async iterable provides + # concurrent next/return methods and has a slow aclose() + document = parse( + """ + query { + nonNullFriendList @stream(initialCount: 1) { + nonNullName + } + } + """ + ) + + async def throw(): + raise RuntimeError("Oops") + + class AsyncIterableWithSlowAclose: + def __init__(self): + self.count = 0 + self.finished = False + + def __aiter__(self): + return self + + async def __anext__(self): + if self.finished: + raise StopAsyncIteration # pragma: no cover + count = self.count + self.count += 1 + if count == 1: + name = throw() + else: + if count: + count -= 1 # pragma: no cover + name = friends[count].name + return {"nonNullName": name} + + async def aclose(self): + await sleep(0) + self.finished = True + + async_iterable = AsyncIterableWithSlowAclose() + result = await complete(document, {"nonNullFriendList": async_iterable}) + assert result == [ + { + "data": { + "nonNullFriendList": [{"nonNullName": "Luke"}], + }, + "hasNext": True, + }, + { + "completed": [ + { + "path": ["nonNullFriendList"], + "errors": [ + { + "message": "Oops", + "locations": [{"line": 4, "column": 17}], + "path": ["nonNullFriendList", 1, "nonNullName"], + }, + ], + }, + ], + "hasNext": False, + }, + ] + assert async_iterable.finished + @pytest.mark.asyncio async def filters_payloads_that_are_nulled(): document = parse( @@ -1432,10 +1548,9 @@ async def filters_payloads_that_are_nulled(): ) async def resolve_null(_info): - await sleep(0) + return None async def friend_list(_info): - await sleep(0) yield friends[0] result = await complete( @@ -1483,7 +1598,6 @@ async def filters_payloads_that_are_nulled_by_a_later_synchronous_error(): ) async def friend_list(_info): - await sleep(0) # pragma: no cover yield friends[0] # pragma: no cover result = await complete( @@ -1531,11 +1645,9 @@ async def does_not_filter_payloads_when_null_error_is_in_a_different_path(): ) async def error_field(_info): - await sleep(0) raise RuntimeError("Oops") async def friend_list(_info): - await sleep(0) yield friends[0] result = await complete( @@ -1571,12 +1683,16 @@ async def friend_list(_info): }, { "items": [{"name": "Luke"}], - "path": ["nestedObject", "nestedFriendList", 0], + "path": ["nestedObject", "nestedFriendList"], }, ], + "completed": [{"path": ["otherNestedObject"]}], "hasNext": True, }, - {"hasNext": False}, + { + "completed": [{"path": ["nestedObject", "nestedFriendList"]}], + "hasNext": False, + }, ] @pytest.mark.asyncio @@ -1600,10 +1716,9 @@ async def filters_stream_payloads_that_are_nulled_in_a_deferred_payload(): ) async def resolve_null(_info): - await sleep(0) + return None async def friend_list(_info): - await sleep(0) yield friends[0] result = await complete( @@ -1646,11 +1761,13 @@ async def friend_list(_info): ], }, ], + "completed": [{"path": ["nestedObject"]}], "hasNext": False, }, ] @pytest.mark.asyncio + @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") async def filters_defer_payloads_that_are_nulled_in_a_stream_response(): document = parse( """ @@ -1666,17 +1783,15 @@ async def filters_defer_payloads_that_are_nulled_in_a_stream_response(): ) async def resolve_null(_info): - await sleep(0) + return None async def friend(): - await sleep(0) return { "name": friends[0].name, "nonNullName": resolve_null, } async def friend_list(_info): - await sleep(0) yield await friend() result = await complete(document, {"friendList": friend_list}) @@ -1692,7 +1807,7 @@ async def friend_list(_info): "incremental": [ { "items": [None], - "path": ["friendList", 0], + "path": ["friendList"], "errors": [ { "message": "Cannot return null for non-nullable field" @@ -1706,6 +1821,7 @@ async def friend_list(_info): "hasNext": True, }, { + "completed": [{"path": ["friendList"]}], "hasNext": False, }, ] @@ -1716,15 +1832,14 @@ async def returns_iterator_and_ignores_error_when_stream_payloads_are_filtered() finished = False async def resolve_null(_info): - await sleep(0) + return None async def iterable(_info): nonlocal finished for i in range(3): - await sleep(0) friend = friends[i] yield {"name": friend.name, "nonNullName": None} - finished = True # pragma: no cover + finished = True document = parse( """ @@ -1762,6 +1877,8 @@ async def iterable(_info): result1 = execute_result.initial_result assert result1 == {"data": {"nestedObject": {}}, "hasNext": True} + assert not finished + result2 = await anext(iterator) assert result2.formatted == { "incremental": [ @@ -1782,13 +1899,14 @@ async def iterable(_info): ], }, ], + "completed": [{"path": ["nestedObject"]}], "hasNext": False, } with pytest.raises(StopAsyncIteration): await anext(iterator) - assert not finished # running iterator cannot be canceled + assert finished @pytest.mark.asyncio async def handles_awaitables_from_complete_value_after_initial_count_is_reached(): @@ -1804,11 +1922,9 @@ async def handles_awaitables_from_complete_value_after_initial_count_is_reached( ) async def get_friend_name(i): - await sleep(0) return friends[i].name async def get_friend(i): - await sleep(0) if i < 2: return friends[i] return {"id": friends[2].id, "name": get_friend_name(i)} @@ -1834,7 +1950,7 @@ async def get_friends(_info): "incremental": [ { "items": [{"id": "2", "name": "Han"}], - "path": ["friendList", 1], + "path": ["friendList"], } ], "hasNext": True, @@ -1843,12 +1959,13 @@ async def get_friends(_info): "incremental": [ { "items": [{"id": "3", "name": "Leia"}], - "path": ["friendList", 2], + "path": ["friendList"], } ], "hasNext": True, }, { + "completed": [{"path": ["friendList"]}], "hasNext": False, }, ] @@ -1877,7 +1994,6 @@ async def handles_overlapping_deferred_and_non_deferred_streams(): async def get_nested_friend_list(_info): for i in range(2): - await sleep(0) yield friends[i] result = await complete( @@ -1889,142 +2005,39 @@ async def get_nested_friend_list(_info): }, ) - assert result in ( - # exact order of results depends on timing and Python version - [ - { - "data": {"nestedObject": {"nestedFriendList": []}}, - "hasNext": True, - }, - { - "incremental": [ - { - "items": [{"id": "1"}], - "path": ["nestedObject", "nestedFriendList", 0], - }, - {"data": {"nestedFriendList": []}, "path": ["nestedObject"]}, - ], - "hasNext": True, - }, - { - "incremental": [ - { - "items": [{"id": "2"}], - "path": ["nestedObject", "nestedFriendList", 1], - }, - { - "items": [{"id": "1", "name": "Luke"}], - "path": ["nestedObject", "nestedFriendList", 0], - }, - ], - "hasNext": True, - }, - { - "incremental": [ - { - "items": [{"id": "2", "name": "Han"}], - "path": ["nestedObject", "nestedFriendList", 1], - }, - ], - "hasNext": True, - }, - { - "hasNext": False, - }, - ], - [ - { - "data": {"nestedObject": {"nestedFriendList": []}}, - "hasNext": True, - }, - { - "incremental": [ - { - "items": [{"id": "1"}], - "path": ["nestedObject", "nestedFriendList", 0], - }, - ], - "hasNext": True, - }, - { - "incremental": [ - { - "items": [{"id": "2"}], - "path": ["nestedObject", "nestedFriendList", 1], - }, - {"data": {"nestedFriendList": []}, "path": ["nestedObject"]}, - ], - "hasNext": True, - }, - { - "incremental": [ - { - "items": [{"id": "1", "name": "Luke"}], - "path": ["nestedObject", "nestedFriendList", 0], - }, - ], - "hasNext": True, - }, - { - "incremental": [ - { - "items": [{"id": "2", "name": "Han"}], - "path": ["nestedObject", "nestedFriendList", 1], - }, - ], - "hasNext": True, - }, - { - "hasNext": False, - }, - ], - [ - {"data": {"nestedObject": {"nestedFriendList": []}}, "hasNext": True}, - { - "incremental": [ - { - "items": [{"id": "1"}], - "path": ["nestedObject", "nestedFriendList", 0], - } - ], - "hasNext": True, - }, - { - "incremental": [ - { - "items": [{"id": "2"}], - "path": ["nestedObject", "nestedFriendList", 1], - } - ], - "hasNext": True, - }, - { - "incremental": [ - {"data": {"nestedFriendList": []}, "path": ["nestedObject"]} - ], - "hasNext": True, - }, - { - "incremental": [ - { - "items": [{"id": "1", "name": "Luke"}], - "path": ["nestedObject", "nestedFriendList", 0], - } - ], - "hasNext": True, - }, - { - "incremental": [ - { - "items": [{"id": "2", "name": "Han"}], - "path": ["nestedObject", "nestedFriendList", 1], - } - ], - "hasNext": True, + assert result == [ + { + "data": { + "nestedObject": { + "nestedFriendList": [], + }, }, - {"hasNext": False}, - ], - ) + "hasNext": True, + }, + { + "incremental": [ + { + "items": [{"id": "1", "name": "Luke"}], + "path": ["nestedObject", "nestedFriendList"], + }, + ], + "completed": [{"path": ["nestedObject"]}], + "hasNext": True, + }, + { + "incremental": [ + { + "items": [{"id": "2", "name": "Han"}], + "path": ["nestedObject", "nestedFriendList"], + }, + ], + "hasNext": True, + }, + { + "completed": [{"path": ["nestedObject", "nestedFriendList"]}], + "hasNext": False, + }, + ] @pytest.mark.asyncio async def returns_payloads_properly_when_parent_deferred_slower_than_stream(): @@ -2052,7 +2065,6 @@ async def slow_field(_info): async def get_friends(_info): for i in range(2): - await sleep(0) yield friends[i] execute_result = experimental_execute_incrementally( @@ -2081,6 +2093,7 @@ async def get_friends(_info): "path": ["nestedObject"], }, ], + "completed": [{"path": ["nestedObject"]}], "hasNext": True, } result3 = await anext(iterator) @@ -2088,7 +2101,7 @@ async def get_friends(_info): "incremental": [ { "items": [{"name": "Luke"}], - "path": ["nestedObject", "nestedFriendList", 0], + "path": ["nestedObject", "nestedFriendList"], }, ], "hasNext": True, @@ -2098,13 +2111,14 @@ async def get_friends(_info): "incremental": [ { "items": [{"name": "Han"}], - "path": ["nestedObject", "nestedFriendList", 1], + "path": ["nestedObject", "nestedFriendList"], }, ], "hasNext": True, } result5 = await anext(iterator) assert result5.formatted == { + "completed": [{"path": ["nestedObject", "nestedFriendList"]}], "hasNext": False, } @@ -2136,9 +2150,7 @@ async def slow_field(_info): ) async def get_friends(_info): - await sleep(0) yield friends[0] - await sleep(0) yield {"id": friends[1].id, "name": slow_field} await resolve_iterable.wait() @@ -2163,27 +2175,36 @@ async def get_friends(_info): { "data": {"name": "Luke"}, "path": ["friendList", 0], - "label": "DeferName", }, { "items": [{"id": "2"}], - "path": ["friendList", 1], - "label": "stream-label", + "path": ["friendList"], }, ], + "completed": [{"path": ["friendList", 0], "label": "DeferName"}], "hasNext": True, } resolve_slow_field.set() result3 = await anext(iterator) assert result3.formatted == { + "completed": [ + { + "path": ["friendList"], + "label": "stream-label", + }, + ], + "hasNext": True, + } + result4 = await anext(iterator) + assert result4.formatted == { "incremental": [ { "data": {"name": "Han"}, "path": ["friendList", 1], - "label": "DeferName", }, ], + "completed": [{"path": ["friendList", 1], "label": "DeferName"}], "hasNext": False, } @@ -2214,11 +2235,8 @@ async def slow_field(_info): ) async def get_friends(_info): - await sleep(0) yield friends[0] - await sleep(0) yield {"id": friends[1].id, "name": slow_field} - await sleep(0) await resolve_iterable.wait() execute_result = await experimental_execute_incrementally( # type: ignore @@ -2242,14 +2260,13 @@ async def get_friends(_info): { "data": {"name": "Luke"}, "path": ["friendList", 0], - "label": "DeferName", }, { "items": [{"id": "2"}], - "path": ["friendList", 1], - "label": "stream-label", + "path": ["friendList"], }, ], + "completed": [{"path": ["friendList", 0], "label": "DeferName"}], "hasNext": True, } @@ -2259,15 +2276,16 @@ async def get_friends(_info): { "data": {"name": "Han"}, "path": ["friendList", 1], - "label": "DeferName", }, ], + "completed": [{"path": ["friendList", 1], "label": "DeferName"}], "hasNext": True, } resolve_iterable.set() result4 = await anext(iterator) assert result4.formatted == { + "completed": [{"path": ["friendList"], "label": "stream-label"}], "hasNext": False, } @@ -2275,13 +2293,12 @@ async def get_friends(_info): await anext(iterator) @pytest.mark.asyncio - async def finishes_async_iterable_when_returned_generator_is_closed(): + async def finishes_async_iterable_when_finished_generator_is_closed(): finished = False async def iterable(_info): nonlocal finished for i in range(3): - await sleep(0) yield friends[i] finished = True @@ -2311,7 +2328,6 @@ async def iterable(_info): with pytest.raises(StopAsyncIteration): await anext(iterator) - await sleep(0) assert finished @pytest.mark.asyncio @@ -2324,7 +2340,6 @@ def __aiter__(self): return self async def __anext__(self): - await sleep(0) index = self.index self.index = index + 1 try: @@ -2361,18 +2376,15 @@ async def __anext__(self): with pytest.raises(StopAsyncIteration): await anext(iterator) - await sleep(0) - await sleep(0) assert iterable.index == 4 @pytest.mark.asyncio - async def finishes_async_iterable_when_error_is_raised_in_returned_generator(): + async def finishes_async_iterable_when_error_is_raised_in_finished_generator(): finished = False async def iterable(_info): nonlocal finished for i in range(3): - await sleep(0) yield friends[i] finished = True @@ -2404,5 +2416,4 @@ async def iterable(_info): with pytest.raises(StopAsyncIteration): await anext(iterator) - await sleep(0) assert finished diff --git a/tests/pyutils/test_ref_map.py b/tests/pyutils/test_ref_map.py new file mode 100644 index 00000000..96e15c58 --- /dev/null +++ b/tests/pyutils/test_ref_map.py @@ -0,0 +1,124 @@ +import pytest + +from graphql.pyutils import RefMap + +obj1 = {"a": 1, "b": 2, "c": 3} +obj2 = obj1.copy() +obj3 = obj1.copy() +obj4 = obj1.copy() + + +def describe_object_map(): + def can_create_an_empty_map(): + m = RefMap[str, int]() + assert not m + assert len(m) == 0 + assert list(m) == [] + assert list(m.keys()) == [] + assert list(m.values()) == [] + assert list(m.items()) == [] + + def can_create_a_map_with_scalar_keys_and_values(): + m = RefMap[str, int](list(obj1.items())) + assert m + assert len(m) == 3 + assert list(m) == ["a", "b", "c"] + assert list(m.keys()) == ["a", "b", "c"] + assert list(m.values()) == [1, 2, 3] + assert list(m.items()) == [("a", 1), ("b", 2), ("c", 3)] + for k, v in m.items(): + assert k in m + assert m[k] == v + assert m.get(k) == v + assert v not in m + with pytest.raises(KeyError): + m[v] # type: ignore + assert m.get(v) is None + + def can_create_a_map_with_one_object_as_key(): + m = RefMap[dict, int]([(obj1, 1)]) + assert m + assert len(m) == 1 + assert list(m) == [obj1] + assert list(m.keys()) == [obj1] + assert list(m.values()) == [1] + assert list(m.items()) == [(obj1, 1)] + assert obj1 in m + assert 1 not in m + assert obj2 not in m + assert m[obj1] == 1 + assert m.get(obj1) == 1 + with pytest.raises(KeyError): + m[1] # type: ignore + assert m.get(1) is None + with pytest.raises(KeyError): + m[obj2] + assert m.get(obj2) is None + + def can_create_a_map_with_three_objects_as_keys(): + m = RefMap[dict, int]([(obj1, 1), (obj2, 2), (obj3, 3)]) + assert m + assert len(m) == 3 + assert list(m) == [obj1, obj2, obj3] + assert list(m.keys()) == [obj1, obj2, obj3] + assert list(m.values()) == [1, 2, 3] + assert list(m.items()) == [(obj1, 1), (obj2, 2), (obj3, 3)] + for k, v in m.items(): + assert k in m + assert m[k] == v + assert m.get(k) == v + assert v not in m + with pytest.raises(KeyError): + m[v] # type: ignore + assert m.get(v) is None + assert obj4 not in m + with pytest.raises(KeyError): + m[obj4] + assert m.get(obj4) is None + + def can_set_a_key_that_is_an_object(): + m = RefMap[dict, int]() + m[obj1] = 1 + assert m[obj1] == 1 + assert list(m) == [obj1] + with pytest.raises(KeyError): + m[obj2] + m[obj2] = 2 + assert m[obj1] == 1 + assert m[obj2] == 2 + assert list(m) == [obj1, obj2] + m[obj2] = 3 + assert m[obj1] == 1 + assert m[obj2] == 3 + assert list(m) == [obj1, obj2] + assert len(m) == 2 + + def can_delete_a_key_that_is_an_object(): + m = RefMap[dict, int]([(obj1, 1), (obj2, 2), (obj3, 3)]) + del m[obj2] + assert obj2 not in m + assert list(m) == [obj1, obj3] + with pytest.raises(KeyError): + del m[obj2] + assert list(m) == [obj1, obj3] + assert len(m) == 2 + + def can_update_a_map(): + m = RefMap[dict, int]([(obj1, 1), (obj2, 2)]) + m.update([]) + assert list(m.keys()) == [obj1, obj2] + assert len(m) == 2 + m.update([(obj2, 3), (obj3, 4)]) + assert list(m.keys()) == [obj1, obj2, obj3] + assert list(m.values()) == [1, 3, 4] + assert list(m.items()) == [(obj1, 1), (obj2, 3), (obj3, 4)] + assert obj3 in m + assert m[obj2] == 3 + assert m[obj3] == 4 + assert len(m) == 3 + + def can_get_the_representation_of_a_ref_map(): + m = RefMap[dict, int]([(obj1, 1), (obj2, 2)]) + assert repr(m) == ( + "RefMap([({'a': 1, 'b': 2, 'c': 3}, 1), ({'a': 1, 'b': 2, 'c': 3}, 2)])" + ) diff --git a/tests/pyutils/test_ref_set.py b/tests/pyutils/test_ref_set.py new file mode 100644 index 00000000..fead877b --- /dev/null +++ b/tests/pyutils/test_ref_set.py @@ -0,0 +1,89 @@ +import pytest + +from graphql.pyutils import RefSet + +obj1 = ["a", "b", "c"] +obj2 = obj1.copy() +obj3 = obj1.copy() +obj4 = obj1.copy() + + +def describe_object_set(): + def can_create_an_empty_set(): + s = RefSet[int]() + assert not s + assert len(s) == 0 + assert list(s) == [] + + def can_create_a_set_with_scalar_values(): + s = RefSet[str](obj1) + assert s + assert len(s) == 3 + assert list(s) == ["a", "b", "c"] + for v in s: + assert v in s + + def can_create_a_set_with_one_object_as_value(): + s = RefSet[list]([obj1]) + assert s + assert len(s) == 1 + assert obj1 in s + assert obj2 not in s + + def can_create_a_set_with_three_objects_as_keys(): + s = RefSet[list]([obj1, obj2, obj3]) + assert s + assert len(s) == 3 + assert list(s) == [obj1, obj2, obj3] + for v in s: + assert v in s + assert obj4 not in s + + def can_add_a_value_that_is_an_object(): + s = RefSet[list]() + s.add(obj1) + assert obj1 in s + assert list(s) == [obj1] + assert obj2 not in s + s.add(obj2) + assert obj1 in s + assert obj2 in s + assert list(s) == [obj1, obj2] + s.add(obj2) + assert obj1 in s + assert obj2 in s + assert list(s) == [obj1, obj2] + assert len(s) == 2 + + def can_remove_a_value_that_is_an_object(): + s = RefSet[list]([obj1, obj2, obj3]) + s.remove(obj2) + assert obj2 not in s + assert list(s) == [obj1, obj3] + with pytest.raises(KeyError): + s.remove(obj2) + assert list(s) == [obj1, obj3] + assert len(s) == 2 + + def can_discard_a_value_that_is_an_object(): + s = RefSet[list]([obj1, obj2, obj3]) + s.discard(obj2) + assert obj2 not in s + assert list(s) == [obj1, obj3] + s.discard(obj2) + assert list(s) == [obj1, obj3] + assert len(s) == 2 + + def can_update_a_set(): + s = RefSet[list]([obj1, obj2]) + s.update([]) + assert list(s) == [obj1, obj2] + assert len(s) == 2 + s.update([obj2, obj3]) + assert list(s) == [obj1, obj2, obj3] + assert obj3 in s + assert len(s) == 3 + + def can_get_the_representation_of_a_ref_set(): + s = RefSet[list]([obj1, obj2]) + assert repr(s) == ("RefSet([['a', 'b', 'c'], ['a', 'b', 'c']])") From 9d915b247a4d55d8a8a8211b9816f62bfcac0de2 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 18 Jan 2025 19:52:45 +0100 Subject: [PATCH 205/230] Fix GitHub action for older Python versions --- .github/workflows/test.yml | 26 +++++++++++++++++++++++++- 1 file changed, 25 insertions(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 298d3dd0..77f15bf1 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -9,7 +9,31 @@ jobs: strategy: matrix: - python-version: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12', '3.13', 'pypy3.9', 'pypy3.10'] + python-version: ['3.9', '3.10', '3.11', '3.12', '3.13', 'pypy3.9', 'pypy3.10'] + + steps: + - uses: actions/checkout@v4 + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install "tox>=3.28,<5" "tox-gh-actions>=3.2,<4" + + - name: Run unit tests with tox + run: tox + + tests-old: + name: 🧪 Tests (older Python versions) + runs-on: ubuntu-22.04 + + strategy: + matrix: + python-version: ['3.7', '3.8'] steps: - uses: actions/checkout@v4 From a4e5778f6d9cdef5d8e564ecd3b66cd44e205315 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 18 Jan 2025 20:18:59 +0100 Subject: [PATCH 206/230] Improve README file --- README.md | 43 +++++++++++++++++++++---------------------- tox.ini | 4 ++-- 2 files changed, 23 insertions(+), 24 deletions(-) diff --git a/README.md b/README.md index 913f81e5..fa10c81c 100644 --- a/README.md +++ b/README.md @@ -10,14 +10,14 @@ a query language for APIs created by Facebook. ![Lint Status](https://github.com/graphql-python/graphql-core/actions/workflows/lint.yml/badge.svg) [![Code Style](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/ambv/black) -An extensive test suite with over 2300 unit tests and 100% coverage comprises a -replication of the complete test suite of GraphQL.js, making sure this port is -reliable and compatible with GraphQL.js. +An extensive test suite with over 2200 unit tests and 100% coverage replicates the +complete test suite of GraphQL.js, ensuring that this port is reliable and compatible +with GraphQL.js. -The current stable version 3.2.4 of GraphQL-core is up-to-date with GraphQL.js -version 16.8.2 and supports Python version 3.6 to 3.12. +The current stable version 3.2.5 of GraphQL-core is up-to-date with GraphQL.js +version 16.8.2 and supports Python versions 3.6 to 3.13. -You can also try out the latest alpha version 3.3.0a6 of GraphQL-core +You can also try out the latest alpha version 3.3.0a6 of GraphQL-core, which is up-to-date with GraphQL.js version 17.0.0a2. Please note that this new minor version of GraphQL-core does not support Python 3.6 anymore. @@ -26,13 +26,12 @@ Note that for various reasons, GraphQL-core does not use SemVer like GraphQL.js. Changes in the major version of GraphQL.js are reflected in the minor version of GraphQL-core instead. This means there can be breaking changes in the API when the minor version changes, and only patch releases are fully backward compatible. -Therefore, we recommend something like `=~ 3.2.0` as version specifier +Therefore, we recommend using something like `~= 3.2.0` as the version specifier when including GraphQL-core as a dependency. - ## Documentation -A more detailed documentation for GraphQL-core 3 can be found at +More detailed documentation for GraphQL-core 3 can be found at [graphql-core-3.readthedocs.io](https://graphql-core-3.readthedocs.io/). The documentation for GraphQL.js can be found at [graphql.org/graphql-js/](https://graphql.org/graphql-js/). @@ -47,10 +46,10 @@ examples. A general overview of GraphQL is available in the [README](https://github.com/graphql/graphql-spec/blob/main/README.md) for the -[Specification for GraphQL](https://github.com/graphql/graphql-spec). That overview -describes a simple set of GraphQL examples that exist as [tests](tests) in this -repository. A good way to get started with this repository is to walk through that -README and the corresponding tests in parallel. +[Specification for GraphQL](https://github.com/graphql/graphql-spec). This overview +includes a simple set of GraphQL examples that are also available as [tests](tests) +in this repository. A good way to get started with this repository is to walk through +that README and the corresponding tests in parallel. ## Installation @@ -174,17 +173,17 @@ asyncio.run(main()) ## Goals and restrictions -GraphQL-core tries to reproduce the code of the reference implementation GraphQL.js -in Python as closely as possible and to stay up-to-date with the latest development of -GraphQL.js. +GraphQL-core aims to reproduce the code of the reference implementation GraphQL.js +in Python as closely as possible while staying up-to-date with the latest development +of GraphQL.js. -GraphQL-core 3 (formerly known as GraphQL-core-next) has been created as a modern +GraphQL-core 3 (formerly known as GraphQL-core-next) was created as a modern alternative to [GraphQL-core 2](https://github.com/graphql-python/graphql-core-legacy), -a prior work by Syrus Akbary, based on an older version of GraphQL.js and also -targeting older Python versions. Some parts of GraphQL-core 3 have been inspired by -GraphQL-core 2 or directly taken over with only slight modifications, but most of the -code has been re-implemented from scratch, replicating the latest code in GraphQL.js -very closely and adding type hints for Python. +a prior work by Syrus Akbary based on an older version of GraphQL.js that still +supported legacy Python versions. While some parts of GraphQL-core 3 were inspired by +GraphQL-core 2 or directly taken over with slight modifications, most of the code has +been re-implemented from scratch. This re-implementation closely replicates the latest +code in GraphQL.js and adds type hints for Python. Design goals for the GraphQL-core 3 library were: diff --git a/tox.ini b/tox.ini index c998afd8..7f6b4dcb 100644 --- a/tox.ini +++ b/tox.ini @@ -49,7 +49,7 @@ deps = pytest-timeout>=2.3,<3 py3{7,8,9}, pypy39: typing-extensions>=4.7.1,<5 commands = - # to also run the time-consuming tests: tox -e py311 -- --run-slow - # to run the benchmarks: tox -e py311 -- -k benchmarks --benchmark-enable + # to also run the time-consuming tests: tox -e py312 -- --run-slow + # to run the benchmarks: tox -e py312 -- -k benchmarks --benchmark-enable py3{7,8,9,10,11,13}, pypy3{9,10}: pytest tests {posargs} py312: pytest tests {posargs: --cov-report=term-missing --cov=graphql --cov=tests --cov-fail-under=100} From 666ecdc870ba7419ef2a5171f31c39552d1981de Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 18 Jan 2025 21:33:09 +0100 Subject: [PATCH 207/230] Update dependencies --- poetry.lock | 503 +++++++++++------- pyproject.toml | 28 +- src/graphql/execution/values.py | 3 +- src/graphql/language/lexer.py | 6 +- src/graphql/type/definition.py | 3 +- src/graphql/type/introspection.py | 3 +- src/graphql/type/scalars.py | 2 +- src/graphql/type/validate.py | 3 +- .../utilities/find_breaking_changes.py | 4 +- .../rules/overlapping_fields_can_be_merged.py | 3 +- .../rules/unique_field_definition_names.py | 3 +- tests/error/test_graphql_error.py | 2 +- tests/execution/test_abstract.py | 4 +- tests/execution/test_oneof.py | 2 +- tests/execution/test_schema.py | 2 +- tests/language/test_lexer.py | 3 +- tests/language/test_printer.py | 3 +- tests/star_wars_schema.py | 10 +- tests/type/test_definition.py | 3 +- tests/type/test_validation.py | 3 +- tests/utilities/test_extend_schema.py | 3 +- tests/utilities/test_find_breaking_changes.py | 3 +- tests/utilities/test_type_info.py | 3 +- tests/validation/test_validation.py | 3 +- tox.ini | 12 +- 25 files changed, 367 insertions(+), 250 deletions(-) diff --git a/poetry.lock b/poetry.lock index abd0077f..2208d903 100644 --- a/poetry.lock +++ b/poetry.lock @@ -246,116 +246,103 @@ files = [ [[package]] name = "charset-normalizer" -version = "3.4.0" +version = "3.4.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false -python-versions = ">=3.7.0" -files = [ - {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-win32.whl", hash = "sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-win32.whl", hash = "sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca"}, - {file = "charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079"}, - {file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"}, +python-versions = ">=3.7" +files = [ + {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-win32.whl", hash = "sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-win32.whl", hash = "sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-win32.whl", hash = "sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-win32.whl", hash = "sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765"}, + {file = "charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85"}, + {file = "charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3"}, ] [[package]] @@ -531,6 +518,83 @@ tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.1 [package.extras] toml = ["tomli"] +[[package]] +name = "coverage" +version = "7.6.10" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "coverage-7.6.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5c912978f7fbf47ef99cec50c4401340436d200d41d714c7a4766f377c5b7b78"}, + {file = "coverage-7.6.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a01ec4af7dfeb96ff0078ad9a48810bb0cc8abcb0115180c6013a6b26237626c"}, + {file = "coverage-7.6.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3b204c11e2b2d883946fe1d97f89403aa1811df28ce0447439178cc7463448a"}, + {file = "coverage-7.6.10-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32ee6d8491fcfc82652a37109f69dee9a830e9379166cb73c16d8dc5c2915165"}, + {file = "coverage-7.6.10-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675cefc4c06e3b4c876b85bfb7c59c5e2218167bbd4da5075cbe3b5790a28988"}, + {file = "coverage-7.6.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f4f620668dbc6f5e909a0946a877310fb3d57aea8198bde792aae369ee1c23b5"}, + {file = "coverage-7.6.10-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:4eea95ef275de7abaef630c9b2c002ffbc01918b726a39f5a4353916ec72d2f3"}, + {file = "coverage-7.6.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e2f0280519e42b0a17550072861e0bc8a80a0870de260f9796157d3fca2733c5"}, + {file = "coverage-7.6.10-cp310-cp310-win32.whl", hash = "sha256:bc67deb76bc3717f22e765ab3e07ee9c7a5e26b9019ca19a3b063d9f4b874244"}, + {file = "coverage-7.6.10-cp310-cp310-win_amd64.whl", hash = "sha256:0f460286cb94036455e703c66988851d970fdfd8acc2a1122ab7f4f904e4029e"}, + {file = "coverage-7.6.10-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ea3c8f04b3e4af80e17bab607c386a830ffc2fb88a5484e1df756478cf70d1d3"}, + {file = "coverage-7.6.10-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:507a20fc863cae1d5720797761b42d2d87a04b3e5aeb682ef3b7332e90598f43"}, + {file = "coverage-7.6.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d37a84878285b903c0fe21ac8794c6dab58150e9359f1aaebbeddd6412d53132"}, + {file = "coverage-7.6.10-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a534738b47b0de1995f85f582d983d94031dffb48ab86c95bdf88dc62212142f"}, + {file = "coverage-7.6.10-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d7a2bf79378d8fb8afaa994f91bfd8215134f8631d27eba3e0e2c13546ce994"}, + {file = "coverage-7.6.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6713ba4b4ebc330f3def51df1d5d38fad60b66720948112f114968feb52d3f99"}, + {file = "coverage-7.6.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ab32947f481f7e8c763fa2c92fd9f44eeb143e7610c4ca9ecd6a36adab4081bd"}, + {file = "coverage-7.6.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7bbd8c8f1b115b892e34ba66a097b915d3871db7ce0e6b9901f462ff3a975377"}, + {file = "coverage-7.6.10-cp311-cp311-win32.whl", hash = "sha256:299e91b274c5c9cdb64cbdf1b3e4a8fe538a7a86acdd08fae52301b28ba297f8"}, + {file = "coverage-7.6.10-cp311-cp311-win_amd64.whl", hash = "sha256:489a01f94aa581dbd961f306e37d75d4ba16104bbfa2b0edb21d29b73be83609"}, + {file = "coverage-7.6.10-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:27c6e64726b307782fa5cbe531e7647aee385a29b2107cd87ba7c0105a5d3853"}, + {file = "coverage-7.6.10-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c56e097019e72c373bae32d946ecf9858fda841e48d82df7e81c63ac25554078"}, + {file = "coverage-7.6.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7827a5bc7bdb197b9e066cdf650b2887597ad124dd99777332776f7b7c7d0d0"}, + {file = "coverage-7.6.10-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:204a8238afe787323a8b47d8be4df89772d5c1e4651b9ffa808552bdf20e1d50"}, + {file = "coverage-7.6.10-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e67926f51821b8e9deb6426ff3164870976fe414d033ad90ea75e7ed0c2e5022"}, + {file = "coverage-7.6.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e78b270eadb5702938c3dbe9367f878249b5ef9a2fcc5360ac7bff694310d17b"}, + {file = "coverage-7.6.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:714f942b9c15c3a7a5fe6876ce30af831c2ad4ce902410b7466b662358c852c0"}, + {file = "coverage-7.6.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:abb02e2f5a3187b2ac4cd46b8ced85a0858230b577ccb2c62c81482ca7d18852"}, + {file = "coverage-7.6.10-cp312-cp312-win32.whl", hash = "sha256:55b201b97286cf61f5e76063f9e2a1d8d2972fc2fcfd2c1272530172fd28c359"}, + {file = "coverage-7.6.10-cp312-cp312-win_amd64.whl", hash = "sha256:e4ae5ac5e0d1e4edfc9b4b57b4cbecd5bc266a6915c500f358817a8496739247"}, + {file = "coverage-7.6.10-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:05fca8ba6a87aabdd2d30d0b6c838b50510b56cdcfc604d40760dae7153b73d9"}, + {file = "coverage-7.6.10-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9e80eba8801c386f72e0712a0453431259c45c3249f0009aff537a517b52942b"}, + {file = "coverage-7.6.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a372c89c939d57abe09e08c0578c1d212e7a678135d53aa16eec4430adc5e690"}, + {file = "coverage-7.6.10-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ec22b5e7fe7a0fa8509181c4aac1db48f3dd4d3a566131b313d1efc102892c18"}, + {file = "coverage-7.6.10-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26bcf5c4df41cad1b19c84af71c22cbc9ea9a547fc973f1f2cc9a290002c8b3c"}, + {file = "coverage-7.6.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4e4630c26b6084c9b3cb53b15bd488f30ceb50b73c35c5ad7871b869cb7365fd"}, + {file = "coverage-7.6.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2396e8116db77789f819d2bc8a7e200232b7a282c66e0ae2d2cd84581a89757e"}, + {file = "coverage-7.6.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:79109c70cc0882e4d2d002fe69a24aa504dec0cc17169b3c7f41a1d341a73694"}, + {file = "coverage-7.6.10-cp313-cp313-win32.whl", hash = "sha256:9e1747bab246d6ff2c4f28b4d186b205adced9f7bd9dc362051cc37c4a0c7bd6"}, + {file = "coverage-7.6.10-cp313-cp313-win_amd64.whl", hash = "sha256:254f1a3b1eef5f7ed23ef265eaa89c65c8c5b6b257327c149db1ca9d4a35f25e"}, + {file = "coverage-7.6.10-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:2ccf240eb719789cedbb9fd1338055de2761088202a9a0b73032857e53f612fe"}, + {file = "coverage-7.6.10-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:0c807ca74d5a5e64427c8805de15b9ca140bba13572d6d74e262f46f50b13273"}, + {file = "coverage-7.6.10-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2bcfa46d7709b5a7ffe089075799b902020b62e7ee56ebaed2f4bdac04c508d8"}, + {file = "coverage-7.6.10-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4e0de1e902669dccbf80b0415fb6b43d27edca2fbd48c74da378923b05316098"}, + {file = "coverage-7.6.10-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f7b444c42bbc533aaae6b5a2166fd1a797cdb5eb58ee51a92bee1eb94a1e1cb"}, + {file = "coverage-7.6.10-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b330368cb99ef72fcd2dc3ed260adf67b31499584dc8a20225e85bfe6f6cfed0"}, + {file = "coverage-7.6.10-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:9a7cfb50515f87f7ed30bc882f68812fd98bc2852957df69f3003d22a2aa0abf"}, + {file = "coverage-7.6.10-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6f93531882a5f68c28090f901b1d135de61b56331bba82028489bc51bdd818d2"}, + {file = "coverage-7.6.10-cp313-cp313t-win32.whl", hash = "sha256:89d76815a26197c858f53c7f6a656686ec392b25991f9e409bcef020cd532312"}, + {file = "coverage-7.6.10-cp313-cp313t-win_amd64.whl", hash = "sha256:54a5f0f43950a36312155dae55c505a76cd7f2b12d26abeebbe7a0b36dbc868d"}, + {file = "coverage-7.6.10-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:656c82b8a0ead8bba147de9a89bda95064874c91a3ed43a00e687f23cc19d53a"}, + {file = "coverage-7.6.10-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ccc2b70a7ed475c68ceb548bf69cec1e27305c1c2606a5eb7c3afff56a1b3b27"}, + {file = "coverage-7.6.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5e37dc41d57ceba70956fa2fc5b63c26dba863c946ace9705f8eca99daecdc4"}, + {file = "coverage-7.6.10-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0aa9692b4fdd83a4647eeb7db46410ea1322b5ed94cd1715ef09d1d5922ba87f"}, + {file = "coverage-7.6.10-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa744da1820678b475e4ba3dfd994c321c5b13381d1041fe9c608620e6676e25"}, + {file = "coverage-7.6.10-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c0b1818063dc9e9d838c09e3a473c1422f517889436dd980f5d721899e66f315"}, + {file = "coverage-7.6.10-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:59af35558ba08b758aec4d56182b222976330ef8d2feacbb93964f576a7e7a90"}, + {file = "coverage-7.6.10-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7ed2f37cfce1ce101e6dffdfd1c99e729dd2ffc291d02d3e2d0af8b53d13840d"}, + {file = "coverage-7.6.10-cp39-cp39-win32.whl", hash = "sha256:4bcc276261505d82f0ad426870c3b12cb177752834a633e737ec5ee79bbdff18"}, + {file = "coverage-7.6.10-cp39-cp39-win_amd64.whl", hash = "sha256:457574f4599d2b00f7f637a0700a6422243b3565509457b2dbd3f50703e11f59"}, + {file = "coverage-7.6.10-pp39.pp310-none-any.whl", hash = "sha256:fd34e7b3405f0cc7ab03d54a334c17a9e802897580d964bd8c2001f4b9fd488f"}, + {file = "coverage-7.6.10.tar.gz", hash = "sha256:7fb105327c8f8f0682e29843e2ff96af9dcbe5bab8eeb4b398c6a33a16d80a23"}, +] + +[package.dependencies] +tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} + +[package.extras] +toml = ["tomli"] + [[package]] name = "distlib" version = "0.3.9" @@ -690,13 +754,13 @@ files = [ [[package]] name = "jinja2" -version = "3.1.4" +version = "3.1.5" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" files = [ - {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, - {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, + {file = "jinja2-3.1.5-py3-none-any.whl", hash = "sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb"}, + {file = "jinja2-3.1.5.tar.gz", hash = "sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb"}, ] [package.dependencies] @@ -858,49 +922,55 @@ reports = ["lxml"] [[package]] name = "mypy" -version = "1.13.0" +version = "1.14.1" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6607e0f1dd1fb7f0aca14d936d13fd19eba5e17e1cd2a14f808fa5f8f6d8f60a"}, - {file = "mypy-1.13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8a21be69bd26fa81b1f80a61ee7ab05b076c674d9b18fb56239d72e21d9f4c80"}, - {file = "mypy-1.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7b2353a44d2179846a096e25691d54d59904559f4232519d420d64da6828a3a7"}, - {file = "mypy-1.13.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0730d1c6a2739d4511dc4253f8274cdd140c55c32dfb0a4cf8b7a43f40abfa6f"}, - {file = "mypy-1.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:c5fc54dbb712ff5e5a0fca797e6e0aa25726c7e72c6a5850cfd2adbc1eb0a372"}, - {file = "mypy-1.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:581665e6f3a8a9078f28d5502f4c334c0c8d802ef55ea0e7276a6e409bc0d82d"}, - {file = "mypy-1.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3ddb5b9bf82e05cc9a627e84707b528e5c7caaa1c55c69e175abb15a761cec2d"}, - {file = "mypy-1.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:20c7ee0bc0d5a9595c46f38beb04201f2620065a93755704e141fcac9f59db2b"}, - {file = "mypy-1.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3790ded76f0b34bc9c8ba4def8f919dd6a46db0f5a6610fb994fe8efdd447f73"}, - {file = "mypy-1.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:51f869f4b6b538229c1d1bcc1dd7d119817206e2bc54e8e374b3dfa202defcca"}, - {file = "mypy-1.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5c7051a3461ae84dfb5dd15eff5094640c61c5f22257c8b766794e6dd85e72d5"}, - {file = "mypy-1.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:39bb21c69a5d6342f4ce526e4584bc5c197fd20a60d14a8624d8743fffb9472e"}, - {file = "mypy-1.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:164f28cb9d6367439031f4c81e84d3ccaa1e19232d9d05d37cb0bd880d3f93c2"}, - {file = "mypy-1.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a4c1bfcdbce96ff5d96fc9b08e3831acb30dc44ab02671eca5953eadad07d6d0"}, - {file = "mypy-1.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:a0affb3a79a256b4183ba09811e3577c5163ed06685e4d4b46429a271ba174d2"}, - {file = "mypy-1.13.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a7b44178c9760ce1a43f544e595d35ed61ac2c3de306599fa59b38a6048e1aa7"}, - {file = "mypy-1.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5d5092efb8516d08440e36626f0153b5006d4088c1d663d88bf79625af3d1d62"}, - {file = "mypy-1.13.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:de2904956dac40ced10931ac967ae63c5089bd498542194b436eb097a9f77bc8"}, - {file = "mypy-1.13.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:7bfd8836970d33c2105562650656b6846149374dc8ed77d98424b40b09340ba7"}, - {file = "mypy-1.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:9f73dba9ec77acb86457a8fc04b5239822df0c14a082564737833d2963677dbc"}, - {file = "mypy-1.13.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:100fac22ce82925f676a734af0db922ecfea991e1d7ec0ceb1e115ebe501301a"}, - {file = "mypy-1.13.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7bcb0bb7f42a978bb323a7c88f1081d1b5dee77ca86f4100735a6f541299d8fb"}, - {file = "mypy-1.13.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bde31fc887c213e223bbfc34328070996061b0833b0a4cfec53745ed61f3519b"}, - {file = "mypy-1.13.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:07de989f89786f62b937851295ed62e51774722e5444a27cecca993fc3f9cd74"}, - {file = "mypy-1.13.0-cp38-cp38-win_amd64.whl", hash = "sha256:4bde84334fbe19bad704b3f5b78c4abd35ff1026f8ba72b29de70dda0916beb6"}, - {file = "mypy-1.13.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0246bcb1b5de7f08f2826451abd947bf656945209b140d16ed317f65a17dc7dc"}, - {file = "mypy-1.13.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7f5b7deae912cf8b77e990b9280f170381fdfbddf61b4ef80927edd813163732"}, - {file = "mypy-1.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7029881ec6ffb8bc233a4fa364736789582c738217b133f1b55967115288a2bc"}, - {file = "mypy-1.13.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3e38b980e5681f28f033f3be86b099a247b13c491f14bb8b1e1e134d23bb599d"}, - {file = "mypy-1.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:a6789be98a2017c912ae6ccb77ea553bbaf13d27605d2ca20a76dfbced631b24"}, - {file = "mypy-1.13.0-py3-none-any.whl", hash = "sha256:9c250883f9fd81d212e0952c92dbfcc96fc237f4b7c92f56ac81fd48460b3e5a"}, - {file = "mypy-1.13.0.tar.gz", hash = "sha256:0291a61b6fbf3e6673e3405cfcc0e7650bebc7939659fdca2702958038bd835e"}, + {file = "mypy-1.14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:52686e37cf13d559f668aa398dd7ddf1f92c5d613e4f8cb262be2fb4fedb0fcb"}, + {file = "mypy-1.14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1fb545ca340537d4b45d3eecdb3def05e913299ca72c290326be19b3804b39c0"}, + {file = "mypy-1.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:90716d8b2d1f4cd503309788e51366f07c56635a3309b0f6a32547eaaa36a64d"}, + {file = "mypy-1.14.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2ae753f5c9fef278bcf12e1a564351764f2a6da579d4a81347e1d5a15819997b"}, + {file = "mypy-1.14.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e0fe0f5feaafcb04505bcf439e991c6d8f1bf8b15f12b05feeed96e9e7bf1427"}, + {file = "mypy-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:7d54bd85b925e501c555a3227f3ec0cfc54ee8b6930bd6141ec872d1c572f81f"}, + {file = "mypy-1.14.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f995e511de847791c3b11ed90084a7a0aafdc074ab88c5a9711622fe4751138c"}, + {file = "mypy-1.14.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d64169ec3b8461311f8ce2fd2eb5d33e2d0f2c7b49116259c51d0d96edee48d1"}, + {file = "mypy-1.14.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ba24549de7b89b6381b91fbc068d798192b1b5201987070319889e93038967a8"}, + {file = "mypy-1.14.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:183cf0a45457d28ff9d758730cd0210419ac27d4d3f285beda038c9083363b1f"}, + {file = "mypy-1.14.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f2a0ecc86378f45347f586e4163d1769dd81c5a223d577fe351f26b179e148b1"}, + {file = "mypy-1.14.1-cp311-cp311-win_amd64.whl", hash = "sha256:ad3301ebebec9e8ee7135d8e3109ca76c23752bac1e717bc84cd3836b4bf3eae"}, + {file = "mypy-1.14.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:30ff5ef8519bbc2e18b3b54521ec319513a26f1bba19a7582e7b1f58a6e69f14"}, + {file = "mypy-1.14.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cb9f255c18052343c70234907e2e532bc7e55a62565d64536dbc7706a20b78b9"}, + {file = "mypy-1.14.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b4e3413e0bddea671012b063e27591b953d653209e7a4fa5e48759cda77ca11"}, + {file = "mypy-1.14.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:553c293b1fbdebb6c3c4030589dab9fafb6dfa768995a453d8a5d3b23784af2e"}, + {file = "mypy-1.14.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fad79bfe3b65fe6a1efaed97b445c3d37f7be9fdc348bdb2d7cac75579607c89"}, + {file = "mypy-1.14.1-cp312-cp312-win_amd64.whl", hash = "sha256:8fa2220e54d2946e94ab6dbb3ba0a992795bd68b16dc852db33028df2b00191b"}, + {file = "mypy-1.14.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:92c3ed5afb06c3a8e188cb5da4984cab9ec9a77ba956ee419c68a388b4595255"}, + {file = "mypy-1.14.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:dbec574648b3e25f43d23577309b16534431db4ddc09fda50841f1e34e64ed34"}, + {file = "mypy-1.14.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8c6d94b16d62eb3e947281aa7347d78236688e21081f11de976376cf010eb31a"}, + {file = "mypy-1.14.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d4b19b03fdf54f3c5b2fa474c56b4c13c9dbfb9a2db4370ede7ec11a2c5927d9"}, + {file = "mypy-1.14.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0c911fde686394753fff899c409fd4e16e9b294c24bfd5e1ea4675deae1ac6fd"}, + {file = "mypy-1.14.1-cp313-cp313-win_amd64.whl", hash = "sha256:8b21525cb51671219f5307be85f7e646a153e5acc656e5cebf64bfa076c50107"}, + {file = "mypy-1.14.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7084fb8f1128c76cd9cf68fe5971b37072598e7c31b2f9f95586b65c741a9d31"}, + {file = "mypy-1.14.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8f845a00b4f420f693f870eaee5f3e2692fa84cc8514496114649cfa8fd5e2c6"}, + {file = "mypy-1.14.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:44bf464499f0e3a2d14d58b54674dee25c031703b2ffc35064bd0df2e0fac319"}, + {file = "mypy-1.14.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c99f27732c0b7dc847adb21c9d47ce57eb48fa33a17bc6d7d5c5e9f9e7ae5bac"}, + {file = "mypy-1.14.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:bce23c7377b43602baa0bd22ea3265c49b9ff0b76eb315d6c34721af4cdf1d9b"}, + {file = "mypy-1.14.1-cp38-cp38-win_amd64.whl", hash = "sha256:8edc07eeade7ebc771ff9cf6b211b9a7d93687ff892150cb5692e4f4272b0837"}, + {file = "mypy-1.14.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3888a1816d69f7ab92092f785a462944b3ca16d7c470d564165fe703b0970c35"}, + {file = "mypy-1.14.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:46c756a444117c43ee984bd055db99e498bc613a70bbbc120272bd13ca579fbc"}, + {file = "mypy-1.14.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:27fc248022907e72abfd8e22ab1f10e903915ff69961174784a3900a8cba9ad9"}, + {file = "mypy-1.14.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:499d6a72fb7e5de92218db961f1a66d5f11783f9ae549d214617edab5d4dbdbb"}, + {file = "mypy-1.14.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:57961db9795eb566dc1d1b4e9139ebc4c6b0cb6e7254ecde69d1552bf7613f60"}, + {file = "mypy-1.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:07ba89fdcc9451f2ebb02853deb6aaaa3d2239a236669a63ab3801bbf923ef5c"}, + {file = "mypy-1.14.1-py3-none-any.whl", hash = "sha256:b66a60cc4073aeb8ae00057f9c1f64d49e90f918fbcef9a977eb121da8b8f1d1"}, + {file = "mypy-1.14.1.tar.gz", hash = "sha256:7ec88144fe9b510e8475ec2f5f251992690fcf89ccb4500b214b4226abcd32d6"}, ] [package.dependencies] -mypy-extensions = ">=1.0.0" +mypy_extensions = ">=1.0.0" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = ">=4.6.0" +typing_extensions = ">=4.6.0" [package.extras] dmypy = ["psutil (>=4.0)"] @@ -1070,13 +1140,13 @@ windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pygments" -version = "2.18.0" +version = "2.19.1" description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.8" files = [ - {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, - {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, + {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, + {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, ] [package.extras] @@ -1167,22 +1237,40 @@ testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy [[package]] name = "pytest-asyncio" -version = "0.23.8" +version = "0.24.0" description = "Pytest support for asyncio" optional = false python-versions = ">=3.8" files = [ - {file = "pytest_asyncio-0.23.8-py3-none-any.whl", hash = "sha256:50265d892689a5faefb84df80819d1ecef566eb3549cf915dfb33569359d1ce2"}, - {file = "pytest_asyncio-0.23.8.tar.gz", hash = "sha256:759b10b33a6dc61cce40a8bd5205e302978bbbcc00e279a8b61d9a6a3c82e4d3"}, + {file = "pytest_asyncio-0.24.0-py3-none-any.whl", hash = "sha256:a811296ed596b69bf0b6f3dc40f83bcaf341b155a269052d82efa2b25ac7037b"}, + {file = "pytest_asyncio-0.24.0.tar.gz", hash = "sha256:d081d828e576d85f875399194281e92bf8a68d60d72d1a2faf2feddb6c46b276"}, ] [package.dependencies] -pytest = ">=7.0.0,<9" +pytest = ">=8.2,<9" [package.extras] docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] +[[package]] +name = "pytest-asyncio" +version = "0.25.2" +description = "Pytest support for asyncio" +optional = false +python-versions = ">=3.9" +files = [ + {file = "pytest_asyncio-0.25.2-py3-none-any.whl", hash = "sha256:0d0bb693f7b99da304a0634afc0a4b19e49d5e0de2d670f38dc4bfa5727c5075"}, + {file = "pytest_asyncio-0.25.2.tar.gz", hash = "sha256:3f8ef9a98f45948ea91a0ed3dc4268b5326c0e7bce73892acc654df4262ad45f"}, +] + +[package.dependencies] +pytest = ">=8.2,<9" + +[package.extras] +docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1)"] +testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] + [[package]] name = "pytest-benchmark" version = "4.0.0" @@ -1203,6 +1291,26 @@ aspect = ["aspectlib"] elasticsearch = ["elasticsearch"] histogram = ["pygal", "pygaljs"] +[[package]] +name = "pytest-benchmark" +version = "5.1.0" +description = "A ``pytest`` fixture for benchmarking code. It will group the tests into rounds that are calibrated to the chosen timer." +optional = false +python-versions = ">=3.9" +files = [ + {file = "pytest-benchmark-5.1.0.tar.gz", hash = "sha256:9ea661cdc292e8231f7cd4c10b0319e56a2118e2c09d9f50e1b3d150d2aca105"}, + {file = "pytest_benchmark-5.1.0-py3-none-any.whl", hash = "sha256:922de2dfa3033c227c96da942d1878191afa135a29485fb942e85dff1c592c89"}, +] + +[package.dependencies] +py-cpuinfo = "*" +pytest = ">=8.1" + +[package.extras] +aspect = ["aspectlib"] +elasticsearch = ["elasticsearch"] +histogram = ["pygal", "pygaljs", "setuptools"] + [[package]] name = "pytest-codspeed" version = "2.2.1" @@ -1226,22 +1334,23 @@ test = ["pytest (>=7.0,<8.0)", "pytest-cov (>=4.0.0,<4.1.0)"] [[package]] name = "pytest-codspeed" -version = "3.1.0" +version = "3.1.2" description = "Pytest plugin to create CodSpeed benchmarks" optional = false python-versions = ">=3.9" files = [ - {file = "pytest_codspeed-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1cb7c16e5a64cb30bad30f5204c7690f3cbc9ae5b9839ce187ef1727aa5d2d9c"}, - {file = "pytest_codspeed-3.1.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4d23910893c22ceef6efbdf85d80e803b7fb4a231c9e7676ab08f5ddfc228438"}, - {file = "pytest_codspeed-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fb1495a633a33e15268a1f97d91a4809c868de06319db50cf97b4e9fa426372c"}, - {file = "pytest_codspeed-3.1.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dbd8a54b99207bd25a4c3f64d9a83ac0f3def91cdd87204ca70a49f822ba919c"}, - {file = "pytest_codspeed-3.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4d1ac896ebaea5b365e69b41319b4d09b57dab85ec6234f6ff26116b3795f03"}, - {file = "pytest_codspeed-3.1.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5f0c1857a0a6cce6a23c49f98c588c2eef66db353c76ecbb2fb65c1a2b33a8d5"}, - {file = "pytest_codspeed-3.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a4731a7cf1d8d38f58140d51faa69b7c1401234c59d9759a2507df570c805b11"}, - {file = "pytest_codspeed-3.1.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c5f2e4b63260f65493b8d42c8167f831b8ed90788f81eb4eb95a103ee6aa4294"}, - {file = "pytest_codspeed-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:db44099b3f1ec1c9c41f0267c4d57d94e31667f4cb3fb4b71901561e8ab8bc98"}, - {file = "pytest_codspeed-3.1.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a533c1ad3cc60f07be432864c83d1769ce2877753ac778e1bfc5a9821f5c6ddf"}, - {file = "pytest_codspeed-3.1.0.tar.gz", hash = "sha256:f29641d27b4ded133b1058a4c859e510a2612ad4217ef9a839ba61750abd2f8a"}, + {file = "pytest_codspeed-3.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:aed496f873670ce0ea8f980a7c1a2c6a08f415e0ebdf207bf651b2d922103374"}, + {file = "pytest_codspeed-3.1.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ee45b0b763f6b5fa5d74c7b91d694a9615561c428b320383660672f4471756e3"}, + {file = "pytest_codspeed-3.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c84e591a7a0f67d45e2dc9fd05b276971a3aabcab7478fe43363ebefec1358f4"}, + {file = "pytest_codspeed-3.1.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c6ae6d094247156407770e6b517af70b98862dd59a3c31034aede11d5f71c32c"}, + {file = "pytest_codspeed-3.1.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d0f264991de5b5cdc118b96fc671386cca3f0f34e411482939bf2459dc599097"}, + {file = "pytest_codspeed-3.1.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c0695a4bcd5ff04e8379124dba5d9795ea5e0cadf38be7a0406432fc1467b555"}, + {file = "pytest_codspeed-3.1.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6dc356c8dcaaa883af83310f397ac06c96fac9b8a1146e303d4b374b2cb46a18"}, + {file = "pytest_codspeed-3.1.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cc8a5d0366322a75cf562f7d8d672d28c1cf6948695c4dddca50331e08f6b3d5"}, + {file = "pytest_codspeed-3.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6c5fe7a19b72f54f217480b3b527102579547b1de9fe3acd9e66cb4629ff46c8"}, + {file = "pytest_codspeed-3.1.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b67205755a665593f6521a98317d02a9d07d6fdc593f6634de2c94dea47a3055"}, + {file = "pytest_codspeed-3.1.2-py3-none-any.whl", hash = "sha256:5e7ed0315e33496c5c07dba262b50303b8d0bc4c3d10bf1d422a41e70783f1cb"}, + {file = "pytest_codspeed-3.1.2.tar.gz", hash = "sha256:09c1733af3aab35e94a621aa510f2d2114f65591e6f644c42ca3f67547edad4b"}, ] [package.dependencies] @@ -1291,6 +1400,24 @@ pytest = ">=4.6" [package.extras] testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"] +[[package]] +name = "pytest-cov" +version = "6.0.0" +description = "Pytest plugin for measuring coverage." +optional = false +python-versions = ">=3.9" +files = [ + {file = "pytest-cov-6.0.0.tar.gz", hash = "sha256:fde0b595ca248bb8e2d76f020b465f3b107c9632e6a1d1705f17834c89dcadc0"}, + {file = "pytest_cov-6.0.0-py3-none-any.whl", hash = "sha256:eee6f1b9e61008bd34975a4d5bab25801eb31898b032dd55addc93e96fcaaa35"}, +] + +[package.dependencies] +coverage = {version = ">=7.5", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"] + [[package]] name = "pytest-describe" version = "2.2.0" @@ -1393,29 +1520,29 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "ruff" -version = "0.8.3" +version = "0.9.2" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.8.3-py3-none-linux_armv6l.whl", hash = "sha256:8d5d273ffffff0acd3db5bf626d4b131aa5a5ada1276126231c4174543ce20d6"}, - {file = "ruff-0.8.3-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:e4d66a21de39f15c9757d00c50c8cdd20ac84f55684ca56def7891a025d7e939"}, - {file = "ruff-0.8.3-py3-none-macosx_11_0_arm64.whl", hash = "sha256:c356e770811858bd20832af696ff6c7e884701115094f427b64b25093d6d932d"}, - {file = "ruff-0.8.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c0a60a825e3e177116c84009d5ebaa90cf40dfab56e1358d1df4e29a9a14b13"}, - {file = "ruff-0.8.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:75fb782f4db39501210ac093c79c3de581d306624575eddd7e4e13747e61ba18"}, - {file = "ruff-0.8.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f26bc76a133ecb09a38b7868737eded6941b70a6d34ef53a4027e83913b6502"}, - {file = "ruff-0.8.3-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:01b14b2f72a37390c1b13477c1c02d53184f728be2f3ffc3ace5b44e9e87b90d"}, - {file = "ruff-0.8.3-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:53babd6e63e31f4e96ec95ea0d962298f9f0d9cc5990a1bbb023a6baf2503a82"}, - {file = "ruff-0.8.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1ae441ce4cf925b7f363d33cd6570c51435972d697e3e58928973994e56e1452"}, - {file = "ruff-0.8.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7c65bc0cadce32255e93c57d57ecc2cca23149edd52714c0c5d6fa11ec328cd"}, - {file = "ruff-0.8.3-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:5be450bb18f23f0edc5a4e5585c17a56ba88920d598f04a06bd9fd76d324cb20"}, - {file = "ruff-0.8.3-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:8faeae3827eaa77f5721f09b9472a18c749139c891dbc17f45e72d8f2ca1f8fc"}, - {file = "ruff-0.8.3-py3-none-musllinux_1_2_i686.whl", hash = "sha256:db503486e1cf074b9808403991663e4277f5c664d3fe237ee0d994d1305bb060"}, - {file = "ruff-0.8.3-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:6567be9fb62fbd7a099209257fef4ad2c3153b60579818b31a23c886ed4147ea"}, - {file = "ruff-0.8.3-py3-none-win32.whl", hash = "sha256:19048f2f878f3ee4583fc6cb23fb636e48c2635e30fb2022b3a1cd293402f964"}, - {file = "ruff-0.8.3-py3-none-win_amd64.whl", hash = "sha256:f7df94f57d7418fa7c3ffb650757e0c2b96cf2501a0b192c18e4fb5571dfada9"}, - {file = "ruff-0.8.3-py3-none-win_arm64.whl", hash = "sha256:fe2756edf68ea79707c8d68b78ca9a58ed9af22e430430491ee03e718b5e4936"}, - {file = "ruff-0.8.3.tar.gz", hash = "sha256:5e7558304353b84279042fc584a4f4cb8a07ae79b2bf3da1a7551d960b5626d3"}, + {file = "ruff-0.9.2-py3-none-linux_armv6l.whl", hash = "sha256:80605a039ba1454d002b32139e4970becf84b5fee3a3c3bf1c2af6f61a784347"}, + {file = "ruff-0.9.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b9aab82bb20afd5f596527045c01e6ae25a718ff1784cb92947bff1f83068b00"}, + {file = "ruff-0.9.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:fbd337bac1cfa96be615f6efcd4bc4d077edbc127ef30e2b8ba2a27e18c054d4"}, + {file = "ruff-0.9.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82b35259b0cbf8daa22a498018e300b9bb0174c2bbb7bcba593935158a78054d"}, + {file = "ruff-0.9.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8b6a9701d1e371bf41dca22015c3f89769da7576884d2add7317ec1ec8cb9c3c"}, + {file = "ruff-0.9.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9cc53e68b3c5ae41e8faf83a3b89f4a5d7b2cb666dff4b366bb86ed2a85b481f"}, + {file = "ruff-0.9.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:8efd9da7a1ee314b910da155ca7e8953094a7c10d0c0a39bfde3fcfd2a015684"}, + {file = "ruff-0.9.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3292c5a22ea9a5f9a185e2d131dc7f98f8534a32fb6d2ee7b9944569239c648d"}, + {file = "ruff-0.9.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1a605fdcf6e8b2d39f9436d343d1f0ff70c365a1e681546de0104bef81ce88df"}, + {file = "ruff-0.9.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c547f7f256aa366834829a08375c297fa63386cbe5f1459efaf174086b564247"}, + {file = "ruff-0.9.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:d18bba3d3353ed916e882521bc3e0af403949dbada344c20c16ea78f47af965e"}, + {file = "ruff-0.9.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:b338edc4610142355ccf6b87bd356729b62bf1bc152a2fad5b0c7dc04af77bfe"}, + {file = "ruff-0.9.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:492a5e44ad9b22a0ea98cf72e40305cbdaf27fac0d927f8bc9e1df316dcc96eb"}, + {file = "ruff-0.9.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:af1e9e9fe7b1f767264d26b1075ac4ad831c7db976911fa362d09b2d0356426a"}, + {file = "ruff-0.9.2-py3-none-win32.whl", hash = "sha256:71cbe22e178c5da20e1514e1e01029c73dc09288a8028a5d3446e6bba87a5145"}, + {file = "ruff-0.9.2-py3-none-win_amd64.whl", hash = "sha256:c5e1d6abc798419cf46eed03f54f2e0c3adb1ad4b801119dedf23fcaf69b55b5"}, + {file = "ruff-0.9.2-py3-none-win_arm64.whl", hash = "sha256:a1b63fa24149918f8b37cef2ee6fff81f24f0d74b6f0bdc37bc3e1f2143e41c6"}, + {file = "ruff-0.9.2.tar.gz", hash = "sha256:b5eceb334d55fae5f316f783437392642ae18e16dcf4f1858d55d3c2a0f8f5d0"}, ] [[package]] @@ -1896,13 +2023,13 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess [[package]] name = "virtualenv" -version = "20.28.0" +version = "20.29.1" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.8" files = [ - {file = "virtualenv-20.28.0-py3-none-any.whl", hash = "sha256:23eae1b4516ecd610481eda647f3a7c09aea295055337331bb4e6892ecce47b0"}, - {file = "virtualenv-20.28.0.tar.gz", hash = "sha256:2c9c3262bb8e7b87ea801d715fae4495e6032450c71d2309be9550e7364049aa"}, + {file = "virtualenv-20.29.1-py3-none-any.whl", hash = "sha256:4e4cb403c0b0da39e13b46b1b2476e505cb0046b25f242bee80f62bf990b2779"}, + {file = "virtualenv-20.29.1.tar.gz", hash = "sha256:b8b8970138d32fb606192cb97f6cd4bb644fa486be9308fb9b63f81091b5dc35"}, ] [package.dependencies] @@ -1970,4 +2097,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = "^3.7" -content-hash = "2f41e2d562a00d6905a8b02cd7ccf5dbcc2fb0218476addd64faff18ee8b46bf" +content-hash = "37c41caf594570c2c84273ca5abc41ab2ec53d4e05a7bf6440b3e10e6de122d7" diff --git a/pyproject.toml b/pyproject.toml index 4d366945..bc191f97 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -44,7 +44,7 @@ Changelog = "https://github.com/graphql-python/graphql-core/releases" [tool.poetry.dependencies] python = "^3.7" typing-extensions = [ - { version = "^4.12", python = ">=3.8,<3.10" }, + { version = "^4.12.2", python = ">=3.8,<3.10" }, { version = "^4.7.1", python = "<3.8" }, ] @@ -57,18 +57,23 @@ pytest = [ { version = "^7.4", python = "<3.8" } ] pytest-asyncio = [ - { version = "^0.23.8", python = ">=3.8" }, + { version = "^0.25.2", python = ">=3.9" }, + { version = "~0.24.0", python = ">=3.8,<3.9" }, { version = "~0.21.1", python = "<3.8" } ] -pytest-benchmark = "^4.0" +pytest-benchmark = [ + { version = "^5.1", python = ">=3.9" }, + { version = "^4.0", python = "<3.9" } +] pytest-cov = [ - { version = "^5.0", python = ">=3.8" }, + { version = "^6.0", python = ">=3.9" }, + { version = "^5.0", python = ">=3.8,<3.9" }, { version = "^4.1", python = "<3.8" }, ] pytest-describe = "^2.2" pytest-timeout = "^2.3" pytest-codspeed = [ - { version = "^3.1.0", python = ">=3.9" }, + { version = "^3.1.2", python = ">=3.9" }, { version = "^2.2.1", python = "<3.8" } ] tox = [ @@ -80,22 +85,22 @@ tox = [ optional = true [tool.poetry.group.lint.dependencies] -ruff = ">=0.8,<0.9" +ruff = ">=0.9,<0.10" mypy = [ - { version = "^1.12", python = ">=3.8" }, + { version = "^1.14", python = ">=3.8" }, { version = "~1.4", python = "<3.8" } ] -bump2version = ">=1.0,<2" +bump2version = ">=1,<2" [tool.poetry.group.doc] optional = true [tool.poetry.group.doc.dependencies] sphinx = [ - { version = ">=7,<8", python = ">=3.8" }, + { version = ">=7,<9", python = ">=3.8" }, { version = ">=4,<6", python = "<3.8" } ] -sphinx_rtd_theme = "^2.0" +sphinx_rtd_theme = ">=2,<4" [tool.ruff] line-length = 88 @@ -149,6 +154,7 @@ select = [ "YTT", # flake8-2020 ] ignore = [ + "A005", # allow using standard-lib module names "ANN401", # allow explicit Any "COM812", # allow trailing commas for auto-formatting "D105", "D107", # no docstring needed for magic methods @@ -324,5 +330,5 @@ testpaths = ["tests"] asyncio_default_fixture_loop_scope = "function" [build-system] -requires = ["poetry_core>=1.6.1,<2"] +requires = ["poetry_core>=1.6.1,<3"] build-backend = "poetry.core.masonry.api" diff --git a/src/graphql/execution/values.py b/src/graphql/execution/values.py index 1c223b60..fda472de 100644 --- a/src/graphql/execution/values.py +++ b/src/graphql/execution/values.py @@ -175,8 +175,7 @@ def get_argument_values( coerced_values[arg_def.out_name or name] = arg_def.default_value elif is_non_null_type(arg_type): # pragma: no cover else msg = ( - f"Argument '{name}' of required type '{arg_type}'" - " was not provided." + f"Argument '{name}' of required type '{arg_type}' was not provided." ) raise GraphQLError(msg, node) continue # pragma: no cover diff --git a/src/graphql/language/lexer.py b/src/graphql/language/lexer.py index f93bd3b7..9ec37427 100644 --- a/src/graphql/language/lexer.py +++ b/src/graphql/language/lexer.py @@ -342,7 +342,7 @@ def read_escaped_unicode_variable_width(self, position: int) -> EscapeSequence: raise GraphQLSyntaxError( self.source, position, - f"Invalid Unicode escape sequence: '{body[position: position + size]}'.", + f"Invalid Unicode escape sequence: '{body[position : position + size]}'.", ) def read_escaped_unicode_fixed_width(self, position: int) -> EscapeSequence: @@ -368,7 +368,7 @@ def read_escaped_unicode_fixed_width(self, position: int) -> EscapeSequence: raise GraphQLSyntaxError( self.source, position, - f"Invalid Unicode escape sequence: '{body[position: position + 6]}'.", + f"Invalid Unicode escape sequence: '{body[position : position + 6]}'.", ) def read_escaped_character(self, position: int) -> EscapeSequence: @@ -380,7 +380,7 @@ def read_escaped_character(self, position: int) -> EscapeSequence: raise GraphQLSyntaxError( self.source, position, - f"Invalid character escape sequence: '{body[position: position + 2]}'.", + f"Invalid character escape sequence: '{body[position : position + 2]}'.", ) def read_block_string(self, start: int) -> Token: diff --git a/src/graphql/type/definition.py b/src/graphql/type/definition.py index f49691e7..480c1879 100644 --- a/src/graphql/type/definition.py +++ b/src/graphql/type/definition.py @@ -386,8 +386,7 @@ def __init__( self.parse_literal = parse_literal # type: ignore if parse_literal is not None and parse_value is None: msg = ( - f"{name} must provide" - " both 'parse_value' and 'parse_literal' functions." + f"{name} must provide both 'parse_value' and 'parse_literal' functions." ) raise TypeError(msg) self.specified_by_url = specified_by_url diff --git a/src/graphql/type/introspection.py b/src/graphql/type/introspection.py index e59386a4..313c3679 100644 --- a/src/graphql/type/introspection.py +++ b/src/graphql/type/introspection.py @@ -639,8 +639,7 @@ class TypeKind(Enum): ), "NON_NULL": GraphQLEnumValue( TypeKind.NON_NULL, - description="Indicates this type is a non-null." - " `ofType` is a valid field.", + description="Indicates this type is a non-null. `ofType` is a valid field.", ), }, ) diff --git a/src/graphql/type/scalars.py b/src/graphql/type/scalars.py index 1bc98c21..d35e6e26 100644 --- a/src/graphql/type/scalars.py +++ b/src/graphql/type/scalars.py @@ -315,7 +315,7 @@ def parse_id_literal(value_node: ValueNode, _variables: Any = None) -> str: GraphQLBoolean, GraphQLID, ) -} +} # pyright: ignore def is_specified_scalar_type(type_: GraphQLNamedType) -> TypeGuard[GraphQLScalarType]: diff --git a/src/graphql/type/validate.py b/src/graphql/type/validate.py index 109667f1..d5f8f8ce 100644 --- a/src/graphql/type/validate.py +++ b/src/graphql/type/validate.py @@ -454,8 +454,7 @@ def validate_input_fields(self, input_obj: GraphQLInputObjectType) -> None: if not fields: self.report_error( - f"Input Object type {input_obj.name}" - " must define one or more fields.", + f"Input Object type {input_obj.name} must define one or more fields.", [input_obj.ast_node, *input_obj.extension_ast_nodes], ) diff --git a/src/graphql/utilities/find_breaking_changes.py b/src/graphql/utilities/find_breaking_changes.py index d436f1d4..d2a03ad2 100644 --- a/src/graphql/utilities/find_breaking_changes.py +++ b/src/graphql/utilities/find_breaking_changes.py @@ -294,7 +294,7 @@ def find_union_type_changes( schema_changes.append( DangerousChange( DangerousChangeType.TYPE_ADDED_TO_UNION, - f"{possible_type.name} was added" f" to union type {old_type.name}.", + f"{possible_type.name} was added to union type {old_type.name}.", ) ) @@ -407,7 +407,7 @@ def find_arg_changes( schema_changes.append( BreakingChange( BreakingChangeType.ARG_REMOVED, - f"{old_type.name}.{field_name} arg" f" {arg_name} was removed.", + f"{old_type.name}.{field_name} arg {arg_name} was removed.", ) ) diff --git a/src/graphql/validation/rules/overlapping_fields_can_be_merged.py b/src/graphql/validation/rules/overlapping_fields_can_be_merged.py index b077958b..58a7a3b7 100644 --- a/src/graphql/validation/rules/overlapping_fields_can_be_merged.py +++ b/src/graphql/validation/rules/overlapping_fields_can_be_merged.py @@ -44,8 +44,7 @@ def reason_message(reason: ConflictReasonMessage) -> str: if isinstance(reason, list): return " and ".join( - f"subfields '{response_name}' conflict" - f" because {reason_message(sub_reason)}" + f"subfields '{response_name}' conflict because {reason_message(sub_reason)}" for response_name, sub_reason in reason ) return reason diff --git a/src/graphql/validation/rules/unique_field_definition_names.py b/src/graphql/validation/rules/unique_field_definition_names.py index 8451bc27..39df7203 100644 --- a/src/graphql/validation/rules/unique_field_definition_names.py +++ b/src/graphql/validation/rules/unique_field_definition_names.py @@ -47,8 +47,7 @@ def check_field_uniqueness( elif field_name in field_names: self.report_error( GraphQLError( - f"Field '{type_name}.{field_name}'" - " can only be defined once.", + f"Field '{type_name}.{field_name}' can only be defined once.", [field_names[field_name], field_def.name], ) ) diff --git a/tests/error/test_graphql_error.py b/tests/error/test_graphql_error.py index d01e1e8a..fbc8602e 100644 --- a/tests/error/test_graphql_error.py +++ b/tests/error/test_graphql_error.py @@ -224,7 +224,7 @@ def serializes_to_include_all_standard_fields(): extensions = {"foo": "bar "} e_full = GraphQLError("msg", field_node, None, None, path, None, extensions) assert str(e_full) == ( - "msg\n\nGraphQL request:2:3\n" "1 | {\n2 | field\n | ^\n3 | }" + "msg\n\nGraphQL request:2:3\n1 | {\n2 | field\n | ^\n3 | }" ) assert repr(e_full) == ( "GraphQLError('msg', locations=[SourceLocation(line=2, column=3)]," diff --git a/tests/execution/test_abstract.py b/tests/execution/test_abstract.py index 75a1e875..ddb01345 100644 --- a/tests/execution/test_abstract.py +++ b/tests/execution/test_abstract.py @@ -23,14 +23,14 @@ def sync_and_async(spec): """Decorator for running a test synchronously and asynchronously.""" return pytest.mark.asyncio( - pytest.mark.parametrize("sync", (True, False), ids=("sync", "async"))(spec) + pytest.mark.parametrize("sync", [True, False], ids=("sync", "async"))(spec) ) def access_variants(spec): """Decorator for tests with dict and object access, including inheritance.""" return pytest.mark.asyncio( - pytest.mark.parametrize("access", ("dict", "object", "inheritance"))(spec) + pytest.mark.parametrize("access", ["dict", "object", "inheritance"])(spec) ) diff --git a/tests/execution/test_oneof.py b/tests/execution/test_oneof.py index 81f3d224..2040b1a7 100644 --- a/tests/execution/test_oneof.py +++ b/tests/execution/test_oneof.py @@ -35,7 +35,7 @@ def execute_query( def describe_execute_handles_one_of_input_objects(): def describe_one_of_input_objects(): root_value = { - "test": lambda _info, input: input, + "test": lambda _info, input: input, # noqa: A006 } def accepts_a_good_default_value(): diff --git a/tests/execution/test_schema.py b/tests/execution/test_schema.py index 593c1cf6..7096c5fb 100644 --- a/tests/execution/test_schema.py +++ b/tests/execution/test_schema.py @@ -78,7 +78,7 @@ def __init__(self, id: int): # noqa: A002 "article": GraphQLField( BlogArticle, args={"id": GraphQLArgument(GraphQLID)}, - resolve=lambda _obj, _info, id: Article(id), + resolve=lambda _obj, _info, id: Article(id), # noqa: A006 ), "feed": GraphQLField( GraphQLList(BlogArticle), diff --git a/tests/language/test_lexer.py b/tests/language/test_lexer.py index d2d24931..a44e859d 100644 --- a/tests/language/test_lexer.py +++ b/tests/language/test_lexer.py @@ -394,8 +394,7 @@ def lexes_block_strings(): TokenKind.BLOCK_STRING, 0, 19, 1, 1, "slashes \\\\ \\/" ) assert lex_one( - '"""\n\n spans\n multiple\n' - ' lines\n\n """' + '"""\n\n spans\n multiple\n lines\n\n """' ) == Token(TokenKind.BLOCK_STRING, 0, 68, 1, 1, "spans\n multiple\n lines") def advance_line_after_lexing_multiline_block_string(): diff --git a/tests/language/test_printer.py b/tests/language/test_printer.py index b6ac41e0..42531096 100644 --- a/tests/language/test_printer.py +++ b/tests/language/test_printer.py @@ -60,8 +60,7 @@ def correctly_prints_mutation_operation_with_artifacts(): def prints_query_with_variable_directives(): query_ast_with_variable_directive = parse( - "query ($foo: TestType = { a: 123 }" - " @testDirective(if: true) @test) { id }" + "query ($foo: TestType = { a: 123 } @testDirective(if: true) @test) { id }" ) assert print_ast(query_ast_with_variable_directive) == dedent( """ diff --git a/tests/star_wars_schema.py b/tests/star_wars_schema.py index 575bf482..5f4c0809 100644 --- a/tests/star_wars_schema.py +++ b/tests/star_wars_schema.py @@ -140,8 +140,7 @@ "name": GraphQLField(GraphQLString, description="The name of the human."), "friends": GraphQLField( GraphQLList(character_interface), - description="The friends of the human," - " or an empty list if they have none.", + description="The friends of the human, or an empty list if they have none.", resolve=lambda human, _info: get_friends(human), ), "appearsIn": GraphQLField( @@ -182,8 +181,7 @@ "name": GraphQLField(GraphQLString, description="The name of the droid."), "friends": GraphQLField( GraphQLList(character_interface), - description="The friends of the droid," - " or an empty list if they have none.", + description="The friends of the droid, or an empty list if they have none.", resolve=lambda droid, _info: get_friends(droid), ), "appearsIn": GraphQLField( @@ -238,7 +236,7 @@ GraphQLNonNull(GraphQLString), description="id of the human" ) }, - resolve=lambda _source, _info, id: get_human(id), + resolve=lambda _source, _info, id: get_human(id), # noqa: A006 ), "droid": GraphQLField( droid_type, @@ -247,7 +245,7 @@ GraphQLNonNull(GraphQLString), description="id of the droid" ) }, - resolve=lambda _source, _info, id: get_droid(id), + resolve=lambda _source, _info, id: get_droid(id), # noqa: A006 ), }, ) diff --git a/tests/type/test_definition.py b/tests/type/test_definition.py index a8b7c24b..ac7830ef 100644 --- a/tests/type/test_definition.py +++ b/tests/type/test_definition.py @@ -198,8 +198,7 @@ def parse_literal(_node: ValueNode, _vars=None): with pytest.raises(TypeError) as exc_info: GraphQLScalarType("SomeScalar", parse_literal=parse_literal) assert str(exc_info.value) == ( - "SomeScalar must provide both" - " 'parse_value' and 'parse_literal' functions." + "SomeScalar must provide both 'parse_value' and 'parse_literal' functions." ) def pickles_a_custom_scalar_type(): diff --git a/tests/type/test_validation.py b/tests/type/test_validation.py index 087832ba..a4efe041 100644 --- a/tests/type/test_validation.py +++ b/tests/type/test_validation.py @@ -242,8 +242,7 @@ def rejects_a_schema_whose_query_root_type_is_not_an_object_type(): ) assert validate_schema(schema) == [ { - "message": "Query root type must be Object type," - " it cannot be Query.", + "message": "Query root type must be Object type, it cannot be Query.", "locations": [(2, 13)], } ] diff --git a/tests/utilities/test_extend_schema.py b/tests/utilities/test_extend_schema.py index 28ac0be4..1eb98d38 100644 --- a/tests/utilities/test_extend_schema.py +++ b/tests/utilities/test_extend_schema.py @@ -1363,8 +1363,7 @@ def does_not_allow_replacing_a_default_directive(): with pytest.raises(TypeError) as exc_info: extend_schema(schema, extend_ast) assert str(exc_info.value).startswith( - "Directive '@include' already exists in the schema." - " It cannot be redefined." + "Directive '@include' already exists in the schema. It cannot be redefined." ) def does_not_allow_replacing_an_existing_enum_value(): diff --git a/tests/utilities/test_find_breaking_changes.py b/tests/utilities/test_find_breaking_changes.py index 24d03704..bfcc7e72 100644 --- a/tests/utilities/test_find_breaking_changes.py +++ b/tests/utilities/test_find_breaking_changes.py @@ -755,8 +755,7 @@ def should_detect_all_breaking_changes(): ), ( BreakingChangeType.TYPE_CHANGED_KIND, - "TypeThatChangesType changed from an Object type to an" - " Interface type.", + "TypeThatChangesType changed from an Object type to an Interface type.", ), ( BreakingChangeType.FIELD_REMOVED, diff --git a/tests/utilities/test_type_info.py b/tests/utilities/test_type_info.py index d23b878b..01f7e464 100644 --- a/tests/utilities/test_type_info.py +++ b/tests/utilities/test_type_info.py @@ -375,8 +375,7 @@ def leave(*args): assert print_ast(edited_ast) == print_ast( parse( - "{ human(id: 4) { name, pets { __typename } }," - " alien { __typename } }" + "{ human(id: 4) { name, pets { __typename } }, alien { __typename } }" ) ) diff --git a/tests/validation/test_validation.py b/tests/validation/test_validation.py index e8f08fe1..78efbce9 100644 --- a/tests/validation/test_validation.py +++ b/tests/validation/test_validation.py @@ -71,8 +71,7 @@ def deprecated_validates_using_a_custom_type_info(): "Cannot query field 'human' on type 'QueryRoot'. Did you mean 'human'?", "Cannot query field 'meowsVolume' on type 'Cat'." " Did you mean 'meowsVolume'?", - "Cannot query field 'barkVolume' on type 'Dog'." - " Did you mean 'barkVolume'?", + "Cannot query field 'barkVolume' on type 'Dog'. Did you mean 'barkVolume'?", ] def validates_using_a_custom_rule(): diff --git a/tox.ini b/tox.ini index 7f6b4dcb..7f2e07d4 100644 --- a/tox.ini +++ b/tox.ini @@ -18,7 +18,7 @@ python = [testenv:ruff] basepython = python3.12 -deps = ruff>=0.8,<0.9 +deps = ruff>=0.9,<0.10 commands = ruff check src tests ruff format --check src tests @@ -26,7 +26,7 @@ commands = [testenv:mypy] basepython = python3.12 deps = - mypy>=1.12,<2 + mypy>=1.14,<2 pytest>=8.3,<9 commands = mypy src tests @@ -34,8 +34,8 @@ commands = [testenv:docs] basepython = python3.12 deps = - sphinx>=7,<8 - sphinx_rtd_theme>=2.0,<3 + sphinx>=8,<9 + sphinx_rtd_theme>=3,<4 commands = sphinx-build -b html -nEW docs docs/_build/html @@ -43,8 +43,8 @@ commands = deps = pytest>=7.4,<9 pytest-asyncio>=0.21.1,<1 - pytest-benchmark>=4,<5 - pytest-cov>=4.1,<6 + pytest-benchmark>=4,<6 + pytest-cov>=4.1,<7 pytest-describe>=2.2,<3 pytest-timeout>=2.3,<3 py3{7,8,9}, pypy39: typing-extensions>=4.7.1,<5 From d9c5e1dedb682264788c7c681529268422c987d1 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 19 Jan 2025 16:50:36 +0100 Subject: [PATCH 208/230] incremental delivery: add pending notifications Replicates graphql/graphql-js@fe65bc8be6813d03870ba0d7faffa733c1ba7351 --- docs/conf.py | 4 + .../execution/incremental_publisher.py | 225 ++++++++++---- tests/execution/test_defer.py | 281 +++++++++++++++--- tests/execution/test_execution_result.py | 12 +- tests/execution/test_mutations.py | 12 +- tests/execution/test_stream.py | 88 +++++- 6 files changed, 507 insertions(+), 115 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index d3de91ea..1d7afde0 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -153,6 +153,7 @@ ExperimentalIncrementalExecutionResults FieldGroup FormattedIncrementalResult +FormattedPendingResult FormattedSourceLocation GraphQLAbstractType GraphQLCompositeType @@ -167,6 +168,7 @@ IncrementalResult InitialResultRecord Middleware +PendingResult StreamItemsRecord StreamRecord SubsequentDataRecord @@ -183,8 +185,10 @@ graphql.execution.incremental_publisher.DeferredFragmentRecord graphql.execution.incremental_publisher.DeferredGroupedFieldSetRecord graphql.execution.incremental_publisher.FormattedCompletedResult +graphql.execution.incremental_publisher.FormattedPendingResult graphql.execution.incremental_publisher.IncrementalPublisher graphql.execution.incremental_publisher.InitialResultRecord +graphql.execution.incremental_publisher.PendingResult graphql.execution.incremental_publisher.StreamItemsRecord graphql.execution.incremental_publisher.StreamRecord graphql.execution.Middleware diff --git a/src/graphql/execution/incremental_publisher.py b/src/graphql/execution/incremental_publisher.py index 18890fb3..4ba1d553 100644 --- a/src/graphql/execution/incremental_publisher.py +++ b/src/graphql/execution/incremental_publisher.py @@ -13,7 +13,6 @@ Collection, Iterator, NamedTuple, - Sequence, Union, ) @@ -22,6 +21,8 @@ except ImportError: # Python < 3.8 from typing_extensions import TypedDict +from ..pyutils import RefSet + if TYPE_CHECKING: from ..error import GraphQLError, GraphQLFormattedError from ..pyutils import Path @@ -55,6 +56,63 @@ suppress_key_error = suppress(KeyError) +class FormattedPendingResult(TypedDict, total=False): + """Formatted pending execution result""" + + path: list[str | int] + label: str + + +class PendingResult: + """Pending execution result""" + + path: list[str | int] + label: str | None + + __slots__ = "label", "path" + + def __init__( + self, + path: list[str | int], + label: str | None = None, + ) -> None: + self.path = path + self.label = label + + def __repr__(self) -> str: + name = self.__class__.__name__ + args: list[str] = [f"path={self.path!r}"] + if self.label: + args.append(f"label={self.label!r}") + return f"{name}({', '.join(args)})" + + @property + def formatted(self) -> FormattedPendingResult: + """Get pending result formatted according to the specification.""" + formatted: FormattedPendingResult = {"path": self.path} + if self.label is not None: + formatted["label"] = self.label + return formatted + + def __eq__(self, other: object) -> bool: + if isinstance(other, dict): + return (other.get("path") or None) == (self.path or None) and ( + other.get("label") or None + ) == (self.label or None) + + if isinstance(other, tuple): + size = len(other) + return 1 < size < 3 and (self.path, self.label)[:size] == other + return ( + isinstance(other, self.__class__) + and other.path == self.path + and other.label == self.label + ) + + def __ne__(self, other: object) -> bool: + return not self == other + + class FormattedCompletedResult(TypedDict, total=False): """Formatted completed execution result""" @@ -93,7 +151,7 @@ def __repr__(self) -> str: @property def formatted(self) -> FormattedCompletedResult: - """Get execution result formatted according to the specification.""" + """Get completed result formatted according to the specification.""" formatted: FormattedCompletedResult = {"path": self.path} if self.label is not None: formatted["label"] = self.label @@ -104,9 +162,9 @@ def formatted(self) -> FormattedCompletedResult: def __eq__(self, other: object) -> bool: if isinstance(other, dict): return ( - other.get("path") == self.path - and ("label" not in other or other["label"] == self.label) - and ("errors" not in other or other["errors"] == self.errors) + (other.get("path") or None) == (self.path or None) + and (other.get("label") or None) == (self.label or None) + and (other.get("errors") or None) == (self.errors or None) ) if isinstance(other, tuple): size = len(other) @@ -125,6 +183,7 @@ def __ne__(self, other: object) -> bool: class IncrementalUpdate(NamedTuple): """Incremental update""" + pending: list[PendingResult] incremental: list[IncrementalResult] completed: list[CompletedResult] @@ -181,13 +240,11 @@ def formatted(self) -> FormattedExecutionResult: def __eq__(self, other: object) -> bool: if isinstance(other, dict): - if "extensions" not in other: - return other == {"data": self.data, "errors": self.errors} - return other == { - "data": self.data, - "errors": self.errors, - "extensions": self.extensions, - } + return ( + (other.get("data") == self.data) + and (other.get("errors") or None) == (self.errors or None) + and (other.get("extensions") or None) == (self.extensions or None) + ) if isinstance(other, tuple): if len(other) == 2: return other == (self.data, self.errors) @@ -208,40 +265,42 @@ class FormattedInitialIncrementalExecutionResult(TypedDict, total=False): data: dict[str, Any] | None errors: list[GraphQLFormattedError] + pending: list[FormattedPendingResult] hasNext: bool incremental: list[FormattedIncrementalResult] extensions: dict[str, Any] class InitialIncrementalExecutionResult: - """Initial incremental execution result. - - - ``has_next`` is True if a future payload is expected. - - ``incremental`` is a list of the results from defer/stream directives. - """ + """Initial incremental execution result.""" data: dict[str, Any] | None errors: list[GraphQLError] | None + pending: list[PendingResult] has_next: bool extensions: dict[str, Any] | None - __slots__ = "data", "errors", "extensions", "has_next" + __slots__ = "data", "errors", "extensions", "has_next", "pending" def __init__( self, data: dict[str, Any] | None = None, errors: list[GraphQLError] | None = None, + pending: list[PendingResult] | None = None, has_next: bool = False, extensions: dict[str, Any] | None = None, ) -> None: self.data = data self.errors = errors + self.pending = pending or [] self.has_next = has_next self.extensions = extensions def __repr__(self) -> str: name = self.__class__.__name__ args: list[str] = [f"data={self.data!r}, errors={self.errors!r}"] + if self.pending: + args.append(f"pending={self.pending!r}") if self.has_next: args.append("has_next") if self.extensions: @@ -254,6 +313,7 @@ def formatted(self) -> FormattedInitialIncrementalExecutionResult: formatted: FormattedInitialIncrementalExecutionResult = {"data": self.data} if self.errors is not None: formatted["errors"] = [error.formatted for error in self.errors] + formatted["pending"] = [pending.formatted for pending in self.pending] formatted["hasNext"] = self.has_next if self.extensions is not None: formatted["extensions"] = self.extensions @@ -263,19 +323,19 @@ def __eq__(self, other: object) -> bool: if isinstance(other, dict): return ( other.get("data") == self.data - and other.get("errors") == self.errors - and ("hasNext" not in other or other["hasNext"] == self.has_next) - and ( - "extensions" not in other or other["extensions"] == self.extensions - ) + and (other.get("errors") or None) == (self.errors or None) + and (other.get("pending") or None) == (self.pending or None) + and (other.get("hasNext") or None) == (self.has_next or None) + and (other.get("extensions") or None) == (self.extensions or None) ) if isinstance(other, tuple): size = len(other) return ( - 1 < size < 5 + 1 < size < 6 and ( self.data, self.errors, + self.pending, self.has_next, self.extensions, )[:size] @@ -285,6 +345,7 @@ def __eq__(self, other: object) -> bool: isinstance(other, self.__class__) and other.data == self.data and other.errors == self.errors + and other.pending == self.pending and other.has_next == self.has_next and other.extensions == self.extensions ) @@ -356,11 +417,9 @@ def __eq__(self, other: object) -> bool: if isinstance(other, dict): return ( other.get("data") == self.data - and other.get("errors") == self.errors - and ("path" not in other or other["path"] == self.path) - and ( - "extensions" not in other or other["extensions"] == self.extensions - ) + and (other.get("errors") or None) == (self.errors or None) + and (other.get("path") or None) == (self.path or None) + and (other.get("extensions") or None) == (self.extensions or None) ) if isinstance(other, tuple): size = len(other) @@ -435,12 +494,10 @@ def formatted(self) -> FormattedIncrementalStreamResult: def __eq__(self, other: object) -> bool: if isinstance(other, dict): return ( - other.get("items") == self.items - and other.get("errors") == self.errors - and ("path" not in other or other["path"] == self.path) - and ( - "extensions" not in other or other["extensions"] == self.extensions - ) + (other.get("items") or None) == (self.items or None) + and (other.get("errors") or None) == (self.errors or None) + and (other.get("path", None) == (self.path or None)) + and (other.get("extensions", None) == (self.extensions or None)) ) if isinstance(other, tuple): size = len(other) @@ -472,33 +529,33 @@ class FormattedSubsequentIncrementalExecutionResult(TypedDict, total=False): """Formatted subsequent incremental execution result""" hasNext: bool + pending: list[FormattedPendingResult] incremental: list[FormattedIncrementalResult] completed: list[FormattedCompletedResult] extensions: dict[str, Any] class SubsequentIncrementalExecutionResult: - """Subsequent incremental execution result. - - - ``has_next`` is True if a future payload is expected. - - ``incremental`` is a list of the results from defer/stream directives. - """ + """Subsequent incremental execution result.""" - __slots__ = "completed", "extensions", "has_next", "incremental" + __slots__ = "completed", "extensions", "has_next", "incremental", "pending" has_next: bool - incremental: Sequence[IncrementalResult] | None - completed: Sequence[CompletedResult] | None + pending: list[PendingResult] | None + incremental: list[IncrementalResult] | None + completed: list[CompletedResult] | None extensions: dict[str, Any] | None def __init__( self, has_next: bool = False, - incremental: Sequence[IncrementalResult] | None = None, - completed: Sequence[CompletedResult] | None = None, + pending: list[PendingResult] | None = None, + incremental: list[IncrementalResult] | None = None, + completed: list[CompletedResult] | None = None, extensions: dict[str, Any] | None = None, ) -> None: self.has_next = has_next + self.pending = pending or [] self.incremental = incremental self.completed = completed self.extensions = extensions @@ -508,6 +565,8 @@ def __repr__(self) -> str: args: list[str] = [] if self.has_next: args.append("has_next") + if self.pending: + args.append(f"pending[{len(self.pending)}]") if self.incremental: args.append(f"incremental[{len(self.incremental)}]") if self.completed: @@ -521,6 +580,8 @@ def formatted(self) -> FormattedSubsequentIncrementalExecutionResult: """Get execution result formatted according to the specification.""" formatted: FormattedSubsequentIncrementalExecutionResult = {} formatted["hasNext"] = self.has_next + if self.pending: + formatted["pending"] = [result.formatted for result in self.pending] if self.incremental: formatted["incremental"] = [result.formatted for result in self.incremental] if self.completed: @@ -532,22 +593,19 @@ def formatted(self) -> FormattedSubsequentIncrementalExecutionResult: def __eq__(self, other: object) -> bool: if isinstance(other, dict): return ( - ("hasNext" in other and other["hasNext"] == self.has_next) - and ( - "incremental" not in other - or other["incremental"] == self.incremental - ) - and ("completed" not in other or other["completed"] == self.completed) - and ( - "extensions" not in other or other["extensions"] == self.extensions - ) + (other.get("hasNext") or None) == (self.has_next or None) + and (other.get("pending") or None) == (self.pending or None) + and (other.get("incremental") or None) == (self.incremental or None) + and (other.get("completed") or None) == (self.completed or None) + and (other.get("extensions") or None) == (self.extensions or None) ) if isinstance(other, tuple): size = len(other) return ( - 1 < size < 5 + 1 < size < 6 and ( self.has_next, + self.pending, self.incremental, self.completed, self.extensions, @@ -557,6 +615,7 @@ def __eq__(self, other: object) -> bool: return ( isinstance(other, self.__class__) and other.has_next == self.has_next + and self.pending == other.pending and other.incremental == self.incremental and other.completed == self.completed and other.extensions == self.extensions @@ -729,11 +788,19 @@ def build_data_response( error.message, ) ) - if self._pending: + pending = self._pending + if pending: + pending_sources: RefSet[DeferredFragmentRecord | StreamRecord] = RefSet( + subsequent_result_record.stream_record + if isinstance(subsequent_result_record, StreamItemsRecord) + else subsequent_result_record + for subsequent_result_record in pending + ) return ExperimentalIncrementalExecutionResults( initial_result=InitialIncrementalExecutionResult( data, errors, + pending=self._pending_sources_to_results(pending_sources), has_next=True, ), subsequent_results=self._subscribe(), @@ -783,6 +850,19 @@ def filter( if early_returns: self._add_task(gather(*early_returns)) + def _pending_sources_to_results( + self, + pending_sources: RefSet[DeferredFragmentRecord | StreamRecord], + ) -> list[PendingResult]: + """Convert pending sources to pending results.""" + pending_results: list[PendingResult] = [] + for pending_source in pending_sources: + pending_source.pending_sent = True + pending_results.append( + PendingResult(pending_source.path, pending_source.label) + ) + return pending_results + async def _subscribe( self, ) -> AsyncGenerator[SubsequentIncrementalExecutionResult, None]: @@ -854,14 +934,18 @@ def _get_incremental_result( ) -> SubsequentIncrementalExecutionResult | None: """Get the incremental result with the completed records.""" update = self._process_pending(completed_records) - incremental, completed = update.incremental, update.completed + pending, incremental, completed = ( + update.pending, + update.incremental, + update.completed, + ) has_next = bool(self._pending) if not incremental and not completed and has_next: return None return SubsequentIncrementalExecutionResult( - has_next, incremental or None, completed or None + has_next, pending or None, incremental or None, completed or None ) def _process_pending( @@ -869,6 +953,7 @@ def _process_pending( completed_records: Collection[SubsequentResultRecord], ) -> IncrementalUpdate: """Process the pending records.""" + new_pending_sources: RefSet[DeferredFragmentRecord | StreamRecord] = RefSet() incremental_results: list[IncrementalResult] = [] completed_results: list[CompletedResult] = [] to_result = self._completed_record_to_result @@ -876,13 +961,20 @@ def _process_pending( for child in subsequent_result_record.children: if child.filtered: continue + pending_source: DeferredFragmentRecord | StreamRecord = ( + child.stream_record + if isinstance(child, StreamItemsRecord) + else child + ) + if not pending_source.pending_sent: + new_pending_sources.add(pending_source) self._publish(child) incremental_result: IncrementalResult if isinstance(subsequent_result_record, StreamItemsRecord): if subsequent_result_record.is_final_record: - completed_results.append( - to_result(subsequent_result_record.stream_record) - ) + stream_record = subsequent_result_record.stream_record + new_pending_sources.discard(stream_record) + completed_results.append(to_result(stream_record)) if subsequent_result_record.is_completed_async_iterator: # async iterable resolver finished but there may be pending payload continue @@ -895,6 +987,7 @@ def _process_pending( ) incremental_results.append(incremental_result) else: + new_pending_sources.discard(subsequent_result_record) completed_results.append(to_result(subsequent_result_record)) if subsequent_result_record.errors: continue @@ -909,7 +1002,11 @@ def _process_pending( deferred_grouped_field_set_record.path, ) incremental_results.append(incremental_result) - return IncrementalUpdate(incremental_results, completed_results) + return IncrementalUpdate( + self._pending_sources_to_results(new_pending_sources), + incremental_results, + completed_results, + ) @staticmethod def _completed_record_to_result( @@ -1052,6 +1149,7 @@ class DeferredFragmentRecord: deferred_grouped_field_set_records: dict[DeferredGroupedFieldSetRecord, None] errors: list[GraphQLError] filtered: bool + pending_sent: bool _pending: dict[DeferredGroupedFieldSetRecord, None] def __init__(self, path: Path | None = None, label: str | None = None) -> None: @@ -1059,6 +1157,7 @@ def __init__(self, path: Path | None = None, label: str | None = None) -> None: self.label = label self.children = {} self.filtered = False + self.pending_sent = False self.deferred_grouped_field_set_records = {} self.errors = [] self._pending = {} @@ -1080,6 +1179,7 @@ class StreamRecord: path: list[str | int] errors: list[GraphQLError] early_return: Callable[[], Awaitable[Any]] | None + pending_sent: bool def __init__( self, @@ -1091,6 +1191,7 @@ def __init__( self.label = label self.errors = [] self.early_return = early_return + self.pending_sent = False def __repr__(self) -> str: name = self.__class__.__name__ diff --git a/tests/execution/test_defer.py b/tests/execution/test_defer.py index d6d17105..2de10173 100644 --- a/tests/execution/test_defer.py +++ b/tests/execution/test_defer.py @@ -1,7 +1,7 @@ from __future__ import annotations from asyncio import sleep -from typing import Any, AsyncGenerator, NamedTuple +from typing import Any, AsyncGenerator, NamedTuple, cast import pytest @@ -10,6 +10,7 @@ ExecutionResult, ExperimentalIncrementalExecutionResults, IncrementalDeferResult, + IncrementalResult, InitialIncrementalExecutionResult, SubsequentIncrementalExecutionResult, execute, @@ -19,6 +20,7 @@ CompletedResult, DeferredFragmentRecord, DeferredGroupedFieldSetRecord, + PendingResult, StreamItemsRecord, StreamRecord, ) @@ -193,6 +195,31 @@ def modified_args(args: dict[str, Any], **modifications: Any) -> dict[str, Any]: def describe_execute_defer_directive(): + def can_format_and_print_pending_result(): + result = PendingResult([]) + assert result.formatted == {"path": []} + assert str(result) == "PendingResult(path=[])" + + result = PendingResult(path=["foo", 1], label="bar") + assert result.formatted == { + "path": ["foo", 1], + "label": "bar", + } + assert str(result) == "PendingResult(path=['foo', 1], label='bar')" + + def can_compare_pending_result(): + args: dict[str, Any] = {"path": ["foo", 1], "label": "bar"} + result = PendingResult(**args) + assert result == PendingResult(**args) + assert result != CompletedResult(**modified_args(args, path=["foo", 2])) + assert result != CompletedResult(**modified_args(args, label="baz")) + assert result == tuple(args.values()) + assert result != tuple(args.values())[:1] + assert result != tuple(args.values())[:1] + ("baz",) + assert result == args + assert result != {**args, "path": ["foo", 2]} + assert result != {**args, "label": "baz"} + def can_format_and_print_completed_result(): result = CompletedResult([]) assert result.formatted == {"path": []} @@ -224,10 +251,9 @@ def can_compare_completed_result(): assert result == tuple(args.values())[:2] assert result != tuple(args.values())[:1] assert result == args - assert result == dict(list(args.items())[:2]) - assert result != dict( - list(args.items())[:1] + [("errors", [GraphQLError("oops")])] - ) + assert result != {**args, "path": ["foo", 2]} + assert result != {**args, "label": "baz"} + assert result != {**args, "errors": [{"message": "oops"}]} def can_format_and_print_incremental_defer_result(): result = IncrementalDeferResult() @@ -276,20 +302,20 @@ def can_compare_incremental_defer_result(): assert result != tuple(args.values())[:1] assert result != ({"hello": "world"}, []) assert result == args - assert result == dict(list(args.items())[:2]) - assert result == dict(list(args.items())[:3]) - assert result != dict(list(args.items())[:2] + [("path", ["foo", 2])]) - assert result != {**args, "extensions": {"baz": 3}} + assert result != {**args, "data": {"hello": "foo"}} + assert result != {**args, "errors": []} + assert result != {**args, "path": ["foo", 2]} + assert result != {**args, "extensions": {"baz": 1}} def can_format_and_print_initial_incremental_execution_result(): result = InitialIncrementalExecutionResult() - assert result.formatted == {"data": None, "hasNext": False} + assert result.formatted == {"data": None, "hasNext": False, "pending": []} assert ( str(result) == "InitialIncrementalExecutionResult(data=None, errors=None)" ) result = InitialIncrementalExecutionResult(has_next=True) - assert result.formatted == {"data": None, "hasNext": True} + assert result.formatted == {"data": None, "hasNext": True, "pending": []} assert ( str(result) == "InitialIncrementalExecutionResult(data=None, errors=None, has_next)" @@ -298,25 +324,28 @@ def can_format_and_print_initial_incremental_execution_result(): result = InitialIncrementalExecutionResult( data={"hello": "world"}, errors=[GraphQLError("msg")], + pending=[PendingResult(["bar"])], has_next=True, extensions={"baz": 2}, ) assert result.formatted == { "data": {"hello": "world"}, - "errors": [GraphQLError("msg")], + "errors": [{"message": "msg"}], + "pending": [{"path": ["bar"]}], "hasNext": True, "extensions": {"baz": 2}, } assert ( str(result) == "InitialIncrementalExecutionResult(" - "data={'hello': 'world'}, errors=[GraphQLError('msg')], has_next," - " extensions={'baz': 2})" + "data={'hello': 'world'}, errors=[GraphQLError('msg')]," + " pending=[PendingResult(path=['bar'])], has_next, extensions={'baz': 2})" ) def can_compare_initial_incremental_execution_result(): args: dict[str, Any] = { "data": {"hello": "world"}, "errors": [GraphQLError("msg")], + "pending": [PendingResult(["bar"])], "has_next": True, "extensions": {"baz": 2}, } @@ -328,6 +357,9 @@ def can_compare_initial_incremental_execution_result(): assert result != InitialIncrementalExecutionResult( **modified_args(args, errors=[]) ) + assert result != InitialIncrementalExecutionResult( + **modified_args(args, pending=[]) + ) assert result != InitialIncrementalExecutionResult( **modified_args(args, has_next=False) ) @@ -335,6 +367,7 @@ def can_compare_initial_incremental_execution_result(): **modified_args(args, extensions={"baz": 1}) ) assert result == tuple(args.values()) + assert result == tuple(args.values())[:5] assert result == tuple(args.values())[:4] assert result == tuple(args.values())[:3] assert result == tuple(args.values())[:2] @@ -344,20 +377,40 @@ def can_compare_initial_incremental_execution_result(): assert result == { "data": {"hello": "world"}, "errors": [GraphQLError("msg")], + "pending": [PendingResult(["bar"])], "hasNext": True, "extensions": {"baz": 2}, } - assert result == { + assert result != { + "errors": [GraphQLError("msg")], + "pending": [PendingResult(["bar"])], + "hasNext": True, + "extensions": {"baz": 2}, + } + assert result != { + "data": {"hello": "world"}, + "pending": [PendingResult(["bar"])], + "hasNext": True, + "extensions": {"baz": 2}, + } + assert result != { "data": {"hello": "world"}, "errors": [GraphQLError("msg")], "hasNext": True, + "extensions": {"baz": 2}, } assert result != { "data": {"hello": "world"}, "errors": [GraphQLError("msg")], - "hasNext": False, + "pending": [PendingResult(["bar"])], "extensions": {"baz": 2}, } + assert result != { + "data": {"hello": "world"}, + "errors": [GraphQLError("msg")], + "pending": [PendingResult(["bar"])], + "hasNext": True, + } def can_format_and_print_subsequent_incremental_execution_result(): result = SubsequentIncrementalExecutionResult() @@ -368,36 +421,44 @@ def can_format_and_print_subsequent_incremental_execution_result(): assert result.formatted == {"hasNext": True} assert str(result) == "SubsequentIncrementalExecutionResult(has_next)" - incremental = [IncrementalDeferResult()] + pending = [PendingResult(["bar"])] + incremental = [cast(IncrementalResult, IncrementalDeferResult())] completed = [CompletedResult(["foo", 1])] result = SubsequentIncrementalExecutionResult( has_next=True, + pending=pending, incremental=incremental, completed=completed, extensions={"baz": 2}, ) assert result.formatted == { "hasNext": True, + "pending": [{"path": ["bar"]}], "incremental": [{"data": None}], "completed": [{"path": ["foo", 1]}], "extensions": {"baz": 2}, } assert ( str(result) == "SubsequentIncrementalExecutionResult(has_next," - " incremental[1], completed[1], extensions={'baz': 2})" + " pending[1], incremental[1], completed[1], extensions={'baz': 2})" ) def can_compare_subsequent_incremental_execution_result(): - incremental = [IncrementalDeferResult()] + pending = [PendingResult(["bar"])] + incremental = [cast(IncrementalResult, IncrementalDeferResult())] completed = [CompletedResult(path=["foo", 1])] args: dict[str, Any] = { "has_next": True, + "pending": pending, "incremental": incremental, "completed": completed, "extensions": {"baz": 2}, } result = SubsequentIncrementalExecutionResult(**args) assert result == SubsequentIncrementalExecutionResult(**args) + assert result != SubsequentIncrementalExecutionResult( + **modified_args(args, pending=[]) + ) assert result != SubsequentIncrementalExecutionResult( **modified_args(args, incremental=[]) ) @@ -408,22 +469,47 @@ def can_compare_subsequent_incremental_execution_result(): **modified_args(args, extensions={"baz": 1}) ) assert result == tuple(args.values()) + assert result == tuple(args.values())[:3] assert result == tuple(args.values())[:2] assert result != tuple(args.values())[:1] assert result != (incremental, False) assert result == { "hasNext": True, + "pending": pending, "incremental": incremental, "completed": completed, "extensions": {"baz": 2}, } - assert result == {"incremental": incremental, "hasNext": True} assert result != { - "hasNext": False, + "pending": pending, + "incremental": incremental, + "completed": completed, + "extensions": {"baz": 2}, + } + assert result != { + "hasNext": True, "incremental": incremental, "completed": completed, "extensions": {"baz": 2}, } + assert result != { + "hasNext": True, + "pending": pending, + "completed": completed, + "extensions": {"baz": 2}, + } + assert result != { + "hasNext": True, + "pending": pending, + "incremental": incremental, + "extensions": {"baz": 2}, + } + assert result != { + "hasNext": True, + "pending": pending, + "incremental": incremental, + "completed": completed, + } def can_print_deferred_grouped_field_set_record(): record = DeferredGroupedFieldSetRecord([], {}, False) @@ -483,7 +569,11 @@ async def can_defer_fragments_containing_scalar_types(): result = await complete(document) assert result == [ - {"data": {"hero": {"id": "1"}}, "hasNext": True}, + { + "data": {"hero": {"id": "1"}}, + "pending": [{"path": ["hero"]}], + "hasNext": True, + }, { "incremental": [{"data": {"name": "Luke"}, "path": ["hero"]}], "completed": [{"path": ["hero"]}], @@ -535,7 +625,11 @@ async def does_not_disable_defer_with_null_if_argument(): result = await complete(document) assert result == [ - {"data": {"hero": {"id": "1"}}, "hasNext": True}, + { + "data": {"hero": {"id": "1"}}, + "pending": [{"path": ["hero"]}], + "hasNext": True, + }, { "incremental": [{"data": {"name": "Luke"}, "path": ["hero"]}], "completed": [{"path": ["hero"]}], @@ -581,7 +675,11 @@ async def can_defer_fragments_on_the_top_level_query_field(): result = await complete(document) assert result == [ - {"data": {}, "hasNext": True}, + { + "data": {}, + "pending": [{"path": [], "label": "DeferQuery"}], + "hasNext": True, + }, { "incremental": [{"data": {"hero": {"id": "1"}}, "path": []}], "completed": [{"path": [], "label": "DeferQuery"}], @@ -606,7 +704,11 @@ async def can_defer_fragments_with_errors_on_the_top_level_query_field(): result = await complete(document, {"hero": {**hero, "name": Resolvers.bad}}) assert result == [ - {"data": {}, "hasNext": True}, + { + "data": {}, + "pending": [{"path": [], "label": "DeferQuery"}], + "hasNext": True, + }, { "incremental": [ { @@ -649,7 +751,14 @@ async def can_defer_a_fragment_within_an_already_deferred_fragment(): result = await complete(document) assert result == [ - {"data": {"hero": {}}, "hasNext": True}, + { + "data": {"hero": {}}, + "pending": [ + {"path": ["hero"], "label": "DeferTop"}, + {"path": ["hero"], "label": "DeferNested"}, + ], + "hasNext": True, + }, { "incremental": [ { @@ -693,7 +802,11 @@ async def can_defer_a_fragment_that_is_also_not_deferred_with_deferred_first(): result = await complete(document) assert result == [ - {"data": {"hero": {"name": "Luke"}}, "hasNext": True}, + { + "data": {"hero": {"name": "Luke"}}, + "pending": [{"path": ["hero"], "label": "DeferTop"}], + "hasNext": True, + }, { "completed": [{"path": ["hero"], "label": "DeferTop"}], "hasNext": False, @@ -718,7 +831,11 @@ async def can_defer_a_fragment_that_is_also_not_deferred_with_non_deferred_first result = await complete(document) assert result == [ - {"data": {"hero": {"name": "Luke"}}, "hasNext": True}, + { + "data": {"hero": {"name": "Luke"}}, + "pending": [{"path": ["hero"], "label": "DeferTop"}], + "hasNext": True, + }, { "completed": [{"path": ["hero"], "label": "DeferTop"}], "hasNext": False, @@ -742,7 +859,11 @@ async def can_defer_an_inline_fragment(): result = await complete(document) assert result == [ - {"data": {"hero": {"id": "1"}}, "hasNext": True}, + { + "data": {"hero": {"id": "1"}}, + "pending": [{"path": ["hero"], "label": "InlineDeferred"}], + "hasNext": True, + }, { "incremental": [{"data": {"name": "Luke"}, "path": ["hero"]}], "completed": [{"path": ["hero"], "label": "InlineDeferred"}], @@ -769,7 +890,7 @@ async def does_not_emit_empty_defer_fragments(): result = await complete(document) assert result == [ - {"data": {"hero": {}}, "hasNext": True}, + {"data": {"hero": {}}, "pending": [{"path": ["hero"]}], "hasNext": True}, { "completed": [{"path": ["hero"]}], "hasNext": False, @@ -797,6 +918,10 @@ async def separately_emits_defer_fragments_different_labels_varying_fields(): assert result == [ { "data": {"hero": {}}, + "pending": [ + {"path": ["hero"], "label": "DeferID"}, + {"path": ["hero"], "label": "DeferName"}, + ], "hasNext": True, }, { @@ -841,6 +966,10 @@ async def separately_emits_defer_fragments_different_labels_varying_subfields(): assert result == [ { "data": {}, + "pending": [ + {"path": [], "label": "DeferID"}, + {"path": [], "label": "DeferName"}, + ], "hasNext": True, }, { @@ -901,6 +1030,10 @@ async def resolve(value): assert result == [ { "data": {}, + "pending": [ + {"path": [], "label": "DeferID"}, + {"path": [], "label": "DeferName"}, + ], "hasNext": True, }, { @@ -949,6 +1082,10 @@ async def separately_emits_defer_fragments_var_subfields_same_prio_diff_level(): assert result == [ { "data": {"hero": {}}, + "pending": [ + {"path": [], "label": "DeferName"}, + {"path": ["hero"], "label": "DeferID"}, + ], "hasNext": True, }, { @@ -991,9 +1128,11 @@ async def separately_emits_nested_defer_frags_var_subfields_same_prio_diff_level assert result == [ { "data": {}, + "pending": [{"path": [], "label": "DeferName"}], "hasNext": True, }, { + "pending": [{"path": ["hero"], "label": "DeferID"}], "incremental": [ { "data": { @@ -1055,7 +1194,24 @@ async def can_deduplicate_multiple_defers_on_the_same_object(): result = await complete(document) assert result == [ - {"data": {"hero": {"friends": [{}, {}, {}]}}, "hasNext": True}, + { + "data": {"hero": {"friends": [{}, {}, {}]}}, + "pending": [ + {"path": ["hero", "friends", 0]}, + {"path": ["hero", "friends", 0]}, + {"path": ["hero", "friends", 0]}, + {"path": ["hero", "friends", 0]}, + {"path": ["hero", "friends", 1]}, + {"path": ["hero", "friends", 1]}, + {"path": ["hero", "friends", 1]}, + {"path": ["hero", "friends", 1]}, + {"path": ["hero", "friends", 2]}, + {"path": ["hero", "friends", 2]}, + {"path": ["hero", "friends", 2]}, + {"path": ["hero", "friends", 2]}, + ], + "hasNext": True, + }, { "incremental": [ { @@ -1139,6 +1295,7 @@ async def deduplicates_fields_present_in_the_initial_payload(): "anotherNestedObject": {"deeperObject": {"foo": "foo"}}, } }, + "pending": [{"path": ["hero"]}], "hasNext": True, }, { @@ -1182,9 +1339,11 @@ async def deduplicates_fields_present_in_a_parent_defer_payload(): assert result == [ { "data": {"hero": {}}, + "pending": [{"path": ["hero"]}], "hasNext": True, }, { + "pending": [{"path": ["hero", "nestedObject", "deeperObject"]}], "incremental": [ { "data": { @@ -1277,9 +1436,11 @@ async def deduplicates_fields_with_deferred_fragments_at_multiple_levels(): }, }, }, + "pending": [{"path": ["hero"]}], "hasNext": True, }, { + "pending": [{"path": ["hero", "nestedObject"]}], "incremental": [ { "data": {"bar": "bar"}, @@ -1290,6 +1451,7 @@ async def deduplicates_fields_with_deferred_fragments_at_multiple_levels(): "hasNext": True, }, { + "pending": [{"path": ["hero", "nestedObject", "deeperObject"]}], "incremental": [ { "data": {"baz": "baz"}, @@ -1346,9 +1508,14 @@ async def deduplicates_fields_from_deferred_fragments_branches_same_level(): assert result == [ { "data": {"hero": {"nestedObject": {"deeperObject": {}}}}, + "pending": [ + {"path": ["hero"]}, + {"path": ["hero", "nestedObject", "deeperObject"]}, + ], "hasNext": True, }, { + "pending": [{"path": ["hero", "nestedObject", "deeperObject"]}], "incremental": [ { "data": { @@ -1417,6 +1584,7 @@ async def deduplicates_fields_from_deferred_fragments_branches_multi_levels(): assert result == [ { "data": {"a": {"b": {"c": {"d": "d"}}}}, + "pending": [{"path": []}, {"path": ["a", "b"]}], "hasNext": True, }, { @@ -1470,6 +1638,7 @@ async def nulls_cross_defer_boundaries_null_first(): assert result == [ { "data": {"a": {}}, + "pending": [{"path": []}, {"path": ["a"]}], "hasNext": True, }, { @@ -1540,6 +1709,7 @@ async def nulls_cross_defer_boundaries_value_first(): assert result == [ { "data": {"a": {}}, + "pending": [{"path": []}, {"path": ["a"]}], "hasNext": True, }, { @@ -1613,6 +1783,7 @@ async def filters_a_payload_with_a_null_that_cannot_be_merged(): assert result == [ { "data": {"a": {}}, + "pending": [{"path": []}, {"path": ["a"]}], "hasNext": True, }, { @@ -1704,6 +1875,7 @@ async def cancels_deferred_fields_when_deferred_result_exhibits_null_bubbling(): assert result == [ { "data": {}, + "pending": [{"path": []}], "hasNext": True, }, { @@ -1757,6 +1929,7 @@ async def deduplicates_list_fields(): ] } }, + "pending": [{"path": ["hero"]}], "hasNext": True, }, { @@ -1793,6 +1966,7 @@ async def deduplicates_async_iterable_list_fields(): assert result == [ { "data": {"hero": {"friends": [{"name": "Han"}]}}, + "pending": [{"path": ["hero"]}], "hasNext": True, }, { @@ -1834,6 +2008,7 @@ async def resolve_friends(_info): assert result == [ { "data": {"hero": {"friends": []}}, + "pending": [{"path": ["hero"]}], "hasNext": True, }, { @@ -1872,6 +2047,7 @@ async def does_not_deduplicate_list_fields_with_non_overlapping_fields(): ] } }, + "pending": [{"path": ["hero"]}], "hasNext": True, }, { @@ -1918,6 +2094,7 @@ async def deduplicates_list_fields_that_return_empty_lists(): assert result == [ { "data": {"hero": {"friends": []}}, + "pending": [{"path": ["hero"]}], "hasNext": True, }, { @@ -1950,6 +2127,7 @@ async def deduplicates_null_object_fields(): assert result == [ { "data": {"hero": {"nestedObject": None}}, + "pending": [{"path": ["hero"]}], "hasNext": True, }, { @@ -1986,6 +2164,7 @@ async def resolve_nested_object(_info): assert result == [ { "data": {"hero": {"nestedObject": {"name": "foo"}}}, + "pending": [{"path": ["hero"]}], "hasNext": True, }, { @@ -2012,7 +2191,11 @@ async def handles_errors_thrown_in_deferred_fragments(): result = await complete(document, {"hero": {**hero, "name": Resolvers.bad}}) assert result == [ - {"data": {"hero": {"id": "1"}}, "hasNext": True}, + { + "data": {"hero": {"id": "1"}}, + "pending": [{"path": ["hero"]}], + "hasNext": True, + }, { "incremental": [ { @@ -2052,7 +2235,11 @@ async def handles_non_nullable_errors_thrown_in_deferred_fragments(): ) assert result == [ - {"data": {"hero": {"id": "1"}}, "hasNext": True}, + { + "data": {"hero": {"id": "1"}}, + "pending": [{"path": ["hero"]}], + "hasNext": True, + }, { "completed": [ { @@ -2122,7 +2309,11 @@ async def handles_async_non_nullable_errors_thrown_in_deferred_fragments(): ) assert result == [ - {"data": {"hero": {"id": "1"}}, "hasNext": True}, + { + "data": {"hero": {"id": "1"}}, + "pending": [{"path": ["hero"]}], + "hasNext": True, + }, { "completed": [ { @@ -2165,8 +2356,17 @@ async def returns_payloads_in_correct_order(): result = await complete(document, {"hero": {**hero, "name": Resolvers.slow}}) assert result == [ - {"data": {"hero": {"id": "1"}}, "hasNext": True}, { + "data": {"hero": {"id": "1"}}, + "pending": [{"path": ["hero"]}], + "hasNext": True, + }, + { + "pending": [ + {"path": ["hero", "friends", 0]}, + {"path": ["hero", "friends", 1]}, + {"path": ["hero", "friends", 2]}, + ], "incremental": [ { "data": {"name": "slow", "friends": [{}, {}, {}]}, @@ -2224,8 +2424,17 @@ async def returns_payloads_from_synchronous_data_in_correct_order(): result = await complete(document) assert result == [ - {"data": {"hero": {"id": "1"}}, "hasNext": True}, { + "data": {"hero": {"id": "1"}}, + "pending": [{"path": ["hero"]}], + "hasNext": True, + }, + { + "pending": [ + {"path": ["hero", "friends", 0]}, + {"path": ["hero", "friends", 1]}, + {"path": ["hero", "friends", 2]}, + ], "incremental": [ { "data": {"name": "Luke", "friends": [{}, {}, {}]}, diff --git a/tests/execution/test_execution_result.py b/tests/execution/test_execution_result.py index 162bd00d..96935d99 100644 --- a/tests/execution/test_execution_result.py +++ b/tests/execution/test_execution_result.py @@ -55,15 +55,15 @@ def compares_to_dict(): res = ExecutionResult(data, errors) assert res == {"data": data, "errors": errors} assert res == {"data": data, "errors": errors, "extensions": None} - assert res != {"data": data, "errors": None} - assert res != {"data": None, "errors": errors} + assert res == {"data": data, "errors": errors, "extensions": {}} + assert res != {"errors": errors} + assert res != {"data": data} assert res != {"data": data, "errors": errors, "extensions": extensions} res = ExecutionResult(data, errors, extensions) - assert res == {"data": data, "errors": errors} assert res == {"data": data, "errors": errors, "extensions": extensions} - assert res != {"data": data, "errors": None} - assert res != {"data": None, "errors": errors} - assert res != {"data": data, "errors": errors, "extensions": None} + assert res != {"errors": errors, "extensions": extensions} + assert res != {"data": data, "extensions": extensions} + assert res != {"data": data, "errors": errors} def compares_to_tuple(): res = ExecutionResult(data, errors) diff --git a/tests/execution/test_mutations.py b/tests/execution/test_mutations.py index f5030c88..987eba45 100644 --- a/tests/execution/test_mutations.py +++ b/tests/execution/test_mutations.py @@ -242,7 +242,11 @@ async def mutation_fields_with_defer_do_not_block_next_mutation(): patches.append(patch.formatted) assert patches == [ - {"data": {"first": {}, "second": {"theNumber": 2}}, "hasNext": True}, + { + "data": {"first": {}, "second": {"theNumber": 2}}, + "pending": [{"path": ["first"], "label": "defer-label"}], + "hasNext": True, + }, { "incremental": [ { @@ -313,7 +317,11 @@ async def mutation_with_defer_is_not_executed_serially(): patches.append(patch.formatted) assert patches == [ - {"data": {"second": {"theNumber": 2}}, "hasNext": True}, + { + "data": {"second": {"theNumber": 2}}, + "pending": [{"path": [], "label": "defer-label"}], + "hasNext": True, + }, { "incremental": [ { diff --git a/tests/execution/test_stream.py b/tests/execution/test_stream.py index 5454e826..4331eaa4 100644 --- a/tests/execution/test_stream.py +++ b/tests/execution/test_stream.py @@ -199,9 +199,9 @@ def can_compare_incremental_stream_result(): assert result != tuple(args.values())[:1] assert result != (["hello", "world"], []) assert result == args - assert result == dict(list(args.items())[:2]) - assert result == dict(list(args.items())[:3]) - assert result != dict(list(args.items())[:2] + [("path", ["foo", 2])]) + assert result != {**args, "items": ["hello", "foo"]} + assert result != {**args, "errors": []} + assert result != {**args, "path": ["foo", 2]} assert result != {**args, "extensions": {"baz": 1}} @pytest.mark.asyncio @@ -215,6 +215,7 @@ async def can_stream_a_list_field(): "data": { "scalarList": ["apple"], }, + "pending": [{"path": ["scalarList"]}], "hasNext": True, }, { @@ -239,6 +240,7 @@ async def can_use_default_value_of_initial_count(): "data": { "scalarList": [], }, + "pending": [{"path": ["scalarList"]}], "hasNext": True, }, { @@ -305,6 +307,7 @@ async def returns_label_from_stream_directive(): "data": { "scalarList": ["apple"], }, + "pending": [{"path": ["scalarList"], "label": "scalar-stream"}], "hasNext": True, }, { @@ -375,6 +378,7 @@ async def does_not_disable_stream_with_null_if_argument(): "data": { "scalarList": ["apple", "banana"], }, + "pending": [{"path": ["scalarList"]}], "hasNext": True, }, { @@ -407,6 +411,7 @@ async def can_stream_multi_dimensional_lists(): "data": { "scalarListList": [["apple", "apple", "apple"]], }, + "pending": [{"path": ["scalarListList"]}], "hasNext": True, }, { @@ -458,6 +463,7 @@ async def await_friend(f): {"name": "Han", "id": "2"}, ], }, + "pending": [{"path": ["friendList"]}], "hasNext": True, }, { @@ -495,6 +501,7 @@ async def await_friend(f): assert result == [ { "data": {"friendList": []}, + "pending": [{"path": ["friendList"]}], "hasNext": True, }, { @@ -562,6 +569,7 @@ async def get_id(f): {"name": "Han", "id": "2"}, ] }, + "pending": [{"path": ["friendList"]}], "hasNext": True, }, { @@ -612,6 +620,7 @@ async def await_friend(f, i): "path": ["friendList", 1], } ], + "pending": [{"path": ["friendList"]}], "hasNext": True, }, { @@ -655,6 +664,7 @@ async def await_friend(f, i): assert result == [ { "data": {"friendList": [{"name": "Luke", "id": "1"}]}, + "pending": [{"path": ["friendList"]}], "hasNext": True, }, { @@ -706,6 +716,7 @@ async def friend_list(_info): assert result == [ { "data": {"friendList": []}, + "pending": [{"path": ["friendList"]}], "hasNext": True, }, { @@ -767,6 +778,7 @@ async def friend_list(_info): {"name": "Han", "id": "2"}, ] }, + "pending": [{"path": ["friendList"]}], "hasNext": True, }, { @@ -840,6 +852,7 @@ async def friend_list(_info): {"name": "Han", "id": "2"}, ] }, + "pending": [{"path": ["friendList"]}], "hasNext": True, }, }, @@ -914,6 +927,7 @@ async def friend_list(_info): "data": { "friendList": [{"name": "Luke", "id": "1"}], }, + "pending": [{"path": ["friendList"]}], "hasNext": True, }, { @@ -953,6 +967,7 @@ async def handles_null_for_non_null_list_items_after_initial_count_is_reached(): "data": { "nonNullFriendList": [{"name": "Luke"}], }, + "pending": [{"path": ["nonNullFriendList"]}], "hasNext": True, }, { @@ -998,6 +1013,7 @@ async def friend_list(_info): "data": { "nonNullFriendList": [{"name": "Luke"}], }, + "pending": [{"path": ["nonNullFriendList"]}], "hasNext": True, }, { @@ -1037,6 +1053,7 @@ async def scalar_list(_info): "data": { "scalarList": ["Luke"], }, + "pending": [{"path": ["scalarList"]}], "hasNext": True, }, { @@ -1090,6 +1107,7 @@ def get_friends(_info): "data": { "friendList": [{"nonNullName": "Luke"}], }, + "pending": [{"path": ["friendList"]}], "hasNext": True, }, { @@ -1151,6 +1169,7 @@ def get_friends(_info): "data": { "friendList": [{"nonNullName": "Luke"}], }, + "pending": [{"path": ["friendList"]}], "hasNext": True, }, { @@ -1213,6 +1232,7 @@ def get_friends(_info): "data": { "nonNullFriendList": [{"nonNullName": "Luke"}], }, + "pending": [{"path": ["nonNullFriendList"]}], "hasNext": True, }, { @@ -1263,6 +1283,7 @@ def get_friends(_info): "data": { "nonNullFriendList": [{"nonNullName": "Luke"}], }, + "pending": [{"path": ["nonNullFriendList"]}], "hasNext": True, }, { @@ -1315,6 +1336,7 @@ async def get_friends(_info): "data": { "friendList": [{"nonNullName": "Luke"}], }, + "pending": [{"path": ["friendList"]}], "hasNext": True, }, { @@ -1381,6 +1403,7 @@ async def get_friends(_info): "data": { "nonNullFriendList": [{"nonNullName": "Luke"}], }, + "pending": [{"path": ["nonNullFriendList"]}], "hasNext": True, }, { @@ -1443,6 +1466,7 @@ async def __anext__(self): "data": { "nonNullFriendList": [{"nonNullName": "Luke"}], }, + "pending": [{"path": ["nonNullFriendList"]}], "hasNext": True, }, { @@ -1512,6 +1536,7 @@ async def aclose(self): "data": { "nonNullFriendList": [{"nonNullName": "Luke"}], }, + "pending": [{"path": ["nonNullFriendList"]}], "hasNext": True, }, { @@ -1666,6 +1691,10 @@ async def friend_list(_info): "otherNestedObject": {}, "nestedObject": {"nestedFriendList": []}, }, + "pending": [ + {"path": ["otherNestedObject"]}, + {"path": ["nestedObject", "nestedFriendList"]}, + ], "hasNext": True, }, { @@ -1738,6 +1767,7 @@ async def friend_list(_info): "data": { "nestedObject": {}, }, + "pending": [{"path": ["nestedObject"]}], "hasNext": True, }, { @@ -1801,6 +1831,7 @@ async def friend_list(_info): "data": { "friendList": [], }, + "pending": [{"path": ["friendList"]}], "hasNext": True, }, { @@ -1875,7 +1906,11 @@ async def iterable(_info): iterator = execute_result.subsequent_results result1 = execute_result.initial_result - assert result1 == {"data": {"nestedObject": {}}, "hasNext": True} + assert result1 == { + "data": {"nestedObject": {}}, + "pending": [{"path": ["nestedObject"]}], + "hasNext": True, + } assert not finished @@ -1944,6 +1979,7 @@ async def get_friends(_info): "data": { "friendList": [{"id": "1", "name": "Luke"}], }, + "pending": [{"path": ["friendList"]}], "hasNext": True, }, { @@ -2012,6 +2048,10 @@ async def get_nested_friend_list(_info): "nestedFriendList": [], }, }, + "pending": [ + {"path": ["nestedObject"]}, + {"path": ["nestedObject", "nestedFriendList"]}, + ], "hasNext": True, }, { @@ -2082,11 +2122,16 @@ async def get_friends(_info): iterator = execute_result.subsequent_results result1 = execute_result.initial_result - assert result1 == {"data": {"nestedObject": {}}, "hasNext": True} + assert result1 == { + "data": {"nestedObject": {}}, + "pending": [{"path": ["nestedObject"]}], + "hasNext": True, + } resolve_slow_field.set() result2 = await anext(iterator) assert result2.formatted == { + "pending": [{"path": ["nestedObject", "nestedFriendList"]}], "incremental": [ { "data": {"scalarField": "slow", "nestedFriendList": []}, @@ -2166,11 +2211,19 @@ async def get_friends(_info): iterator = execute_result.subsequent_results result1 = execute_result.initial_result - assert result1 == {"data": {"friendList": [{"id": "1"}]}, "hasNext": True} + assert result1 == { + "data": {"friendList": [{"id": "1"}]}, + "pending": [ + {"path": ["friendList", 0], "label": "DeferName"}, + {"path": ["friendList"], "label": "stream-label"}, + ], + "hasNext": True, + } resolve_iterable.set() result2 = await anext(iterator) assert result2.formatted == { + "pending": [{"path": ["friendList", 1], "label": "DeferName"}], "incremental": [ { "data": {"name": "Luke"}, @@ -2251,11 +2304,19 @@ async def get_friends(_info): iterator = execute_result.subsequent_results result1 = execute_result.initial_result - assert result1 == {"data": {"friendList": [{"id": "1"}]}, "hasNext": True} + assert result1 == { + "data": {"friendList": [{"id": "1"}]}, + "pending": [ + {"path": ["friendList", 0], "label": "DeferName"}, + {"path": ["friendList"], "label": "stream-label"}, + ], + "hasNext": True, + } resolve_slow_field.set() result2 = await anext(iterator) assert result2.formatted == { + "pending": [{"path": ["friendList", 1], "label": "DeferName"}], "incremental": [ { "data": {"name": "Luke"}, @@ -2322,7 +2383,11 @@ async def iterable(_info): iterator = execute_result.subsequent_results result1 = execute_result.initial_result - assert result1 == {"data": {"friendList": [{"id": "1"}]}, "hasNext": True} + assert result1 == { + "data": {"friendList": [{"id": "1"}]}, + "pending": [{"path": ["friendList", 0]}, {"path": ["friendList"]}], + "hasNext": True, + } await iterator.aclose() with pytest.raises(StopAsyncIteration): @@ -2369,6 +2434,7 @@ async def __anext__(self): result1 = execute_result.initial_result assert result1 == { "data": {"friendList": [{"id": "1", "name": "Luke"}]}, + "pending": [{"path": ["friendList"]}], "hasNext": True, } @@ -2408,7 +2474,11 @@ async def iterable(_info): iterator = execute_result.subsequent_results result1 = execute_result.initial_result - assert result1 == {"data": {"friendList": [{"id": "1"}]}, "hasNext": True} + assert result1 == { + "data": {"friendList": [{"id": "1"}]}, + "pending": [{"path": ["friendList", 0]}, {"path": ["friendList"]}], + "hasNext": True, + } with pytest.raises(RuntimeError, match="bad"): await iterator.athrow(RuntimeError("bad")) From 9445c0b8f1bc585e88cf3c221b435cf779007e09 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 25 Jan 2025 14:18:32 +0100 Subject: [PATCH 209/230] fix(types): path is required within incremental results Replicates graphql/graphql-js@d1d66a34b697c24efd549150c3a5df9bc01be5af --- .../execution/incremental_publisher.py | 62 ++++++++++--------- tests/execution/test_defer.py | 14 ++--- tests/execution/test_stream.py | 8 +-- 3 files changed, 43 insertions(+), 41 deletions(-) diff --git a/src/graphql/execution/incremental_publisher.py b/src/graphql/execution/incremental_publisher.py index 4ba1d553..dba04461 100644 --- a/src/graphql/execution/incremental_publisher.py +++ b/src/graphql/execution/incremental_publisher.py @@ -364,39 +364,39 @@ class ExperimentalIncrementalExecutionResults(NamedTuple): class FormattedIncrementalDeferResult(TypedDict, total=False): """Formatted incremental deferred execution result""" - data: dict[str, Any] | None - errors: list[GraphQLFormattedError] + data: dict[str, Any] path: list[str | int] + errors: list[GraphQLFormattedError] extensions: dict[str, Any] class IncrementalDeferResult: """Incremental deferred execution result""" - data: dict[str, Any] | None + data: dict[str, Any] + path: list[str | int] errors: list[GraphQLError] | None - path: list[str | int] | None extensions: dict[str, Any] | None __slots__ = "data", "errors", "extensions", "path" def __init__( self, - data: dict[str, Any] | None = None, + data: dict[str, Any], + path: list[str | int], errors: list[GraphQLError] | None = None, - path: list[str | int] | None = None, extensions: dict[str, Any] | None = None, ) -> None: self.data = data - self.errors = errors self.path = path + self.errors = errors self.extensions = extensions def __repr__(self) -> str: name = self.__class__.__name__ - args: list[str] = [f"data={self.data!r}, errors={self.errors!r}"] - if self.path: - args.append(f"path={self.path!r}") + args: list[str] = [f"data={self.data!r}, path={self.path!r}"] + if self.errors: + args.append(f"errors={self.errors!r}") if self.extensions: args.append(f"extensions={self.extensions}") return f"{name}({', '.join(args)})" @@ -404,11 +404,12 @@ def __repr__(self) -> str: @property def formatted(self) -> FormattedIncrementalDeferResult: """Get execution result formatted according to the specification.""" - formatted: FormattedIncrementalDeferResult = {"data": self.data} + formatted: FormattedIncrementalDeferResult = { + "data": self.data, + "path": self.path, + } if self.errors is not None: formatted["errors"] = [error.formatted for error in self.errors] - if self.path is not None: - formatted["path"] = self.path if self.extensions is not None: formatted["extensions"] = self.extensions return formatted @@ -442,39 +443,39 @@ def __ne__(self, other: object) -> bool: class FormattedIncrementalStreamResult(TypedDict, total=False): """Formatted incremental stream execution result""" - items: list[Any] | None - errors: list[GraphQLFormattedError] + items: list[Any] path: list[str | int] + errors: list[GraphQLFormattedError] extensions: dict[str, Any] class IncrementalStreamResult: """Incremental streamed execution result""" - items: list[Any] | None + items: list[Any] + path: list[str | int] errors: list[GraphQLError] | None - path: list[str | int] | None extensions: dict[str, Any] | None __slots__ = "errors", "extensions", "items", "label", "path" def __init__( self, - items: list[Any] | None = None, + items: list[Any], + path: list[str | int], errors: list[GraphQLError] | None = None, - path: list[str | int] | None = None, extensions: dict[str, Any] | None = None, ) -> None: self.items = items - self.errors = errors self.path = path + self.errors = errors self.extensions = extensions def __repr__(self) -> str: name = self.__class__.__name__ - args: list[str] = [f"items={self.items!r}, errors={self.errors!r}"] - if self.path: - args.append(f"path={self.path!r}") + args: list[str] = [f"items={self.items!r}, path={self.path!r}"] + if self.errors: + args.append(f"errors={self.errors!r}") if self.extensions: args.append(f"extensions={self.extensions}") return f"{name}({', '.join(args)})" @@ -482,11 +483,12 @@ def __repr__(self) -> str: @property def formatted(self) -> FormattedIncrementalStreamResult: """Get execution result formatted according to the specification.""" - formatted: FormattedIncrementalStreamResult = {"items": self.items} - if self.errors is not None: + formatted: FormattedIncrementalStreamResult = { + "items": self.items, + "path": self.path, + } + if self.errors: formatted["errors"] = [error.formatted for error in self.errors] - if self.path is not None: - formatted["path"] = self.path if self.extensions is not None: formatted["extensions"] = self.extensions return formatted @@ -982,8 +984,8 @@ def _process_pending( continue incremental_result = IncrementalStreamResult( subsequent_result_record.items, - subsequent_result_record.errors or None, subsequent_result_record.stream_record.path, + subsequent_result_record.errors or None, ) incremental_results.append(incremental_result) else: @@ -997,9 +999,9 @@ def _process_pending( if not deferred_grouped_field_set_record.sent: deferred_grouped_field_set_record.sent = True incremental_result = IncrementalDeferResult( - deferred_grouped_field_set_record.data, - deferred_grouped_field_set_record.errors or None, + deferred_grouped_field_set_record.data, # type: ignore deferred_grouped_field_set_record.path, + deferred_grouped_field_set_record.errors or None, ) incremental_results.append(incremental_result) return IncrementalUpdate( diff --git a/tests/execution/test_defer.py b/tests/execution/test_defer.py index 2de10173..7f7c0c01 100644 --- a/tests/execution/test_defer.py +++ b/tests/execution/test_defer.py @@ -256,9 +256,9 @@ def can_compare_completed_result(): assert result != {**args, "errors": [{"message": "oops"}]} def can_format_and_print_incremental_defer_result(): - result = IncrementalDeferResult() - assert result.formatted == {"data": None} - assert str(result) == "IncrementalDeferResult(data=None, errors=None)" + result = IncrementalDeferResult(data={}, path=[]) + assert result.formatted == {"data": {}, "path": []} + assert str(result) == "IncrementalDeferResult(data={}, path=[])" result = IncrementalDeferResult( data={"hello": "world"}, @@ -274,7 +274,7 @@ def can_format_and_print_incremental_defer_result(): } assert ( str(result) == "IncrementalDeferResult(data={'hello': 'world'}," - " errors=[GraphQLError('msg')], path=['foo', 1], extensions={'baz': 2})" + " path=['foo', 1], errors=[GraphQLError('msg')], extensions={'baz': 2})" ) # noinspection PyTypeChecker @@ -422,7 +422,7 @@ def can_format_and_print_subsequent_incremental_execution_result(): assert str(result) == "SubsequentIncrementalExecutionResult(has_next)" pending = [PendingResult(["bar"])] - incremental = [cast(IncrementalResult, IncrementalDeferResult())] + incremental = [cast(IncrementalResult, IncrementalDeferResult({"one": 1}, [1]))] completed = [CompletedResult(["foo", 1])] result = SubsequentIncrementalExecutionResult( has_next=True, @@ -434,7 +434,7 @@ def can_format_and_print_subsequent_incremental_execution_result(): assert result.formatted == { "hasNext": True, "pending": [{"path": ["bar"]}], - "incremental": [{"data": None}], + "incremental": [{"data": {"one": 1}, "path": [1]}], "completed": [{"path": ["foo", 1]}], "extensions": {"baz": 2}, } @@ -445,7 +445,7 @@ def can_format_and_print_subsequent_incremental_execution_result(): def can_compare_subsequent_incremental_execution_result(): pending = [PendingResult(["bar"])] - incremental = [cast(IncrementalResult, IncrementalDeferResult())] + incremental = [cast(IncrementalResult, IncrementalDeferResult({"one": 1}, [1]))] completed = [CompletedResult(path=["foo", 1])] args: dict[str, Any] = { "has_next": True, diff --git a/tests/execution/test_stream.py b/tests/execution/test_stream.py index 4331eaa4..487817b4 100644 --- a/tests/execution/test_stream.py +++ b/tests/execution/test_stream.py @@ -148,9 +148,9 @@ def modified_args(args: dict[str, Any], **modifications: Any) -> dict[str, Any]: def describe_execute_stream_directive(): def can_format_and_print_incremental_stream_result(): - result = IncrementalStreamResult() - assert result.formatted == {"items": None} - assert str(result) == "IncrementalStreamResult(items=None, errors=None)" + result = IncrementalStreamResult(items=[], path=[]) + assert result.formatted == {"items": [], "path": []} + assert str(result) == "IncrementalStreamResult(items=[], path=[])" result = IncrementalStreamResult( items=["hello", "world"], @@ -166,7 +166,7 @@ def can_format_and_print_incremental_stream_result(): } assert ( str(result) == "IncrementalStreamResult(items=['hello', 'world']," - " errors=[GraphQLError('msg')], path=['foo', 1], extensions={'baz': 2})" + " path=['foo', 1], errors=[GraphQLError('msg')], extensions={'baz': 2})" ) def can_print_stream_record(): From f9b19b088615cd0531830883447f207b3b222c51 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 25 Jan 2025 19:32:12 +0100 Subject: [PATCH 210/230] incremental: utilize id and subPath rather than path and label Replicates graphql/graphql-js@d2e280ac3eaa90adf2b6118cf35687a21bef8e15 --- .../execution/incremental_publisher.py | 215 +++-- tests/execution/test_defer.py | 734 ++++++------------ tests/execution/test_mutations.py | 26 +- tests/execution/test_stream.py | 641 +++++---------- 4 files changed, 607 insertions(+), 1009 deletions(-) diff --git a/src/graphql/execution/incremental_publisher.py b/src/graphql/execution/incremental_publisher.py index dba04461..d112651e 100644 --- a/src/graphql/execution/incremental_publisher.py +++ b/src/graphql/execution/incremental_publisher.py @@ -59,6 +59,7 @@ class FormattedPendingResult(TypedDict, total=False): """Formatted pending execution result""" + id: str path: list[str | int] label: str @@ -66,22 +67,25 @@ class FormattedPendingResult(TypedDict, total=False): class PendingResult: """Pending execution result""" + id: str path: list[str | int] label: str | None - __slots__ = "label", "path" + __slots__ = "id", "label", "path" def __init__( self, + id: str, # noqa: A002 path: list[str | int], label: str | None = None, ) -> None: + self.id = id self.path = path self.label = label def __repr__(self) -> str: name = self.__class__.__name__ - args: list[str] = [f"path={self.path!r}"] + args: list[str] = [f"id={self.id!r}, path={self.path!r}"] if self.label: args.append(f"label={self.label!r}") return f"{name}({', '.join(args)})" @@ -89,22 +93,25 @@ def __repr__(self) -> str: @property def formatted(self) -> FormattedPendingResult: """Get pending result formatted according to the specification.""" - formatted: FormattedPendingResult = {"path": self.path} + formatted: FormattedPendingResult = {"id": self.id, "path": self.path} if self.label is not None: formatted["label"] = self.label return formatted def __eq__(self, other: object) -> bool: if isinstance(other, dict): - return (other.get("path") or None) == (self.path or None) and ( - other.get("label") or None - ) == (self.label or None) + return ( + other.get("id") == self.id + and (other.get("path") or None) == (self.path or None) + and (other.get("label") or None) == (self.label or None) + ) if isinstance(other, tuple): size = len(other) - return 1 < size < 3 and (self.path, self.label)[:size] == other + return 1 < size < 4 and (self.id, self.path, self.label)[:size] == other return ( isinstance(other, self.__class__) + and other.id == self.id and other.path == self.path and other.label == self.label ) @@ -116,35 +123,29 @@ def __ne__(self, other: object) -> bool: class FormattedCompletedResult(TypedDict, total=False): """Formatted completed execution result""" - path: list[str | int] - label: str + id: str errors: list[GraphQLFormattedError] class CompletedResult: """Completed execution result""" - path: list[str | int] - label: str | None + id: str errors: list[GraphQLError] | None - __slots__ = "errors", "label", "path" + __slots__ = "errors", "id" def __init__( self, - path: list[str | int], - label: str | None = None, + id: str, # noqa: A002 errors: list[GraphQLError] | None = None, ) -> None: - self.path = path - self.label = label + self.id = id self.errors = errors def __repr__(self) -> str: name = self.__class__.__name__ - args: list[str] = [f"path={self.path!r}"] - if self.label: - args.append(f"label={self.label!r}") + args: list[str] = [f"id={self.id!r}"] if self.errors: args.append(f"errors={self.errors!r}") return f"{name}({', '.join(args)})" @@ -152,27 +153,22 @@ def __repr__(self) -> str: @property def formatted(self) -> FormattedCompletedResult: """Get completed result formatted according to the specification.""" - formatted: FormattedCompletedResult = {"path": self.path} - if self.label is not None: - formatted["label"] = self.label + formatted: FormattedCompletedResult = {"id": self.id} if self.errors is not None: formatted["errors"] = [error.formatted for error in self.errors] return formatted def __eq__(self, other: object) -> bool: if isinstance(other, dict): - return ( - (other.get("path") or None) == (self.path or None) - and (other.get("label") or None) == (self.label or None) - and (other.get("errors") or None) == (self.errors or None) + return other.get("id") == self.id and (other.get("errors") or None) == ( + self.errors or None ) if isinstance(other, tuple): size = len(other) - return 1 < size < 4 and (self.path, self.label, self.errors)[:size] == other + return 1 < size < 3 and (self.id, self.errors)[:size] == other return ( isinstance(other, self.__class__) - and other.path == self.path - and other.label == self.label + and other.id == self.id and other.errors == self.errors ) @@ -222,7 +218,7 @@ def __init__( def __repr__(self) -> str: name = self.__class__.__name__ - ext = "" if self.extensions is None else f", extensions={self.extensions}" + ext = "" if self.extensions is None else f", extensions={self.extensions!r}" return f"{name}(data={self.data!r}, errors={self.errors!r}{ext})" def __iter__(self) -> Iterator[Any]: @@ -298,13 +294,15 @@ def __init__( def __repr__(self) -> str: name = self.__class__.__name__ - args: list[str] = [f"data={self.data!r}, errors={self.errors!r}"] + args: list[str] = [f"data={self.data!r}"] + if self.errors: + args.append(f"errors={self.errors!r}") if self.pending: args.append(f"pending={self.pending!r}") if self.has_next: args.append("has_next") if self.extensions: - args.append(f"extensions={self.extensions}") + args.append(f"extensions={self.extensions!r}") return f"{name}({', '.join(args)})" @property @@ -365,7 +363,8 @@ class FormattedIncrementalDeferResult(TypedDict, total=False): """Formatted incremental deferred execution result""" data: dict[str, Any] - path: list[str | int] + id: str + subPath: list[str | int] errors: list[GraphQLFormattedError] extensions: dict[str, Any] @@ -374,31 +373,36 @@ class IncrementalDeferResult: """Incremental deferred execution result""" data: dict[str, Any] - path: list[str | int] + id: str + sub_path: list[str | int] | None errors: list[GraphQLError] | None extensions: dict[str, Any] | None - __slots__ = "data", "errors", "extensions", "path" + __slots__ = "data", "errors", "extensions", "id", "sub_path" def __init__( self, data: dict[str, Any], - path: list[str | int], + id: str, # noqa: A002 + sub_path: list[str | int] | None = None, errors: list[GraphQLError] | None = None, extensions: dict[str, Any] | None = None, ) -> None: self.data = data - self.path = path + self.id = id + self.sub_path = sub_path self.errors = errors self.extensions = extensions def __repr__(self) -> str: name = self.__class__.__name__ - args: list[str] = [f"data={self.data!r}, path={self.path!r}"] - if self.errors: + args: list[str] = [f"data={self.data!r}, id={self.id!r}"] + if self.sub_path is not None: + args.append(f"sub_path={self.sub_path!r}") + if self.errors is not None: args.append(f"errors={self.errors!r}") - if self.extensions: - args.append(f"extensions={self.extensions}") + if self.extensions is not None: + args.append(f"extensions={self.extensions!r}") return f"{name}({', '.join(args)})" @property @@ -406,8 +410,10 @@ def formatted(self) -> FormattedIncrementalDeferResult: """Get execution result formatted according to the specification.""" formatted: FormattedIncrementalDeferResult = { "data": self.data, - "path": self.path, + "id": self.id, } + if self.sub_path is not None: + formatted["subPath"] = self.sub_path if self.errors is not None: formatted["errors"] = [error.formatted for error in self.errors] if self.extensions is not None: @@ -418,21 +424,26 @@ def __eq__(self, other: object) -> bool: if isinstance(other, dict): return ( other.get("data") == self.data + and other.get("id") == self.id + and (other.get("subPath") or None) == (self.sub_path or None) and (other.get("errors") or None) == (self.errors or None) - and (other.get("path") or None) == (self.path or None) and (other.get("extensions") or None) == (self.extensions or None) ) if isinstance(other, tuple): size = len(other) return ( - 1 < size < 5 - and (self.data, self.errors, self.path, self.extensions)[:size] == other + 1 < size < 6 + and (self.data, self.id, self.sub_path, self.errors, self.extensions)[ + :size + ] + == other ) return ( isinstance(other, self.__class__) and other.data == self.data + and other.id == self.id + and other.sub_path == self.sub_path and other.errors == self.errors - and other.path == self.path and other.extensions == self.extensions ) @@ -444,7 +455,8 @@ class FormattedIncrementalStreamResult(TypedDict, total=False): """Formatted incremental stream execution result""" items: list[Any] - path: list[str | int] + id: str + subPath: list[str | int] errors: list[GraphQLFormattedError] extensions: dict[str, Any] @@ -453,31 +465,36 @@ class IncrementalStreamResult: """Incremental streamed execution result""" items: list[Any] - path: list[str | int] + id: str + sub_path: list[str | int] | None errors: list[GraphQLError] | None extensions: dict[str, Any] | None - __slots__ = "errors", "extensions", "items", "label", "path" + __slots__ = "errors", "extensions", "id", "items", "label", "sub_path" def __init__( self, items: list[Any], - path: list[str | int], + id: str, # noqa: A002 + sub_path: list[str | int] | None = None, errors: list[GraphQLError] | None = None, extensions: dict[str, Any] | None = None, ) -> None: self.items = items - self.path = path + self.id = id + self.sub_path = sub_path self.errors = errors self.extensions = extensions def __repr__(self) -> str: name = self.__class__.__name__ - args: list[str] = [f"items={self.items!r}, path={self.path!r}"] - if self.errors: + args: list[str] = [f"items={self.items!r}, id={self.id!r}"] + if self.sub_path is not None: + args.append(f"sub_path={self.sub_path!r}") + if self.errors is not None: args.append(f"errors={self.errors!r}") - if self.extensions: - args.append(f"extensions={self.extensions}") + if self.extensions is not None: + args.append(f"extensions={self.extensions!r}") return f"{name}({', '.join(args)})" @property @@ -485,9 +502,11 @@ def formatted(self) -> FormattedIncrementalStreamResult: """Get execution result formatted according to the specification.""" formatted: FormattedIncrementalStreamResult = { "items": self.items, - "path": self.path, + "id": self.id, } - if self.errors: + if self.sub_path is not None: + formatted["subPath"] = self.sub_path + if self.errors is not None: formatted["errors"] = [error.formatted for error in self.errors] if self.extensions is not None: formatted["extensions"] = self.extensions @@ -496,23 +515,27 @@ def formatted(self) -> FormattedIncrementalStreamResult: def __eq__(self, other: object) -> bool: if isinstance(other, dict): return ( - (other.get("items") or None) == (self.items or None) + other.get("items") == self.items + and other.get("id") == self.id + and (other.get("subPath", None) == (self.sub_path or None)) and (other.get("errors") or None) == (self.errors or None) - and (other.get("path", None) == (self.path or None)) and (other.get("extensions", None) == (self.extensions or None)) ) if isinstance(other, tuple): size = len(other) return ( - 1 < size < 5 - and (self.items, self.errors, self.path, self.extensions)[:size] + 1 < size < 6 + and (self.items, self.id, self.sub_path, self.errors, self.extensions)[ + :size + ] == other ) return ( isinstance(other, self.__class__) and other.items == self.items + and other.id == self.id + and other.sub_path == self.sub_path and other.errors == self.errors - and other.path == self.path and other.extensions == self.extensions ) @@ -574,7 +597,7 @@ def __repr__(self) -> str: if self.completed: args.append(f"completed[{len(self.completed)}]") if self.extensions: - args.append(f"extensions={self.extensions}") + args.append(f"extensions={self.extensions!r}") return f"{name}({', '.join(args)})" @property @@ -654,15 +677,18 @@ class IncrementalPublisher: and thereby achieve more deterministic results. """ + _next_id: int _released: dict[SubsequentResultRecord, None] _pending: dict[SubsequentResultRecord, None] _resolve: Event | None + _tasks: set[Awaitable] def __init__(self) -> None: + self._next_id = 0 self._released = {} self._pending = {} self._resolve = None # lazy initialization - self._tasks: set[Awaitable] = set() + self._tasks = set() @staticmethod def report_new_defer_fragment_record( @@ -860,11 +886,19 @@ def _pending_sources_to_results( pending_results: list[PendingResult] = [] for pending_source in pending_sources: pending_source.pending_sent = True + id_ = self._get_next_id() + pending_source.id = id_ pending_results.append( - PendingResult(pending_source.path, pending_source.label) + PendingResult(id_, pending_source.path, pending_source.label) ) return pending_results + def _get_next_id(self) -> str: + """Get the next ID for pending results.""" + id_ = self._next_id + self._next_id += 1 + return str(id_) + async def _subscribe( self, ) -> AsyncGenerator[SubsequentIncrementalExecutionResult, None]: @@ -984,9 +1018,12 @@ def _process_pending( continue incremental_result = IncrementalStreamResult( subsequent_result_record.items, - subsequent_result_record.stream_record.path, - subsequent_result_record.errors or None, + # safe because `id` is defined + # once the stream has been released as pending + subsequent_result_record.stream_record.id, # type: ignore ) + if subsequent_result_record.errors: + incremental_result.errors = subsequent_result_record.errors incremental_results.append(incremental_result) else: new_pending_sources.discard(subsequent_result_record) @@ -998,11 +1035,13 @@ def _process_pending( ) in subsequent_result_record.deferred_grouped_field_set_records: if not deferred_grouped_field_set_record.sent: deferred_grouped_field_set_record.sent = True - incremental_result = IncrementalDeferResult( - deferred_grouped_field_set_record.data, # type: ignore - deferred_grouped_field_set_record.path, - deferred_grouped_field_set_record.errors or None, + incremental_result = self._get_incremental_defer_result( + deferred_grouped_field_set_record ) + if deferred_grouped_field_set_record.errors: + incremental_result.errors = ( + deferred_grouped_field_set_record.errors + ) incremental_results.append(incremental_result) return IncrementalUpdate( self._pending_sources_to_results(new_pending_sources), @@ -1010,14 +1049,40 @@ def _process_pending( completed_results, ) + def _get_incremental_defer_result( + self, deferred_grouped_field_set_record: DeferredGroupedFieldSetRecord + ) -> IncrementalDeferResult: + """Get the incremental defer result from the grouped field set record.""" + data = deferred_grouped_field_set_record.data + fragment_records = deferred_grouped_field_set_record.deferred_fragment_records + max_length = len(fragment_records[0].path) + max_index = 0 + for i in range(1, len(fragment_records)): + fragment_record = fragment_records[i] + length = len(fragment_record.path) + if length > max_length: + max_length = length + max_index = i + record_with_longest_path = fragment_records[max_index] + longest_path = record_with_longest_path.path + sub_path = deferred_grouped_field_set_record.path[len(longest_path) :] + id_ = record_with_longest_path.id + return IncrementalDeferResult( + data, # type: ignore + # safe because `id` is defined + # once the fragment has been released as pending + id_, # type: ignore + sub_path or None, + ) + @staticmethod def _completed_record_to_result( completed_record: DeferredFragmentRecord | StreamRecord, ) -> CompletedResult: """Convert the completed record to a result.""" return CompletedResult( - completed_record.path, - completed_record.label or None, + # safe because `id` is defined once the stream has been released as pending + completed_record.id, # type: ignore completed_record.errors or None, ) @@ -1147,6 +1212,7 @@ class DeferredFragmentRecord: path: list[str | int] label: str | None + id: str | None children: dict[SubsequentResultRecord, None] deferred_grouped_field_set_records: dict[DeferredGroupedFieldSetRecord, None] errors: list[GraphQLError] @@ -1157,6 +1223,7 @@ class DeferredFragmentRecord: def __init__(self, path: Path | None = None, label: str | None = None) -> None: self.path = path.as_list() if path else [] self.label = label + self.id = None self.children = {} self.filtered = False self.pending_sent = False @@ -1179,6 +1246,7 @@ class StreamRecord: label: str | None path: list[str | int] + id: str | None errors: list[GraphQLError] early_return: Callable[[], Awaitable[Any]] | None pending_sent: bool @@ -1191,6 +1259,7 @@ def __init__( ) -> None: self.path = path.as_list() self.label = label + self.id = None self.errors = [] self.early_return = early_return self.pending_sent = False diff --git a/tests/execution/test_defer.py b/tests/execution/test_defer.py index 7f7c0c01..62dc88bb 100644 --- a/tests/execution/test_defer.py +++ b/tests/execution/test_defer.py @@ -196,93 +196,86 @@ def modified_args(args: dict[str, Any], **modifications: Any) -> dict[str, Any]: def describe_execute_defer_directive(): def can_format_and_print_pending_result(): - result = PendingResult([]) - assert result.formatted == {"path": []} - assert str(result) == "PendingResult(path=[])" + result = PendingResult("foo", []) + assert result.formatted == {"id": "foo", "path": []} + assert str(result) == "PendingResult(id='foo', path=[])" - result = PendingResult(path=["foo", 1], label="bar") - assert result.formatted == { - "path": ["foo", 1], - "label": "bar", - } - assert str(result) == "PendingResult(path=['foo', 1], label='bar')" + result = PendingResult(id="foo", path=["bar", 1], label="baz") + assert result.formatted == {"id": "foo", "path": ["bar", 1], "label": "baz"} + assert str(result) == "PendingResult(id='foo', path=['bar', 1], label='baz')" def can_compare_pending_result(): - args: dict[str, Any] = {"path": ["foo", 1], "label": "bar"} + args: dict[str, Any] = {"id": "foo", "path": ["bar", 1], "label": "baz"} result = PendingResult(**args) assert result == PendingResult(**args) - assert result != CompletedResult(**modified_args(args, path=["foo", 2])) - assert result != CompletedResult(**modified_args(args, label="baz")) + assert result != PendingResult(**modified_args(args, id="bar")) + assert result != PendingResult(**modified_args(args, path=["bar", 2])) + assert result != PendingResult(**modified_args(args, label="bar")) assert result == tuple(args.values()) + assert result == tuple(args.values())[:2] assert result != tuple(args.values())[:1] - assert result != tuple(args.values())[:1] + ("baz",) + assert result != tuple(args.values())[:1] + (["bar", 2],) assert result == args - assert result != {**args, "path": ["foo", 2]} - assert result != {**args, "label": "baz"} + assert result != {**args, "id": "bar"} + assert result != {**args, "path": ["bar", 2]} + assert result != {**args, "label": "bar"} def can_format_and_print_completed_result(): - result = CompletedResult([]) - assert result.formatted == {"path": []} - assert str(result) == "CompletedResult(path=[])" + result = CompletedResult("foo") + assert result.formatted == {"id": "foo"} + assert str(result) == "CompletedResult(id='foo')" - result = CompletedResult( - path=["foo", 1], label="bar", errors=[GraphQLError("oops")] - ) - assert result.formatted == { - "path": ["foo", 1], - "label": "bar", - "errors": [{"message": "oops"}], - } - assert ( - str(result) == "CompletedResult(path=['foo', 1], label='bar'," - " errors=[GraphQLError('oops')])" - ) + result = CompletedResult(id="foo", errors=[GraphQLError("oops")]) + assert result.formatted == {"id": "foo", "errors": [{"message": "oops"}]} + assert str(result) == "CompletedResult(id='foo', errors=[GraphQLError('oops')])" def can_compare_completed_result(): - args: dict[str, Any] = {"path": ["foo", 1], "label": "bar", "errors": []} + args: dict[str, Any] = {"id": "foo", "errors": []} result = CompletedResult(**args) assert result == CompletedResult(**args) - assert result != CompletedResult(**modified_args(args, path=["foo", 2])) - assert result != CompletedResult(**modified_args(args, label="baz")) + assert result != CompletedResult(**modified_args(args, id="bar")) assert result != CompletedResult( **modified_args(args, errors=[GraphQLError("oops")]) ) assert result == tuple(args.values()) - assert result == tuple(args.values())[:2] assert result != tuple(args.values())[:1] + assert result != tuple(args.values())[:1] + ([GraphQLError("oops")],) assert result == args - assert result != {**args, "path": ["foo", 2]} - assert result != {**args, "label": "baz"} + assert result != {**args, "id": "bar"} assert result != {**args, "errors": [{"message": "oops"}]} def can_format_and_print_incremental_defer_result(): - result = IncrementalDeferResult(data={}, path=[]) - assert result.formatted == {"data": {}, "path": []} - assert str(result) == "IncrementalDeferResult(data={}, path=[])" + result = IncrementalDeferResult(data={}, id="foo") + assert result.formatted == {"data": {}, "id": "foo"} + assert str(result) == "IncrementalDeferResult(data={}, id='foo')" result = IncrementalDeferResult( data={"hello": "world"}, - errors=[GraphQLError("msg")], - path=["foo", 1], + id="foo", + sub_path=["bar", 1], + errors=[GraphQLError("oops")], extensions={"baz": 2}, ) assert result.formatted == { "data": {"hello": "world"}, - "errors": [{"message": "msg"}], + "id": "foo", + "subPath": ["bar", 1], + "errors": [{"message": "oops"}], "extensions": {"baz": 2}, - "path": ["foo", 1], } assert ( str(result) == "IncrementalDeferResult(data={'hello': 'world'}," - " path=['foo', 1], errors=[GraphQLError('msg')], extensions={'baz': 2})" + " id='foo', sub_path=['bar', 1], errors=[GraphQLError('oops')]," + " extensions={'baz': 2})" ) # noinspection PyTypeChecker def can_compare_incremental_defer_result(): args: dict[str, Any] = { "data": {"hello": "world"}, - "errors": [GraphQLError("msg")], - "path": ["foo", 1], + "id": "foo", + "sub_path": ["bar", 1], + "errors": [GraphQLError("oops")], "extensions": {"baz": 2}, } result = IncrementalDeferResult(**args) @@ -290,8 +283,11 @@ def can_compare_incremental_defer_result(): assert result != IncrementalDeferResult( **modified_args(args, data={"hello": "foo"}) ) + assert result != IncrementalDeferResult(**modified_args(args, id="bar")) + assert result != IncrementalDeferResult( + **modified_args(args, sub_path=["bar", 2]) + ) assert result != IncrementalDeferResult(**modified_args(args, errors=[])) - assert result != IncrementalDeferResult(**modified_args(args, path=["foo", 2])) assert result != IncrementalDeferResult( **modified_args(args, extensions={"baz": 1}) ) @@ -300,52 +296,50 @@ def can_compare_incremental_defer_result(): assert result == tuple(args.values())[:3] assert result == tuple(args.values())[:2] assert result != tuple(args.values())[:1] - assert result != ({"hello": "world"}, []) + assert result != ({"hello": "world"}, "bar") + args["subPath"] = args.pop("sub_path") assert result == args assert result != {**args, "data": {"hello": "foo"}} + assert result != {**args, "id": "bar"} + assert result != {**args, "subPath": ["bar", 2]} assert result != {**args, "errors": []} - assert result != {**args, "path": ["foo", 2]} assert result != {**args, "extensions": {"baz": 1}} def can_format_and_print_initial_incremental_execution_result(): result = InitialIncrementalExecutionResult() assert result.formatted == {"data": None, "hasNext": False, "pending": []} - assert ( - str(result) == "InitialIncrementalExecutionResult(data=None, errors=None)" - ) + assert str(result) == "InitialIncrementalExecutionResult(data=None)" result = InitialIncrementalExecutionResult(has_next=True) assert result.formatted == {"data": None, "hasNext": True, "pending": []} - assert ( - str(result) - == "InitialIncrementalExecutionResult(data=None, errors=None, has_next)" - ) + assert str(result) == "InitialIncrementalExecutionResult(data=None, has_next)" result = InitialIncrementalExecutionResult( data={"hello": "world"}, errors=[GraphQLError("msg")], - pending=[PendingResult(["bar"])], + pending=[PendingResult("foo", ["bar"])], has_next=True, extensions={"baz": 2}, ) assert result.formatted == { "data": {"hello": "world"}, "errors": [{"message": "msg"}], - "pending": [{"path": ["bar"]}], + "pending": [{"id": "foo", "path": ["bar"]}], "hasNext": True, "extensions": {"baz": 2}, } assert ( str(result) == "InitialIncrementalExecutionResult(" "data={'hello': 'world'}, errors=[GraphQLError('msg')]," - " pending=[PendingResult(path=['bar'])], has_next, extensions={'baz': 2})" + " pending=[PendingResult(id='foo', path=['bar'])], has_next," + " extensions={'baz': 2})" ) def can_compare_initial_incremental_execution_result(): args: dict[str, Any] = { "data": {"hello": "world"}, "errors": [GraphQLError("msg")], - "pending": [PendingResult(["bar"])], + "pending": [PendingResult("foo", ["bar"])], "has_next": True, "extensions": {"baz": 2}, } @@ -377,19 +371,19 @@ def can_compare_initial_incremental_execution_result(): assert result == { "data": {"hello": "world"}, "errors": [GraphQLError("msg")], - "pending": [PendingResult(["bar"])], + "pending": [PendingResult("foo", ["bar"])], "hasNext": True, "extensions": {"baz": 2}, } assert result != { "errors": [GraphQLError("msg")], - "pending": [PendingResult(["bar"])], + "pending": [PendingResult("foo", ["bar"])], "hasNext": True, "extensions": {"baz": 2}, } assert result != { "data": {"hello": "world"}, - "pending": [PendingResult(["bar"])], + "pending": [PendingResult("foo", ["bar"])], "hasNext": True, "extensions": {"baz": 2}, } @@ -402,13 +396,13 @@ def can_compare_initial_incremental_execution_result(): assert result != { "data": {"hello": "world"}, "errors": [GraphQLError("msg")], - "pending": [PendingResult(["bar"])], + "pending": [PendingResult("foo", ["bar"])], "extensions": {"baz": 2}, } assert result != { "data": {"hello": "world"}, "errors": [GraphQLError("msg")], - "pending": [PendingResult(["bar"])], + "pending": [PendingResult("foo", ["bar"])], "hasNext": True, } @@ -421,9 +415,11 @@ def can_format_and_print_subsequent_incremental_execution_result(): assert result.formatted == {"hasNext": True} assert str(result) == "SubsequentIncrementalExecutionResult(has_next)" - pending = [PendingResult(["bar"])] - incremental = [cast(IncrementalResult, IncrementalDeferResult({"one": 1}, [1]))] - completed = [CompletedResult(["foo", 1])] + pending = [PendingResult("foo", ["bar"])] + incremental = [ + cast(IncrementalResult, IncrementalDeferResult({"foo": 1}, "bar")) + ] + completed = [CompletedResult("foo")] result = SubsequentIncrementalExecutionResult( has_next=True, pending=pending, @@ -433,9 +429,9 @@ def can_format_and_print_subsequent_incremental_execution_result(): ) assert result.formatted == { "hasNext": True, - "pending": [{"path": ["bar"]}], - "incremental": [{"data": {"one": 1}, "path": [1]}], - "completed": [{"path": ["foo", 1]}], + "pending": [{"id": "foo", "path": ["bar"]}], + "incremental": [{"data": {"foo": 1}, "id": "bar"}], + "completed": [{"id": "foo"}], "extensions": {"baz": 2}, } assert ( @@ -444,9 +440,11 @@ def can_format_and_print_subsequent_incremental_execution_result(): ) def can_compare_subsequent_incremental_execution_result(): - pending = [PendingResult(["bar"])] - incremental = [cast(IncrementalResult, IncrementalDeferResult({"one": 1}, [1]))] - completed = [CompletedResult(path=["foo", 1])] + pending = [PendingResult("foo", ["bar"])] + incremental = [ + cast(IncrementalResult, IncrementalDeferResult({"foo": 1}, "bar")) + ] + completed = [CompletedResult("foo")] args: dict[str, Any] = { "has_next": True, "pending": pending, @@ -571,12 +569,12 @@ async def can_defer_fragments_containing_scalar_types(): assert result == [ { "data": {"hero": {"id": "1"}}, - "pending": [{"path": ["hero"]}], + "pending": [{"id": "0", "path": ["hero"]}], "hasNext": True, }, { - "incremental": [{"data": {"name": "Luke"}, "path": ["hero"]}], - "completed": [{"path": ["hero"]}], + "incremental": [{"data": {"name": "Luke"}, "id": "0"}], + "completed": [{"id": "0"}], "hasNext": False, }, ] @@ -598,14 +596,7 @@ async def can_disable_defer_using_if_argument(): ) result = await complete(document) - assert result == { - "data": { - "hero": { - "id": "1", - "name": "Luke", - }, - }, - } + assert result == {"data": {"hero": {"id": "1", "name": "Luke"}}} @pytest.mark.asyncio async def does_not_disable_defer_with_null_if_argument(): @@ -627,12 +618,12 @@ async def does_not_disable_defer_with_null_if_argument(): assert result == [ { "data": {"hero": {"id": "1"}}, - "pending": [{"path": ["hero"]}], + "pending": [{"id": "0", "path": ["hero"]}], "hasNext": True, }, { - "incremental": [{"data": {"name": "Luke"}, "path": ["hero"]}], - "completed": [{"path": ["hero"]}], + "incremental": [{"data": {"name": "Luke"}, "id": "0"}], + "completed": [{"id": "0"}], "hasNext": False, }, ] @@ -677,12 +668,12 @@ async def can_defer_fragments_on_the_top_level_query_field(): assert result == [ { "data": {}, - "pending": [{"path": [], "label": "DeferQuery"}], + "pending": [{"id": "0", "path": [], "label": "DeferQuery"}], "hasNext": True, }, { - "incremental": [{"data": {"hero": {"id": "1"}}, "path": []}], - "completed": [{"path": [], "label": "DeferQuery"}], + "incremental": [{"data": {"hero": {"id": "1"}}, "id": "0"}], + "completed": [{"id": "0"}], "hasNext": False, }, ] @@ -706,7 +697,7 @@ async def can_defer_fragments_with_errors_on_the_top_level_query_field(): assert result == [ { "data": {}, - "pending": [{"path": [], "label": "DeferQuery"}], + "pending": [{"id": "0", "path": [], "label": "DeferQuery"}], "hasNext": True, }, { @@ -720,10 +711,10 @@ async def can_defer_fragments_with_errors_on_the_top_level_query_field(): "path": ["hero", "name"], } ], - "path": [], + "id": "0", } ], - "completed": [{"path": [], "label": "DeferQuery"}], + "completed": [{"id": "0"}], "hasNext": False, }, ] @@ -754,17 +745,14 @@ async def can_defer_a_fragment_within_an_already_deferred_fragment(): { "data": {"hero": {}}, "pending": [ - {"path": ["hero"], "label": "DeferTop"}, - {"path": ["hero"], "label": "DeferNested"}, + {"id": "0", "path": ["hero"], "label": "DeferTop"}, + {"id": "1", "path": ["hero"], "label": "DeferNested"}, ], "hasNext": True, }, { "incremental": [ - { - "data": {"id": "1"}, - "path": ["hero"], - }, + {"data": {"id": "1"}, "id": "0"}, { "data": { "friends": [ @@ -773,13 +761,10 @@ async def can_defer_a_fragment_within_an_already_deferred_fragment(): {"name": "C-3PO"}, ] }, - "path": ["hero"], + "id": "1", }, ], - "completed": [ - {"path": ["hero"], "label": "DeferTop"}, - {"path": ["hero"], "label": "DeferNested"}, - ], + "completed": [{"id": "0"}, {"id": "1"}], "hasNext": False, }, ] @@ -804,13 +789,10 @@ async def can_defer_a_fragment_that_is_also_not_deferred_with_deferred_first(): assert result == [ { "data": {"hero": {"name": "Luke"}}, - "pending": [{"path": ["hero"], "label": "DeferTop"}], + "pending": [{"id": "0", "path": ["hero"], "label": "DeferTop"}], "hasNext": True, }, - { - "completed": [{"path": ["hero"], "label": "DeferTop"}], - "hasNext": False, - }, + {"completed": [{"id": "0"}], "hasNext": False}, ] @pytest.mark.asyncio @@ -833,13 +815,10 @@ async def can_defer_a_fragment_that_is_also_not_deferred_with_non_deferred_first assert result == [ { "data": {"hero": {"name": "Luke"}}, - "pending": [{"path": ["hero"], "label": "DeferTop"}], + "pending": [{"id": "0", "path": ["hero"], "label": "DeferTop"}], "hasNext": True, }, - { - "completed": [{"path": ["hero"], "label": "DeferTop"}], - "hasNext": False, - }, + {"completed": [{"id": "0"}], "hasNext": False}, ] @pytest.mark.asyncio @@ -861,12 +840,12 @@ async def can_defer_an_inline_fragment(): assert result == [ { "data": {"hero": {"id": "1"}}, - "pending": [{"path": ["hero"], "label": "InlineDeferred"}], + "pending": [{"id": "0", "path": ["hero"], "label": "InlineDeferred"}], "hasNext": True, }, { - "incremental": [{"data": {"name": "Luke"}, "path": ["hero"]}], - "completed": [{"path": ["hero"], "label": "InlineDeferred"}], + "incremental": [{"data": {"name": "Luke"}, "id": "0"}], + "completed": [{"id": "0"}], "hasNext": False, }, ] @@ -890,11 +869,12 @@ async def does_not_emit_empty_defer_fragments(): result = await complete(document) assert result == [ - {"data": {"hero": {}}, "pending": [{"path": ["hero"]}], "hasNext": True}, { - "completed": [{"path": ["hero"]}], - "hasNext": False, + "data": {"hero": {}}, + "pending": [{"id": "0", "path": ["hero"]}], + "hasNext": True, }, + {"completed": [{"id": "0"}], "hasNext": False}, ] @pytest.mark.asyncio @@ -919,26 +899,17 @@ async def separately_emits_defer_fragments_different_labels_varying_fields(): { "data": {"hero": {}}, "pending": [ - {"path": ["hero"], "label": "DeferID"}, - {"path": ["hero"], "label": "DeferName"}, + {"id": "0", "path": ["hero"], "label": "DeferID"}, + {"id": "1", "path": ["hero"], "label": "DeferName"}, ], "hasNext": True, }, { "incremental": [ - { - "data": {"id": "1"}, - "path": ["hero"], - }, - { - "data": {"name": "Luke"}, - "path": ["hero"], - }, - ], - "completed": [ - {"path": ["hero"], "label": "DeferID"}, - {"path": ["hero"], "label": "DeferName"}, + {"data": {"id": "1"}, "id": "0"}, + {"data": {"name": "Luke"}, "id": "1"}, ], + "completed": [{"id": "0"}, {"id": "1"}], "hasNext": False, }, ] @@ -967,30 +938,18 @@ async def separately_emits_defer_fragments_different_labels_varying_subfields(): { "data": {}, "pending": [ - {"path": [], "label": "DeferID"}, - {"path": [], "label": "DeferName"}, + {"id": "0", "path": [], "label": "DeferID"}, + {"id": "1", "path": [], "label": "DeferName"}, ], "hasNext": True, }, { "incremental": [ - { - "data": {"hero": {}}, - "path": [], - }, - { - "data": {"id": "1"}, - "path": ["hero"], - }, - { - "data": {"name": "Luke"}, - "path": ["hero"], - }, - ], - "completed": [ - {"path": [], "label": "DeferID"}, - {"path": [], "label": "DeferName"}, + {"data": {"hero": {}}, "id": "0"}, + {"data": {"id": "1"}, "id": "0", "subPath": ["hero"]}, + {"data": {"name": "Luke"}, "id": "1", "subPath": ["hero"]}, ], + "completed": [{"id": "0"}, {"id": "1"}], "hasNext": False, }, ] @@ -1031,30 +990,18 @@ async def resolve(value): { "data": {}, "pending": [ - {"path": [], "label": "DeferID"}, - {"path": [], "label": "DeferName"}, + {"id": "0", "path": [], "label": "DeferID"}, + {"id": "1", "path": [], "label": "DeferName"}, ], "hasNext": True, }, { "incremental": [ - { - "data": {"hero": {}}, - "path": [], - }, - { - "data": {"id": "1"}, - "path": ["hero"], - }, - { - "data": {"name": "Luke"}, - "path": ["hero"], - }, - ], - "completed": [ - {"path": [], "label": "DeferID"}, - {"path": [], "label": "DeferName"}, + {"data": {"hero": {}}, "id": "0"}, + {"data": {"id": "1"}, "id": "0", "subPath": ["hero"]}, + {"data": {"name": "Luke"}, "id": "1", "subPath": ["hero"]}, ], + "completed": [{"id": "0"}, {"id": "1"}], "hasNext": False, }, ] @@ -1083,26 +1030,17 @@ async def separately_emits_defer_fragments_var_subfields_same_prio_diff_level(): { "data": {"hero": {}}, "pending": [ - {"path": [], "label": "DeferName"}, - {"path": ["hero"], "label": "DeferID"}, + {"id": "0", "path": [], "label": "DeferName"}, + {"id": "1", "path": ["hero"], "label": "DeferID"}, ], "hasNext": True, }, { "incremental": [ - { - "data": {"id": "1"}, - "path": ["hero"], - }, - { - "data": {"name": "Luke"}, - "path": ["hero"], - }, - ], - "completed": [ - {"path": ["hero"], "label": "DeferID"}, - {"path": [], "label": "DeferName"}, + {"data": {"id": "1"}, "id": "1"}, + {"data": {"name": "Luke"}, "id": "0", "subPath": ["hero"]}, ], + "completed": [{"id": "1"}, {"id": "0"}], "hasNext": False, }, ] @@ -1128,36 +1066,18 @@ async def separately_emits_nested_defer_frags_var_subfields_same_prio_diff_level assert result == [ { "data": {}, - "pending": [{"path": [], "label": "DeferName"}], + "pending": [{"id": "0", "path": [], "label": "DeferName"}], "hasNext": True, }, { - "pending": [{"path": ["hero"], "label": "DeferID"}], - "incremental": [ - { - "data": { - "hero": { - "name": "Luke", - }, - }, - "path": [], - }, - ], - "completed": [ - {"path": [], "label": "DeferName"}, - ], + "pending": [{"id": "1", "path": ["hero"], "label": "DeferID"}], + "incremental": [{"data": {"hero": {"name": "Luke"}}, "id": "0"}], + "completed": [{"id": "0"}], "hasNext": True, }, { - "incremental": [ - { - "data": { - "id": "1", - }, - "path": ["hero"], - }, - ], - "completed": [{"path": ["hero"], "label": "DeferID"}], + "incremental": [{"data": {"id": "1"}, "id": "1"}], + "completed": [{"id": "1"}], "hasNext": False, }, ] @@ -1197,49 +1117,40 @@ async def can_deduplicate_multiple_defers_on_the_same_object(): { "data": {"hero": {"friends": [{}, {}, {}]}}, "pending": [ - {"path": ["hero", "friends", 0]}, - {"path": ["hero", "friends", 0]}, - {"path": ["hero", "friends", 0]}, - {"path": ["hero", "friends", 0]}, - {"path": ["hero", "friends", 1]}, - {"path": ["hero", "friends", 1]}, - {"path": ["hero", "friends", 1]}, - {"path": ["hero", "friends", 1]}, - {"path": ["hero", "friends", 2]}, - {"path": ["hero", "friends", 2]}, - {"path": ["hero", "friends", 2]}, - {"path": ["hero", "friends", 2]}, + {"id": "0", "path": ["hero", "friends", 0]}, + {"id": "1", "path": ["hero", "friends", 0]}, + {"id": "2", "path": ["hero", "friends", 0]}, + {"id": "3", "path": ["hero", "friends", 0]}, + {"id": "4", "path": ["hero", "friends", 1]}, + {"id": "5", "path": ["hero", "friends", 1]}, + {"id": "6", "path": ["hero", "friends", 1]}, + {"id": "7", "path": ["hero", "friends", 1]}, + {"id": "8", "path": ["hero", "friends", 2]}, + {"id": "9", "path": ["hero", "friends", 2]}, + {"id": "10", "path": ["hero", "friends", 2]}, + {"id": "11", "path": ["hero", "friends", 2]}, ], "hasNext": True, }, { "incremental": [ - { - "data": {"id": "2", "name": "Han"}, - "path": ["hero", "friends", 0], - }, - { - "data": {"id": "3", "name": "Leia"}, - "path": ["hero", "friends", 1], - }, - { - "data": {"id": "4", "name": "C-3PO"}, - "path": ["hero", "friends", 2], - }, + {"data": {"id": "2", "name": "Han"}, "id": "0"}, + {"data": {"id": "3", "name": "Leia"}, "id": "4"}, + {"data": {"id": "4", "name": "C-3PO"}, "id": "8"}, ], "completed": [ - {"path": ["hero", "friends", 0]}, - {"path": ["hero", "friends", 0]}, - {"path": ["hero", "friends", 0]}, - {"path": ["hero", "friends", 1]}, - {"path": ["hero", "friends", 1]}, - {"path": ["hero", "friends", 1]}, - {"path": ["hero", "friends", 2]}, - {"path": ["hero", "friends", 2]}, - {"path": ["hero", "friends", 2]}, - {"path": ["hero", "friends", 0]}, - {"path": ["hero", "friends", 1]}, - {"path": ["hero", "friends", 2]}, + {"id": "1"}, + {"id": "2"}, + {"id": "3"}, + {"id": "5"}, + {"id": "6"}, + {"id": "7"}, + {"id": "9"}, + {"id": "10"}, + {"id": "11"}, + {"id": "0"}, + {"id": "4"}, + {"id": "8"}, ], "hasNext": False, }, @@ -1295,17 +1206,18 @@ async def deduplicates_fields_present_in_the_initial_payload(): "anotherNestedObject": {"deeperObject": {"foo": "foo"}}, } }, - "pending": [{"path": ["hero"]}], + "pending": [{"id": "0", "path": ["hero"]}], "hasNext": True, }, { "incremental": [ { "data": {"bar": "bar"}, - "path": ["hero", "nestedObject", "deeperObject"], + "id": "0", + "subPath": ["nestedObject", "deeperObject"], }, ], - "completed": [{"path": ["hero"]}], + "completed": [{"id": "0"}], "hasNext": False, }, ] @@ -1339,36 +1251,25 @@ async def deduplicates_fields_present_in_a_parent_defer_payload(): assert result == [ { "data": {"hero": {}}, - "pending": [{"path": ["hero"]}], + "pending": [{"id": "0", "path": ["hero"]}], "hasNext": True, }, { - "pending": [{"path": ["hero", "nestedObject", "deeperObject"]}], + "pending": [ + {"id": "1", "path": ["hero", "nestedObject", "deeperObject"]} + ], "incremental": [ { - "data": { - "nestedObject": { - "deeperObject": { - "foo": "foo", - }, - } - }, - "path": ["hero"], + "data": {"nestedObject": {"deeperObject": {"foo": "foo"}}}, + "id": "0", }, ], - "completed": [{"path": ["hero"]}], + "completed": [{"id": "0"}], "hasNext": True, }, { - "incremental": [ - { - "data": { - "bar": "bar", - }, - "path": ["hero", "nestedObject", "deeperObject"], - }, - ], - "completed": [{"path": ["hero", "nestedObject", "deeperObject"]}], + "incremental": [{"data": {"bar": "bar"}, "id": "1"}], + "completed": [{"id": "1"}], "hasNext": False, }, ] @@ -1436,39 +1337,34 @@ async def deduplicates_fields_with_deferred_fragments_at_multiple_levels(): }, }, }, - "pending": [{"path": ["hero"]}], + "pending": [{"id": "0", "path": ["hero"]}], "hasNext": True, }, { - "pending": [{"path": ["hero", "nestedObject"]}], + "pending": [{"id": "1", "path": ["hero", "nestedObject"]}], "incremental": [ { "data": {"bar": "bar"}, - "path": ["hero", "nestedObject", "deeperObject"], + "id": "0", + "subPath": ["nestedObject", "deeperObject"], }, ], - "completed": [{"path": ["hero"]}], + "completed": [{"id": "0"}], "hasNext": True, }, { - "pending": [{"path": ["hero", "nestedObject", "deeperObject"]}], + "pending": [ + {"id": "2", "path": ["hero", "nestedObject", "deeperObject"]} + ], "incremental": [ - { - "data": {"baz": "baz"}, - "path": ["hero", "nestedObject", "deeperObject"], - }, + {"data": {"baz": "baz"}, "id": "1", "subPath": ["deeperObject"]}, ], "hasNext": True, - "completed": [{"path": ["hero", "nestedObject"]}], + "completed": [{"id": "1"}], }, { - "incremental": [ - { - "data": {"bak": "bak"}, - "path": ["hero", "nestedObject", "deeperObject"], - }, - ], - "completed": [{"path": ["hero", "nestedObject", "deeperObject"]}], + "incremental": [{"data": {"bak": "bak"}, "id": "2"}], + "completed": [{"id": "2"}], "hasNext": False, }, ] @@ -1509,37 +1405,22 @@ async def deduplicates_fields_from_deferred_fragments_branches_same_level(): { "data": {"hero": {"nestedObject": {"deeperObject": {}}}}, "pending": [ - {"path": ["hero"]}, - {"path": ["hero", "nestedObject", "deeperObject"]}, + {"id": "0", "path": ["hero"]}, + {"id": "1", "path": ["hero", "nestedObject", "deeperObject"]}, ], "hasNext": True, }, { - "pending": [{"path": ["hero", "nestedObject", "deeperObject"]}], - "incremental": [ - { - "data": { - "foo": "foo", - }, - "path": ["hero", "nestedObject", "deeperObject"], - }, - ], - "completed": [ - {"path": ["hero"]}, - {"path": ["hero", "nestedObject", "deeperObject"]}, + "pending": [ + {"id": "2", "path": ["hero", "nestedObject", "deeperObject"]} ], + "incremental": [{"data": {"foo": "foo"}, "id": "1"}], + "completed": [{"id": "0"}, {"id": "1"}], "hasNext": True, }, { - "incremental": [ - { - "data": { - "bar": "bar", - }, - "path": ["hero", "nestedObject", "deeperObject"], - }, - ], - "completed": [{"path": ["hero", "nestedObject", "deeperObject"]}], + "incremental": [{"data": {"bar": "bar"}, "id": "2"}], + "completed": [{"id": "2"}], "hasNext": False, }, ] @@ -1584,21 +1465,15 @@ async def deduplicates_fields_from_deferred_fragments_branches_multi_levels(): assert result == [ { "data": {"a": {"b": {"c": {"d": "d"}}}}, - "pending": [{"path": []}, {"path": ["a", "b"]}], + "pending": [{"id": "0", "path": []}, {"id": "1", "path": ["a", "b"]}], "hasNext": True, }, { "incremental": [ - { - "data": {"e": {"f": "f"}}, - "path": ["a", "b"], - }, - { - "data": {"g": {"h": "h"}}, - "path": [], - }, + {"data": {"e": {"f": "f"}}, "id": "1"}, + {"data": {"g": {"h": "h"}}, "id": "0"}, ], - "completed": [{"path": ["a", "b"]}, {"path": []}], + "completed": [{"id": "1"}, {"id": "0"}], "hasNext": False, }, ] @@ -1638,23 +1513,17 @@ async def nulls_cross_defer_boundaries_null_first(): assert result == [ { "data": {"a": {}}, - "pending": [{"path": []}, {"path": ["a"]}], + "pending": [{"id": "0", "path": []}, {"id": "1", "path": ["a"]}], "hasNext": True, }, { "incremental": [ - { - "data": {"b": {"c": {}}}, - "path": ["a"], - }, - { - "data": {"d": "d"}, - "path": ["a", "b", "c"], - }, + {"data": {"b": {"c": {}}}, "id": "1"}, + {"data": {"d": "d"}, "id": "1", "subPath": ["b", "c"]}, ], "completed": [ { - "path": [], + "id": "0", "errors": [ { "message": "Cannot return null" @@ -1664,7 +1533,7 @@ async def nulls_cross_defer_boundaries_null_first(): }, ], }, - {"path": ["a"]}, + {"id": "1"}, ], "hasNext": False, }, @@ -1709,23 +1578,17 @@ async def nulls_cross_defer_boundaries_value_first(): assert result == [ { "data": {"a": {}}, - "pending": [{"path": []}, {"path": ["a"]}], + "pending": [{"id": "0", "path": []}, {"id": "1", "path": ["a"]}], "hasNext": True, }, { "incremental": [ - { - "data": {"b": {"c": {}}}, - "path": ["a"], - }, - { - "data": {"d": "d"}, - "path": ["a", "b", "c"], - }, + {"data": {"b": {"c": {}}}, "id": "1"}, + {"data": {"d": "d"}, "id": "0", "subPath": ["a", "b", "c"]}, ], "completed": [ { - "path": ["a"], + "id": "1", "errors": [ { "message": "Cannot return null" @@ -1735,9 +1598,7 @@ async def nulls_cross_defer_boundaries_value_first(): }, ], }, - { - "path": [], - }, + {"id": "0"}, ], "hasNext": False, }, @@ -1783,27 +1644,21 @@ async def filters_a_payload_with_a_null_that_cannot_be_merged(): assert result == [ { "data": {"a": {}}, - "pending": [{"path": []}, {"path": ["a"]}], + "pending": [{"id": "0", "path": []}, {"id": "1", "path": ["a"]}], "hasNext": True, }, { "incremental": [ - { - "data": {"b": {"c": {}}}, - "path": ["a"], - }, - { - "data": {"d": "d"}, - "path": ["a", "b", "c"], - }, + {"data": {"b": {"c": {}}}, "id": "1"}, + {"data": {"d": "d"}, "id": "1", "subPath": ["b", "c"]}, ], - "completed": [{"path": ["a"]}], + "completed": [{"id": "1"}], "hasNext": True, }, { "completed": [ { - "path": [], + "id": "0", "errors": [ { "message": "Cannot return null" @@ -1834,10 +1689,7 @@ async def cancels_deferred_fields_when_initial_result_exhibits_null_bubbling(): """ ) result = await complete( - document, - { - "hero": {**hero, "nonNullName": lambda _info: None}, - }, + document, {"hero": {**hero, "nonNullName": lambda _info: None}} ) assert result == { @@ -1866,23 +1718,20 @@ async def cancels_deferred_fields_when_deferred_result_exhibits_null_bubbling(): """ ) result = await complete( - document, - { - "hero": {**hero, "nonNullName": lambda _info: None}, - }, + document, {"hero": {**hero, "nonNullName": lambda _info: None}} ) assert result == [ { "data": {}, - "pending": [{"path": []}], + "pending": [{"id": "0", "path": []}], "hasNext": True, }, { "incremental": [ { "data": {"hero": None}, - "path": [], + "id": "0", "errors": [ { "message": "Cannot return null" @@ -1893,7 +1742,7 @@ async def cancels_deferred_fields_when_deferred_result_exhibits_null_bubbling(): ], }, ], - "completed": [{"path": []}], + "completed": [{"id": "0"}], "hasNext": False, }, ] @@ -1929,13 +1778,10 @@ async def deduplicates_list_fields(): ] } }, - "pending": [{"path": ["hero"]}], + "pending": [{"id": "0", "path": ["hero"]}], "hasNext": True, }, - { - "completed": [{"path": ["hero"]}], - "hasNext": False, - }, + {"completed": [{"id": "0"}], "hasNext": False}, ] async def deduplicates_async_iterable_list_fields(): @@ -1957,22 +1803,16 @@ async def deduplicates_async_iterable_list_fields(): ) result = await complete( - document, - { - "hero": {**hero, "friends": Resolvers.first_friend}, - }, + document, {"hero": {**hero, "friends": Resolvers.first_friend}} ) assert result == [ { "data": {"hero": {"friends": [{"name": "Han"}]}}, - "pending": [{"path": ["hero"]}], + "pending": [{"id": "0", "path": ["hero"]}], "hasNext": True, }, - { - "completed": [{"path": ["hero"]}], - "hasNext": False, - }, + {"completed": [{"id": "0"}], "hasNext": False}, ] async def deduplicates_empty_async_iterable_list_fields(): @@ -1999,22 +1839,16 @@ async def resolve_friends(_info): yield friend # pragma: no cover result = await complete( - document, - { - "hero": {**hero, "friends": resolve_friends}, - }, + document, {"hero": {**hero, "friends": resolve_friends}} ) assert result == [ { "data": {"hero": {"friends": []}}, - "pending": [{"path": ["hero"]}], + "pending": [{"id": "0", "path": ["hero"]}], "hasNext": True, }, - { - "completed": [{"path": ["hero"]}], - "hasNext": False, - }, + {"completed": [{"id": "0"}], "hasNext": False}, ] async def does_not_deduplicate_list_fields_with_non_overlapping_fields(): @@ -2047,25 +1881,16 @@ async def does_not_deduplicate_list_fields_with_non_overlapping_fields(): ] } }, - "pending": [{"path": ["hero"]}], + "pending": [{"id": "0", "path": ["hero"]}], "hasNext": True, }, { "incremental": [ - { - "data": {"id": "2"}, - "path": ["hero", "friends", 0], - }, - { - "data": {"id": "3"}, - "path": ["hero", "friends", 1], - }, - { - "data": {"id": "4"}, - "path": ["hero", "friends", 2], - }, + {"data": {"id": "2"}, "id": "0", "subPath": ["friends", 0]}, + {"data": {"id": "3"}, "id": "0", "subPath": ["friends", 1]}, + {"data": {"id": "4"}, "id": "0", "subPath": ["friends", 2]}, ], - "completed": [{"path": ["hero"]}], + "completed": [{"id": "0"}], "hasNext": False, }, ] @@ -2094,13 +1919,10 @@ async def deduplicates_list_fields_that_return_empty_lists(): assert result == [ { "data": {"hero": {"friends": []}}, - "pending": [{"path": ["hero"]}], + "pending": [{"id": "0", "path": ["hero"]}], "hasNext": True, }, - { - "completed": [{"path": ["hero"]}], - "hasNext": False, - }, + {"completed": [{"id": "0"}], "hasNext": False}, ] async def deduplicates_null_object_fields(): @@ -2127,13 +1949,10 @@ async def deduplicates_null_object_fields(): assert result == [ { "data": {"hero": {"nestedObject": None}}, - "pending": [{"path": ["hero"]}], + "pending": [{"id": "0", "path": ["hero"]}], "hasNext": True, }, - { - "completed": [{"path": ["hero"]}], - "hasNext": False, - }, + {"completed": [{"id": "0"}], "hasNext": False}, ] async def deduplicates_async_object_fields(): @@ -2164,13 +1983,10 @@ async def resolve_nested_object(_info): assert result == [ { "data": {"hero": {"nestedObject": {"name": "foo"}}}, - "pending": [{"path": ["hero"]}], + "pending": [{"id": "0", "path": ["hero"]}], "hasNext": True, }, - { - "completed": [{"path": ["hero"]}], - "hasNext": False, - }, + {"completed": [{"id": "0"}], "hasNext": False}, ] @pytest.mark.asyncio @@ -2193,14 +2009,14 @@ async def handles_errors_thrown_in_deferred_fragments(): assert result == [ { "data": {"hero": {"id": "1"}}, - "pending": [{"path": ["hero"]}], + "pending": [{"id": "0", "path": ["hero"]}], "hasNext": True, }, { "incremental": [ { "data": {"name": None}, - "path": ["hero"], + "id": "0", "errors": [ { "message": "bad", @@ -2210,7 +2026,7 @@ async def handles_errors_thrown_in_deferred_fragments(): ], }, ], - "completed": [{"path": ["hero"]}], + "completed": [{"id": "0"}], "hasNext": False, }, ] @@ -2237,13 +2053,13 @@ async def handles_non_nullable_errors_thrown_in_deferred_fragments(): assert result == [ { "data": {"hero": {"id": "1"}}, - "pending": [{"path": ["hero"]}], + "pending": [{"id": "0", "path": ["hero"]}], "hasNext": True, }, { "completed": [ { - "path": ["hero"], + "id": "0", "errors": [ { "message": "Cannot return null for non-nullable field" @@ -2311,13 +2127,13 @@ async def handles_async_non_nullable_errors_thrown_in_deferred_fragments(): assert result == [ { "data": {"hero": {"id": "1"}}, - "pending": [{"path": ["hero"]}], + "pending": [{"id": "0", "path": ["hero"]}], "hasNext": True, }, { "completed": [ { - "path": ["hero"], + "id": "0", "errors": [ { "message": "Cannot return null for non-nullable field" @@ -2358,44 +2174,28 @@ async def returns_payloads_in_correct_order(): assert result == [ { "data": {"hero": {"id": "1"}}, - "pending": [{"path": ["hero"]}], + "pending": [{"id": "0", "path": ["hero"]}], "hasNext": True, }, { "pending": [ - {"path": ["hero", "friends", 0]}, - {"path": ["hero", "friends", 1]}, - {"path": ["hero", "friends", 2]}, + {"id": "1", "path": ["hero", "friends", 0]}, + {"id": "2", "path": ["hero", "friends", 1]}, + {"id": "3", "path": ["hero", "friends", 2]}, ], "incremental": [ - { - "data": {"name": "slow", "friends": [{}, {}, {}]}, - "path": ["hero"], - } + {"data": {"name": "slow", "friends": [{}, {}, {}]}, "id": "0"} ], - "completed": [{"path": ["hero"]}], + "completed": [{"id": "0"}], "hasNext": True, }, { "incremental": [ - { - "data": {"name": "Han"}, - "path": ["hero", "friends", 0], - }, - { - "data": {"name": "Leia"}, - "path": ["hero", "friends", 1], - }, - { - "data": {"name": "C-3PO"}, - "path": ["hero", "friends", 2], - }, - ], - "completed": [ - {"path": ["hero", "friends", 0]}, - {"path": ["hero", "friends", 1]}, - {"path": ["hero", "friends", 2]}, + {"data": {"name": "Han"}, "id": "1"}, + {"data": {"name": "Leia"}, "id": "2"}, + {"data": {"name": "C-3PO"}, "id": "3"}, ], + "completed": [{"id": "1"}, {"id": "2"}, {"id": "3"}], "hasNext": False, }, ] @@ -2426,44 +2226,28 @@ async def returns_payloads_from_synchronous_data_in_correct_order(): assert result == [ { "data": {"hero": {"id": "1"}}, - "pending": [{"path": ["hero"]}], + "pending": [{"id": "0", "path": ["hero"]}], "hasNext": True, }, { "pending": [ - {"path": ["hero", "friends", 0]}, - {"path": ["hero", "friends", 1]}, - {"path": ["hero", "friends", 2]}, + {"id": "1", "path": ["hero", "friends", 0]}, + {"id": "2", "path": ["hero", "friends", 1]}, + {"id": "3", "path": ["hero", "friends", 2]}, ], "incremental": [ - { - "data": {"name": "Luke", "friends": [{}, {}, {}]}, - "path": ["hero"], - } + {"data": {"name": "Luke", "friends": [{}, {}, {}]}, "id": "0"} ], - "completed": [{"path": ["hero"]}], + "completed": [{"id": "0"}], "hasNext": True, }, { "incremental": [ - { - "data": {"name": "Han"}, - "path": ["hero", "friends", 0], - }, - { - "data": {"name": "Leia"}, - "path": ["hero", "friends", 1], - }, - { - "data": {"name": "C-3PO"}, - "path": ["hero", "friends", 2], - }, - ], - "completed": [ - {"path": ["hero", "friends", 0]}, - {"path": ["hero", "friends", 1]}, - {"path": ["hero", "friends", 2]}, + {"data": {"name": "Han"}, "id": "1"}, + {"data": {"name": "Leia"}, "id": "2"}, + {"data": {"name": "C-3PO"}, "id": "3"}, ], + "completed": [{"id": "1"}, {"id": "2"}, {"id": "3"}], "hasNext": False, }, ] diff --git a/tests/execution/test_mutations.py b/tests/execution/test_mutations.py index 987eba45..b03004de 100644 --- a/tests/execution/test_mutations.py +++ b/tests/execution/test_mutations.py @@ -244,19 +244,12 @@ async def mutation_fields_with_defer_do_not_block_next_mutation(): assert patches == [ { "data": {"first": {}, "second": {"theNumber": 2}}, - "pending": [{"path": ["first"], "label": "defer-label"}], + "pending": [{"id": "0", "path": ["first"], "label": "defer-label"}], "hasNext": True, }, { - "incremental": [ - { - "path": ["first"], - "data": { - "promiseToGetTheNumber": 2, - }, - }, - ], - "completed": [{"path": ["first"], "label": "defer-label"}], + "incremental": [{"id": "0", "data": {"promiseToGetTheNumber": 2}}], + "completed": [{"id": "0"}], "hasNext": False, }, ] @@ -319,19 +312,12 @@ async def mutation_with_defer_is_not_executed_serially(): assert patches == [ { "data": {"second": {"theNumber": 2}}, - "pending": [{"path": [], "label": "defer-label"}], + "pending": [{"id": "0", "path": [], "label": "defer-label"}], "hasNext": True, }, { - "incremental": [ - { - "path": [], - "data": { - "first": {"theNumber": 1}, - }, - }, - ], - "completed": [{"path": [], "label": "defer-label"}], + "incremental": [{"id": "0", "data": {"first": {"theNumber": 1}}}], + "completed": [{"id": "0"}], "hasNext": False, }, ] diff --git a/tests/execution/test_stream.py b/tests/execution/test_stream.py index 487817b4..46237fc1 100644 --- a/tests/execution/test_stream.py +++ b/tests/execution/test_stream.py @@ -148,39 +148,39 @@ def modified_args(args: dict[str, Any], **modifications: Any) -> dict[str, Any]: def describe_execute_stream_directive(): def can_format_and_print_incremental_stream_result(): - result = IncrementalStreamResult(items=[], path=[]) - assert result.formatted == {"items": [], "path": []} - assert str(result) == "IncrementalStreamResult(items=[], path=[])" + result = IncrementalStreamResult(items=["hello", "world"], id="foo") + assert result.formatted == {"items": ["hello", "world"], "id": "foo"} + assert ( + str(result) == "IncrementalStreamResult(items=['hello', 'world'], id='foo')" + ) result = IncrementalStreamResult( items=["hello", "world"], - errors=[GraphQLError("msg")], - path=["foo", 1], + id="foo", + sub_path=["bar", 1], + errors=[GraphQLError("oops")], extensions={"baz": 2}, ) assert result.formatted == { "items": ["hello", "world"], - "errors": [{"message": "msg"}], + "id": "foo", + "subPath": ["bar", 1], + "errors": [{"message": "oops"}], "extensions": {"baz": 2}, - "path": ["foo", 1], } assert ( str(result) == "IncrementalStreamResult(items=['hello', 'world']," - " path=['foo', 1], errors=[GraphQLError('msg')], extensions={'baz': 2})" + " id='foo', sub_path=['bar', 1], errors=[GraphQLError('oops')]," + " extensions={'baz': 2})" ) - def can_print_stream_record(): - record = StreamRecord(Path(None, 0, None)) - assert str(record) == "StreamRecord(path=[0])" - record = StreamRecord(Path(None, "bar", "Bar"), "foo") - assert str(record) == "StreamRecord(path=['bar'], label='foo')" - # noinspection PyTypeChecker def can_compare_incremental_stream_result(): args: dict[str, Any] = { "items": ["hello", "world"], - "errors": [GraphQLError("msg")], - "path": ["foo", 1], + "id": "foo", + "sub_path": ["bar", 1], + "errors": [GraphQLError("oops")], "extensions": {"baz": 2}, } result = IncrementalStreamResult(**args) @@ -188,22 +188,34 @@ def can_compare_incremental_stream_result(): assert result != IncrementalStreamResult( **modified_args(args, items=["hello", "foo"]) ) + assert result != IncrementalStreamResult(**modified_args(args, id="bar")) + assert result != IncrementalStreamResult( + **modified_args(args, sub_path=["bar", 2]) + ) assert result != IncrementalStreamResult(**modified_args(args, errors=[])) - assert result != IncrementalStreamResult(**modified_args(args, path=["foo", 2])) assert result != IncrementalStreamResult( **modified_args(args, extensions={"baz": 1}) ) assert result == tuple(args.values()) + assert result == tuple(args.values())[:4] assert result == tuple(args.values())[:3] assert result == tuple(args.values())[:2] assert result != tuple(args.values())[:1] - assert result != (["hello", "world"], []) + assert result != (["hello", "world"], "bar") + args["subPath"] = args.pop("sub_path") assert result == args assert result != {**args, "items": ["hello", "foo"]} + assert result != {**args, "id": "bar"} + assert result != {**args, "subPath": ["bar", 2]} assert result != {**args, "errors": []} - assert result != {**args, "path": ["foo", 2]} assert result != {**args, "extensions": {"baz": 1}} + def can_print_stream_record(): + record = StreamRecord(Path(None, 0, None)) + assert str(record) == "StreamRecord(path=[0])" + record = StreamRecord(Path(None, "bar", "Bar"), "foo") + assert str(record) == "StreamRecord(path=['bar'], label='foo')" + @pytest.mark.asyncio async def can_stream_a_list_field(): document = parse("{ scalarList @stream(initialCount: 1) }") @@ -212,19 +224,14 @@ async def can_stream_a_list_field(): ) assert result == [ { - "data": { - "scalarList": ["apple"], - }, - "pending": [{"path": ["scalarList"]}], - "hasNext": True, - }, - { - "incremental": [{"items": ["banana"], "path": ["scalarList"]}], + "data": {"scalarList": ["apple"]}, + "pending": [{"id": "0", "path": ["scalarList"]}], "hasNext": True, }, + {"incremental": [{"items": ["banana"], "id": "0"}], "hasNext": True}, { - "incremental": [{"items": ["coconut"], "path": ["scalarList"]}], - "completed": [{"path": ["scalarList"]}], + "incremental": [{"items": ["coconut"], "id": "0"}], + "completed": [{"id": "0"}], "hasNext": False, }, ] @@ -237,23 +244,15 @@ async def can_use_default_value_of_initial_count(): ) assert result == [ { - "data": { - "scalarList": [], - }, - "pending": [{"path": ["scalarList"]}], + "data": {"scalarList": []}, + "pending": [{"id": "0", "path": ["scalarList"]}], "hasNext": True, }, + {"incremental": [{"items": ["apple"], "id": "0"}], "hasNext": True}, + {"incremental": [{"items": ["banana"], "id": "0"}], "hasNext": True}, { - "incremental": [{"items": ["apple"], "path": ["scalarList"]}], - "hasNext": True, - }, - { - "incremental": [{"items": ["banana"], "path": ["scalarList"]}], - "hasNext": True, - }, - { - "incremental": [{"items": ["coconut"], "path": ["scalarList"]}], - "completed": [{"path": ["scalarList"]}], + "incremental": [{"items": ["coconut"], "id": "0"}], + "completed": [{"id": "0"}], "hasNext": False, }, ] @@ -265,9 +264,7 @@ async def negative_values_of_initial_count_throw_field_errors(): document, {"scalarList": ["apple", "banana", "coconut"]} ) assert result == { - "data": { - "scalarList": None, - }, + "data": {"scalarList": None}, "errors": [ { "message": "initialCount must be a positive integer", @@ -282,9 +279,7 @@ async def non_integer_values_of_initial_count_throw_field_errors(): document = parse("{ scalarList @stream(initialCount: 1.5) }") result = await complete(document, {"scalarList": ["apple", "half of a banana"]}) assert result == { - "data": { - "scalarList": None, - }, + "data": {"scalarList": None}, "errors": [ { "message": "Argument 'initialCount' has invalid value 1.5.", @@ -304,29 +299,16 @@ async def returns_label_from_stream_directive(): ) assert result == [ { - "data": { - "scalarList": ["apple"], - }, - "pending": [{"path": ["scalarList"], "label": "scalar-stream"}], - "hasNext": True, - }, - { - "incremental": [ - { - "items": ["banana"], - "path": ["scalarList"], - } + "data": {"scalarList": ["apple"]}, + "pending": [ + {"id": "0", "path": ["scalarList"], "label": "scalar-stream"} ], "hasNext": True, }, + {"incremental": [{"items": ["banana"], "id": "0"}], "hasNext": True}, { - "incremental": [ - { - "items": ["coconut"], - "path": ["scalarList"], - } - ], - "completed": [{"path": ["scalarList"], "label": "scalar-stream"}], + "incremental": [{"items": ["coconut"], "id": "0"}], + "completed": [{"id": "0"}], "hasNext": False, }, ] @@ -339,12 +321,7 @@ async def throws_an_error_for_stream_directive_with_non_string_label(): "data": {"scalarList": None}, "errors": [ { - "locations": [ - { - "line": 1, - "column": 46, - } - ], + "locations": [{"line": 1, "column": 46}], "message": "Argument 'label' has invalid value 42.", "path": ["scalarList"], } @@ -357,11 +334,7 @@ async def can_disable_stream_using_if_argument(): result = await complete( document, {"scalarList": ["apple", "banana", "coconut"]} ) - assert result == { - "data": { - "scalarList": ["apple", "banana", "coconut"], - }, - } + assert result == {"data": {"scalarList": ["apple", "banana", "coconut"]}} @pytest.mark.asyncio @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") @@ -375,20 +348,13 @@ async def does_not_disable_stream_with_null_if_argument(): ) assert result == [ { - "data": { - "scalarList": ["apple", "banana"], - }, - "pending": [{"path": ["scalarList"]}], + "data": {"scalarList": ["apple", "banana"]}, + "pending": [{"id": "0", "path": ["scalarList"]}], "hasNext": True, }, { - "incremental": [ - { - "items": ["coconut"], - "path": ["scalarList"], - } - ], - "completed": [{"path": ["scalarList"]}], + "incremental": [{"items": ["coconut"], "id": "0"}], + "completed": [{"id": "0"}], "hasNext": False, }, ] @@ -408,29 +374,19 @@ async def can_stream_multi_dimensional_lists(): ) assert result == [ { - "data": { - "scalarListList": [["apple", "apple", "apple"]], - }, - "pending": [{"path": ["scalarListList"]}], + "data": {"scalarListList": [["apple", "apple", "apple"]]}, + "pending": [{"id": "0", "path": ["scalarListList"]}], "hasNext": True, }, { - "incremental": [ - { - "items": [["banana", "banana", "banana"]], - "path": ["scalarListList"], - } - ], + "incremental": [{"items": [["banana", "banana", "banana"]], "id": "0"}], "hasNext": True, }, { "incremental": [ - { - "items": [["coconut", "coconut", "coconut"]], - "path": ["scalarListList"], - } + {"items": [["coconut", "coconut", "coconut"]], "id": "0"} ], - "completed": [{"path": ["scalarListList"]}], + "completed": [{"id": "0"}], "hasNext": False, }, ] @@ -463,17 +419,12 @@ async def await_friend(f): {"name": "Han", "id": "2"}, ], }, - "pending": [{"path": ["friendList"]}], + "pending": [{"id": "0", "path": ["friendList"]}], "hasNext": True, }, { - "incremental": [ - { - "items": [{"name": "Leia", "id": "3"}], - "path": ["friendList"], - } - ], - "completed": [{"path": ["friendList"]}], + "incremental": [{"items": [{"name": "Leia", "id": "3"}], "id": "0"}], + "completed": [{"id": "0"}], "hasNext": False, }, ] @@ -501,35 +452,20 @@ async def await_friend(f): assert result == [ { "data": {"friendList": []}, - "pending": [{"path": ["friendList"]}], + "pending": [{"id": "0", "path": ["friendList"]}], "hasNext": True, }, { - "incremental": [ - { - "items": [{"name": "Luke", "id": "1"}], - "path": ["friendList"], - } - ], + "incremental": [{"items": [{"name": "Luke", "id": "1"}], "id": "0"}], "hasNext": True, }, { - "incremental": [ - { - "items": [{"name": "Han", "id": "2"}], - "path": ["friendList"], - } - ], + "incremental": [{"items": [{"name": "Han", "id": "2"}], "id": "0"}], "hasNext": True, }, { - "incremental": [ - { - "items": [{"name": "Leia", "id": "3"}], - "path": ["friendList"], - } - ], - "completed": [{"path": ["friendList"]}], + "incremental": [{"items": [{"name": "Leia", "id": "3"}], "id": "0"}], + "completed": [{"id": "0"}], "hasNext": False, }, ] @@ -569,17 +505,12 @@ async def get_id(f): {"name": "Han", "id": "2"}, ] }, - "pending": [{"path": ["friendList"]}], + "pending": [{"id": "0", "path": ["friendList"]}], "hasNext": True, }, { - "incremental": [ - { - "items": [{"name": "Leia", "id": "3"}], - "path": ["friendList"], - } - ], - "completed": [{"path": ["friendList"]}], + "incremental": [{"items": [{"name": "Leia", "id": "3"}], "id": "0"}], + "completed": [{"id": "0"}], "hasNext": False, }, ] @@ -620,17 +551,12 @@ async def await_friend(f, i): "path": ["friendList", 1], } ], - "pending": [{"path": ["friendList"]}], + "pending": [{"id": "0", "path": ["friendList"]}], "hasNext": True, }, { - "incremental": [ - { - "items": [{"name": "Leia", "id": "3"}], - "path": ["friendList"], - } - ], - "completed": [{"path": ["friendList"]}], + "incremental": [{"items": [{"name": "Leia", "id": "3"}], "id": "0"}], + "completed": [{"id": "0"}], "hasNext": False, }, ] @@ -664,14 +590,14 @@ async def await_friend(f, i): assert result == [ { "data": {"friendList": [{"name": "Luke", "id": "1"}]}, - "pending": [{"path": ["friendList"]}], + "pending": [{"id": "0", "path": ["friendList"]}], "hasNext": True, }, { "incremental": [ { "items": [None], - "path": ["friendList"], + "id": "0", "errors": [ { "message": "bad", @@ -684,13 +610,8 @@ async def await_friend(f, i): "hasNext": True, }, { - "incremental": [ - { - "items": [{"name": "Leia", "id": "3"}], - "path": ["friendList"], - } - ], - "completed": [{"path": ["friendList"]}], + "incremental": [{"items": [{"name": "Leia", "id": "3"}], "id": "0"}], + "completed": [{"id": "0"}], "hasNext": False, }, ] @@ -716,40 +637,22 @@ async def friend_list(_info): assert result == [ { "data": {"friendList": []}, - "pending": [{"path": ["friendList"]}], + "pending": [{"id": "0", "path": ["friendList"]}], "hasNext": True, }, { - "incremental": [ - { - "items": [{"name": "Luke", "id": "1"}], - "path": ["friendList"], - } - ], + "incremental": [{"items": [{"name": "Luke", "id": "1"}], "id": "0"}], "hasNext": True, }, { - "incremental": [ - { - "items": [{"name": "Han", "id": "2"}], - "path": ["friendList"], - } - ], + "incremental": [{"items": [{"name": "Han", "id": "2"}], "id": "0"}], "hasNext": True, }, { - "incremental": [ - { - "items": [{"name": "Leia", "id": "3"}], - "path": ["friendList"], - } - ], + "incremental": [{"items": [{"name": "Leia", "id": "3"}], "id": "0"}], "hasNext": True, }, - { - "completed": [{"path": ["friendList"]}], - "hasNext": False, - }, + {"completed": [{"id": "0"}], "hasNext": False}, ] @pytest.mark.asyncio @@ -778,22 +681,14 @@ async def friend_list(_info): {"name": "Han", "id": "2"}, ] }, - "pending": [{"path": ["friendList"]}], + "pending": [{"id": "0", "path": ["friendList"]}], "hasNext": True, }, { - "incremental": [ - { - "items": [{"name": "Leia", "id": "3"}], - "path": ["friendList"], - } - ], + "incremental": [{"items": [{"name": "Leia", "id": "3"}], "id": "0"}], "hasNext": True, }, - { - "completed": [{"path": ["friendList"]}], - "hasNext": False, - }, + {"completed": [{"id": "0"}], "hasNext": False}, ] @pytest.mark.asyncio @@ -852,7 +747,7 @@ async def friend_list(_info): {"name": "Han", "id": "2"}, ] }, - "pending": [{"path": ["friendList"]}], + "pending": [{"id": "0", "path": ["friendList"]}], "hasNext": True, }, }, @@ -860,17 +755,14 @@ async def friend_list(_info): "done": False, "value": { "incremental": [ - { - "items": [{"name": "Leia", "id": "3"}], - "path": ["friendList"], - } + {"items": [{"name": "Leia", "id": "3"}], "id": "0"} ], "hasNext": True, }, }, { "done": False, - "value": {"completed": [{"path": ["friendList"]}], "hasNext": False}, + "value": {"completed": [{"id": "0"}], "hasNext": False}, }, {"done": True, "value": None}, ] @@ -924,16 +816,14 @@ async def friend_list(_info): result = await complete(document, {"friendList": friend_list}) assert result == [ { - "data": { - "friendList": [{"name": "Luke", "id": "1"}], - }, - "pending": [{"path": ["friendList"]}], + "data": {"friendList": [{"name": "Luke", "id": "1"}]}, + "pending": [{"id": "0", "path": ["friendList"]}], "hasNext": True, }, { "completed": [ { - "path": ["friendList"], + "id": "0", "errors": [ { "message": "bad", @@ -964,16 +854,14 @@ async def handles_null_for_non_null_list_items_after_initial_count_is_reached(): ) assert result == [ { - "data": { - "nonNullFriendList": [{"name": "Luke"}], - }, - "pending": [{"path": ["nonNullFriendList"]}], + "data": {"nonNullFriendList": [{"name": "Luke"}]}, + "pending": [{"id": "0", "path": ["nonNullFriendList"]}], "hasNext": True, }, { "completed": [ { - "path": ["nonNullFriendList"], + "id": "0", "errors": [ { "message": "Cannot return null for non-nullable field" @@ -1010,16 +898,14 @@ async def friend_list(_info): result = await complete(document, {"nonNullFriendList": friend_list}) assert result == [ { - "data": { - "nonNullFriendList": [{"name": "Luke"}], - }, - "pending": [{"path": ["nonNullFriendList"]}], + "data": {"nonNullFriendList": [{"name": "Luke"}]}, + "pending": [{"id": "0", "path": ["nonNullFriendList"]}], "hasNext": True, }, { "completed": [ { - "path": ["nonNullFriendList"], + "id": "0", "errors": [ { "message": "Cannot return null for non-nullable field" @@ -1050,17 +936,15 @@ async def scalar_list(_info): result = await complete(document, {"scalarList": scalar_list}) assert result == [ { - "data": { - "scalarList": ["Luke"], - }, - "pending": [{"path": ["scalarList"]}], + "data": {"scalarList": ["Luke"]}, + "pending": [{"id": "0", "path": ["scalarList"]}], "hasNext": True, }, { "incremental": [ { "items": [None], - "path": ["scalarList"], + "id": "0", "errors": [ { "message": "String cannot represent value: {}", @@ -1070,7 +954,7 @@ async def scalar_list(_info): ], }, ], - "completed": [{"path": ["scalarList"]}], + "completed": [{"id": "0"}], "hasNext": False, }, ] @@ -1104,17 +988,15 @@ def get_friends(_info): ) assert result == [ { - "data": { - "friendList": [{"nonNullName": "Luke"}], - }, - "pending": [{"path": ["friendList"]}], + "data": {"friendList": [{"nonNullName": "Luke"}]}, + "pending": [{"id": "0", "path": ["friendList"]}], "hasNext": True, }, { "incremental": [ { "items": [None], - "path": ["friendList"], + "id": "0", "errors": [ { "message": "Oops", @@ -1127,13 +1009,8 @@ def get_friends(_info): "hasNext": True, }, { - "incremental": [ - { - "items": [{"nonNullName": "Han"}], - "path": ["friendList"], - }, - ], - "completed": [{"path": ["friendList"]}], + "incremental": [{"items": [{"nonNullName": "Han"}], "id": "0"}], + "completed": [{"id": "0"}], "hasNext": False, }, ] @@ -1166,17 +1043,15 @@ def get_friends(_info): ) assert result == [ { - "data": { - "friendList": [{"nonNullName": "Luke"}], - }, - "pending": [{"path": ["friendList"]}], + "data": {"friendList": [{"nonNullName": "Luke"}]}, + "pending": [{"id": "0", "path": ["friendList"]}], "hasNext": True, }, { "incremental": [ { "items": [None], - "path": ["friendList"], + "id": "0", "errors": [ { "message": "Oops", @@ -1189,13 +1064,8 @@ def get_friends(_info): "hasNext": True, }, { - "incremental": [ - { - "items": [{"nonNullName": "Han"}], - "path": ["friendList"], - } - ], - "completed": [{"path": ["friendList"]}], + "incremental": [{"items": [{"nonNullName": "Han"}], "id": "0"}], + "completed": [{"id": "0"}], "hasNext": False, }, ] @@ -1229,16 +1099,14 @@ def get_friends(_info): ) assert result == [ { - "data": { - "nonNullFriendList": [{"nonNullName": "Luke"}], - }, - "pending": [{"path": ["nonNullFriendList"]}], + "data": {"nonNullFriendList": [{"nonNullName": "Luke"}]}, + "pending": [{"id": "0", "path": ["nonNullFriendList"]}], "hasNext": True, }, { "completed": [ { - "path": ["nonNullFriendList"], + "id": "0", "errors": [ { "message": "Oops", @@ -1283,13 +1151,13 @@ def get_friends(_info): "data": { "nonNullFriendList": [{"nonNullName": "Luke"}], }, - "pending": [{"path": ["nonNullFriendList"]}], + "pending": [{"id": "0", "path": ["nonNullFriendList"]}], "hasNext": True, }, { "completed": [ { - "path": ["nonNullFriendList"], + "id": "0", "errors": [ { "message": "Oops", @@ -1333,17 +1201,15 @@ async def get_friends(_info): ) assert result == [ { - "data": { - "friendList": [{"nonNullName": "Luke"}], - }, - "pending": [{"path": ["friendList"]}], + "data": {"friendList": [{"nonNullName": "Luke"}]}, + "pending": [{"id": "0", "path": ["friendList"]}], "hasNext": True, }, { "incremental": [ { "items": [None], - "path": ["friendList"], + "id": "0", "errors": [ { "message": "Oops", @@ -1356,18 +1222,10 @@ async def get_friends(_info): "hasNext": True, }, { - "incremental": [ - { - "items": [{"nonNullName": "Han"}], - "path": ["friendList"], - }, - ], + "incremental": [{"items": [{"nonNullName": "Han"}], "id": "0"}], "hasNext": True, }, - { - "completed": [{"path": ["friendList"]}], - "hasNext": False, - }, + {"completed": [{"id": "0"}], "hasNext": False}, ] @pytest.mark.asyncio @@ -1394,22 +1252,18 @@ async def get_friends(_info): result = await complete( document, - { - "nonNullFriendList": get_friends, - }, + {"nonNullFriendList": get_friends}, ) assert result == [ { - "data": { - "nonNullFriendList": [{"nonNullName": "Luke"}], - }, - "pending": [{"path": ["nonNullFriendList"]}], + "data": {"nonNullFriendList": [{"nonNullName": "Luke"}]}, + "pending": [{"id": "0", "path": ["nonNullFriendList"]}], "hasNext": True, }, { "completed": [ { - "path": ["nonNullFriendList"], + "id": "0", "errors": [ { "message": "Oops", @@ -1463,16 +1317,14 @@ async def __anext__(self): result = await complete(document, {"nonNullFriendList": async_iterable}) assert result == [ { - "data": { - "nonNullFriendList": [{"nonNullName": "Luke"}], - }, - "pending": [{"path": ["nonNullFriendList"]}], + "data": {"nonNullFriendList": [{"nonNullName": "Luke"}]}, + "pending": [{"id": "0", "path": ["nonNullFriendList"]}], "hasNext": True, }, { "completed": [ { - "path": ["nonNullFriendList"], + "id": "0", "errors": [ { "message": "Oops", @@ -1533,16 +1385,14 @@ async def aclose(self): result = await complete(document, {"nonNullFriendList": async_iterable}) assert result == [ { - "data": { - "nonNullFriendList": [{"nonNullName": "Luke"}], - }, - "pending": [{"path": ["nonNullFriendList"]}], + "data": {"nonNullFriendList": [{"nonNullName": "Luke"}]}, + "pending": [{"id": "0", "path": ["nonNullFriendList"]}], "hasNext": True, }, { "completed": [ { - "path": ["nonNullFriendList"], + "id": "0", "errors": [ { "message": "Oops", @@ -1593,18 +1443,11 @@ async def friend_list(_info): { "message": "Cannot return null for non-nullable field" " NestedObject.nonNullScalarField.", - "locations": [ - { - "line": 4, - "column": 17, - } - ], + "locations": [{"line": 4, "column": 17}], "path": ["nestedObject", "nonNullScalarField"], }, ], - "data": { - "nestedObject": None, - }, + "data": {"nestedObject": None}, } @pytest.mark.asyncio @@ -1644,9 +1487,7 @@ async def friend_list(_info): "path": ["nestedObject", "nonNullScalarField"], }, ], - "data": { - "nestedObject": None, - }, + "data": {"nestedObject": None}, } @pytest.mark.asyncio @@ -1692,8 +1533,8 @@ async def friend_list(_info): "nestedObject": {"nestedFriendList": []}, }, "pending": [ - {"path": ["otherNestedObject"]}, - {"path": ["nestedObject", "nestedFriendList"]}, + {"id": "0", "path": ["otherNestedObject"]}, + {"id": "1", "path": ["nestedObject", "nestedFriendList"]}, ], "hasNext": True, }, @@ -1701,7 +1542,7 @@ async def friend_list(_info): "incremental": [ { "data": {"scalarField": None}, - "path": ["otherNestedObject"], + "id": "0", "errors": [ { "message": "Oops", @@ -1710,18 +1551,12 @@ async def friend_list(_info): }, ], }, - { - "items": [{"name": "Luke"}], - "path": ["nestedObject", "nestedFriendList"], - }, + {"items": [{"name": "Luke"}], "id": "1"}, ], - "completed": [{"path": ["otherNestedObject"]}], + "completed": [{"id": "0"}], "hasNext": True, }, - { - "completed": [{"path": ["nestedObject", "nestedFriendList"]}], - "hasNext": False, - }, + {"completed": [{"id": "1"}], "hasNext": False}, ] @pytest.mark.asyncio @@ -1764,19 +1599,15 @@ async def friend_list(_info): assert result == [ { - "data": { - "nestedObject": {}, - }, - "pending": [{"path": ["nestedObject"]}], + "data": {"nestedObject": {}}, + "pending": [{"id": "0", "path": ["nestedObject"]}], "hasNext": True, }, { "incremental": [ { - "data": { - "deeperNestedObject": None, - }, - "path": ["nestedObject"], + "data": {"deeperNestedObject": None}, + "id": "0", "errors": [ { "message": "Cannot return null for non-nullable field" @@ -1791,7 +1622,7 @@ async def friend_list(_info): ], }, ], - "completed": [{"path": ["nestedObject"]}], + "completed": [{"id": "0"}], "hasNext": False, }, ] @@ -1828,17 +1659,15 @@ async def friend_list(_info): assert result == [ { - "data": { - "friendList": [], - }, - "pending": [{"path": ["friendList"]}], + "data": {"friendList": []}, + "pending": [{"id": "0", "path": ["friendList"]}], "hasNext": True, }, { "incremental": [ { "items": [None], - "path": ["friendList"], + "id": "0", "errors": [ { "message": "Cannot return null for non-nullable field" @@ -1851,10 +1680,7 @@ async def friend_list(_info): ], "hasNext": True, }, - { - "completed": [{"path": ["friendList"]}], - "hasNext": False, - }, + {"completed": [{"id": "0"}], "hasNext": False}, ] @pytest.mark.timeout(1) @@ -1908,7 +1734,7 @@ async def iterable(_info): result1 = execute_result.initial_result assert result1 == { "data": {"nestedObject": {}}, - "pending": [{"path": ["nestedObject"]}], + "pending": [{"id": "0", "path": ["nestedObject"]}], "hasNext": True, } @@ -1919,7 +1745,7 @@ async def iterable(_info): "incremental": [ { "data": {"deeperNestedObject": None}, - "path": ["nestedObject"], + "id": "0", "errors": [ { "message": "Cannot return null for non-nullable field" @@ -1934,7 +1760,7 @@ async def iterable(_info): ], }, ], - "completed": [{"path": ["nestedObject"]}], + "completed": [{"id": "0"}], "hasNext": False, } @@ -1976,34 +1802,19 @@ async def get_friends(_info): ) assert result == [ { - "data": { - "friendList": [{"id": "1", "name": "Luke"}], - }, - "pending": [{"path": ["friendList"]}], + "data": {"friendList": [{"id": "1", "name": "Luke"}]}, + "pending": [{"id": "0", "path": ["friendList"]}], "hasNext": True, }, { - "incremental": [ - { - "items": [{"id": "2", "name": "Han"}], - "path": ["friendList"], - } - ], + "incremental": [{"items": [{"id": "2", "name": "Han"}], "id": "0"}], "hasNext": True, }, { - "incremental": [ - { - "items": [{"id": "3", "name": "Leia"}], - "path": ["friendList"], - } - ], + "incremental": [{"items": [{"id": "3", "name": "Leia"}], "id": "0"}], "hasNext": True, }, - { - "completed": [{"path": ["friendList"]}], - "hasNext": False, - }, + {"completed": [{"id": "0"}], "hasNext": False}, ] @pytest.mark.asyncio @@ -2043,40 +1854,23 @@ async def get_nested_friend_list(_info): assert result == [ { - "data": { - "nestedObject": { - "nestedFriendList": [], - }, - }, + "data": {"nestedObject": {"nestedFriendList": []}}, "pending": [ - {"path": ["nestedObject"]}, - {"path": ["nestedObject", "nestedFriendList"]}, + {"id": "0", "path": ["nestedObject"]}, + {"id": "1", "path": ["nestedObject", "nestedFriendList"]}, ], "hasNext": True, }, { - "incremental": [ - { - "items": [{"id": "1", "name": "Luke"}], - "path": ["nestedObject", "nestedFriendList"], - }, - ], - "completed": [{"path": ["nestedObject"]}], + "incremental": [{"items": [{"id": "1", "name": "Luke"}], "id": "1"}], + "completed": [{"id": "0"}], "hasNext": True, }, { - "incremental": [ - { - "items": [{"id": "2", "name": "Han"}], - "path": ["nestedObject", "nestedFriendList"], - }, - ], + "incremental": [{"items": [{"id": "2", "name": "Han"}], "id": "1"}], "hasNext": True, }, - { - "completed": [{"path": ["nestedObject", "nestedFriendList"]}], - "hasNext": False, - }, + {"completed": [{"id": "1"}], "hasNext": False}, ] @pytest.mark.asyncio @@ -2124,48 +1918,32 @@ async def get_friends(_info): result1 = execute_result.initial_result assert result1 == { "data": {"nestedObject": {}}, - "pending": [{"path": ["nestedObject"]}], + "pending": [{"id": "0", "path": ["nestedObject"]}], "hasNext": True, } resolve_slow_field.set() result2 = await anext(iterator) assert result2.formatted == { - "pending": [{"path": ["nestedObject", "nestedFriendList"]}], + "pending": [{"id": "1", "path": ["nestedObject", "nestedFriendList"]}], "incremental": [ - { - "data": {"scalarField": "slow", "nestedFriendList": []}, - "path": ["nestedObject"], - }, + {"data": {"scalarField": "slow", "nestedFriendList": []}, "id": "0"}, ], - "completed": [{"path": ["nestedObject"]}], + "completed": [{"id": "0"}], "hasNext": True, } result3 = await anext(iterator) assert result3.formatted == { - "incremental": [ - { - "items": [{"name": "Luke"}], - "path": ["nestedObject", "nestedFriendList"], - }, - ], + "incremental": [{"items": [{"name": "Luke"}], "id": "1"}], "hasNext": True, } result4 = await anext(iterator) assert result4.formatted == { - "incremental": [ - { - "items": [{"name": "Han"}], - "path": ["nestedObject", "nestedFriendList"], - }, - ], + "incremental": [{"items": [{"name": "Han"}], "id": "1"}], "hasNext": True, } result5 = await anext(iterator) - assert result5.formatted == { - "completed": [{"path": ["nestedObject", "nestedFriendList"]}], - "hasNext": False, - } + assert result5.formatted == {"completed": [{"id": "1"}], "hasNext": False} with pytest.raises(StopAsyncIteration): await anext(iterator) @@ -2214,8 +1992,8 @@ async def get_friends(_info): assert result1 == { "data": {"friendList": [{"id": "1"}]}, "pending": [ - {"path": ["friendList", 0], "label": "DeferName"}, - {"path": ["friendList"], "label": "stream-label"}, + {"id": "0", "path": ["friendList", 0], "label": "DeferName"}, + {"id": "1", "path": ["friendList"], "label": "stream-label"}, ], "hasNext": True, } @@ -2223,41 +2001,25 @@ async def get_friends(_info): resolve_iterable.set() result2 = await anext(iterator) assert result2.formatted == { - "pending": [{"path": ["friendList", 1], "label": "DeferName"}], + "pending": [{"id": "2", "path": ["friendList", 1], "label": "DeferName"}], "incremental": [ - { - "data": {"name": "Luke"}, - "path": ["friendList", 0], - }, - { - "items": [{"id": "2"}], - "path": ["friendList"], - }, + {"data": {"name": "Luke"}, "id": "0"}, + {"items": [{"id": "2"}], "id": "1"}, ], - "completed": [{"path": ["friendList", 0], "label": "DeferName"}], + "completed": [{"id": "0"}], "hasNext": True, } resolve_slow_field.set() result3 = await anext(iterator) assert result3.formatted == { - "completed": [ - { - "path": ["friendList"], - "label": "stream-label", - }, - ], + "completed": [{"id": "1"}], "hasNext": True, } result4 = await anext(iterator) assert result4.formatted == { - "incremental": [ - { - "data": {"name": "Han"}, - "path": ["friendList", 1], - }, - ], - "completed": [{"path": ["friendList", 1], "label": "DeferName"}], + "incremental": [{"data": {"name": "Han"}, "id": "2"}], + "completed": [{"id": "2"}], "hasNext": False, } @@ -2307,8 +2069,8 @@ async def get_friends(_info): assert result1 == { "data": {"friendList": [{"id": "1"}]}, "pending": [ - {"path": ["friendList", 0], "label": "DeferName"}, - {"path": ["friendList"], "label": "stream-label"}, + {"id": "0", "path": ["friendList", 0], "label": "DeferName"}, + {"id": "1", "path": ["friendList"], "label": "stream-label"}, ], "hasNext": True, } @@ -2316,37 +2078,28 @@ async def get_friends(_info): resolve_slow_field.set() result2 = await anext(iterator) assert result2.formatted == { - "pending": [{"path": ["friendList", 1], "label": "DeferName"}], + "pending": [{"id": "2", "path": ["friendList", 1], "label": "DeferName"}], "incremental": [ - { - "data": {"name": "Luke"}, - "path": ["friendList", 0], - }, - { - "items": [{"id": "2"}], - "path": ["friendList"], - }, + {"data": {"name": "Luke"}, "id": "0"}, + {"items": [{"id": "2"}], "id": "1"}, ], - "completed": [{"path": ["friendList", 0], "label": "DeferName"}], + "completed": [{"id": "0"}], "hasNext": True, } result3 = await anext(iterator) assert result3.formatted == { "incremental": [ - { - "data": {"name": "Han"}, - "path": ["friendList", 1], - }, + {"data": {"name": "Han"}, "id": "2"}, ], - "completed": [{"path": ["friendList", 1], "label": "DeferName"}], + "completed": [{"id": "2"}], "hasNext": True, } resolve_iterable.set() result4 = await anext(iterator) assert result4.formatted == { - "completed": [{"path": ["friendList"], "label": "stream-label"}], + "completed": [{"id": "1"}], "hasNext": False, } @@ -2385,7 +2138,10 @@ async def iterable(_info): result1 = execute_result.initial_result assert result1 == { "data": {"friendList": [{"id": "1"}]}, - "pending": [{"path": ["friendList", 0]}, {"path": ["friendList"]}], + "pending": [ + {"id": "0", "path": ["friendList", 0]}, + {"id": "1", "path": ["friendList"]}, + ], "hasNext": True, } @@ -2434,7 +2190,7 @@ async def __anext__(self): result1 = execute_result.initial_result assert result1 == { "data": {"friendList": [{"id": "1", "name": "Luke"}]}, - "pending": [{"path": ["friendList"]}], + "pending": [{"id": "0", "path": ["friendList"]}], "hasNext": True, } @@ -2476,7 +2232,10 @@ async def iterable(_info): result1 = execute_result.initial_result assert result1 == { "data": {"friendList": [{"id": "1"}]}, - "pending": [{"path": ["friendList", 0]}, {"path": ["friendList"]}], + "pending": [ + {"id": "0", "path": ["friendList", 0]}, + {"id": "1", "path": ["friendList"]}, + ], "hasNext": True, } From 1285cd4baca467d721cdc637b1088c66f112e6e6 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 25 Jan 2025 19:48:38 +0100 Subject: [PATCH 211/230] skip unnecessary initialization of empty items array (#3962) Replicates graphql/graphql-js@b12dcffe83098922dcc6c0ec94eb6fc032bd9772 --- src/graphql/execution/incremental_publisher.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/graphql/execution/incremental_publisher.py b/src/graphql/execution/incremental_publisher.py index d112651e..839f62d8 100644 --- a/src/graphql/execution/incremental_publisher.py +++ b/src/graphql/execution/incremental_publisher.py @@ -1017,6 +1017,8 @@ def _process_pending( if subsequent_result_record.stream_record.errors: continue incremental_result = IncrementalStreamResult( + # safe because `items` is always defined + # when the record is completed subsequent_result_record.items, # safe because `id` is defined # once the stream has been released as pending @@ -1068,6 +1070,7 @@ def _get_incremental_defer_result( sub_path = deferred_grouped_field_set_record.path[len(longest_path) :] id_ = record_with_longest_path.id return IncrementalDeferResult( + # safe because `data` is always defined when the record is completed data, # type: ignore # safe because `id` is defined # once the fragment has been released as pending @@ -1298,7 +1301,6 @@ def __init__( self.errors = [] self.is_completed_async_iterator = self.is_completed = False self.is_final_record = self.filtered = False - self.items = [] def __repr__(self) -> str: name = self.__class__.__name__ From a070e4154bc0f1f68086b5a160eadac0b4d26f8d Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 25 Jan 2025 19:55:41 +0100 Subject: [PATCH 212/230] Improve description for @oneOf directive Replicates graphql/graphql-js@acf05e365dc30b718712261b886cf7d1462ca28a --- src/graphql/type/directives.py | 5 +++-- tests/utilities/test_print_schema.py | 4 +++- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/src/graphql/type/directives.py b/src/graphql/type/directives.py index 5fe48b94..b73d938f 100644 --- a/src/graphql/type/directives.py +++ b/src/graphql/type/directives.py @@ -261,12 +261,13 @@ def assert_directive(directive: Any) -> GraphQLDirective: description="Exposes a URL that specifies the behavior of this scalar.", ) -# Used to declare an Input Object as a OneOf Input Objects. +# Used to indicate an Input Object is a OneOf Input Object. GraphQLOneOfDirective = GraphQLDirective( name="oneOf", locations=[DirectiveLocation.INPUT_OBJECT], args={}, - description="Indicates an Input Object is a OneOf Input Object.", + description="Indicates exactly one field must be supplied" + " and this field must not be `null`.", ) specified_directives: tuple[GraphQLDirective, ...] = ( diff --git a/tests/utilities/test_print_schema.py b/tests/utilities/test_print_schema.py index 0e96bbbc..b12d30dc 100644 --- a/tests/utilities/test_print_schema.py +++ b/tests/utilities/test_print_schema.py @@ -771,7 +771,9 @@ def prints_introspection_schema(): url: String! ) on SCALAR - """Indicates an Input Object is a OneOf Input Object.""" + """ + Indicates exactly one field must be supplied and this field must not be `null`. + """ directive @oneOf on INPUT_OBJECT """ From facce8736b40eea4eda6fa827d80951c3e88a333 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 25 Jan 2025 20:04:15 +0100 Subject: [PATCH 213/230] polish: improve add_deferred_fragments readability Replicates graphql/graphql-js@d32b99d003f8560cf0f878443fab1446f1adf20c --- src/graphql/execution/execute.py | 28 +++++++++++++++++++++++++++- 1 file changed, 27 insertions(+), 1 deletion(-) diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index ac041392..174acadd 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -1925,15 +1925,38 @@ def add_new_deferred_fragments( defer_map: RefMap[DeferUsage, DeferredFragmentRecord] | None = None, path: Path | None = None, ) -> RefMap[DeferUsage, DeferredFragmentRecord]: - """Add new deferred fragments to the defer map.""" + """Add new deferred fragments to the defer map. + + Instantiates new DeferredFragmentRecords for the given path within an + incremental data record, returning an updated map of DeferUsage + objects to DeferredFragmentRecords. + + Note: As defer directives may be used with operations returning lists, + a DeferUsage object may correspond to many DeferredFragmentRecords. + + DeferredFragmentRecord creation includes the following steps: + 1. The new DeferredFragmentRecord is instantiated at the given path. + 2. The parent result record is calculated from the given incremental data record. + 3. The IncrementalPublisher is notified that a new DeferredFragmentRecord + with the calculated parent has been added; the record will be released only + after the parent has completed. + """ new_defer_map: RefMap[DeferUsage, DeferredFragmentRecord] if not new_defer_usages: + # Given no DeferUsages, return the existing map, creating one if necessary. return RefMap() if defer_map is None else defer_map new_defer_map = RefMap() if defer_map is None else RefMap(defer_map.items()) + # For each new DeferUsage object: for defer_usage in new_defer_usages: ancestors = defer_usage.ancestors parent_defer_usage = ancestors[0] if ancestors else None + # If the parent target is defined, the parent target is a DeferUsage object + # and the parent result record is the DeferredFragmentRecord corresponding + # to that DeferUsage. + # If the parent target is not defined, the parent result record is either: + # - the InitialResultRecord, or + # - a StreamItemsRecord, as `@defer` may be nested under `@stream`. parent = ( cast(Union[InitialResultRecord, StreamItemsRecord], incremental_data_record) if parent_defer_usage is None @@ -1942,12 +1965,15 @@ def add_new_deferred_fragments( ) ) + # Instantiate the new record. deferred_fragment_record = DeferredFragmentRecord(path, defer_usage.label) + # Report the new record to the Incremental Publisher. incremental_publisher.report_new_defer_fragment_record( deferred_fragment_record, parent ) + # Update the map. new_defer_map[defer_usage] = deferred_fragment_record return new_defer_map From 965502c61ef312f4228ff4a9468e20cf65a42fe7 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 25 Jan 2025 20:14:44 +0100 Subject: [PATCH 214/230] Remove an unnecessary declaration --- src/graphql/execution/execute.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index 174acadd..d52eac33 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -1941,11 +1941,13 @@ def add_new_deferred_fragments( with the calculated parent has been added; the record will be released only after the parent has completed. """ - new_defer_map: RefMap[DeferUsage, DeferredFragmentRecord] if not new_defer_usages: # Given no DeferUsages, return the existing map, creating one if necessary. return RefMap() if defer_map is None else defer_map + + # Create a copy of the old map. new_defer_map = RefMap() if defer_map is None else RefMap(defer_map.items()) + # For each new DeferUsage object: for defer_usage in new_defer_usages: ancestors = defer_usage.ancestors From c685d84f15b01cd5594cc7b962631a24f14a2793 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 25 Jan 2025 20:24:56 +0100 Subject: [PATCH 215/230] Update dependencies --- poetry.lock | 81 ++++++++++++++++++++++++++++++++------------------ pyproject.toml | 2 +- 2 files changed, 53 insertions(+), 30 deletions(-) diff --git a/poetry.lock b/poetry.lock index 2208d903..20a0ff50 100644 --- a/poetry.lock +++ b/poetry.lock @@ -58,13 +58,13 @@ files = [ [[package]] name = "cachetools" -version = "5.5.0" +version = "5.5.1" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.5.0-py3-none-any.whl", hash = "sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292"}, - {file = "cachetools-5.5.0.tar.gz", hash = "sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a"}, + {file = "cachetools-5.5.1-py3-none-any.whl", hash = "sha256:b76651fdc3b24ead3c648bbdeeb940c1b04d365b38b4af66788f9ec4a81d42bb"}, + {file = "cachetools-5.5.1.tar.gz", hash = "sha256:70f238fbba50383ef62e55c6aff6d9673175fe59f7c6782c7a0b9e38f4a9df95"}, ] [[package]] @@ -741,6 +741,29 @@ perf = ["ipython"] test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] type = ["pytest-mypy"] +[[package]] +name = "importlib-metadata" +version = "8.6.1" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.9" +files = [ + {file = "importlib_metadata-8.6.1-py3-none-any.whl", hash = "sha256:02a89390c1e15fdfdc0d7c6b25cb3e62650d0494005c97d6f148bf5b9787525e"}, + {file = "importlib_metadata-8.6.1.tar.gz", hash = "sha256:310b41d755445d74569f993ccfc22838295d9fe005425094fad953d7f15c8580"}, +] + +[package.dependencies] +zipp = ">=3.20" + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +perf = ["ipython"] +test = ["flufl.flake8", "importlib_resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] +type = ["pytest-mypy"] + [[package]] name = "iniconfig" version = "2.0.0" @@ -1520,29 +1543,29 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "ruff" -version = "0.9.2" +version = "0.9.3" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.9.2-py3-none-linux_armv6l.whl", hash = "sha256:80605a039ba1454d002b32139e4970becf84b5fee3a3c3bf1c2af6f61a784347"}, - {file = "ruff-0.9.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b9aab82bb20afd5f596527045c01e6ae25a718ff1784cb92947bff1f83068b00"}, - {file = "ruff-0.9.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:fbd337bac1cfa96be615f6efcd4bc4d077edbc127ef30e2b8ba2a27e18c054d4"}, - {file = "ruff-0.9.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82b35259b0cbf8daa22a498018e300b9bb0174c2bbb7bcba593935158a78054d"}, - {file = "ruff-0.9.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8b6a9701d1e371bf41dca22015c3f89769da7576884d2add7317ec1ec8cb9c3c"}, - {file = "ruff-0.9.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9cc53e68b3c5ae41e8faf83a3b89f4a5d7b2cb666dff4b366bb86ed2a85b481f"}, - {file = "ruff-0.9.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:8efd9da7a1ee314b910da155ca7e8953094a7c10d0c0a39bfde3fcfd2a015684"}, - {file = "ruff-0.9.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3292c5a22ea9a5f9a185e2d131dc7f98f8534a32fb6d2ee7b9944569239c648d"}, - {file = "ruff-0.9.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1a605fdcf6e8b2d39f9436d343d1f0ff70c365a1e681546de0104bef81ce88df"}, - {file = "ruff-0.9.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c547f7f256aa366834829a08375c297fa63386cbe5f1459efaf174086b564247"}, - {file = "ruff-0.9.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:d18bba3d3353ed916e882521bc3e0af403949dbada344c20c16ea78f47af965e"}, - {file = "ruff-0.9.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:b338edc4610142355ccf6b87bd356729b62bf1bc152a2fad5b0c7dc04af77bfe"}, - {file = "ruff-0.9.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:492a5e44ad9b22a0ea98cf72e40305cbdaf27fac0d927f8bc9e1df316dcc96eb"}, - {file = "ruff-0.9.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:af1e9e9fe7b1f767264d26b1075ac4ad831c7db976911fa362d09b2d0356426a"}, - {file = "ruff-0.9.2-py3-none-win32.whl", hash = "sha256:71cbe22e178c5da20e1514e1e01029c73dc09288a8028a5d3446e6bba87a5145"}, - {file = "ruff-0.9.2-py3-none-win_amd64.whl", hash = "sha256:c5e1d6abc798419cf46eed03f54f2e0c3adb1ad4b801119dedf23fcaf69b55b5"}, - {file = "ruff-0.9.2-py3-none-win_arm64.whl", hash = "sha256:a1b63fa24149918f8b37cef2ee6fff81f24f0d74b6f0bdc37bc3e1f2143e41c6"}, - {file = "ruff-0.9.2.tar.gz", hash = "sha256:b5eceb334d55fae5f316f783437392642ae18e16dcf4f1858d55d3c2a0f8f5d0"}, + {file = "ruff-0.9.3-py3-none-linux_armv6l.whl", hash = "sha256:7f39b879064c7d9670197d91124a75d118d00b0990586549949aae80cdc16624"}, + {file = "ruff-0.9.3-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:a187171e7c09efa4b4cc30ee5d0d55a8d6c5311b3e1b74ac5cb96cc89bafc43c"}, + {file = "ruff-0.9.3-py3-none-macosx_11_0_arm64.whl", hash = "sha256:c59ab92f8e92d6725b7ded9d4a31be3ef42688a115c6d3da9457a5bda140e2b4"}, + {file = "ruff-0.9.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2dc153c25e715be41bb228bc651c1e9b1a88d5c6e5ed0194fa0dfea02b026439"}, + {file = "ruff-0.9.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:646909a1e25e0dc28fbc529eab8eb7bb583079628e8cbe738192853dbbe43af5"}, + {file = "ruff-0.9.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a5a46e09355695fbdbb30ed9889d6cf1c61b77b700a9fafc21b41f097bfbba4"}, + {file = "ruff-0.9.3-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:c4bb09d2bbb394e3730d0918c00276e79b2de70ec2a5231cd4ebb51a57df9ba1"}, + {file = "ruff-0.9.3-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:96a87ec31dc1044d8c2da2ebbed1c456d9b561e7d087734336518181b26b3aa5"}, + {file = "ruff-0.9.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9bb7554aca6f842645022fe2d301c264e6925baa708b392867b7a62645304df4"}, + {file = "ruff-0.9.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cabc332b7075a914ecea912cd1f3d4370489c8018f2c945a30bcc934e3bc06a6"}, + {file = "ruff-0.9.3-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:33866c3cc2a575cbd546f2cd02bdd466fed65118e4365ee538a3deffd6fcb730"}, + {file = "ruff-0.9.3-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:006e5de2621304c8810bcd2ee101587712fa93b4f955ed0985907a36c427e0c2"}, + {file = "ruff-0.9.3-py3-none-musllinux_1_2_i686.whl", hash = "sha256:ba6eea4459dbd6b1be4e6bfc766079fb9b8dd2e5a35aff6baee4d9b1514ea519"}, + {file = "ruff-0.9.3-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:90230a6b8055ad47d3325e9ee8f8a9ae7e273078a66401ac66df68943ced029b"}, + {file = "ruff-0.9.3-py3-none-win32.whl", hash = "sha256:eabe5eb2c19a42f4808c03b82bd313fc84d4e395133fb3fc1b1516170a31213c"}, + {file = "ruff-0.9.3-py3-none-win_amd64.whl", hash = "sha256:040ceb7f20791dfa0e78b4230ee9dce23da3b64dd5848e40e3bf3ab76468dcf4"}, + {file = "ruff-0.9.3-py3-none-win_arm64.whl", hash = "sha256:800d773f6d4d33b0a3c60e2c6ae8f4c202ea2de056365acfa519aa48acf28e0b"}, + {file = "ruff-0.9.3.tar.gz", hash = "sha256:8293f89985a090ebc3ed1064df31f3b4b56320cdfcec8b60d3295bddb955c22a"}, ] [[package]] @@ -1869,13 +1892,13 @@ testing = ["flaky (>=3.4.0)", "freezegun (>=0.3.11)", "pathlib2 (>=2.3.3)", "psu [[package]] name = "tox" -version = "4.23.2" +version = "4.24.1" description = "tox is a generic virtualenv management and test command line tool" optional = false python-versions = ">=3.8" files = [ - {file = "tox-4.23.2-py3-none-any.whl", hash = "sha256:452bc32bb031f2282881a2118923176445bac783ab97c874b8770ab4c3b76c38"}, - {file = "tox-4.23.2.tar.gz", hash = "sha256:86075e00e555df6e82e74cfc333917f91ecb47ffbc868dcafbd2672e332f4a2c"}, + {file = "tox-4.24.1-py3-none-any.whl", hash = "sha256:57ba7df7d199002c6df8c2db9e6484f3de6ca8f42013c083ea2d4d1e5c6bdc75"}, + {file = "tox-4.24.1.tar.gz", hash = "sha256:083a720adbc6166fff0b7d1df9d154f9d00bfccb9403b8abf6bc0ee435d6a62e"}, ] [package.dependencies] @@ -1883,13 +1906,13 @@ cachetools = ">=5.5" chardet = ">=5.2" colorama = ">=0.4.6" filelock = ">=3.16.1" -packaging = ">=24.1" +packaging = ">=24.2" platformdirs = ">=4.3.6" pluggy = ">=1.5" pyproject-api = ">=1.8" -tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} +tomli = {version = ">=2.1", markers = "python_version < \"3.11\""} typing-extensions = {version = ">=4.12.2", markers = "python_version < \"3.11\""} -virtualenv = ">=20.26.6" +virtualenv = ">=20.27.1" [package.extras] test = ["devpi-process (>=1.0.2)", "pytest (>=8.3.3)", "pytest-mock (>=3.14)"] @@ -2097,4 +2120,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = "^3.7" -content-hash = "37c41caf594570c2c84273ca5abc41ab2ec53d4e05a7bf6440b3e10e6de122d7" +content-hash = "97f4c031d7769c7bad6adc5b4dfee58549dd3a445f991960527ec5e1212449b6" diff --git a/pyproject.toml b/pyproject.toml index bc191f97..0b0fcf5d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -77,7 +77,7 @@ pytest-codspeed = [ { version = "^2.2.1", python = "<3.8" } ] tox = [ - { version = "^4.16", python = ">=3.8" }, + { version = "^4.24", python = ">=3.8" }, { version = "^3.28", python = "<3.8" } ] From 38186538e406cc588af34696b62da9ed9152d28b Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 25 Jan 2025 21:16:02 +0100 Subject: [PATCH 216/230] Allow injecting custom data to custom execution context (#226) --- src/graphql/execution/execute.py | 28 ++++++++++++++-------------- tests/execution/test_customize.py | 16 +++++++++++++++- 2 files changed, 29 insertions(+), 15 deletions(-) diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index d52eac33..90d3d73b 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -4,6 +4,7 @@ from asyncio import ensure_future, gather, shield, wait_for from contextlib import suppress +from copy import copy from typing import ( Any, AsyncGenerator, @@ -219,6 +220,7 @@ def build( subscribe_field_resolver: GraphQLFieldResolver | None = None, middleware: Middleware | None = None, is_awaitable: Callable[[Any], bool] | None = None, + **custom_args: Any, ) -> list[GraphQLError] | ExecutionContext: """Build an execution context @@ -292,24 +294,14 @@ def build( IncrementalPublisher(), middleware_manager, is_awaitable, + **custom_args, ) def build_per_event_execution_context(self, payload: Any) -> ExecutionContext: """Create a copy of the execution context for usage with subscribe events.""" - return self.__class__( - self.schema, - self.fragments, - payload, - self.context_value, - self.operation, - self.variable_values, - self.field_resolver, - self.type_resolver, - self.subscribe_field_resolver, - self.incremental_publisher, - self.middleware_manager, - self.is_awaitable, - ) + context = copy(self) + context.root_value = payload + return context def execute_operation( self, initial_result_record: InitialResultRecord @@ -1709,6 +1701,7 @@ def execute( middleware: Middleware | None = None, execution_context_class: type[ExecutionContext] | None = None, is_awaitable: Callable[[Any], bool] | None = None, + **custom_context_args: Any, ) -> AwaitableOrValue[ExecutionResult]: """Execute a GraphQL operation. @@ -1741,6 +1734,7 @@ def execute( middleware, execution_context_class, is_awaitable, + **custom_context_args, ) if isinstance(result, ExecutionResult): return result @@ -1769,6 +1763,7 @@ def experimental_execute_incrementally( middleware: Middleware | None = None, execution_context_class: type[ExecutionContext] | None = None, is_awaitable: Callable[[Any], bool] | None = None, + **custom_context_args: Any, ) -> AwaitableOrValue[ExecutionResult | ExperimentalIncrementalExecutionResults]: """Execute GraphQL operation incrementally (internal implementation). @@ -1797,6 +1792,7 @@ def experimental_execute_incrementally( subscribe_field_resolver, middleware, is_awaitable, + **custom_context_args, ) # Return early errors if execution context failed. @@ -2127,6 +2123,7 @@ def subscribe( subscribe_field_resolver: GraphQLFieldResolver | None = None, execution_context_class: type[ExecutionContext] | None = None, middleware: MiddlewareManager | None = None, + **custom_context_args: Any, ) -> AwaitableOrValue[AsyncIterator[ExecutionResult] | ExecutionResult]: """Create a GraphQL subscription. @@ -2167,6 +2164,7 @@ def subscribe( type_resolver, subscribe_field_resolver, middleware=middleware, + **custom_context_args, ) # Return early errors if execution context failed. @@ -2202,6 +2200,7 @@ def create_source_event_stream( type_resolver: GraphQLTypeResolver | None = None, subscribe_field_resolver: GraphQLFieldResolver | None = None, execution_context_class: type[ExecutionContext] | None = None, + **custom_context_args: Any, ) -> AwaitableOrValue[AsyncIterable[Any] | ExecutionResult]: """Create source event stream @@ -2238,6 +2237,7 @@ def create_source_event_stream( field_resolver, type_resolver, subscribe_field_resolver, + **custom_context_args, ) # Return early errors if execution context failed. diff --git a/tests/execution/test_customize.py b/tests/execution/test_customize.py index ac8b9ae1..bf1859a2 100644 --- a/tests/execution/test_customize.py +++ b/tests/execution/test_customize.py @@ -43,6 +43,10 @@ def uses_a_custom_execution_context_class(): ) class TestExecutionContext(ExecutionContext): + def __init__(self, *args, **kwargs): + assert kwargs.pop("custom_arg", None) == "baz" + super().__init__(*args, **kwargs) + def execute_field( self, parent_type, @@ -62,7 +66,12 @@ def execute_field( ) return result * 2 # type: ignore - assert execute(schema, query, execution_context_class=TestExecutionContext) == ( + assert execute( + schema, + query, + execution_context_class=TestExecutionContext, + custom_arg="baz", + ) == ( {"foo": "barbar"}, None, ) @@ -101,6 +110,10 @@ async def custom_foo(): @pytest.mark.asyncio async def uses_a_custom_execution_context_class(): class TestExecutionContext(ExecutionContext): + def __init__(self, *args, **kwargs): + assert kwargs.pop("custom_arg", None) == "baz" + super().__init__(*args, **kwargs) + def build_resolve_info(self, *args, **kwargs): resolve_info = super().build_resolve_info(*args, **kwargs) resolve_info.context["foo"] = "bar" @@ -132,6 +145,7 @@ def resolve_foo(message, _info): document, context_value={}, execution_context_class=TestExecutionContext, + custom_arg="baz", ) assert isasyncgen(subscription) From 41058d7fb26bc3231e718a1d0a411ebca53e1004 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 25 Jan 2025 21:35:08 +0100 Subject: [PATCH 217/230] Improve and update badges --- README.md | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index fa10c81c..00927a42 100644 --- a/README.md +++ b/README.md @@ -6,9 +6,10 @@ a query language for APIs created by Facebook. [![PyPI version](https://badge.fury.io/py/graphql-core.svg)](https://badge.fury.io/py/graphql-core) [![Documentation Status](https://readthedocs.org/projects/graphql-core-3/badge/)](https://graphql-core-3.readthedocs.io) -![Test Status](https://github.com/graphql-python/graphql-core/actions/workflows/test.yml/badge.svg) -![Lint Status](https://github.com/graphql-python/graphql-core/actions/workflows/lint.yml/badge.svg) -[![Code Style](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/ambv/black) +[![Test Status](https://github.com/graphql-python/graphql-core/actions/workflows/test.yml/badge.svg)](https://github.com/graphql-python/graphql-core/actions/workflows/test.yml) +[![Lint Status](https://github.com/graphql-python/graphql-core/actions/workflows/lint.yml/badge.svg)](https://github.com/graphql-python/graphql-core/actions/workflows/lint.yml) +[![CodSpeed](https://img.shields.io/endpoint?url=https://codspeed.io/badge.json)](https://codspeed.io/graphql-python/graphql-core) +[![Code style](https://img.shields.io/badge/code%20style-ruff-000000.svg)](https://github.com/astral-sh/ruff) An extensive test suite with over 2200 unit tests and 100% coverage replicates the complete test suite of GraphQL.js, ensuring that this port is reliable and compatible From 41da78af788b6fad6f6c46f9ca495f4091474106 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 25 Jan 2025 23:27:32 +0100 Subject: [PATCH 218/230] Deep copy schema with directive with arg of custom type (#210) --- src/graphql/type/schema.py | 22 +++++++++++++++++----- tests/utilities/test_build_ast_schema.py | 19 +++++++++++++++++++ 2 files changed, 36 insertions(+), 5 deletions(-) diff --git a/src/graphql/type/schema.py b/src/graphql/type/schema.py index 3099991d..befefabd 100644 --- a/src/graphql/type/schema.py +++ b/src/graphql/type/schema.py @@ -21,6 +21,7 @@ GraphQLAbstractType, GraphQLCompositeType, GraphQLField, + GraphQLInputType, GraphQLInterfaceType, GraphQLNamedType, GraphQLObjectType, @@ -293,6 +294,8 @@ def __deepcopy__(self, memo_: dict) -> GraphQLSchema: directive if is_specified_directive(directive) else copy(directive) for directive in self.directives ] + for directive in directives: + remap_directive(directive, type_map) return self.__class__( self.query_type and cast(GraphQLObjectType, type_map[self.query_type.name]), self.mutation_type @@ -458,11 +461,7 @@ def remapped_type(type_: GraphQLType, type_map: TypeMap) -> GraphQLType: def remap_named_type(type_: GraphQLNamedType, type_map: TypeMap) -> None: """Change all references in the given named type to use this type map.""" - if is_union_type(type_): - type_.types = [ - type_map.get(member_type.name, member_type) for member_type in type_.types - ] - elif is_object_type(type_) or is_interface_type(type_): + if is_object_type(type_) or is_interface_type(type_): type_.interfaces = [ type_map.get(interface_type.name, interface_type) for interface_type in type_.interfaces @@ -477,9 +476,22 @@ def remap_named_type(type_: GraphQLNamedType, type_map: TypeMap) -> None: arg.type = remapped_type(arg.type, type_map) args[arg_name] = arg fields[field_name] = field + elif is_union_type(type_): + type_.types = [ + type_map.get(member_type.name, member_type) for member_type in type_.types + ] elif is_input_object_type(type_): fields = type_.fields for field_name, field in fields.items(): field = copy(field) # noqa: PLW2901 field.type = remapped_type(field.type, type_map) fields[field_name] = field + + +def remap_directive(directive: GraphQLDirective, type_map: TypeMap) -> None: + """Change all references in the given directive to use this type map.""" + args = directive.args + for arg_name, arg in args.items(): + arg = copy(arg) # noqa: PLW2901 + arg.type = cast(GraphQLInputType, remapped_type(arg.type, type_map)) + args[arg_name] = arg diff --git a/tests/utilities/test_build_ast_schema.py b/tests/utilities/test_build_ast_schema.py index d4c2dff9..d0196bd7 100644 --- a/tests/utilities/test_build_ast_schema.py +++ b/tests/utilities/test_build_ast_schema.py @@ -1222,6 +1222,25 @@ def can_deep_copy_schema(): # check that printing the copied schema gives the same SDL assert print_schema(copied) == sdl + def can_deep_copy_schema_with_directive_using_args_of_custom_type(): + sdl = dedent(""" + directive @someDirective(someArg: SomeEnum) on FIELD_DEFINITION + + enum SomeEnum { + ONE + TWO + } + + type Query { + someField: String @someDirective(someArg: ONE) + } + """) + schema = build_schema(sdl) + copied = deepcopy(schema) + # custom directives on field definitions cannot be reproduced + expected_sdl = sdl.replace(" @someDirective(someArg: ONE)", "") + assert print_schema(copied) == expected_sdl + def can_pickle_and_unpickle_star_wars_schema(): # create a schema from the star wars SDL schema = build_schema(sdl, assume_valid_sdl=True) From ab78551fb6084bb64df4bbeeef1d7df00974857b Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 25 Jan 2025 23:34:09 +0100 Subject: [PATCH 219/230] Update year of copyright --- docs/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/conf.py b/docs/conf.py index 1d7afde0..aa88b282 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -50,7 +50,7 @@ # General information about the project. project = "GraphQL-core 3" -copyright = "2024, Christoph Zwerschke" +copyright = "2025, Christoph Zwerschke" author = "Christoph Zwerschke" # The version info for the project you're documenting, acts as replacement for From 1c11f15328ff8425a7bb63054c3b379c0d7739bc Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 26 Jan 2025 15:07:39 +0100 Subject: [PATCH 220/230] Fix docstrings --- src/graphql/type/definition.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/graphql/type/definition.py b/src/graphql/type/definition.py index 480c1879..5b48c8b4 100644 --- a/src/graphql/type/definition.py +++ b/src/graphql/type/definition.py @@ -1284,7 +1284,7 @@ class GraphQLInputObjectType(GraphQLNamedType): Example:: - NonNullFloat = GraphQLNonNull(GraphQLFloat()) + NonNullFloat = GraphQLNonNull(GraphQLFloat) class GeoPoint(GraphQLInputObjectType): name = 'GeoPoint' @@ -1292,7 +1292,7 @@ class GeoPoint(GraphQLInputObjectType): 'lat': GraphQLInputField(NonNullFloat), 'lon': GraphQLInputField(NonNullFloat), 'alt': GraphQLInputField( - GraphQLFloat(), default_value=0) + GraphQLFloat, default_value=0) } The outbound values will be Python dictionaries by default, but you can have them @@ -1511,7 +1511,7 @@ class GraphQLNonNull(GraphQLWrappingType[GNT_co]): class RowType(GraphQLObjectType): name = 'Row' fields = { - 'id': GraphQLField(GraphQLNonNull(GraphQLString())) + 'id': GraphQLField(GraphQLNonNull(GraphQLString)) } Note: the enforcement of non-nullability occurs within the executor. From d3c03e638487984e2d7fff67473bc123a51f6ee4 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 26 Jan 2025 15:10:58 +0100 Subject: [PATCH 221/230] Newer Python version should use newer tox versions --- .github/workflows/test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 77f15bf1..581528cc 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -22,7 +22,7 @@ jobs: - name: Install dependencies run: | python -m pip install --upgrade pip - pip install "tox>=3.28,<5" "tox-gh-actions>=3.2,<4" + pip install "tox>=4.24,<5" "tox-gh-actions>=3.2,<4" - name: Run unit tests with tox run: tox From d4f8b32410c8e56fd76eb10c50105ab3a9fa5a60 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 26 Jan 2025 15:51:54 +0100 Subject: [PATCH 222/230] Fix issue with older tox versions --- tox.ini | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tox.ini b/tox.ini index 7f2e07d4..b7ee6b8c 100644 --- a/tox.ini +++ b/tox.ini @@ -47,9 +47,9 @@ deps = pytest-cov>=4.1,<7 pytest-describe>=2.2,<3 pytest-timeout>=2.3,<3 - py3{7,8,9}, pypy39: typing-extensions>=4.7.1,<5 + py3{7,8,9},pypy39: typing-extensions>=4.7.1,<5 commands = # to also run the time-consuming tests: tox -e py312 -- --run-slow # to run the benchmarks: tox -e py312 -- -k benchmarks --benchmark-enable - py3{7,8,9,10,11,13}, pypy3{9,10}: pytest tests {posargs} + py3{7,8,9,10,11,13},pypy3{9,10}: pytest tests {posargs} py312: pytest tests {posargs: --cov-report=term-missing --cov=graphql --cov=tests --cov-fail-under=100} From 651ca5ceca8bb7c7cc7d8bb4fa0a545399e03854 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 26 Jan 2025 16:29:36 +0100 Subject: [PATCH 223/230] Transform input objects used as default values (#206) --- src/graphql/execution/values.py | 17 +++++-- tests/execution/test_resolve.py | 87 +++++++++++++++++++++++++++++++++ 2 files changed, 100 insertions(+), 4 deletions(-) diff --git a/src/graphql/execution/values.py b/src/graphql/execution/values.py index fda472de..5309996a 100644 --- a/src/graphql/execution/values.py +++ b/src/graphql/execution/values.py @@ -26,6 +26,7 @@ GraphQLDirective, GraphQLField, GraphQLSchema, + is_input_object_type, is_input_type, is_non_null_type, ) @@ -171,8 +172,12 @@ def get_argument_values( argument_node = arg_node_map.get(name) if argument_node is None: - if arg_def.default_value is not Undefined: - coerced_values[arg_def.out_name or name] = arg_def.default_value + value = arg_def.default_value + if value is not Undefined: + if is_input_object_type(arg_def.type): + # coerce input value so that out_names are used + value = coerce_input_value(value, arg_def.type) + coerced_values[arg_def.out_name or name] = value elif is_non_null_type(arg_type): # pragma: no cover else msg = ( f"Argument '{name}' of required type '{arg_type}' was not provided." @@ -186,8 +191,12 @@ def get_argument_values( if isinstance(value_node, VariableNode): variable_name = value_node.name.value if variable_values is None or variable_name not in variable_values: - if arg_def.default_value is not Undefined: - coerced_values[arg_def.out_name or name] = arg_def.default_value + value = arg_def.default_value + if value is not Undefined: + if is_input_object_type(arg_def.type): + # coerce input value so that out_names are used + value = coerce_input_value(value, arg_def.type) + coerced_values[arg_def.out_name or name] = value elif is_non_null_type(arg_type): # pragma: no cover else msg = ( f"Argument '{name}' of required type '{arg_type}'" diff --git a/tests/execution/test_resolve.py b/tests/execution/test_resolve.py index 1c77af8b..db52d638 100644 --- a/tests/execution/test_resolve.py +++ b/tests/execution/test_resolve.py @@ -7,9 +7,11 @@ from graphql.type import ( GraphQLArgument, GraphQLField, + GraphQLID, GraphQLInputField, GraphQLInputObjectType, GraphQLInt, + GraphQLList, GraphQLObjectType, GraphQLSchema, GraphQLString, @@ -213,6 +215,91 @@ def execute_query(query: str, root_value: Any = None) -> ExecutionResult: None, ) + def transforms_default_values_using_out_names(): + # This is an extension of GraphQL.js. + resolver_kwargs: Any + + def search_resolver(_obj: None, _info, **kwargs): + nonlocal resolver_kwargs + resolver_kwargs = kwargs + return [{"id": "42"}] + + filters_type = GraphQLInputObjectType( + "SearchFilters", + {"pageSize": GraphQLInputField(GraphQLInt, out_name="page_size")}, + ) + result_type = GraphQLObjectType("SearchResult", {"id": GraphQLField(GraphQLID)}) + query = GraphQLObjectType( + "Query", + { + "search": GraphQLField( + GraphQLList(result_type), + { + "searchFilters": GraphQLArgument( + filters_type, {"pageSize": 10}, out_name="search_filters" + ) + }, + resolve=search_resolver, + ) + }, + ) + schema = GraphQLSchema(query) + + resolver_kwargs = None + result = execute_sync(schema, parse("{ search { id } }")) + assert result == ({"search": [{"id": "42"}]}, None) + assert resolver_kwargs == {"search_filters": {"page_size": 10}} + + resolver_kwargs = None + result = execute_sync( + schema, parse("{ search(searchFilters:{pageSize: 25}) { id } }") + ) + assert result == ({"search": [{"id": "42"}]}, None) + assert resolver_kwargs == {"search_filters": {"page_size": 25}} + + resolver_kwargs = None + result = execute_sync( + schema, + parse( + """ + query ($searchFilters: SearchFilters) { + search(searchFilters: $searchFilters) { id } + } + """ + ), + ) + assert result == ({"search": [{"id": "42"}]}, None) + assert resolver_kwargs == {"search_filters": {"page_size": 10}} + + resolver_kwargs = None + result = execute_sync( + schema, + parse( + """ + query ($searchFilters: SearchFilters) { + search(searchFilters: $searchFilters) { id } + } + """ + ), + variable_values={"searchFilters": {"pageSize": 25}}, + ) + assert result == ({"search": [{"id": "42"}]}, None) + assert resolver_kwargs == {"search_filters": {"page_size": 25}} + + resolver_kwargs = None + result = execute_sync( + schema, + parse( + """ + query ($searchFilters: SearchFilters = {pageSize: 25}) { + search(searchFilters: $searchFilters) { id } + } + """ + ), + ) + assert result == ({"search": [{"id": "42"}]}, None) + assert resolver_kwargs == {"search_filters": {"page_size": 25}} + def pass_error_from_resolver_wrapped_as_located_graphql_error(): def resolve(_obj, _info): raise ValueError("Some error") From 44334f30f5e0cd9ecb7995a38548f2dc2a728f9d Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 26 Jan 2025 18:02:41 +0100 Subject: [PATCH 224/230] Bump patch version and update README --- .bumpversion.cfg | 2 +- README.md | 8 ++++---- docs/conf.py | 2 +- pyproject.toml | 2 +- src/graphql/version.py | 4 ++-- 5 files changed, 9 insertions(+), 9 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index e2aa0e98..e8560a6a 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 3.3.0a6 +current_version = 3.3.0a7 commit = False tag = False diff --git a/README.md b/README.md index 00927a42..58b57b1f 100644 --- a/README.md +++ b/README.md @@ -11,15 +11,15 @@ a query language for APIs created by Facebook. [![CodSpeed](https://img.shields.io/endpoint?url=https://codspeed.io/badge.json)](https://codspeed.io/graphql-python/graphql-core) [![Code style](https://img.shields.io/badge/code%20style-ruff-000000.svg)](https://github.com/astral-sh/ruff) -An extensive test suite with over 2200 unit tests and 100% coverage replicates the +An extensive test suite with over 2500 unit tests and 100% coverage replicates the complete test suite of GraphQL.js, ensuring that this port is reliable and compatible with GraphQL.js. -The current stable version 3.2.5 of GraphQL-core is up-to-date with GraphQL.js +The current stable version 3.2.6 of GraphQL-core is up-to-date with GraphQL.js version 16.8.2 and supports Python versions 3.6 to 3.13. -You can also try out the latest alpha version 3.3.0a6 of GraphQL-core, -which is up-to-date with GraphQL.js version 17.0.0a2. +You can also try out the latest alpha version 3.3.0a7 of GraphQL-core, +which is up-to-date with GraphQL.js version 17.0.0a3. Please note that this new minor version of GraphQL-core does not support Python 3.6 anymore. diff --git a/docs/conf.py b/docs/conf.py index aa88b282..e78359fe 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -60,7 +60,7 @@ # The short X.Y version. # version = '3.3' # The full version, including alpha/beta/rc tags. -version = release = "3.3.0a6" +version = release = "3.3.0a7" # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/pyproject.toml b/pyproject.toml index 0b0fcf5d..1dbd6636 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "graphql-core" -version = "3.3.0a6" +version = "3.3.0a7" description = """\ GraphQL-core is a Python port of GraphQL.js,\ the JavaScript reference implementation for GraphQL.""" diff --git a/src/graphql/version.py b/src/graphql/version.py index 7b08ac67..311c74a0 100644 --- a/src/graphql/version.py +++ b/src/graphql/version.py @@ -8,9 +8,9 @@ __all__ = ["version", "version_info", "version_info_js", "version_js"] -version = "3.3.0a6" +version = "3.3.0a7" -version_js = "17.0.0a2" +version_js = "17.0.0a3" _re_version = re.compile(r"(\d+)\.(\d+)\.(\d+)(\D*)(\d*)") From dae8e8f4b71c02bc17eec1df9311ddf256ed342c Mon Sep 17 00:00:00 2001 From: sobolevn Date: Mon, 17 Feb 2025 23:00:32 +0300 Subject: [PATCH 225/230] Fix IntrospectionQuery type definition (#234) --- src/graphql/utilities/get_introspection_query.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/src/graphql/utilities/get_introspection_query.py b/src/graphql/utilities/get_introspection_query.py index c23a1533..7b8c33bb 100644 --- a/src/graphql/utilities/get_introspection_query.py +++ b/src/graphql/utilities/get_introspection_query.py @@ -302,7 +302,8 @@ class IntrospectionSchema(MaybeWithDescription): directives: list[IntrospectionDirective] -class IntrospectionQuery(TypedDict): - """The root typed dictionary for schema introspections.""" - - __schema: IntrospectionSchema +# The root typed dictionary for schema introspections. +IntrospectionQuery = TypedDict( # noqa: UP013 + "IntrospectionQuery", + {"__schema": IntrospectionSchema}, +) From 416247c1d511350445c23096f9491fbef424b69b Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Mon, 17 Feb 2025 21:09:48 +0100 Subject: [PATCH 226/230] Fix mypy issues --- poetry.lock | 320 +++++++++--------- pyproject.toml | 3 +- src/graphql/type/definition.py | 6 +- .../utilities/get_introspection_query.py | 1 + tox.ini | 2 +- 5 files changed, 173 insertions(+), 159 deletions(-) diff --git a/poetry.lock b/poetry.lock index 20a0ff50..167d9627 100644 --- a/poetry.lock +++ b/poetry.lock @@ -30,20 +30,20 @@ dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] [[package]] name = "babel" -version = "2.16.0" +version = "2.17.0" description = "Internationalization utilities" optional = false python-versions = ">=3.8" files = [ - {file = "babel-2.16.0-py3-none-any.whl", hash = "sha256:368b5b98b37c06b7daf6696391c3240c938b37767d4584413e8438c5c435fa8b"}, - {file = "babel-2.16.0.tar.gz", hash = "sha256:d1f3554ca26605fe173f3de0c65f750f5a42f924499bf134de6423582298e316"}, + {file = "babel-2.17.0-py3-none-any.whl", hash = "sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2"}, + {file = "babel-2.17.0.tar.gz", hash = "sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d"}, ] [package.dependencies] pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} [package.extras] -dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] +dev = ["backports.zoneinfo", "freezegun (>=1.0,<2.0)", "jinja2 (>=3.0)", "pytest (>=6.0)", "pytest-cov", "pytz", "setuptools", "tzdata"] [[package]] name = "bump2version" @@ -69,13 +69,13 @@ files = [ [[package]] name = "certifi" -version = "2024.12.14" +version = "2025.1.31" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.12.14-py3-none-any.whl", hash = "sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56"}, - {file = "certifi-2024.12.14.tar.gz", hash = "sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db"}, + {file = "certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe"}, + {file = "certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651"}, ] [[package]] @@ -520,73 +520,74 @@ toml = ["tomli"] [[package]] name = "coverage" -version = "7.6.10" +version = "7.6.12" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.9" files = [ - {file = "coverage-7.6.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5c912978f7fbf47ef99cec50c4401340436d200d41d714c7a4766f377c5b7b78"}, - {file = "coverage-7.6.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a01ec4af7dfeb96ff0078ad9a48810bb0cc8abcb0115180c6013a6b26237626c"}, - {file = "coverage-7.6.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3b204c11e2b2d883946fe1d97f89403aa1811df28ce0447439178cc7463448a"}, - {file = "coverage-7.6.10-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32ee6d8491fcfc82652a37109f69dee9a830e9379166cb73c16d8dc5c2915165"}, - {file = "coverage-7.6.10-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675cefc4c06e3b4c876b85bfb7c59c5e2218167bbd4da5075cbe3b5790a28988"}, - {file = "coverage-7.6.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f4f620668dbc6f5e909a0946a877310fb3d57aea8198bde792aae369ee1c23b5"}, - {file = "coverage-7.6.10-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:4eea95ef275de7abaef630c9b2c002ffbc01918b726a39f5a4353916ec72d2f3"}, - {file = "coverage-7.6.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e2f0280519e42b0a17550072861e0bc8a80a0870de260f9796157d3fca2733c5"}, - {file = "coverage-7.6.10-cp310-cp310-win32.whl", hash = "sha256:bc67deb76bc3717f22e765ab3e07ee9c7a5e26b9019ca19a3b063d9f4b874244"}, - {file = "coverage-7.6.10-cp310-cp310-win_amd64.whl", hash = "sha256:0f460286cb94036455e703c66988851d970fdfd8acc2a1122ab7f4f904e4029e"}, - {file = "coverage-7.6.10-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ea3c8f04b3e4af80e17bab607c386a830ffc2fb88a5484e1df756478cf70d1d3"}, - {file = "coverage-7.6.10-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:507a20fc863cae1d5720797761b42d2d87a04b3e5aeb682ef3b7332e90598f43"}, - {file = "coverage-7.6.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d37a84878285b903c0fe21ac8794c6dab58150e9359f1aaebbeddd6412d53132"}, - {file = "coverage-7.6.10-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a534738b47b0de1995f85f582d983d94031dffb48ab86c95bdf88dc62212142f"}, - {file = "coverage-7.6.10-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d7a2bf79378d8fb8afaa994f91bfd8215134f8631d27eba3e0e2c13546ce994"}, - {file = "coverage-7.6.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6713ba4b4ebc330f3def51df1d5d38fad60b66720948112f114968feb52d3f99"}, - {file = "coverage-7.6.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ab32947f481f7e8c763fa2c92fd9f44eeb143e7610c4ca9ecd6a36adab4081bd"}, - {file = "coverage-7.6.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7bbd8c8f1b115b892e34ba66a097b915d3871db7ce0e6b9901f462ff3a975377"}, - {file = "coverage-7.6.10-cp311-cp311-win32.whl", hash = "sha256:299e91b274c5c9cdb64cbdf1b3e4a8fe538a7a86acdd08fae52301b28ba297f8"}, - {file = "coverage-7.6.10-cp311-cp311-win_amd64.whl", hash = "sha256:489a01f94aa581dbd961f306e37d75d4ba16104bbfa2b0edb21d29b73be83609"}, - {file = "coverage-7.6.10-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:27c6e64726b307782fa5cbe531e7647aee385a29b2107cd87ba7c0105a5d3853"}, - {file = "coverage-7.6.10-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c56e097019e72c373bae32d946ecf9858fda841e48d82df7e81c63ac25554078"}, - {file = "coverage-7.6.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7827a5bc7bdb197b9e066cdf650b2887597ad124dd99777332776f7b7c7d0d0"}, - {file = "coverage-7.6.10-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:204a8238afe787323a8b47d8be4df89772d5c1e4651b9ffa808552bdf20e1d50"}, - {file = "coverage-7.6.10-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e67926f51821b8e9deb6426ff3164870976fe414d033ad90ea75e7ed0c2e5022"}, - {file = "coverage-7.6.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e78b270eadb5702938c3dbe9367f878249b5ef9a2fcc5360ac7bff694310d17b"}, - {file = "coverage-7.6.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:714f942b9c15c3a7a5fe6876ce30af831c2ad4ce902410b7466b662358c852c0"}, - {file = "coverage-7.6.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:abb02e2f5a3187b2ac4cd46b8ced85a0858230b577ccb2c62c81482ca7d18852"}, - {file = "coverage-7.6.10-cp312-cp312-win32.whl", hash = "sha256:55b201b97286cf61f5e76063f9e2a1d8d2972fc2fcfd2c1272530172fd28c359"}, - {file = "coverage-7.6.10-cp312-cp312-win_amd64.whl", hash = "sha256:e4ae5ac5e0d1e4edfc9b4b57b4cbecd5bc266a6915c500f358817a8496739247"}, - {file = "coverage-7.6.10-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:05fca8ba6a87aabdd2d30d0b6c838b50510b56cdcfc604d40760dae7153b73d9"}, - {file = "coverage-7.6.10-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9e80eba8801c386f72e0712a0453431259c45c3249f0009aff537a517b52942b"}, - {file = "coverage-7.6.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a372c89c939d57abe09e08c0578c1d212e7a678135d53aa16eec4430adc5e690"}, - {file = "coverage-7.6.10-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ec22b5e7fe7a0fa8509181c4aac1db48f3dd4d3a566131b313d1efc102892c18"}, - {file = "coverage-7.6.10-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26bcf5c4df41cad1b19c84af71c22cbc9ea9a547fc973f1f2cc9a290002c8b3c"}, - {file = "coverage-7.6.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4e4630c26b6084c9b3cb53b15bd488f30ceb50b73c35c5ad7871b869cb7365fd"}, - {file = "coverage-7.6.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2396e8116db77789f819d2bc8a7e200232b7a282c66e0ae2d2cd84581a89757e"}, - {file = "coverage-7.6.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:79109c70cc0882e4d2d002fe69a24aa504dec0cc17169b3c7f41a1d341a73694"}, - {file = "coverage-7.6.10-cp313-cp313-win32.whl", hash = "sha256:9e1747bab246d6ff2c4f28b4d186b205adced9f7bd9dc362051cc37c4a0c7bd6"}, - {file = "coverage-7.6.10-cp313-cp313-win_amd64.whl", hash = "sha256:254f1a3b1eef5f7ed23ef265eaa89c65c8c5b6b257327c149db1ca9d4a35f25e"}, - {file = "coverage-7.6.10-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:2ccf240eb719789cedbb9fd1338055de2761088202a9a0b73032857e53f612fe"}, - {file = "coverage-7.6.10-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:0c807ca74d5a5e64427c8805de15b9ca140bba13572d6d74e262f46f50b13273"}, - {file = "coverage-7.6.10-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2bcfa46d7709b5a7ffe089075799b902020b62e7ee56ebaed2f4bdac04c508d8"}, - {file = "coverage-7.6.10-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4e0de1e902669dccbf80b0415fb6b43d27edca2fbd48c74da378923b05316098"}, - {file = "coverage-7.6.10-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f7b444c42bbc533aaae6b5a2166fd1a797cdb5eb58ee51a92bee1eb94a1e1cb"}, - {file = "coverage-7.6.10-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b330368cb99ef72fcd2dc3ed260adf67b31499584dc8a20225e85bfe6f6cfed0"}, - {file = "coverage-7.6.10-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:9a7cfb50515f87f7ed30bc882f68812fd98bc2852957df69f3003d22a2aa0abf"}, - {file = "coverage-7.6.10-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6f93531882a5f68c28090f901b1d135de61b56331bba82028489bc51bdd818d2"}, - {file = "coverage-7.6.10-cp313-cp313t-win32.whl", hash = "sha256:89d76815a26197c858f53c7f6a656686ec392b25991f9e409bcef020cd532312"}, - {file = "coverage-7.6.10-cp313-cp313t-win_amd64.whl", hash = "sha256:54a5f0f43950a36312155dae55c505a76cd7f2b12d26abeebbe7a0b36dbc868d"}, - {file = "coverage-7.6.10-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:656c82b8a0ead8bba147de9a89bda95064874c91a3ed43a00e687f23cc19d53a"}, - {file = "coverage-7.6.10-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ccc2b70a7ed475c68ceb548bf69cec1e27305c1c2606a5eb7c3afff56a1b3b27"}, - {file = "coverage-7.6.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5e37dc41d57ceba70956fa2fc5b63c26dba863c946ace9705f8eca99daecdc4"}, - {file = "coverage-7.6.10-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0aa9692b4fdd83a4647eeb7db46410ea1322b5ed94cd1715ef09d1d5922ba87f"}, - {file = "coverage-7.6.10-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa744da1820678b475e4ba3dfd994c321c5b13381d1041fe9c608620e6676e25"}, - {file = "coverage-7.6.10-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c0b1818063dc9e9d838c09e3a473c1422f517889436dd980f5d721899e66f315"}, - {file = "coverage-7.6.10-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:59af35558ba08b758aec4d56182b222976330ef8d2feacbb93964f576a7e7a90"}, - {file = "coverage-7.6.10-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7ed2f37cfce1ce101e6dffdfd1c99e729dd2ffc291d02d3e2d0af8b53d13840d"}, - {file = "coverage-7.6.10-cp39-cp39-win32.whl", hash = "sha256:4bcc276261505d82f0ad426870c3b12cb177752834a633e737ec5ee79bbdff18"}, - {file = "coverage-7.6.10-cp39-cp39-win_amd64.whl", hash = "sha256:457574f4599d2b00f7f637a0700a6422243b3565509457b2dbd3f50703e11f59"}, - {file = "coverage-7.6.10-pp39.pp310-none-any.whl", hash = "sha256:fd34e7b3405f0cc7ab03d54a334c17a9e802897580d964bd8c2001f4b9fd488f"}, - {file = "coverage-7.6.10.tar.gz", hash = "sha256:7fb105327c8f8f0682e29843e2ff96af9dcbe5bab8eeb4b398c6a33a16d80a23"}, + {file = "coverage-7.6.12-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:704c8c8c6ce6569286ae9622e534b4f5b9759b6f2cd643f1c1a61f666d534fe8"}, + {file = "coverage-7.6.12-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ad7525bf0241e5502168ae9c643a2f6c219fa0a283001cee4cf23a9b7da75879"}, + {file = "coverage-7.6.12-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06097c7abfa611c91edb9e6920264e5be1d6ceb374efb4986f38b09eed4cb2fe"}, + {file = "coverage-7.6.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:220fa6c0ad7d9caef57f2c8771918324563ef0d8272c94974717c3909664e674"}, + {file = "coverage-7.6.12-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3688b99604a24492bcfe1c106278c45586eb819bf66a654d8a9a1433022fb2eb"}, + {file = "coverage-7.6.12-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d1a987778b9c71da2fc8948e6f2656da6ef68f59298b7e9786849634c35d2c3c"}, + {file = "coverage-7.6.12-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:cec6b9ce3bd2b7853d4a4563801292bfee40b030c05a3d29555fd2a8ee9bd68c"}, + {file = "coverage-7.6.12-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ace9048de91293e467b44bce0f0381345078389814ff6e18dbac8fdbf896360e"}, + {file = "coverage-7.6.12-cp310-cp310-win32.whl", hash = "sha256:ea31689f05043d520113e0552f039603c4dd71fa4c287b64cb3606140c66f425"}, + {file = "coverage-7.6.12-cp310-cp310-win_amd64.whl", hash = "sha256:676f92141e3c5492d2a1596d52287d0d963df21bf5e55c8b03075a60e1ddf8aa"}, + {file = "coverage-7.6.12-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e18aafdfb3e9ec0d261c942d35bd7c28d031c5855dadb491d2723ba54f4c3015"}, + {file = "coverage-7.6.12-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:66fe626fd7aa5982cdebad23e49e78ef7dbb3e3c2a5960a2b53632f1f703ea45"}, + {file = "coverage-7.6.12-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ef01d70198431719af0b1f5dcbefc557d44a190e749004042927b2a3fed0702"}, + {file = "coverage-7.6.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e92ae5a289a4bc4c0aae710c0948d3c7892e20fd3588224ebe242039573bf0"}, + {file = "coverage-7.6.12-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e695df2c58ce526eeab11a2e915448d3eb76f75dffe338ea613c1201b33bab2f"}, + {file = "coverage-7.6.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d74c08e9aaef995f8c4ef6d202dbd219c318450fe2a76da624f2ebb9c8ec5d9f"}, + {file = "coverage-7.6.12-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e995b3b76ccedc27fe4f477b349b7d64597e53a43fc2961db9d3fbace085d69d"}, + {file = "coverage-7.6.12-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b1f097878d74fe51e1ddd1be62d8e3682748875b461232cf4b52ddc6e6db0bba"}, + {file = "coverage-7.6.12-cp311-cp311-win32.whl", hash = "sha256:1f7ffa05da41754e20512202c866d0ebfc440bba3b0ed15133070e20bf5aeb5f"}, + {file = "coverage-7.6.12-cp311-cp311-win_amd64.whl", hash = "sha256:e216c5c45f89ef8971373fd1c5d8d1164b81f7f5f06bbf23c37e7908d19e8558"}, + {file = "coverage-7.6.12-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b172f8e030e8ef247b3104902cc671e20df80163b60a203653150d2fc204d1ad"}, + {file = "coverage-7.6.12-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:641dfe0ab73deb7069fb972d4d9725bf11c239c309ce694dd50b1473c0f641c3"}, + {file = "coverage-7.6.12-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e549f54ac5f301e8e04c569dfdb907f7be71b06b88b5063ce9d6953d2d58574"}, + {file = "coverage-7.6.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:959244a17184515f8c52dcb65fb662808767c0bd233c1d8a166e7cf74c9ea985"}, + {file = "coverage-7.6.12-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bda1c5f347550c359f841d6614fb8ca42ae5cb0b74d39f8a1e204815ebe25750"}, + {file = "coverage-7.6.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1ceeb90c3eda1f2d8c4c578c14167dbd8c674ecd7d38e45647543f19839dd6ea"}, + {file = "coverage-7.6.12-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f16f44025c06792e0fb09571ae454bcc7a3ec75eeb3c36b025eccf501b1a4c3"}, + {file = "coverage-7.6.12-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b076e625396e787448d27a411aefff867db2bffac8ed04e8f7056b07024eed5a"}, + {file = "coverage-7.6.12-cp312-cp312-win32.whl", hash = "sha256:00b2086892cf06c7c2d74983c9595dc511acca00665480b3ddff749ec4fb2a95"}, + {file = "coverage-7.6.12-cp312-cp312-win_amd64.whl", hash = "sha256:7ae6eabf519bc7871ce117fb18bf14e0e343eeb96c377667e3e5dd12095e0288"}, + {file = "coverage-7.6.12-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:488c27b3db0ebee97a830e6b5a3ea930c4a6e2c07f27a5e67e1b3532e76b9ef1"}, + {file = "coverage-7.6.12-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5d1095bbee1851269f79fd8e0c9b5544e4c00c0c24965e66d8cba2eb5bb535fd"}, + {file = "coverage-7.6.12-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0533adc29adf6a69c1baa88c3d7dbcaadcffa21afbed3ca7a225a440e4744bf9"}, + {file = "coverage-7.6.12-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53c56358d470fa507a2b6e67a68fd002364d23c83741dbc4c2e0680d80ca227e"}, + {file = "coverage-7.6.12-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64cbb1a3027c79ca6310bf101014614f6e6e18c226474606cf725238cf5bc2d4"}, + {file = "coverage-7.6.12-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:79cac3390bfa9836bb795be377395f28410811c9066bc4eefd8015258a7578c6"}, + {file = "coverage-7.6.12-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:9b148068e881faa26d878ff63e79650e208e95cf1c22bd3f77c3ca7b1d9821a3"}, + {file = "coverage-7.6.12-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8bec2ac5da793c2685ce5319ca9bcf4eee683b8a1679051f8e6ec04c4f2fd7dc"}, + {file = "coverage-7.6.12-cp313-cp313-win32.whl", hash = "sha256:200e10beb6ddd7c3ded322a4186313d5ca9e63e33d8fab4faa67ef46d3460af3"}, + {file = "coverage-7.6.12-cp313-cp313-win_amd64.whl", hash = "sha256:2b996819ced9f7dbb812c701485d58f261bef08f9b85304d41219b1496b591ef"}, + {file = "coverage-7.6.12-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:299cf973a7abff87a30609879c10df0b3bfc33d021e1adabc29138a48888841e"}, + {file = "coverage-7.6.12-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4b467a8c56974bf06e543e69ad803c6865249d7a5ccf6980457ed2bc50312703"}, + {file = "coverage-7.6.12-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2458f275944db8129f95d91aee32c828a408481ecde3b30af31d552c2ce284a0"}, + {file = "coverage-7.6.12-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a9d8be07fb0832636a0f72b80d2a652fe665e80e720301fb22b191c3434d924"}, + {file = "coverage-7.6.12-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14d47376a4f445e9743f6c83291e60adb1b127607a3618e3185bbc8091f0467b"}, + {file = "coverage-7.6.12-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b95574d06aa9d2bd6e5cc35a5bbe35696342c96760b69dc4287dbd5abd4ad51d"}, + {file = "coverage-7.6.12-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:ecea0c38c9079570163d663c0433a9af4094a60aafdca491c6a3d248c7432827"}, + {file = "coverage-7.6.12-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2251fabcfee0a55a8578a9d29cecfee5f2de02f11530e7d5c5a05859aa85aee9"}, + {file = "coverage-7.6.12-cp313-cp313t-win32.whl", hash = "sha256:eb5507795caabd9b2ae3f1adc95f67b1104971c22c624bb354232d65c4fc90b3"}, + {file = "coverage-7.6.12-cp313-cp313t-win_amd64.whl", hash = "sha256:f60a297c3987c6c02ffb29effc70eadcbb412fe76947d394a1091a3615948e2f"}, + {file = "coverage-7.6.12-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e7575ab65ca8399c8c4f9a7d61bbd2d204c8b8e447aab9d355682205c9dd948d"}, + {file = "coverage-7.6.12-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8161d9fbc7e9fe2326de89cd0abb9f3599bccc1287db0aba285cb68d204ce929"}, + {file = "coverage-7.6.12-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a1e465f398c713f1b212400b4e79a09829cd42aebd360362cd89c5bdc44eb87"}, + {file = "coverage-7.6.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f25d8b92a4e31ff1bd873654ec367ae811b3a943583e05432ea29264782dc32c"}, + {file = "coverage-7.6.12-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a936309a65cc5ca80fa9f20a442ff9e2d06927ec9a4f54bcba9c14c066323f2"}, + {file = "coverage-7.6.12-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:aa6f302a3a0b5f240ee201297fff0bbfe2fa0d415a94aeb257d8b461032389bd"}, + {file = "coverage-7.6.12-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f973643ef532d4f9be71dd88cf7588936685fdb576d93a79fe9f65bc337d9d73"}, + {file = "coverage-7.6.12-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:78f5243bb6b1060aed6213d5107744c19f9571ec76d54c99cc15938eb69e0e86"}, + {file = "coverage-7.6.12-cp39-cp39-win32.whl", hash = "sha256:69e62c5034291c845fc4df7f8155e8544178b6c774f97a99e2734b05eb5bed31"}, + {file = "coverage-7.6.12-cp39-cp39-win_amd64.whl", hash = "sha256:b01a840ecc25dce235ae4c1b6a0daefb2a203dba0e6e980637ee9c2f6ee0df57"}, + {file = "coverage-7.6.12-pp39.pp310-none-any.whl", hash = "sha256:7e39e845c4d764208e7b8f6a21c541ade741e2c41afabdfa1caa28687a3c98cf"}, + {file = "coverage-7.6.12-py3-none-any.whl", hash = "sha256:eb8668cfbc279a536c633137deeb9435d2962caec279c3f8cf8b91fff6ff8953"}, + {file = "coverage-7.6.12.tar.gz", hash = "sha256:48cfc4641d95d34766ad41d9573cc0f22a48aa88d22657a1fe01dca0dbae4de2"}, ] [package.dependencies] @@ -741,29 +742,6 @@ perf = ["ipython"] test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] type = ["pytest-mypy"] -[[package]] -name = "importlib-metadata" -version = "8.6.1" -description = "Read metadata from Python packages" -optional = false -python-versions = ">=3.9" -files = [ - {file = "importlib_metadata-8.6.1-py3-none-any.whl", hash = "sha256:02a89390c1e15fdfdc0d7c6b25cb3e62650d0494005c97d6f148bf5b9787525e"}, - {file = "importlib_metadata-8.6.1.tar.gz", hash = "sha256:310b41d755445d74569f993ccfc22838295d9fe005425094fad953d7f15c8580"}, -] - -[package.dependencies] -zipp = ">=3.20" - -[package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] -cover = ["pytest-cov"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -enabler = ["pytest-enabler (>=2.2)"] -perf = ["ipython"] -test = ["flufl.flake8", "importlib_resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] -type = ["pytest-mypy"] - [[package]] name = "iniconfig" version = "2.0.0" @@ -1002,6 +980,59 @@ install-types = ["pip"] mypyc = ["setuptools (>=50)"] reports = ["lxml"] +[[package]] +name = "mypy" +version = "1.15.0" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "mypy-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:979e4e1a006511dacf628e36fadfecbcc0160a8af6ca7dad2f5025529e082c13"}, + {file = "mypy-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c4bb0e1bd29f7d34efcccd71cf733580191e9a264a2202b0239da95984c5b559"}, + {file = "mypy-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:be68172e9fd9ad8fb876c6389f16d1c1b5f100ffa779f77b1fb2176fcc9ab95b"}, + {file = "mypy-1.15.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c7be1e46525adfa0d97681432ee9fcd61a3964c2446795714699a998d193f1a3"}, + {file = "mypy-1.15.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:2e2c2e6d3593f6451b18588848e66260ff62ccca522dd231cd4dd59b0160668b"}, + {file = "mypy-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:6983aae8b2f653e098edb77f893f7b6aca69f6cffb19b2cc7443f23cce5f4828"}, + {file = "mypy-1.15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2922d42e16d6de288022e5ca321cd0618b238cfc5570e0263e5ba0a77dbef56f"}, + {file = "mypy-1.15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2ee2d57e01a7c35de00f4634ba1bbf015185b219e4dc5909e281016df43f5ee5"}, + {file = "mypy-1.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:973500e0774b85d9689715feeffcc980193086551110fd678ebe1f4342fb7c5e"}, + {file = "mypy-1.15.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5a95fb17c13e29d2d5195869262f8125dfdb5c134dc8d9a9d0aecf7525b10c2c"}, + {file = "mypy-1.15.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1905f494bfd7d85a23a88c5d97840888a7bd516545fc5aaedff0267e0bb54e2f"}, + {file = "mypy-1.15.0-cp311-cp311-win_amd64.whl", hash = "sha256:c9817fa23833ff189db061e6d2eff49b2f3b6ed9856b4a0a73046e41932d744f"}, + {file = "mypy-1.15.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:aea39e0583d05124836ea645f412e88a5c7d0fd77a6d694b60d9b6b2d9f184fd"}, + {file = "mypy-1.15.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2f2147ab812b75e5b5499b01ade1f4a81489a147c01585cda36019102538615f"}, + {file = "mypy-1.15.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ce436f4c6d218a070048ed6a44c0bbb10cd2cc5e272b29e7845f6a2f57ee4464"}, + {file = "mypy-1.15.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8023ff13985661b50a5928fc7a5ca15f3d1affb41e5f0a9952cb68ef090b31ee"}, + {file = "mypy-1.15.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1124a18bc11a6a62887e3e137f37f53fbae476dc36c185d549d4f837a2a6a14e"}, + {file = "mypy-1.15.0-cp312-cp312-win_amd64.whl", hash = "sha256:171a9ca9a40cd1843abeca0e405bc1940cd9b305eaeea2dda769ba096932bb22"}, + {file = "mypy-1.15.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:93faf3fdb04768d44bf28693293f3904bbb555d076b781ad2530214ee53e3445"}, + {file = "mypy-1.15.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:811aeccadfb730024c5d3e326b2fbe9249bb7413553f15499a4050f7c30e801d"}, + {file = "mypy-1.15.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:98b7b9b9aedb65fe628c62a6dc57f6d5088ef2dfca37903a7d9ee374d03acca5"}, + {file = "mypy-1.15.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c43a7682e24b4f576d93072216bf56eeff70d9140241f9edec0c104d0c515036"}, + {file = "mypy-1.15.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:baefc32840a9f00babd83251560e0ae1573e2f9d1b067719479bfb0e987c6357"}, + {file = "mypy-1.15.0-cp313-cp313-win_amd64.whl", hash = "sha256:b9378e2c00146c44793c98b8d5a61039a048e31f429fb0eb546d93f4b000bedf"}, + {file = "mypy-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e601a7fa172c2131bff456bb3ee08a88360760d0d2f8cbd7a75a65497e2df078"}, + {file = "mypy-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:712e962a6357634fef20412699a3655c610110e01cdaa6180acec7fc9f8513ba"}, + {file = "mypy-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f95579473af29ab73a10bada2f9722856792a36ec5af5399b653aa28360290a5"}, + {file = "mypy-1.15.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8f8722560a14cde92fdb1e31597760dc35f9f5524cce17836c0d22841830fd5b"}, + {file = "mypy-1.15.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1fbb8da62dc352133d7d7ca90ed2fb0e9d42bb1a32724c287d3c76c58cbaa9c2"}, + {file = "mypy-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:d10d994b41fb3497719bbf866f227b3489048ea4bbbb5015357db306249f7980"}, + {file = "mypy-1.15.0-py3-none-any.whl", hash = "sha256:5469affef548bd1895d86d3bf10ce2b44e33d86923c29e4d675b3e323437ea3e"}, + {file = "mypy-1.15.0.tar.gz", hash = "sha256:404534629d51d3efea5c800ee7c42b72a6554d6c400e6a79eafe15d11341fd43"}, +] + +[package.dependencies] +mypy_extensions = ">=1.0.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing_extensions = ">=4.6.0" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +faster-cache = ["orjson"] +install-types = ["pip"] +mypyc = ["setuptools (>=50)"] +reports = ["lxml"] + [[package]] name = "mypy-extensions" version = "1.0.0" @@ -1278,13 +1309,13 @@ testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] [[package]] name = "pytest-asyncio" -version = "0.25.2" +version = "0.25.3" description = "Pytest support for asyncio" optional = false python-versions = ">=3.9" files = [ - {file = "pytest_asyncio-0.25.2-py3-none-any.whl", hash = "sha256:0d0bb693f7b99da304a0634afc0a4b19e49d5e0de2d670f38dc4bfa5727c5075"}, - {file = "pytest_asyncio-0.25.2.tar.gz", hash = "sha256:3f8ef9a98f45948ea91a0ed3dc4268b5326c0e7bce73892acc654df4262ad45f"}, + {file = "pytest_asyncio-0.25.3-py3-none-any.whl", hash = "sha256:9e89518e0f9bd08928f97a3482fdc4e244df17529460bc038291ccaf8f85c7c3"}, + {file = "pytest_asyncio-0.25.3.tar.gz", hash = "sha256:fc1da2cf9f125ada7e710b4ddad05518d4cee187ae9412e9ac9271003497f07a"}, ] [package.dependencies] @@ -1357,23 +1388,23 @@ test = ["pytest (>=7.0,<8.0)", "pytest-cov (>=4.0.0,<4.1.0)"] [[package]] name = "pytest-codspeed" -version = "3.1.2" +version = "3.2.0" description = "Pytest plugin to create CodSpeed benchmarks" optional = false python-versions = ">=3.9" files = [ - {file = "pytest_codspeed-3.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:aed496f873670ce0ea8f980a7c1a2c6a08f415e0ebdf207bf651b2d922103374"}, - {file = "pytest_codspeed-3.1.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ee45b0b763f6b5fa5d74c7b91d694a9615561c428b320383660672f4471756e3"}, - {file = "pytest_codspeed-3.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c84e591a7a0f67d45e2dc9fd05b276971a3aabcab7478fe43363ebefec1358f4"}, - {file = "pytest_codspeed-3.1.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c6ae6d094247156407770e6b517af70b98862dd59a3c31034aede11d5f71c32c"}, - {file = "pytest_codspeed-3.1.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d0f264991de5b5cdc118b96fc671386cca3f0f34e411482939bf2459dc599097"}, - {file = "pytest_codspeed-3.1.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c0695a4bcd5ff04e8379124dba5d9795ea5e0cadf38be7a0406432fc1467b555"}, - {file = "pytest_codspeed-3.1.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6dc356c8dcaaa883af83310f397ac06c96fac9b8a1146e303d4b374b2cb46a18"}, - {file = "pytest_codspeed-3.1.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cc8a5d0366322a75cf562f7d8d672d28c1cf6948695c4dddca50331e08f6b3d5"}, - {file = "pytest_codspeed-3.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6c5fe7a19b72f54f217480b3b527102579547b1de9fe3acd9e66cb4629ff46c8"}, - {file = "pytest_codspeed-3.1.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b67205755a665593f6521a98317d02a9d07d6fdc593f6634de2c94dea47a3055"}, - {file = "pytest_codspeed-3.1.2-py3-none-any.whl", hash = "sha256:5e7ed0315e33496c5c07dba262b50303b8d0bc4c3d10bf1d422a41e70783f1cb"}, - {file = "pytest_codspeed-3.1.2.tar.gz", hash = "sha256:09c1733af3aab35e94a621aa510f2d2114f65591e6f644c42ca3f67547edad4b"}, + {file = "pytest_codspeed-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c5165774424c7ab8db7e7acdb539763a0e5657996effefdf0664d7fd95158d34"}, + {file = "pytest_codspeed-3.2.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9bd55f92d772592c04a55209950c50880413ae46876e66bd349ef157075ca26c"}, + {file = "pytest_codspeed-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4cf6f56067538f4892baa8d7ab5ef4e45bb59033be1ef18759a2c7fc55b32035"}, + {file = "pytest_codspeed-3.2.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:39a687b05c3d145642061b45ea78e47e12f13ce510104d1a2cda00eee0e36f58"}, + {file = "pytest_codspeed-3.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:46a1afaaa1ac4c2ca5b0700d31ac46d80a27612961d031067d73c6ccbd8d3c2b"}, + {file = "pytest_codspeed-3.2.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c48ce3af3dfa78413ed3d69d1924043aa1519048dbff46edccf8f35a25dab3c2"}, + {file = "pytest_codspeed-3.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:66692506d33453df48b36a84703448cb8b22953eea51f03fbb2eb758dc2bdc4f"}, + {file = "pytest_codspeed-3.2.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:479774f80d0bdfafa16112700df4dbd31bf2a6757fac74795fd79c0a7b3c389b"}, + {file = "pytest_codspeed-3.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:109f9f4dd1088019c3b3f887d003b7d65f98a7736ca1d457884f5aa293e8e81c"}, + {file = "pytest_codspeed-3.2.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e2f69a03b52c9bb041aec1b8ee54b7b6c37a6d0a948786effa4c71157765b6da"}, + {file = "pytest_codspeed-3.2.0-py3-none-any.whl", hash = "sha256:54b5c2e986d6a28e7b0af11d610ea57bd5531cec8326abe486f1b55b09d91c39"}, + {file = "pytest_codspeed-3.2.0.tar.gz", hash = "sha256:f9d1b1a3b2c69cdc0490a1e8b1ced44bffbd0e8e21d81a7160cfdd923f6e8155"}, ] [package.dependencies] @@ -1471,13 +1502,13 @@ pytest = ">=7.0.0" [[package]] name = "pytz" -version = "2024.2" +version = "2025.1" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" files = [ - {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"}, - {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"}, + {file = "pytz-2025.1-py2.py3-none-any.whl", hash = "sha256:89dd22dca55b46eac6eda23b2d72721bf1bdfef212645d81513ef5d03038de57"}, + {file = "pytz-2025.1.tar.gz", hash = "sha256:c2db42be2a2518b28e65f9207c4d05e6ff547d1efa4086469ef855e4ab70178e"}, ] [[package]] @@ -1543,29 +1574,29 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "ruff" -version = "0.9.3" +version = "0.9.6" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.9.3-py3-none-linux_armv6l.whl", hash = "sha256:7f39b879064c7d9670197d91124a75d118d00b0990586549949aae80cdc16624"}, - {file = "ruff-0.9.3-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:a187171e7c09efa4b4cc30ee5d0d55a8d6c5311b3e1b74ac5cb96cc89bafc43c"}, - {file = "ruff-0.9.3-py3-none-macosx_11_0_arm64.whl", hash = "sha256:c59ab92f8e92d6725b7ded9d4a31be3ef42688a115c6d3da9457a5bda140e2b4"}, - {file = "ruff-0.9.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2dc153c25e715be41bb228bc651c1e9b1a88d5c6e5ed0194fa0dfea02b026439"}, - {file = "ruff-0.9.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:646909a1e25e0dc28fbc529eab8eb7bb583079628e8cbe738192853dbbe43af5"}, - {file = "ruff-0.9.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a5a46e09355695fbdbb30ed9889d6cf1c61b77b700a9fafc21b41f097bfbba4"}, - {file = "ruff-0.9.3-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:c4bb09d2bbb394e3730d0918c00276e79b2de70ec2a5231cd4ebb51a57df9ba1"}, - {file = "ruff-0.9.3-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:96a87ec31dc1044d8c2da2ebbed1c456d9b561e7d087734336518181b26b3aa5"}, - {file = "ruff-0.9.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9bb7554aca6f842645022fe2d301c264e6925baa708b392867b7a62645304df4"}, - {file = "ruff-0.9.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cabc332b7075a914ecea912cd1f3d4370489c8018f2c945a30bcc934e3bc06a6"}, - {file = "ruff-0.9.3-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:33866c3cc2a575cbd546f2cd02bdd466fed65118e4365ee538a3deffd6fcb730"}, - {file = "ruff-0.9.3-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:006e5de2621304c8810bcd2ee101587712fa93b4f955ed0985907a36c427e0c2"}, - {file = "ruff-0.9.3-py3-none-musllinux_1_2_i686.whl", hash = "sha256:ba6eea4459dbd6b1be4e6bfc766079fb9b8dd2e5a35aff6baee4d9b1514ea519"}, - {file = "ruff-0.9.3-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:90230a6b8055ad47d3325e9ee8f8a9ae7e273078a66401ac66df68943ced029b"}, - {file = "ruff-0.9.3-py3-none-win32.whl", hash = "sha256:eabe5eb2c19a42f4808c03b82bd313fc84d4e395133fb3fc1b1516170a31213c"}, - {file = "ruff-0.9.3-py3-none-win_amd64.whl", hash = "sha256:040ceb7f20791dfa0e78b4230ee9dce23da3b64dd5848e40e3bf3ab76468dcf4"}, - {file = "ruff-0.9.3-py3-none-win_arm64.whl", hash = "sha256:800d773f6d4d33b0a3c60e2c6ae8f4c202ea2de056365acfa519aa48acf28e0b"}, - {file = "ruff-0.9.3.tar.gz", hash = "sha256:8293f89985a090ebc3ed1064df31f3b4b56320cdfcec8b60d3295bddb955c22a"}, + {file = "ruff-0.9.6-py3-none-linux_armv6l.whl", hash = "sha256:2f218f356dd2d995839f1941322ff021c72a492c470f0b26a34f844c29cdf5ba"}, + {file = "ruff-0.9.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b908ff4df65dad7b251c9968a2e4560836d8f5487c2f0cc238321ed951ea0504"}, + {file = "ruff-0.9.6-py3-none-macosx_11_0_arm64.whl", hash = "sha256:b109c0ad2ececf42e75fa99dc4043ff72a357436bb171900714a9ea581ddef83"}, + {file = "ruff-0.9.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1de4367cca3dac99bcbd15c161404e849bb0bfd543664db39232648dc00112dc"}, + {file = "ruff-0.9.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac3ee4d7c2c92ddfdaedf0bf31b2b176fa7aa8950efc454628d477394d35638b"}, + {file = "ruff-0.9.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5dc1edd1775270e6aa2386119aea692039781429f0be1e0949ea5884e011aa8e"}, + {file = "ruff-0.9.6-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:4a091729086dffa4bd070aa5dab7e39cc6b9d62eb2bef8f3d91172d30d599666"}, + {file = "ruff-0.9.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1bbc6808bf7b15796cef0815e1dfb796fbd383e7dbd4334709642649625e7c5"}, + {file = "ruff-0.9.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:589d1d9f25b5754ff230dce914a174a7c951a85a4e9270613a2b74231fdac2f5"}, + {file = "ruff-0.9.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc61dd5131742e21103fbbdcad683a8813be0e3c204472d520d9a5021ca8b217"}, + {file = "ruff-0.9.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:5e2d9126161d0357e5c8f30b0bd6168d2c3872372f14481136d13de9937f79b6"}, + {file = "ruff-0.9.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:68660eab1a8e65babb5229a1f97b46e3120923757a68b5413d8561f8a85d4897"}, + {file = "ruff-0.9.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:c4cae6c4cc7b9b4017c71114115db0445b00a16de3bcde0946273e8392856f08"}, + {file = "ruff-0.9.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:19f505b643228b417c1111a2a536424ddde0db4ef9023b9e04a46ed8a1cb4656"}, + {file = "ruff-0.9.6-py3-none-win32.whl", hash = "sha256:194d8402bceef1b31164909540a597e0d913c0e4952015a5b40e28c146121b5d"}, + {file = "ruff-0.9.6-py3-none-win_amd64.whl", hash = "sha256:03482d5c09d90d4ee3f40d97578423698ad895c87314c4de39ed2af945633caa"}, + {file = "ruff-0.9.6-py3-none-win_arm64.whl", hash = "sha256:0e2bb706a2be7ddfea4a4af918562fdc1bcb16df255e5fa595bbd800ce322a5a"}, + {file = "ruff-0.9.6.tar.gz", hash = "sha256:81761592f72b620ec8fa1068a6fd00e98a5ebee342a3642efd84454f3031dca9"}, ] [[package]] @@ -2046,13 +2077,13 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess [[package]] name = "virtualenv" -version = "20.29.1" +version = "20.29.2" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.8" files = [ - {file = "virtualenv-20.29.1-py3-none-any.whl", hash = "sha256:4e4cb403c0b0da39e13b46b1b2476e505cb0046b25f242bee80f62bf990b2779"}, - {file = "virtualenv-20.29.1.tar.gz", hash = "sha256:b8b8970138d32fb606192cb97f6cd4bb644fa486be9308fb9b63f81091b5dc35"}, + {file = "virtualenv-20.29.2-py3-none-any.whl", hash = "sha256:febddfc3d1ea571bdb1dc0f98d7b45d24def7428214d4fb73cc486c9568cce6a"}, + {file = "virtualenv-20.29.2.tar.gz", hash = "sha256:fdaabebf6d03b5ba83ae0a02cfe96f48a716f4fae556461d180825866f75b728"}, ] [package.dependencies] @@ -2098,26 +2129,7 @@ enabler = ["pytest-enabler (>=2.2)"] test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] type = ["pytest-mypy"] -[[package]] -name = "zipp" -version = "3.21.0" -description = "Backport of pathlib-compatible object wrapper for zip files" -optional = false -python-versions = ">=3.9" -files = [ - {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"}, - {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"}, -] - -[package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] -cover = ["pytest-cov"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -enabler = ["pytest-enabler (>=2.2)"] -test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] -type = ["pytest-mypy"] - [metadata] lock-version = "2.0" python-versions = "^3.7" -content-hash = "97f4c031d7769c7bad6adc5b4dfee58549dd3a445f991960527ec5e1212449b6" +content-hash = "c5a8f50292a01acddd1ce62c872344c676ef173170c50fdc668114f5f787afe6" diff --git a/pyproject.toml b/pyproject.toml index 1dbd6636..2ef24f7d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -87,7 +87,8 @@ optional = true [tool.poetry.group.lint.dependencies] ruff = ">=0.9,<0.10" mypy = [ - { version = "^1.14", python = ">=3.8" }, + { version = "^1.15", python = ">=3.9" }, + { version = "~1.14", python = ">=3.8,<3.9" }, { version = "~1.4", python = "<3.8" } ] bump2version = ">=1,<2" diff --git a/src/graphql/type/definition.py b/src/graphql/type/definition.py index 5b48c8b4..2e557390 100644 --- a/src/graphql/type/definition.py +++ b/src/graphql/type/definition.py @@ -789,7 +789,7 @@ def fields(self) -> GraphQLFieldMap: return { assert_name(name): value if isinstance(value, GraphQLField) - else GraphQLField(value) # type: ignore + else GraphQLField(value) for name, value in fields.items() } @@ -894,7 +894,7 @@ def fields(self) -> GraphQLFieldMap: return { assert_name(name): value if isinstance(value, GraphQLField) - else GraphQLField(value) # type: ignore + else GraphQLField(value) for name, value in fields.items() } @@ -1361,7 +1361,7 @@ def fields(self) -> GraphQLInputFieldMap: return { assert_name(name): value if isinstance(value, GraphQLInputField) - else GraphQLInputField(value) # type: ignore + else GraphQLInputField(value) for name, value in fields.items() } diff --git a/src/graphql/utilities/get_introspection_query.py b/src/graphql/utilities/get_introspection_query.py index 7b8c33bb..d9cb160f 100644 --- a/src/graphql/utilities/get_introspection_query.py +++ b/src/graphql/utilities/get_introspection_query.py @@ -303,6 +303,7 @@ class IntrospectionSchema(MaybeWithDescription): # The root typed dictionary for schema introspections. +# Note: We don't use class syntax here since the key looks like a private attribute. IntrospectionQuery = TypedDict( # noqa: UP013 "IntrospectionQuery", {"__schema": IntrospectionSchema}, diff --git a/tox.ini b/tox.ini index b7ee6b8c..b8601559 100644 --- a/tox.ini +++ b/tox.ini @@ -26,7 +26,7 @@ commands = [testenv:mypy] basepython = python3.12 deps = - mypy>=1.14,<2 + mypy>=1.15,<2 pytest>=8.3,<9 commands = mypy src tests From 2c94f03026932e929c329456da5403a773e9dab1 Mon Sep 17 00:00:00 2001 From: Willem Date: Sat, 3 May 2025 22:02:40 +1200 Subject: [PATCH 227/230] Update README to include Typed GraphQL (#237) --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index 58b57b1f..aa36c84d 100644 --- a/README.md +++ b/README.md @@ -232,6 +232,8 @@ in addition to using [mypy](https://mypy-lang.org/) as type checker. Arminio, is a new GraphQL library for Python 3, inspired by dataclasses, that is also using GraphQL-core 3 as underpinning. +* [Typed GraphQL](https://github.com/willemt/typed-graphql), thin layer over GraphQL-core that uses native Python types for creating GraphQL schemas. + ## Changelog From 41799bb98589c4029bbc09901f39c7a4e752e610 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 3 May 2025 12:07:26 +0200 Subject: [PATCH 228/230] Update ruff --- poetry.lock | 450 ++++++++++++++++++++++++++----------------------- pyproject.toml | 2 +- tox.ini | 4 +- 3 files changed, 239 insertions(+), 217 deletions(-) diff --git a/poetry.lock b/poetry.lock index 167d9627..6af5b224 100644 --- a/poetry.lock +++ b/poetry.lock @@ -58,24 +58,24 @@ files = [ [[package]] name = "cachetools" -version = "5.5.1" +version = "5.5.2" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.5.1-py3-none-any.whl", hash = "sha256:b76651fdc3b24ead3c648bbdeeb940c1b04d365b38b4af66788f9ec4a81d42bb"}, - {file = "cachetools-5.5.1.tar.gz", hash = "sha256:70f238fbba50383ef62e55c6aff6d9673175fe59f7c6782c7a0b9e38f4a9df95"}, + {file = "cachetools-5.5.2-py3-none-any.whl", hash = "sha256:d26a22bcc62eb95c3beabd9f1ee5e820d3d2704fe2967cbe350e20c8ffcd3f0a"}, + {file = "cachetools-5.5.2.tar.gz", hash = "sha256:1a661caa9175d26759571b2e19580f9d6393969e5dfca11fdb1f947a23e640d4"}, ] [[package]] name = "certifi" -version = "2025.1.31" +version = "2025.4.26" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe"}, - {file = "certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651"}, + {file = "certifi-2025.4.26-py3-none-any.whl", hash = "sha256:30350364dfe371162649852c63336a15c70c6510c2ad5015b21c2345311805f3"}, + {file = "certifi-2025.4.26.tar.gz", hash = "sha256:0a816057ea3cdefcef70270d2c515e4506bbc954f417fa5ade2021213bb8f0c6"}, ] [[package]] @@ -246,103 +246,103 @@ files = [ [[package]] name = "charset-normalizer" -version = "3.4.1" +version = "3.4.2" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7" files = [ - {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-win32.whl", hash = "sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-win32.whl", hash = "sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-win32.whl", hash = "sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-win32.whl", hash = "sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765"}, - {file = "charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85"}, - {file = "charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7c48ed483eb946e6c04ccbe02c6b4d1d48e51944b6db70f697e089c193404941"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2d318c11350e10662026ad0eb71bb51c7812fc8590825304ae0bdd4ac283acd"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9cbfacf36cb0ec2897ce0ebc5d08ca44213af24265bd56eca54bee7923c48fd6"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18dd2e350387c87dabe711b86f83c9c78af772c748904d372ade190b5c7c9d4d"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8075c35cd58273fee266c58c0c9b670947c19df5fb98e7b66710e04ad4e9ff86"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5bf4545e3b962767e5c06fe1738f951f77d27967cb2caa64c28be7c4563e162c"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7a6ab32f7210554a96cd9e33abe3ddd86732beeafc7a28e9955cdf22ffadbab0"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b33de11b92e9f75a2b545d6e9b6f37e398d86c3e9e9653c4864eb7e89c5773ef"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:8755483f3c00d6c9a77f490c17e6ab0c8729e39e6390328e42521ef175380ae6"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:68a328e5f55ec37c57f19ebb1fdc56a248db2e3e9ad769919a58672958e8f366"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:21b2899062867b0e1fde9b724f8aecb1af14f2778d69aacd1a5a1853a597a5db"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-win32.whl", hash = "sha256:e8082b26888e2f8b36a042a58307d5b917ef2b1cacab921ad3323ef91901c71a"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:f69a27e45c43520f5487f27627059b64aaf160415589230992cec34c5e18a509"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:be1e352acbe3c78727a16a455126d9ff83ea2dfdcbc83148d2982305a04714c2"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa88ca0b1932e93f2d961bf3addbb2db902198dca337d88c89e1559e066e7645"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d524ba3f1581b35c03cb42beebab4a13e6cdad7b36246bd22541fa585a56cccd"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28a1005facc94196e1fb3e82a3d442a9d9110b8434fc1ded7a24a2983c9888d8"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdb20a30fe1175ecabed17cbf7812f7b804b8a315a25f24678bcdf120a90077f"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f5d9ed7f254402c9e7d35d2f5972c9bbea9040e99cd2861bd77dc68263277c7"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:efd387a49825780ff861998cd959767800d54f8308936b21025326de4b5a42b9"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f0aa37f3c979cf2546b73e8222bbfa3dc07a641585340179d768068e3455e544"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e70e990b2137b29dc5564715de1e12701815dacc1d056308e2b17e9095372a82"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0c8c57f84ccfc871a48a47321cfa49ae1df56cd1d965a09abe84066f6853b9c0"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6b66f92b17849b85cad91259efc341dce9c1af48e2173bf38a85c6329f1033e5"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-win32.whl", hash = "sha256:daac4765328a919a805fa5e2720f3e94767abd632ae410a9062dff5412bae65a"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:e53efc7c7cee4c1e70661e2e112ca46a575f90ed9ae3fef200f2a25e954f4b28"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0c29de6a1a95f24b9a1aa7aefd27d2487263f00dfd55a77719b530788f75cff7"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cddf7bd982eaa998934a91f69d182aec997c6c468898efe6679af88283b498d3"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcbe676a55d7445b22c10967bceaaf0ee69407fbe0ece4d032b6eb8d4565982a"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d41c4d287cfc69060fa91cae9683eacffad989f1a10811995fa309df656ec214"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e594135de17ab3866138f496755f302b72157d115086d100c3f19370839dd3a"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf713fe9a71ef6fd5adf7a79670135081cd4431c2943864757f0fa3a65b1fafd"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a370b3e078e418187da8c3674eddb9d983ec09445c99a3a263c2011993522981"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a955b438e62efdf7e0b7b52a64dc5c3396e2634baa62471768a64bc2adb73d5c"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7222ffd5e4de8e57e03ce2cef95a4c43c98fcb72ad86909abdfc2c17d227fc1b"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:bee093bf902e1d8fc0ac143c88902c3dfc8941f7ea1d6a8dd2bcb786d33db03d"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dedb8adb91d11846ee08bec4c8236c8549ac721c245678282dcb06b221aab59f"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-win32.whl", hash = "sha256:db4c7bf0e07fc3b7d89ac2a5880a6a8062056801b83ff56d8464b70f65482b6c"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:5a9979887252a82fefd3d3ed2a8e3b937a7a809f65dcb1e068b090e165bbe99e"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:926ca93accd5d36ccdabd803392ddc3e03e6d4cd1cf17deff3b989ab8e9dbcf0"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eba9904b0f38a143592d9fc0e19e2df0fa2e41c3c3745554761c5f6447eedabf"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3fddb7e2c84ac87ac3a947cb4e66d143ca5863ef48e4a5ecb83bd48619e4634e"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98f862da73774290f251b9df8d11161b6cf25b599a66baf087c1ffe340e9bfd1"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c9379d65defcab82d07b2a9dfbfc2e95bc8fe0ebb1b176a3190230a3ef0e07c"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e635b87f01ebc977342e2697d05b56632f5f879a4f15955dfe8cef2448b51691"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1c95a1e2902a8b722868587c0e1184ad5c55631de5afc0eb96bc4b0d738092c0"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ef8de666d6179b009dce7bcb2ad4c4a779f113f12caf8dc77f0162c29d20490b"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:32fc0341d72e0f73f80acb0a2c94216bd704f4f0bce10aedea38f30502b271ff"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:289200a18fa698949d2b39c671c2cc7a24d44096784e76614899a7ccf2574b7b"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a476b06fbcf359ad25d34a057b7219281286ae2477cc5ff5e3f70a246971148"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-win32.whl", hash = "sha256:aaeeb6a479c7667fbe1099af9617c83aaca22182d6cf8c53966491a0f1b7ffb7"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-win_amd64.whl", hash = "sha256:aa6af9e7d59f9c12b33ae4e9450619cf2488e2bbe9b44030905877f0b2324980"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cad5f45b3146325bb38d6855642f6fd609c3f7cad4dbaf75549bf3b904d3184"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b2680962a4848b3c4f155dc2ee64505a9c57186d0d56b43123b17ca3de18f0fa"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:36b31da18b8890a76ec181c3cf44326bf2c48e36d393ca1b72b3f484113ea344"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f4074c5a429281bf056ddd4c5d3b740ebca4d43ffffe2ef4bf4d2d05114299da"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9e36a97bee9b86ef9a1cf7bb96747eb7a15c2f22bdb5b516434b00f2a599f02"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:1b1bde144d98e446b056ef98e59c256e9294f6b74d7af6846bf5ffdafd687a7d"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:915f3849a011c1f593ab99092f3cecfcb4d65d8feb4a64cf1bf2d22074dc0ec4"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:fb707f3e15060adf5b7ada797624a6c6e0138e2a26baa089df64c68ee98e040f"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:25a23ea5c7edc53e0f29bae2c44fcb5a1aa10591aae107f2a2b2583a9c5cbc64"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:770cab594ecf99ae64c236bc9ee3439c3f46be49796e265ce0cc8bc17b10294f"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-win32.whl", hash = "sha256:6a0289e4589e8bdfef02a80478f1dfcb14f0ab696b5a00e1f4b8a14a307a3c58"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6fc1f5b51fa4cecaa18f2bd7a003f3dd039dd615cd69a2afd6d3b19aed6775f2"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76af085e67e56c8816c3ccf256ebd136def2ed9654525348cfa744b6802b69eb"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e45ba65510e2647721e35323d6ef54c7974959f6081b58d4ef5d87c60c84919a"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:046595208aae0120559a67693ecc65dd75d46f7bf687f159127046628178dc45"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75d10d37a47afee94919c4fab4c22b9bc2a8bf7d4f46f87363bcf0573f3ff4f5"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6333b3aa5a12c26b2a4d4e7335a28f1475e0e5e17d69d55141ee3cab736f66d1"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e8323a9b031aa0393768b87f04b4164a40037fb2a3c11ac06a03ffecd3618027"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:24498ba8ed6c2e0b56d4acbf83f2d989720a93b41d712ebd4f4979660db4417b"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:844da2b5728b5ce0e32d863af26f32b5ce61bc4273a9c720a9f3aa9df73b1455"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:65c981bdbd3f57670af8b59777cbfae75364b483fa8a9f420f08094531d54a01"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:3c21d4fca343c805a52c0c78edc01e3477f6dd1ad7c47653241cf2a206d4fc58"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:dc7039885fa1baf9be153a0626e337aa7ec8bf96b0128605fb0d77788ddc1681"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-win32.whl", hash = "sha256:8272b73e1c5603666618805fe821edba66892e2870058c94c53147602eab29c7"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:70f7172939fdf8790425ba31915bfbe8335030f05b9913d7ae00a87d4395620a"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:005fa3432484527f9732ebd315da8da8001593e2cf46a3d817669f062c3d9ed4"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e92fca20c46e9f5e1bb485887d074918b13543b1c2a1185e69bb8d17ab6236a7"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:50bf98d5e563b83cc29471fa114366e6806bc06bc7a25fd59641e41445327836"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:721c76e84fe669be19c5791da68232ca2e05ba5185575086e384352e2c309597"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d8fd25b7f4675d0c47cf95b594d4e7b158aca33b76aa63d07186e13c0e0ab7"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3daeac64d5b371dea99714f08ffc2c208522ec6b06fbc7866a450dd446f5c0f"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dccab8d5fa1ef9bfba0590ecf4d46df048d18ffe3eec01eeb73a42e0d9e7a8ba"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:aaf27faa992bfee0264dc1f03f4c75e9fcdda66a519db6b957a3f826e285cf12"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:eb30abc20df9ab0814b5a2524f23d75dcf83cde762c161917a2b4b7b55b1e518"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:c72fbbe68c6f32f251bdc08b8611c7b3060612236e960ef848e0a517ddbe76c5"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:982bb1e8b4ffda883b3d0a521e23abcd6fd17418f6d2c4118d257a10199c0ce3"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-win32.whl", hash = "sha256:43e0933a0eff183ee85833f341ec567c0980dae57c464d8a508e1b2ceb336471"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:d11b54acf878eef558599658b0ffca78138c8c3655cf4f3a4a673c437e67732e"}, + {file = "charset_normalizer-3.4.2-py3-none-any.whl", hash = "sha256:7f56930ab0abd1c45cd15be65cc741c28b1c9a34876ce8c17a2fa107810c0af0"}, + {file = "charset_normalizer-3.4.2.tar.gz", hash = "sha256:5baececa9ecba31eff645232d59845c07aa030f0c81ee70184a90d35099a0e63"}, ] [[package]] @@ -520,74 +520,74 @@ toml = ["tomli"] [[package]] name = "coverage" -version = "7.6.12" +version = "7.8.0" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.9" files = [ - {file = "coverage-7.6.12-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:704c8c8c6ce6569286ae9622e534b4f5b9759b6f2cd643f1c1a61f666d534fe8"}, - {file = "coverage-7.6.12-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ad7525bf0241e5502168ae9c643a2f6c219fa0a283001cee4cf23a9b7da75879"}, - {file = "coverage-7.6.12-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06097c7abfa611c91edb9e6920264e5be1d6ceb374efb4986f38b09eed4cb2fe"}, - {file = "coverage-7.6.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:220fa6c0ad7d9caef57f2c8771918324563ef0d8272c94974717c3909664e674"}, - {file = "coverage-7.6.12-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3688b99604a24492bcfe1c106278c45586eb819bf66a654d8a9a1433022fb2eb"}, - {file = "coverage-7.6.12-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d1a987778b9c71da2fc8948e6f2656da6ef68f59298b7e9786849634c35d2c3c"}, - {file = "coverage-7.6.12-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:cec6b9ce3bd2b7853d4a4563801292bfee40b030c05a3d29555fd2a8ee9bd68c"}, - {file = "coverage-7.6.12-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ace9048de91293e467b44bce0f0381345078389814ff6e18dbac8fdbf896360e"}, - {file = "coverage-7.6.12-cp310-cp310-win32.whl", hash = "sha256:ea31689f05043d520113e0552f039603c4dd71fa4c287b64cb3606140c66f425"}, - {file = "coverage-7.6.12-cp310-cp310-win_amd64.whl", hash = "sha256:676f92141e3c5492d2a1596d52287d0d963df21bf5e55c8b03075a60e1ddf8aa"}, - {file = "coverage-7.6.12-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e18aafdfb3e9ec0d261c942d35bd7c28d031c5855dadb491d2723ba54f4c3015"}, - {file = "coverage-7.6.12-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:66fe626fd7aa5982cdebad23e49e78ef7dbb3e3c2a5960a2b53632f1f703ea45"}, - {file = "coverage-7.6.12-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ef01d70198431719af0b1f5dcbefc557d44a190e749004042927b2a3fed0702"}, - {file = "coverage-7.6.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e92ae5a289a4bc4c0aae710c0948d3c7892e20fd3588224ebe242039573bf0"}, - {file = "coverage-7.6.12-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e695df2c58ce526eeab11a2e915448d3eb76f75dffe338ea613c1201b33bab2f"}, - {file = "coverage-7.6.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d74c08e9aaef995f8c4ef6d202dbd219c318450fe2a76da624f2ebb9c8ec5d9f"}, - {file = "coverage-7.6.12-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e995b3b76ccedc27fe4f477b349b7d64597e53a43fc2961db9d3fbace085d69d"}, - {file = "coverage-7.6.12-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b1f097878d74fe51e1ddd1be62d8e3682748875b461232cf4b52ddc6e6db0bba"}, - {file = "coverage-7.6.12-cp311-cp311-win32.whl", hash = "sha256:1f7ffa05da41754e20512202c866d0ebfc440bba3b0ed15133070e20bf5aeb5f"}, - {file = "coverage-7.6.12-cp311-cp311-win_amd64.whl", hash = "sha256:e216c5c45f89ef8971373fd1c5d8d1164b81f7f5f06bbf23c37e7908d19e8558"}, - {file = "coverage-7.6.12-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b172f8e030e8ef247b3104902cc671e20df80163b60a203653150d2fc204d1ad"}, - {file = "coverage-7.6.12-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:641dfe0ab73deb7069fb972d4d9725bf11c239c309ce694dd50b1473c0f641c3"}, - {file = "coverage-7.6.12-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e549f54ac5f301e8e04c569dfdb907f7be71b06b88b5063ce9d6953d2d58574"}, - {file = "coverage-7.6.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:959244a17184515f8c52dcb65fb662808767c0bd233c1d8a166e7cf74c9ea985"}, - {file = "coverage-7.6.12-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bda1c5f347550c359f841d6614fb8ca42ae5cb0b74d39f8a1e204815ebe25750"}, - {file = "coverage-7.6.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1ceeb90c3eda1f2d8c4c578c14167dbd8c674ecd7d38e45647543f19839dd6ea"}, - {file = "coverage-7.6.12-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f16f44025c06792e0fb09571ae454bcc7a3ec75eeb3c36b025eccf501b1a4c3"}, - {file = "coverage-7.6.12-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b076e625396e787448d27a411aefff867db2bffac8ed04e8f7056b07024eed5a"}, - {file = "coverage-7.6.12-cp312-cp312-win32.whl", hash = "sha256:00b2086892cf06c7c2d74983c9595dc511acca00665480b3ddff749ec4fb2a95"}, - {file = "coverage-7.6.12-cp312-cp312-win_amd64.whl", hash = "sha256:7ae6eabf519bc7871ce117fb18bf14e0e343eeb96c377667e3e5dd12095e0288"}, - {file = "coverage-7.6.12-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:488c27b3db0ebee97a830e6b5a3ea930c4a6e2c07f27a5e67e1b3532e76b9ef1"}, - {file = "coverage-7.6.12-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5d1095bbee1851269f79fd8e0c9b5544e4c00c0c24965e66d8cba2eb5bb535fd"}, - {file = "coverage-7.6.12-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0533adc29adf6a69c1baa88c3d7dbcaadcffa21afbed3ca7a225a440e4744bf9"}, - {file = "coverage-7.6.12-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53c56358d470fa507a2b6e67a68fd002364d23c83741dbc4c2e0680d80ca227e"}, - {file = "coverage-7.6.12-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64cbb1a3027c79ca6310bf101014614f6e6e18c226474606cf725238cf5bc2d4"}, - {file = "coverage-7.6.12-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:79cac3390bfa9836bb795be377395f28410811c9066bc4eefd8015258a7578c6"}, - {file = "coverage-7.6.12-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:9b148068e881faa26d878ff63e79650e208e95cf1c22bd3f77c3ca7b1d9821a3"}, - {file = "coverage-7.6.12-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8bec2ac5da793c2685ce5319ca9bcf4eee683b8a1679051f8e6ec04c4f2fd7dc"}, - {file = "coverage-7.6.12-cp313-cp313-win32.whl", hash = "sha256:200e10beb6ddd7c3ded322a4186313d5ca9e63e33d8fab4faa67ef46d3460af3"}, - {file = "coverage-7.6.12-cp313-cp313-win_amd64.whl", hash = "sha256:2b996819ced9f7dbb812c701485d58f261bef08f9b85304d41219b1496b591ef"}, - {file = "coverage-7.6.12-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:299cf973a7abff87a30609879c10df0b3bfc33d021e1adabc29138a48888841e"}, - {file = "coverage-7.6.12-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4b467a8c56974bf06e543e69ad803c6865249d7a5ccf6980457ed2bc50312703"}, - {file = "coverage-7.6.12-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2458f275944db8129f95d91aee32c828a408481ecde3b30af31d552c2ce284a0"}, - {file = "coverage-7.6.12-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a9d8be07fb0832636a0f72b80d2a652fe665e80e720301fb22b191c3434d924"}, - {file = "coverage-7.6.12-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14d47376a4f445e9743f6c83291e60adb1b127607a3618e3185bbc8091f0467b"}, - {file = "coverage-7.6.12-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b95574d06aa9d2bd6e5cc35a5bbe35696342c96760b69dc4287dbd5abd4ad51d"}, - {file = "coverage-7.6.12-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:ecea0c38c9079570163d663c0433a9af4094a60aafdca491c6a3d248c7432827"}, - {file = "coverage-7.6.12-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2251fabcfee0a55a8578a9d29cecfee5f2de02f11530e7d5c5a05859aa85aee9"}, - {file = "coverage-7.6.12-cp313-cp313t-win32.whl", hash = "sha256:eb5507795caabd9b2ae3f1adc95f67b1104971c22c624bb354232d65c4fc90b3"}, - {file = "coverage-7.6.12-cp313-cp313t-win_amd64.whl", hash = "sha256:f60a297c3987c6c02ffb29effc70eadcbb412fe76947d394a1091a3615948e2f"}, - {file = "coverage-7.6.12-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e7575ab65ca8399c8c4f9a7d61bbd2d204c8b8e447aab9d355682205c9dd948d"}, - {file = "coverage-7.6.12-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8161d9fbc7e9fe2326de89cd0abb9f3599bccc1287db0aba285cb68d204ce929"}, - {file = "coverage-7.6.12-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a1e465f398c713f1b212400b4e79a09829cd42aebd360362cd89c5bdc44eb87"}, - {file = "coverage-7.6.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f25d8b92a4e31ff1bd873654ec367ae811b3a943583e05432ea29264782dc32c"}, - {file = "coverage-7.6.12-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a936309a65cc5ca80fa9f20a442ff9e2d06927ec9a4f54bcba9c14c066323f2"}, - {file = "coverage-7.6.12-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:aa6f302a3a0b5f240ee201297fff0bbfe2fa0d415a94aeb257d8b461032389bd"}, - {file = "coverage-7.6.12-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f973643ef532d4f9be71dd88cf7588936685fdb576d93a79fe9f65bc337d9d73"}, - {file = "coverage-7.6.12-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:78f5243bb6b1060aed6213d5107744c19f9571ec76d54c99cc15938eb69e0e86"}, - {file = "coverage-7.6.12-cp39-cp39-win32.whl", hash = "sha256:69e62c5034291c845fc4df7f8155e8544178b6c774f97a99e2734b05eb5bed31"}, - {file = "coverage-7.6.12-cp39-cp39-win_amd64.whl", hash = "sha256:b01a840ecc25dce235ae4c1b6a0daefb2a203dba0e6e980637ee9c2f6ee0df57"}, - {file = "coverage-7.6.12-pp39.pp310-none-any.whl", hash = "sha256:7e39e845c4d764208e7b8f6a21c541ade741e2c41afabdfa1caa28687a3c98cf"}, - {file = "coverage-7.6.12-py3-none-any.whl", hash = "sha256:eb8668cfbc279a536c633137deeb9435d2962caec279c3f8cf8b91fff6ff8953"}, - {file = "coverage-7.6.12.tar.gz", hash = "sha256:48cfc4641d95d34766ad41d9573cc0f22a48aa88d22657a1fe01dca0dbae4de2"}, + {file = "coverage-7.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2931f66991175369859b5fd58529cd4b73582461877ecfd859b6549869287ffe"}, + {file = "coverage-7.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:52a523153c568d2c0ef8826f6cc23031dc86cffb8c6aeab92c4ff776e7951b28"}, + {file = "coverage-7.8.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c8a5c139aae4c35cbd7cadca1df02ea8cf28a911534fc1b0456acb0b14234f3"}, + {file = "coverage-7.8.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a26c0c795c3e0b63ec7da6efded5f0bc856d7c0b24b2ac84b4d1d7bc578d676"}, + {file = "coverage-7.8.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:821f7bcbaa84318287115d54becb1915eece6918136c6f91045bb84e2f88739d"}, + {file = "coverage-7.8.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a321c61477ff8ee705b8a5fed370b5710c56b3a52d17b983d9215861e37b642a"}, + {file = "coverage-7.8.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:ed2144b8a78f9d94d9515963ed273d620e07846acd5d4b0a642d4849e8d91a0c"}, + {file = "coverage-7.8.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:042e7841a26498fff7a37d6fda770d17519982f5b7d8bf5278d140b67b61095f"}, + {file = "coverage-7.8.0-cp310-cp310-win32.whl", hash = "sha256:f9983d01d7705b2d1f7a95e10bbe4091fabc03a46881a256c2787637b087003f"}, + {file = "coverage-7.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:5a570cd9bd20b85d1a0d7b009aaf6c110b52b5755c17be6962f8ccd65d1dbd23"}, + {file = "coverage-7.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e7ac22a0bb2c7c49f441f7a6d46c9c80d96e56f5a8bc6972529ed43c8b694e27"}, + {file = "coverage-7.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bf13d564d310c156d1c8e53877baf2993fb3073b2fc9f69790ca6a732eb4bfea"}, + {file = "coverage-7.8.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5761c70c017c1b0d21b0815a920ffb94a670c8d5d409d9b38857874c21f70d7"}, + {file = "coverage-7.8.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5ff52d790c7e1628241ffbcaeb33e07d14b007b6eb00a19320c7b8a7024c040"}, + {file = "coverage-7.8.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d39fc4817fd67b3915256af5dda75fd4ee10621a3d484524487e33416c6f3543"}, + {file = "coverage-7.8.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b44674870709017e4b4036e3d0d6c17f06a0e6d4436422e0ad29b882c40697d2"}, + {file = "coverage-7.8.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8f99eb72bf27cbb167b636eb1726f590c00e1ad375002230607a844d9e9a2318"}, + {file = "coverage-7.8.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b571bf5341ba8c6bc02e0baeaf3b061ab993bf372d982ae509807e7f112554e9"}, + {file = "coverage-7.8.0-cp311-cp311-win32.whl", hash = "sha256:e75a2ad7b647fd8046d58c3132d7eaf31b12d8a53c0e4b21fa9c4d23d6ee6d3c"}, + {file = "coverage-7.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:3043ba1c88b2139126fc72cb48574b90e2e0546d4c78b5299317f61b7f718b78"}, + {file = "coverage-7.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:bbb5cc845a0292e0c520656d19d7ce40e18d0e19b22cb3e0409135a575bf79fc"}, + {file = "coverage-7.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4dfd9a93db9e78666d178d4f08a5408aa3f2474ad4d0e0378ed5f2ef71640cb6"}, + {file = "coverage-7.8.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f017a61399f13aa6d1039f75cd467be388d157cd81f1a119b9d9a68ba6f2830d"}, + {file = "coverage-7.8.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0915742f4c82208ebf47a2b154a5334155ed9ef9fe6190674b8a46c2fb89cb05"}, + {file = "coverage-7.8.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a40fcf208e021eb14b0fac6bdb045c0e0cab53105f93ba0d03fd934c956143a"}, + {file = "coverage-7.8.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a1f406a8e0995d654b2ad87c62caf6befa767885301f3b8f6f73e6f3c31ec3a6"}, + {file = "coverage-7.8.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:77af0f6447a582fdc7de5e06fa3757a3ef87769fbb0fdbdeba78c23049140a47"}, + {file = "coverage-7.8.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f2d32f95922927186c6dbc8bc60df0d186b6edb828d299ab10898ef3f40052fe"}, + {file = "coverage-7.8.0-cp312-cp312-win32.whl", hash = "sha256:769773614e676f9d8e8a0980dd7740f09a6ea386d0f383db6821df07d0f08545"}, + {file = "coverage-7.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:e5d2b9be5b0693cf21eb4ce0ec8d211efb43966f6657807f6859aab3814f946b"}, + {file = "coverage-7.8.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5ac46d0c2dd5820ce93943a501ac5f6548ea81594777ca585bf002aa8854cacd"}, + {file = "coverage-7.8.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:771eb7587a0563ca5bb6f622b9ed7f9d07bd08900f7589b4febff05f469bea00"}, + {file = "coverage-7.8.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42421e04069fb2cbcbca5a696c4050b84a43b05392679d4068acbe65449b5c64"}, + {file = "coverage-7.8.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:554fec1199d93ab30adaa751db68acec2b41c5602ac944bb19187cb9a41a8067"}, + {file = "coverage-7.8.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aaeb00761f985007b38cf463b1d160a14a22c34eb3f6a39d9ad6fc27cb73008"}, + {file = "coverage-7.8.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:581a40c7b94921fffd6457ffe532259813fc68eb2bdda60fa8cc343414ce3733"}, + {file = "coverage-7.8.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:f319bae0321bc838e205bf9e5bc28f0a3165f30c203b610f17ab5552cff90323"}, + {file = "coverage-7.8.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:04bfec25a8ef1c5f41f5e7e5c842f6b615599ca8ba8391ec33a9290d9d2db3a3"}, + {file = "coverage-7.8.0-cp313-cp313-win32.whl", hash = "sha256:dd19608788b50eed889e13a5d71d832edc34fc9dfce606f66e8f9f917eef910d"}, + {file = "coverage-7.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:a9abbccd778d98e9c7e85038e35e91e67f5b520776781d9a1e2ee9d400869487"}, + {file = "coverage-7.8.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:18c5ae6d061ad5b3e7eef4363fb27a0576012a7447af48be6c75b88494c6cf25"}, + {file = "coverage-7.8.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:95aa6ae391a22bbbce1b77ddac846c98c5473de0372ba5c463480043a07bff42"}, + {file = "coverage-7.8.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e013b07ba1c748dacc2a80e69a46286ff145935f260eb8c72df7185bf048f502"}, + {file = "coverage-7.8.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d766a4f0e5aa1ba056ec3496243150698dc0481902e2b8559314368717be82b1"}, + {file = "coverage-7.8.0-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad80e6b4a0c3cb6f10f29ae4c60e991f424e6b14219d46f1e7d442b938ee68a4"}, + {file = "coverage-7.8.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b87eb6fc9e1bb8f98892a2458781348fa37e6925f35bb6ceb9d4afd54ba36c73"}, + {file = "coverage-7.8.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:d1ba00ae33be84066cfbe7361d4e04dec78445b2b88bdb734d0d1cbab916025a"}, + {file = "coverage-7.8.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f3c38e4e5ccbdc9198aecc766cedbb134b2d89bf64533973678dfcf07effd883"}, + {file = "coverage-7.8.0-cp313-cp313t-win32.whl", hash = "sha256:379fe315e206b14e21db5240f89dc0774bdd3e25c3c58c2c733c99eca96f1ada"}, + {file = "coverage-7.8.0-cp313-cp313t-win_amd64.whl", hash = "sha256:2e4b6b87bb0c846a9315e3ab4be2d52fac905100565f4b92f02c445c8799e257"}, + {file = "coverage-7.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fa260de59dfb143af06dcf30c2be0b200bed2a73737a8a59248fcb9fa601ef0f"}, + {file = "coverage-7.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:96121edfa4c2dfdda409877ea8608dd01de816a4dc4a0523356067b305e4e17a"}, + {file = "coverage-7.8.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b8af63b9afa1031c0ef05b217faa598f3069148eeee6bb24b79da9012423b82"}, + {file = "coverage-7.8.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:89b1f4af0d4afe495cd4787a68e00f30f1d15939f550e869de90a86efa7e0814"}, + {file = "coverage-7.8.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94ec0be97723ae72d63d3aa41961a0b9a6f5a53ff599813c324548d18e3b9e8c"}, + {file = "coverage-7.8.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8a1d96e780bdb2d0cbb297325711701f7c0b6f89199a57f2049e90064c29f6bd"}, + {file = "coverage-7.8.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f1d8a2a57b47142b10374902777e798784abf400a004b14f1b0b9eaf1e528ba4"}, + {file = "coverage-7.8.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:cf60dd2696b457b710dd40bf17ad269d5f5457b96442f7f85722bdb16fa6c899"}, + {file = "coverage-7.8.0-cp39-cp39-win32.whl", hash = "sha256:be945402e03de47ba1872cd5236395e0f4ad635526185a930735f66710e1bd3f"}, + {file = "coverage-7.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:90e7fbc6216ecaffa5a880cdc9c77b7418c1dcb166166b78dbc630d07f278cc3"}, + {file = "coverage-7.8.0-pp39.pp310.pp311-none-any.whl", hash = "sha256:b8194fb8e50d556d5849753de991d390c5a1edeeba50f68e3a9253fbd8bf8ccd"}, + {file = "coverage-7.8.0-py3-none-any.whl", hash = "sha256:dbf364b4c5e7bae9250528167dfe40219b62e2d573c854d74be213e1e52069f7"}, + {file = "coverage-7.8.0.tar.gz", hash = "sha256:7a3d62b3b03b4b6fd41a085f3574874cf946cb4604d2b4d3e8dca8cd570ca501"}, ] [package.dependencies] @@ -753,15 +753,26 @@ files = [ {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] +[[package]] +name = "iniconfig" +version = "2.1.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.8" +files = [ + {file = "iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760"}, + {file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"}, +] + [[package]] name = "jinja2" -version = "3.1.5" +version = "3.1.6" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" files = [ - {file = "jinja2-3.1.5-py3-none-any.whl", hash = "sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb"}, - {file = "jinja2-3.1.5.tar.gz", hash = "sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb"}, + {file = "jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67"}, + {file = "jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d"}, ] [package.dependencies] @@ -1044,6 +1055,17 @@ files = [ {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] +[[package]] +name = "mypy-extensions" +version = "1.1.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505"}, + {file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"}, +] + [[package]] name = "packaging" version = "24.0" @@ -1057,13 +1079,13 @@ files = [ [[package]] name = "packaging" -version = "24.2" +version = "25.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, - {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, + {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, + {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, ] [[package]] @@ -1250,13 +1272,13 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no [[package]] name = "pytest" -version = "8.3.4" +version = "8.3.5" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6"}, - {file = "pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761"}, + {file = "pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820"}, + {file = "pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845"}, ] [package.dependencies] @@ -1456,13 +1478,13 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"] [[package]] name = "pytest-cov" -version = "6.0.0" +version = "6.1.1" description = "Pytest plugin for measuring coverage." optional = false python-versions = ">=3.9" files = [ - {file = "pytest-cov-6.0.0.tar.gz", hash = "sha256:fde0b595ca248bb8e2d76f020b465f3b107c9632e6a1d1705f17834c89dcadc0"}, - {file = "pytest_cov-6.0.0-py3-none-any.whl", hash = "sha256:eee6f1b9e61008bd34975a4d5bab25801eb31898b032dd55addc93e96fcaaa35"}, + {file = "pytest_cov-6.1.1-py3-none-any.whl", hash = "sha256:bddf29ed2d0ab6f4df17b4c55b0a657287db8684af9c42ea546b21b1041b3dde"}, + {file = "pytest_cov-6.1.1.tar.gz", hash = "sha256:46935f7aaefba760e716c2ebfbe1c216240b9592966e7da99ea8292d4d3e2a0a"}, ] [package.dependencies] @@ -1502,13 +1524,13 @@ pytest = ">=7.0.0" [[package]] name = "pytz" -version = "2025.1" +version = "2025.2" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" files = [ - {file = "pytz-2025.1-py2.py3-none-any.whl", hash = "sha256:89dd22dca55b46eac6eda23b2d72721bf1bdfef212645d81513ef5d03038de57"}, - {file = "pytz-2025.1.tar.gz", hash = "sha256:c2db42be2a2518b28e65f9207c4d05e6ff547d1efa4086469ef855e4ab70178e"}, + {file = "pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00"}, + {file = "pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3"}, ] [[package]] @@ -1555,13 +1577,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "rich" -version = "13.9.4" +version = "14.0.0" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false python-versions = ">=3.8.0" files = [ - {file = "rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90"}, - {file = "rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098"}, + {file = "rich-14.0.0-py3-none-any.whl", hash = "sha256:1c9491e1951aac09caffd42f448ee3d04e58923ffe14993f6e83068dc395d7e0"}, + {file = "rich-14.0.0.tar.gz", hash = "sha256:82f1bc23a6a21ebca4ae0c45af9bdbc492ed20231dcb63f297d6d1021a9d5725"}, ] [package.dependencies] @@ -1574,29 +1596,29 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "ruff" -version = "0.9.6" +version = "0.11.8" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.9.6-py3-none-linux_armv6l.whl", hash = "sha256:2f218f356dd2d995839f1941322ff021c72a492c470f0b26a34f844c29cdf5ba"}, - {file = "ruff-0.9.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b908ff4df65dad7b251c9968a2e4560836d8f5487c2f0cc238321ed951ea0504"}, - {file = "ruff-0.9.6-py3-none-macosx_11_0_arm64.whl", hash = "sha256:b109c0ad2ececf42e75fa99dc4043ff72a357436bb171900714a9ea581ddef83"}, - {file = "ruff-0.9.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1de4367cca3dac99bcbd15c161404e849bb0bfd543664db39232648dc00112dc"}, - {file = "ruff-0.9.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac3ee4d7c2c92ddfdaedf0bf31b2b176fa7aa8950efc454628d477394d35638b"}, - {file = "ruff-0.9.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5dc1edd1775270e6aa2386119aea692039781429f0be1e0949ea5884e011aa8e"}, - {file = "ruff-0.9.6-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:4a091729086dffa4bd070aa5dab7e39cc6b9d62eb2bef8f3d91172d30d599666"}, - {file = "ruff-0.9.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1bbc6808bf7b15796cef0815e1dfb796fbd383e7dbd4334709642649625e7c5"}, - {file = "ruff-0.9.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:589d1d9f25b5754ff230dce914a174a7c951a85a4e9270613a2b74231fdac2f5"}, - {file = "ruff-0.9.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc61dd5131742e21103fbbdcad683a8813be0e3c204472d520d9a5021ca8b217"}, - {file = "ruff-0.9.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:5e2d9126161d0357e5c8f30b0bd6168d2c3872372f14481136d13de9937f79b6"}, - {file = "ruff-0.9.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:68660eab1a8e65babb5229a1f97b46e3120923757a68b5413d8561f8a85d4897"}, - {file = "ruff-0.9.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:c4cae6c4cc7b9b4017c71114115db0445b00a16de3bcde0946273e8392856f08"}, - {file = "ruff-0.9.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:19f505b643228b417c1111a2a536424ddde0db4ef9023b9e04a46ed8a1cb4656"}, - {file = "ruff-0.9.6-py3-none-win32.whl", hash = "sha256:194d8402bceef1b31164909540a597e0d913c0e4952015a5b40e28c146121b5d"}, - {file = "ruff-0.9.6-py3-none-win_amd64.whl", hash = "sha256:03482d5c09d90d4ee3f40d97578423698ad895c87314c4de39ed2af945633caa"}, - {file = "ruff-0.9.6-py3-none-win_arm64.whl", hash = "sha256:0e2bb706a2be7ddfea4a4af918562fdc1bcb16df255e5fa595bbd800ce322a5a"}, - {file = "ruff-0.9.6.tar.gz", hash = "sha256:81761592f72b620ec8fa1068a6fd00e98a5ebee342a3642efd84454f3031dca9"}, + {file = "ruff-0.11.8-py3-none-linux_armv6l.whl", hash = "sha256:896a37516c594805e34020c4a7546c8f8a234b679a7716a3f08197f38913e1a3"}, + {file = "ruff-0.11.8-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ab86d22d3d721a40dd3ecbb5e86ab03b2e053bc93c700dc68d1c3346b36ce835"}, + {file = "ruff-0.11.8-py3-none-macosx_11_0_arm64.whl", hash = "sha256:258f3585057508d317610e8a412788cf726efeefa2fec4dba4001d9e6f90d46c"}, + {file = "ruff-0.11.8-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:727d01702f7c30baed3fc3a34901a640001a2828c793525043c29f7614994a8c"}, + {file = "ruff-0.11.8-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3dca977cc4fc8f66e89900fa415ffe4dbc2e969da9d7a54bfca81a128c5ac219"}, + {file = "ruff-0.11.8-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c657fa987d60b104d2be8b052d66da0a2a88f9bd1d66b2254333e84ea2720c7f"}, + {file = "ruff-0.11.8-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:f2e74b021d0de5eceb8bd32919f6ff8a9b40ee62ed97becd44993ae5b9949474"}, + {file = "ruff-0.11.8-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f9b5ef39820abc0f2c62111f7045009e46b275f5b99d5e59dda113c39b7f4f38"}, + {file = "ruff-0.11.8-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1dba3135ca503727aa4648152c0fa67c3b1385d3dc81c75cd8a229c4b2a1458"}, + {file = "ruff-0.11.8-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f024d32e62faad0f76b2d6afd141b8c171515e4fb91ce9fd6464335c81244e5"}, + {file = "ruff-0.11.8-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:d365618d3ad747432e1ae50d61775b78c055fee5936d77fb4d92c6f559741948"}, + {file = "ruff-0.11.8-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:4d9aaa91035bdf612c8ee7266153bcf16005c7c7e2f5878406911c92a31633cb"}, + {file = "ruff-0.11.8-py3-none-musllinux_1_2_i686.whl", hash = "sha256:0eba551324733efc76116d9f3a0d52946bc2751f0cd30661564117d6fd60897c"}, + {file = "ruff-0.11.8-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:161eb4cff5cfefdb6c9b8b3671d09f7def2f960cee33481dd898caf2bcd02304"}, + {file = "ruff-0.11.8-py3-none-win32.whl", hash = "sha256:5b18caa297a786465cc511d7f8be19226acf9c0a1127e06e736cd4e1878c3ea2"}, + {file = "ruff-0.11.8-py3-none-win_amd64.whl", hash = "sha256:6e70d11043bef637c5617297bdedec9632af15d53ac1e1ba29c448da9341b0c4"}, + {file = "ruff-0.11.8-py3-none-win_arm64.whl", hash = "sha256:304432e4c4a792e3da85b7699feb3426a0908ab98bf29df22a31b0cdd098fac2"}, + {file = "ruff-0.11.8.tar.gz", hash = "sha256:6d742d10626f9004b781f4558154bb226620a7242080e11caeffab1a40e99df8"}, ] [[package]] @@ -1923,17 +1945,17 @@ testing = ["flaky (>=3.4.0)", "freezegun (>=0.3.11)", "pathlib2 (>=2.3.3)", "psu [[package]] name = "tox" -version = "4.24.1" +version = "4.25.0" description = "tox is a generic virtualenv management and test command line tool" optional = false python-versions = ">=3.8" files = [ - {file = "tox-4.24.1-py3-none-any.whl", hash = "sha256:57ba7df7d199002c6df8c2db9e6484f3de6ca8f42013c083ea2d4d1e5c6bdc75"}, - {file = "tox-4.24.1.tar.gz", hash = "sha256:083a720adbc6166fff0b7d1df9d154f9d00bfccb9403b8abf6bc0ee435d6a62e"}, + {file = "tox-4.25.0-py3-none-any.whl", hash = "sha256:4dfdc7ba2cc6fdc6688dde1b21e7b46ff6c41795fb54586c91a3533317b5255c"}, + {file = "tox-4.25.0.tar.gz", hash = "sha256:dd67f030317b80722cf52b246ff42aafd3ed27ddf331c415612d084304cf5e52"}, ] [package.dependencies] -cachetools = ">=5.5" +cachetools = ">=5.5.1" chardet = ">=5.2" colorama = ">=0.4.6" filelock = ">=3.16.1" @@ -1941,12 +1963,12 @@ packaging = ">=24.2" platformdirs = ">=4.3.6" pluggy = ">=1.5" pyproject-api = ">=1.8" -tomli = {version = ">=2.1", markers = "python_version < \"3.11\""} +tomli = {version = ">=2.2.1", markers = "python_version < \"3.11\""} typing-extensions = {version = ">=4.12.2", markers = "python_version < \"3.11\""} -virtualenv = ">=20.27.1" +virtualenv = ">=20.29.1" [package.extras] -test = ["devpi-process (>=1.0.2)", "pytest (>=8.3.3)", "pytest-mock (>=3.14)"] +test = ["devpi-process (>=1.0.2)", "pytest (>=8.3.4)", "pytest-mock (>=3.14)"] [[package]] name = "typed-ast" @@ -2011,13 +2033,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.12.2" +version = "4.13.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, - {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, + {file = "typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c"}, + {file = "typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef"}, ] [[package]] @@ -2077,13 +2099,13 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess [[package]] name = "virtualenv" -version = "20.29.2" +version = "20.30.0" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.8" files = [ - {file = "virtualenv-20.29.2-py3-none-any.whl", hash = "sha256:febddfc3d1ea571bdb1dc0f98d7b45d24def7428214d4fb73cc486c9568cce6a"}, - {file = "virtualenv-20.29.2.tar.gz", hash = "sha256:fdaabebf6d03b5ba83ae0a02cfe96f48a716f4fae556461d180825866f75b728"}, + {file = "virtualenv-20.30.0-py3-none-any.whl", hash = "sha256:e34302959180fca3af42d1800df014b35019490b119eba981af27f2fa486e5d6"}, + {file = "virtualenv-20.30.0.tar.gz", hash = "sha256:800863162bcaa5450a6e4d721049730e7f2dae07720e0902b0e4040bd6f9ada8"}, ] [package.dependencies] @@ -2132,4 +2154,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = "^3.7" -content-hash = "c5a8f50292a01acddd1ce62c872344c676ef173170c50fdc668114f5f787afe6" +content-hash = "73cdf582288c9a4f22ebca27df8a40982b23954061d23e7d2301dfe9877cdb8d" diff --git a/pyproject.toml b/pyproject.toml index 2ef24f7d..e8d2ec6d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -85,7 +85,7 @@ tox = [ optional = true [tool.poetry.group.lint.dependencies] -ruff = ">=0.9,<0.10" +ruff = ">=0.11,<0.12" mypy = [ { version = "^1.15", python = ">=3.9" }, { version = "~1.14", python = ">=3.8,<3.9" }, diff --git a/tox.ini b/tox.ini index b8601559..d7dc47bc 100644 --- a/tox.ini +++ b/tox.ini @@ -4,7 +4,7 @@ isolated_build = true [gh-actions] python = - 3: py311 + 3: py313 3.7: py37 3.8: py38 3.9: py39 @@ -18,7 +18,7 @@ python = [testenv:ruff] basepython = python3.12 -deps = ruff>=0.9,<0.10 +deps = ruff>=0.11,<0.12 commands = ruff check src tests ruff format --check src tests From 7b9e9226c8f7ecf7dffdd4364591c695c11c0480 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 3 May 2025 12:50:26 +0200 Subject: [PATCH 229/230] Fix ruff issues --- src/graphql/execution/collect_fields.py | 2 +- src/graphql/execution/execute.py | 24 ++++---- src/graphql/graphql.py | 8 +-- src/graphql/language/parser.py | 16 ++--- src/graphql/language/print_location.py | 2 +- src/graphql/pyutils/async_reduce.py | 4 +- src/graphql/pyutils/identity_func.py | 2 +- src/graphql/pyutils/merge_kwargs.py | 2 +- src/graphql/type/definition.py | 35 ++++++----- src/graphql/type/directives.py | 4 +- src/graphql/type/schema.py | 17 +++--- src/graphql/type/validate.py | 6 +- src/graphql/utilities/build_ast_schema.py | 6 +- src/graphql/utilities/build_client_schema.py | 61 +++++++++++-------- src/graphql/utilities/coerce_input_value.py | 2 +- src/graphql/utilities/extend_schema.py | 20 +++--- .../utilities/get_introspection_query.py | 2 +- .../utilities/introspection_from_schema.py | 2 +- .../utilities/lexicographic_sort_schema.py | 17 +++--- .../utilities/strip_ignored_characters.py | 2 +- src/graphql/utilities/type_from_ast.py | 2 +- src/graphql/utilities/value_from_ast.py | 2 +- .../defer_stream_directive_on_root_field.py | 2 +- .../rules/executable_definitions.py | 2 +- .../validation/rules/known_argument_names.py | 2 +- .../validation/rules/known_directives.py | 4 +- .../validation/rules/known_type_names.py | 4 +- .../rules/overlapping_fields_can_be_merged.py | 6 +- .../rules/provided_required_arguments.py | 4 +- .../rules/stream_directive_on_list_field.py | 2 +- .../rules/unique_directives_per_location.py | 4 +- .../rules/values_of_correct_type.py | 4 +- src/graphql/validation/validation_context.py | 2 +- tests/error/test_graphql_error.py | 6 +- tests/error/test_located_error.py | 4 +- tests/execution/test_defer.py | 4 +- tests/execution/test_executor.py | 6 +- tests/execution/test_middleware.py | 2 +- tests/execution/test_nonnull.py | 10 +-- tests/language/test_block_string.py | 6 +- tests/language/test_parser.py | 18 +++--- tests/language/test_source.py | 2 +- tests/language/test_visitor.py | 4 +- tests/pyutils/test_description.py | 10 +-- tests/test_user_registry.py | 2 +- tests/utilities/test_build_client_schema.py | 46 +++++++------- tests/utilities/test_print_schema.py | 2 +- .../assert_equal_awaitables_or_values.py | 2 +- 48 files changed, 211 insertions(+), 187 deletions(-) diff --git a/src/graphql/execution/collect_fields.py b/src/graphql/execution/collect_fields.py index 613a55c2..c3fc99cc 100644 --- a/src/graphql/execution/collect_fields.py +++ b/src/graphql/execution/collect_fields.py @@ -394,7 +394,7 @@ def build_grouped_field_sets( # All TargetSets that causes new grouped field sets consist only of DeferUsages # and have should_initiate_defer defined - new_grouped_field_set_details[cast(DeferUsageSet, masking_targets)] = ( + new_grouped_field_set_details[cast("DeferUsageSet", masking_targets)] = ( GroupedFieldSetDetails(new_grouped_field_set, should_initiate_defer) ) diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index 90d3d73b..1097e80f 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -975,7 +975,7 @@ def complete_list_value( if stream_record is not None: self.incremental_publisher.set_is_final_record( - cast(StreamItemsRecord, current_parents) + cast("StreamItemsRecord", current_parents) ) if not awaitable_indices: @@ -1113,7 +1113,7 @@ def complete_abstract_value( runtime_type = resolve_type_fn(result, info, return_type) if self.is_awaitable(runtime_type): - runtime_type = cast(Awaitable, runtime_type) + runtime_type = cast("Awaitable", runtime_type) async def await_complete_object_value() -> Any: value = self.complete_object_value( @@ -1136,7 +1136,7 @@ async def await_complete_object_value() -> Any: return value # pragma: no cover return await_complete_object_value() - runtime_type = cast(Optional[str], runtime_type) + runtime_type = cast("Optional[str]", runtime_type) return self.complete_object_value( self.ensure_valid_runtime_type( @@ -1358,9 +1358,9 @@ async def callback(payload: Any) -> ExecutionResult: # typecast to ExecutionResult, not possible to return # ExperimentalIncrementalExecutionResults when operation is 'subscription'. return ( - await cast(Awaitable[ExecutionResult], result) + await cast("Awaitable[ExecutionResult]", result) if self.is_awaitable(result) - else cast(ExecutionResult, result) + else cast("ExecutionResult", result) ) return map_async_iterable(result_or_stream, callback) @@ -1424,7 +1424,7 @@ def execute_deferred_grouped_field_set( ) if self.is_awaitable(incremental_result): - incremental_result = cast(Awaitable, incremental_result) + incremental_result = cast("Awaitable", incremental_result) async def await_incremental_result() -> None: try: @@ -1897,11 +1897,11 @@ def execute_sync( result, ExperimentalIncrementalExecutionResults ): if default_is_awaitable(result): - ensure_future(cast(Awaitable[ExecutionResult], result)).cancel() + ensure_future(cast("Awaitable[ExecutionResult]", result)).cancel() msg = "GraphQL execution failed to complete synchronously." raise RuntimeError(msg) - return cast(ExecutionResult, result) + return cast("ExecutionResult", result) def invalid_return_type_error( @@ -1956,7 +1956,9 @@ def add_new_deferred_fragments( # - the InitialResultRecord, or # - a StreamItemsRecord, as `@defer` may be nested under `@stream`. parent = ( - cast(Union[InitialResultRecord, StreamItemsRecord], incremental_data_record) + cast( + "Union[InitialResultRecord, StreamItemsRecord]", incremental_data_record + ) if parent_defer_usage is None else deferred_fragment_record_from_defer_usage( parent_defer_usage, new_defer_map @@ -2069,7 +2071,7 @@ def default_type_resolver( is_type_of_result = type_.is_type_of(value, info) if is_awaitable(is_type_of_result): - append_awaitable_results(cast(Awaitable, is_type_of_result)) + append_awaitable_results(cast("Awaitable", is_type_of_result)) append_awaitable_types(type_) elif is_type_of_result: return type_.name @@ -2257,7 +2259,7 @@ def create_source_event_stream_impl( return ExecutionResult(None, errors=[error]) if context.is_awaitable(event_stream): - awaitable_event_stream = cast(Awaitable, event_stream) + awaitable_event_stream = cast("Awaitable", event_stream) # noinspection PyShadowingNames async def await_event_stream() -> AsyncIterable[Any] | ExecutionResult: diff --git a/src/graphql/graphql.py b/src/graphql/graphql.py index aacc7326..fe1dd5c7 100644 --- a/src/graphql/graphql.py +++ b/src/graphql/graphql.py @@ -96,9 +96,9 @@ async def graphql( ) if default_is_awaitable(result): - return await cast(Awaitable[ExecutionResult], result) + return await cast("Awaitable[ExecutionResult]", result) - return cast(ExecutionResult, result) + return cast("ExecutionResult", result) def assume_not_awaitable(_value: Any) -> bool: @@ -149,11 +149,11 @@ def graphql_sync( # Assert that the execution was synchronous. if default_is_awaitable(result): - ensure_future(cast(Awaitable[ExecutionResult], result)).cancel() + ensure_future(cast("Awaitable[ExecutionResult]", result)).cancel() msg = "GraphQL execution failed to complete synchronously." raise RuntimeError(msg) - return cast(ExecutionResult, result) + return cast("ExecutionResult", result) def graphql_impl( diff --git a/src/graphql/language/parser.py b/src/graphql/language/parser.py index 55c249ba..59299a1d 100644 --- a/src/graphql/language/parser.py +++ b/src/graphql/language/parser.py @@ -255,7 +255,7 @@ def __init__( experimental_client_controlled_nullability: bool = False, ) -> None: if not is_source(source): - source = Source(cast(str, source)) + source = Source(cast("str", source)) self._no_location = no_location self._max_tokens = max_tokens @@ -319,7 +319,7 @@ def parse_definition(self) -> DefinitionNode: ) if keyword_token.kind is TokenKind.NAME: - token_name = cast(str, keyword_token.value) + token_name = cast("str", keyword_token.value) method_name = self._parse_type_system_definition_method_names.get( token_name ) @@ -472,7 +472,9 @@ def parse_arguments(self, is_const: bool) -> list[ArgumentNode]: """Arguments[Const]: (Argument[?Const]+)""" item = self.parse_const_argument if is_const else self.parse_argument return self.optional_many( - TokenKind.PAREN_L, cast(Callable[[], ArgumentNode], item), TokenKind.PAREN_R + TokenKind.PAREN_L, + cast("Callable[[], ArgumentNode]", item), + TokenKind.PAREN_R, ) def parse_argument(self, is_const: bool = False) -> ArgumentNode: @@ -487,7 +489,7 @@ def parse_argument(self, is_const: bool = False) -> ArgumentNode: def parse_const_argument(self) -> ConstArgumentNode: """Argument[Const]: Name : Value[Const]""" - return cast(ConstArgumentNode, self.parse_argument(True)) + return cast("ConstArgumentNode", self.parse_argument(True)) # Implement the parsing rules in the Fragments section. @@ -641,7 +643,7 @@ def parse_variable_value(self, is_const: bool) -> VariableNode: return self.parse_variable() def parse_const_value_literal(self) -> ConstValueNode: - return cast(ConstValueNode, self.parse_value_literal(True)) + return cast("ConstValueNode", self.parse_value_literal(True)) # Implement the parsing rules in the Directives section. @@ -654,7 +656,7 @@ def parse_directives(self, is_const: bool) -> list[DirectiveNode]: return directives def parse_const_directives(self) -> list[ConstDirectiveNode]: - return cast(List[ConstDirectiveNode], self.parse_directives(True)) + return cast("List[ConstDirectiveNode]", self.parse_directives(True)) def parse_directive(self, is_const: bool) -> DirectiveNode: """Directive[Const]: @ Name Arguments[?Const]?""" @@ -704,7 +706,7 @@ def parse_type_system_extension(self) -> TypeSystemExtensionNode: keyword_token = self._lexer.lookahead() if keyword_token.kind == TokenKind.NAME: method_name = self._parse_type_extension_method_names.get( - cast(str, keyword_token.value) + cast("str", keyword_token.value) ) if method_name: # pragma: no cover return getattr(self, f"parse_{method_name}")() diff --git a/src/graphql/language/print_location.py b/src/graphql/language/print_location.py index 03509732..21fb1b8a 100644 --- a/src/graphql/language/print_location.py +++ b/src/graphql/language/print_location.py @@ -73,7 +73,7 @@ def print_source_location(source: Source, source_location: SourceLocation) -> st def print_prefixed_lines(*lines: tuple[str, str | None]) -> str: """Print lines specified like this: ("prefix", "string")""" existing_lines = [ - cast(Tuple[str, str], line) for line in lines if line[1] is not None + cast("Tuple[str, str]", line) for line in lines if line[1] is not None ] pad_len = max(len(line[0]) for line in existing_lines) return "\n".join( diff --git a/src/graphql/pyutils/async_reduce.py b/src/graphql/pyutils/async_reduce.py index 02fbf648..4eb79748 100644 --- a/src/graphql/pyutils/async_reduce.py +++ b/src/graphql/pyutils/async_reduce.py @@ -41,7 +41,7 @@ async def async_callback( ) return await result if is_awaitable(result) else result # type: ignore - accumulator = async_callback(cast(Awaitable[U], accumulator), value) + accumulator = async_callback(cast("Awaitable[U]", accumulator), value) else: - accumulator = callback(cast(U, accumulator), value) + accumulator = callback(cast("U", accumulator), value) return accumulator diff --git a/src/graphql/pyutils/identity_func.py b/src/graphql/pyutils/identity_func.py index 2876c570..1a13936b 100644 --- a/src/graphql/pyutils/identity_func.py +++ b/src/graphql/pyutils/identity_func.py @@ -11,7 +11,7 @@ T = TypeVar("T") -DEFAULT_VALUE = cast(Any, Undefined) +DEFAULT_VALUE = cast("Any", Undefined) def identity_func(x: T = DEFAULT_VALUE, *_args: Any) -> T: diff --git a/src/graphql/pyutils/merge_kwargs.py b/src/graphql/pyutils/merge_kwargs.py index c7cace3e..21144524 100644 --- a/src/graphql/pyutils/merge_kwargs.py +++ b/src/graphql/pyutils/merge_kwargs.py @@ -9,4 +9,4 @@ def merge_kwargs(base_dict: T, **kwargs: Any) -> T: """Return arbitrary typed dictionary with some keyword args merged in.""" - return cast(T, {**cast(Dict, base_dict), **kwargs}) + return cast("T", {**cast("Dict", base_dict), **kwargs}) diff --git a/src/graphql/type/definition.py b/src/graphql/type/definition.py index 2e557390..c334488d 100644 --- a/src/graphql/type/definition.py +++ b/src/graphql/type/definition.py @@ -2,7 +2,6 @@ from __future__ import annotations -from enum import Enum from typing import ( TYPE_CHECKING, Any, @@ -19,6 +18,18 @@ overload, ) +try: + from typing import TypedDict +except ImportError: # Python < 3.8 + from typing_extensions import TypedDict +try: + from typing import TypeAlias, TypeGuard +except ImportError: # Python < 3.10 + from typing_extensions import TypeAlias, TypeGuard + +if TYPE_CHECKING: + from enum import Enum + from ..error import GraphQLError from ..language import ( EnumTypeDefinitionNode, @@ -57,18 +68,10 @@ from ..utilities.value_from_ast_untyped import value_from_ast_untyped from .assert_name import assert_enum_value_name, assert_name -try: - from typing import TypedDict -except ImportError: # Python < 3.8 - from typing_extensions import TypedDict -try: - from typing import TypeAlias, TypeGuard -except ImportError: # Python < 3.10 - from typing_extensions import TypeAlias, TypeGuard - if TYPE_CHECKING: from .schema import GraphQLSchema + __all__ = [ "GraphQLAbstractType", "GraphQLArgument", @@ -503,7 +506,7 @@ def __init__( args = { assert_name(name): value if isinstance(value, GraphQLArgument) - else GraphQLArgument(cast(GraphQLInputType, value)) + else GraphQLArgument(cast("GraphQLInputType", value)) for name, value in args.items() } else: @@ -1077,7 +1080,7 @@ def __init__( extension_ast_nodes=extension_ast_nodes, ) try: # check for enum - values = cast(Enum, values).__members__ # type: ignore + values = cast("Enum", values).__members__ # type: ignore except AttributeError: if not isinstance(values, Mapping) or not all( isinstance(name, str) for name in values @@ -1090,9 +1093,9 @@ def __init__( " with value names as keys." ) raise TypeError(msg) from error - values = cast(Dict[str, Any], values) + values = cast("Dict[str, Any]", values) else: - values = cast(Dict[str, Enum], values) + values = cast("Dict[str, Enum]", values) if names_as_values is False: values = {key: value.value for key, value in values.items()} elif names_as_values is True: @@ -1662,7 +1665,7 @@ def get_nullable_type( """Unwrap possible non-null type""" if is_non_null_type(type_): type_ = type_.of_type - return cast(Optional[GraphQLNullableType], type_) + return cast("Optional[GraphQLNullableType]", type_) # These named types do not include modifiers like List or NonNull. @@ -1707,7 +1710,7 @@ def get_named_type(type_: GraphQLType | None) -> GraphQLNamedType | None: unwrapped_type = type_ while is_wrapping_type(unwrapped_type): unwrapped_type = unwrapped_type.of_type - return cast(GraphQLNamedType, unwrapped_type) + return cast("GraphQLNamedType", unwrapped_type) return None diff --git a/src/graphql/type/directives.py b/src/graphql/type/directives.py index b73d938f..ecd201c2 100644 --- a/src/graphql/type/directives.py +++ b/src/graphql/type/directives.py @@ -79,7 +79,7 @@ def __init__( locations = tuple( value if isinstance(value, DirectiveLocation) - else DirectiveLocation[cast(str, value)] + else DirectiveLocation[cast("str", value)] for value in locations ) except (KeyError, TypeError) as error: @@ -92,7 +92,7 @@ def __init__( args = { assert_name(name): value if isinstance(value, GraphQLArgument) - else GraphQLArgument(cast(GraphQLInputType, value)) + else GraphQLArgument(cast("GraphQLInputType", value)) for name, value in args.items() } else: diff --git a/src/graphql/type/schema.py b/src/graphql/type/schema.py index befefabd..f8ab756b 100644 --- a/src/graphql/type/schema.py +++ b/src/graphql/type/schema.py @@ -297,11 +297,12 @@ def __deepcopy__(self, memo_: dict) -> GraphQLSchema: for directive in directives: remap_directive(directive, type_map) return self.__class__( - self.query_type and cast(GraphQLObjectType, type_map[self.query_type.name]), + self.query_type + and cast("GraphQLObjectType", type_map[self.query_type.name]), self.mutation_type - and cast(GraphQLObjectType, type_map[self.mutation_type.name]), + and cast("GraphQLObjectType", type_map[self.mutation_type.name]), self.subscription_type - and cast(GraphQLObjectType, type_map[self.subscription_type.name]), + and cast("GraphQLObjectType", type_map[self.subscription_type.name]), types, directives, self.description, @@ -327,7 +328,7 @@ def get_possible_types( abstract_type.types if is_union_type(abstract_type) else self.get_implementations( - cast(GraphQLInterfaceType, abstract_type) + cast("GraphQLInterfaceType", abstract_type) ).objects ) @@ -354,7 +355,7 @@ def is_sub_type( add(type_.name) else: implementations = self.get_implementations( - cast(GraphQLInterfaceType, abstract_type) + cast("GraphQLInterfaceType", abstract_type) ) for type_ in implementations.objects: add(type_.name) @@ -410,7 +411,7 @@ class TypeSet(Dict[GraphQLNamedType, None]): @classmethod def with_initial_types(cls, types: Collection[GraphQLType]) -> TypeSet: - return cast(TypeSet, super().fromkeys(types)) + return cast("TypeSet", super().fromkeys(types)) def collect_referenced_types(self, type_: GraphQLType) -> None: """Recursive function supplementing the type starting from an initial type.""" @@ -455,7 +456,7 @@ def remapped_type(type_: GraphQLType, type_map: TypeMap) -> GraphQLType: """Get a copy of the given type that uses this type map.""" if is_wrapping_type(type_): return type_.__class__(remapped_type(type_.of_type, type_map)) - type_ = cast(GraphQLNamedType, type_) + type_ = cast("GraphQLNamedType", type_) return type_map.get(type_.name, type_) @@ -493,5 +494,5 @@ def remap_directive(directive: GraphQLDirective, type_map: TypeMap) -> None: args = directive.args for arg_name, arg in args.items(): arg = copy(arg) # noqa: PLW2901 - arg.type = cast(GraphQLInputType, remapped_type(arg.type, type_map)) + arg.type = cast("GraphQLInputType", remapped_type(arg.type, type_map)) args[arg_name] = arg diff --git a/src/graphql/type/validate.py b/src/graphql/type/validate.py index d5f8f8ce..9b22f44e 100644 --- a/src/graphql/type/validate.py +++ b/src/graphql/type/validate.py @@ -101,7 +101,7 @@ def report_error( ) -> None: if nodes and not isinstance(nodes, Node): nodes = [node for node in nodes if node] - nodes = cast(Optional[Collection[Node]], nodes) + nodes = cast("Optional[Collection[Node]]", nodes) self.errors.append(GraphQLError(message, nodes)) def validate_root_types(self) -> None: @@ -183,7 +183,7 @@ def validate_name(self, node: Any, name: str | None = None) -> None: try: if not name: name = node.name - name = cast(str, name) + name = cast("str", name) ast_node = node.ast_node except AttributeError: # pragma: no cover pass @@ -561,7 +561,7 @@ def __call__(self, input_obj: GraphQLInputObjectType) -> None: " within itself through a series of non-null fields:" f" '{'.'.join(field_names)}'.", cast( - Collection[Node], + "Collection[Node]", map(attrgetter("ast_node"), map(itemgetter(1), cycle_path)), ), ) diff --git a/src/graphql/utilities/build_ast_schema.py b/src/graphql/utilities/build_ast_schema.py index 8736e979..26ccfea2 100644 --- a/src/graphql/utilities/build_ast_schema.py +++ b/src/graphql/utilities/build_ast_schema.py @@ -68,11 +68,11 @@ def build_ast_schema( # validation with validate_schema() will produce more actionable results. type_name = type_.name if type_name == "Query": - schema_kwargs["query"] = cast(GraphQLObjectType, type_) + schema_kwargs["query"] = cast("GraphQLObjectType", type_) elif type_name == "Mutation": - schema_kwargs["mutation"] = cast(GraphQLObjectType, type_) + schema_kwargs["mutation"] = cast("GraphQLObjectType", type_) elif type_name == "Subscription": - schema_kwargs["subscription"] = cast(GraphQLObjectType, type_) + schema_kwargs["subscription"] = cast("GraphQLObjectType", type_) # If specified directives were not explicitly declared, add them. directives = schema_kwargs["directives"] diff --git a/src/graphql/utilities/build_client_schema.py b/src/graphql/utilities/build_client_schema.py index c4d05ccc..0e2cbd0e 100644 --- a/src/graphql/utilities/build_client_schema.py +++ b/src/graphql/utilities/build_client_schema.py @@ -3,7 +3,7 @@ from __future__ import annotations from itertools import chain -from typing import Callable, Collection, cast +from typing import TYPE_CHECKING, Callable, Collection, cast from ..language import DirectiveLocation, parse_value from ..pyutils import Undefined, inspect @@ -33,22 +33,25 @@ is_output_type, specified_scalar_types, ) -from .get_introspection_query import ( - IntrospectionDirective, - IntrospectionEnumType, - IntrospectionField, - IntrospectionInputObjectType, - IntrospectionInputValue, - IntrospectionInterfaceType, - IntrospectionObjectType, - IntrospectionQuery, - IntrospectionScalarType, - IntrospectionType, - IntrospectionTypeRef, - IntrospectionUnionType, -) from .value_from_ast import value_from_ast +if TYPE_CHECKING: + from .get_introspection_query import ( + IntrospectionDirective, + IntrospectionEnumType, + IntrospectionField, + IntrospectionInputObjectType, + IntrospectionInputValue, + IntrospectionInterfaceType, + IntrospectionObjectType, + IntrospectionQuery, + IntrospectionScalarType, + IntrospectionType, + IntrospectionTypeRef, + IntrospectionUnionType, + ) + + __all__ = ["build_client_schema"] @@ -90,17 +93,17 @@ def get_type(type_ref: IntrospectionTypeRef) -> GraphQLType: if not item_ref: msg = "Decorated type deeper than introspection query." raise TypeError(msg) - item_ref = cast(IntrospectionTypeRef, item_ref) + item_ref = cast("IntrospectionTypeRef", item_ref) return GraphQLList(get_type(item_ref)) if kind == TypeKind.NON_NULL.name: nullable_ref = type_ref.get("ofType") if not nullable_ref: msg = "Decorated type deeper than introspection query." raise TypeError(msg) - nullable_ref = cast(IntrospectionTypeRef, nullable_ref) + nullable_ref = cast("IntrospectionTypeRef", nullable_ref) nullable_type = get_type(nullable_ref) return GraphQLNonNull(assert_nullable_type(nullable_type)) - type_ref = cast(IntrospectionType, type_ref) + type_ref = cast("IntrospectionType", type_ref) return get_named_type(type_ref) def get_named_type(type_ref: IntrospectionType) -> GraphQLNamedType: @@ -145,7 +148,7 @@ def build_scalar_def( ) -> GraphQLScalarType: name = scalar_introspection["name"] try: - return cast(GraphQLScalarType, GraphQLScalarType.reserved_types[name]) + return cast("GraphQLScalarType", GraphQLScalarType.reserved_types[name]) except KeyError: return GraphQLScalarType( name=name, @@ -168,7 +171,7 @@ def build_implementations_list( f" {inspect(implementing_introspection)}." ) raise TypeError(msg) - interfaces = cast(Collection[IntrospectionInterfaceType], maybe_interfaces) + interfaces = cast("Collection[IntrospectionInterfaceType]", maybe_interfaces) return [get_interface_type(interface) for interface in interfaces] def build_object_def( @@ -176,7 +179,7 @@ def build_object_def( ) -> GraphQLObjectType: name = object_introspection["name"] try: - return cast(GraphQLObjectType, GraphQLObjectType.reserved_types[name]) + return cast("GraphQLObjectType", GraphQLObjectType.reserved_types[name]) except KeyError: return GraphQLObjectType( name=name, @@ -205,7 +208,9 @@ def build_union_def( f" {inspect(union_introspection)}." ) raise TypeError(msg) - possible_types = cast(Collection[IntrospectionObjectType], maybe_possible_types) + possible_types = cast( + "Collection[IntrospectionObjectType]", maybe_possible_types + ) return GraphQLUnionType( name=union_introspection["name"], description=union_introspection.get("description"), @@ -221,7 +226,7 @@ def build_enum_def(enum_introspection: IntrospectionEnumType) -> GraphQLEnumType raise TypeError(msg) name = enum_introspection["name"] try: - return cast(GraphQLEnumType, GraphQLEnumType.reserved_types[name]) + return cast("GraphQLEnumType", GraphQLEnumType.reserved_types[name]) except KeyError: return GraphQLEnumType( name=name, @@ -275,7 +280,7 @@ def build_field_def_map( } def build_field(field_introspection: IntrospectionField) -> GraphQLField: - type_introspection = cast(IntrospectionType, field_introspection["type"]) + type_introspection = cast("IntrospectionType", field_introspection["type"]) type_ = get_type(type_introspection) if not is_output_type(type_): msg = ( @@ -310,7 +315,7 @@ def build_argument_def_map( def build_argument( argument_introspection: IntrospectionInputValue, ) -> GraphQLArgument: - type_introspection = cast(IntrospectionType, argument_introspection["type"]) + type_introspection = cast("IntrospectionType", argument_introspection["type"]) type_ = get_type(type_introspection) if not is_input_type(type_): msg = ( @@ -345,7 +350,9 @@ def build_input_value_def_map( def build_input_value( input_value_introspection: IntrospectionInputValue, ) -> GraphQLInputField: - type_introspection = cast(IntrospectionType, input_value_introspection["type"]) + type_introspection = cast( + "IntrospectionType", input_value_introspection["type"] + ) type_ = get_type(type_introspection) if not is_input_type(type_): msg = ( @@ -388,7 +395,7 @@ def build_directive( is_repeatable=directive_introspection.get("isRepeatable", False), locations=list( cast( - Collection[DirectiveLocation], + "Collection[DirectiveLocation]", directive_introspection.get("locations"), ) ), diff --git a/src/graphql/utilities/coerce_input_value.py b/src/graphql/utilities/coerce_input_value.py index ab06caf1..b7452ec3 100644 --- a/src/graphql/utilities/coerce_input_value.py +++ b/src/graphql/utilities/coerce_input_value.py @@ -160,7 +160,7 @@ def coerce_input_value( # Scalars and Enums determine if an input value is valid via `parse_value()`, # which can throw to indicate failure. If it throws, maintain a reference # to the original error. - type_ = cast(GraphQLScalarType, type_) + type_ = cast("GraphQLScalarType", type_) try: parse_result = type_.parse_value(input_value) except GraphQLError as error: diff --git a/src/graphql/utilities/extend_schema.py b/src/graphql/utilities/extend_schema.py index 14adc661..aebdd2b3 100644 --- a/src/graphql/utilities/extend_schema.py +++ b/src/graphql/utilities/extend_schema.py @@ -405,7 +405,7 @@ def extend_object_type_interfaces( ) -> list[GraphQLInterfaceType]: """Extend a GraphQL object type interface.""" return [ - cast(GraphQLInterfaceType, self.replace_named_type(interface)) + cast("GraphQLInterfaceType", self.replace_named_type(interface)) for interface in kwargs["interfaces"] ] + self.build_interfaces(extensions) @@ -443,7 +443,7 @@ def extend_interface_type_interfaces( ) -> list[GraphQLInterfaceType]: """Extend GraphQL interface type interfaces.""" return [ - cast(GraphQLInterfaceType, self.replace_named_type(interface)) + cast("GraphQLInterfaceType", self.replace_named_type(interface)) for interface in kwargs["interfaces"] ] + self.build_interfaces(extensions) @@ -483,7 +483,7 @@ def extend_union_type_types( ) -> list[GraphQLObjectType]: """Extend types of a GraphQL union type.""" return [ - cast(GraphQLObjectType, self.replace_named_type(member_type)) + cast("GraphQLObjectType", self.replace_named_type(member_type)) for member_type in kwargs["types"] ] + self.build_union_types(extensions) @@ -551,9 +551,9 @@ def get_wrapped_type(self, node: TypeNode) -> GraphQLType: return GraphQLList(self.get_wrapped_type(node.type)) if isinstance(node, NonNullTypeNode): return GraphQLNonNull( - cast(GraphQLNullableType, self.get_wrapped_type(node.type)) + cast("GraphQLNullableType", self.get_wrapped_type(node.type)) ) - return self.get_named_type(cast(NamedTypeNode, node)) + return self.get_named_type(cast("NamedTypeNode", node)) def build_directive(self, node: DirectiveDefinitionNode) -> GraphQLDirective: """Build a GraphQL directive for a given directive definition node.""" @@ -585,7 +585,7 @@ def build_field_map( # value, that would throw immediately while type system validation # with validate_schema() will produce more actionable results. field_map[field.name.value] = GraphQLField( - type_=cast(GraphQLOutputType, self.get_wrapped_type(field.type)), + type_=cast("GraphQLOutputType", self.get_wrapped_type(field.type)), description=field.description.value if field.description else None, args=self.build_argument_map(field.arguments), deprecation_reason=get_deprecation_reason(field), @@ -603,7 +603,7 @@ def build_argument_map( # Note: While this could make assertions to get the correctly typed # value, that would throw immediately while type system validation # with validate_schema() will produce more actionable results. - type_ = cast(GraphQLInputType, self.get_wrapped_type(arg.type)) + type_ = cast("GraphQLInputType", self.get_wrapped_type(arg.type)) arg_map[arg.name.value] = GraphQLArgument( type_=type_, description=arg.description.value if arg.description else None, @@ -624,7 +624,7 @@ def build_input_field_map( # Note: While this could make assertions to get the correctly typed # value, that would throw immediately while type system validation # with validate_schema() will produce more actionable results. - type_ = cast(GraphQLInputType, self.get_wrapped_type(field.type)) + type_ = cast("GraphQLInputType", self.get_wrapped_type(field.type)) input_field_map[field.name.value] = GraphQLInputField( type_=type_, description=field.description.value if field.description else None, @@ -668,7 +668,7 @@ def build_interfaces( # value, that would throw immediately while type system validation # with validate_schema() will produce more actionable results. return [ - cast(GraphQLInterfaceType, self.get_named_type(type_)) + cast("GraphQLInterfaceType", self.get_named_type(type_)) for node in nodes for type_ in node.interfaces or [] ] @@ -682,7 +682,7 @@ def build_union_types( # value, that would throw immediately while type system validation # with validate_schema() will produce more actionable results. return [ - cast(GraphQLObjectType, self.get_named_type(type_)) + cast("GraphQLObjectType", self.get_named_type(type_)) for node in nodes for type_ in node.types or [] ] diff --git a/src/graphql/utilities/get_introspection_query.py b/src/graphql/utilities/get_introspection_query.py index d9cb160f..adf038ac 100644 --- a/src/graphql/utilities/get_introspection_query.py +++ b/src/graphql/utilities/get_introspection_query.py @@ -304,7 +304,7 @@ class IntrospectionSchema(MaybeWithDescription): # The root typed dictionary for schema introspections. # Note: We don't use class syntax here since the key looks like a private attribute. -IntrospectionQuery = TypedDict( # noqa: UP013 +IntrospectionQuery = TypedDict( "IntrospectionQuery", {"__schema": IntrospectionSchema}, ) diff --git a/src/graphql/utilities/introspection_from_schema.py b/src/graphql/utilities/introspection_from_schema.py index cc1e60ce..a0440a32 100644 --- a/src/graphql/utilities/introspection_from_schema.py +++ b/src/graphql/utilities/introspection_from_schema.py @@ -51,4 +51,4 @@ def introspection_from_schema( if not result.data: # pragma: no cover msg = "Introspection did not return a result" raise GraphQLError(msg) - return cast(IntrospectionQuery, result.data) + return cast("IntrospectionQuery", result.data) diff --git a/src/graphql/utilities/lexicographic_sort_schema.py b/src/graphql/utilities/lexicographic_sort_schema.py index cf0c4959..de675a94 100644 --- a/src/graphql/utilities/lexicographic_sort_schema.py +++ b/src/graphql/utilities/lexicographic_sort_schema.py @@ -51,7 +51,7 @@ def replace_type( return GraphQLList(replace_type(type_.of_type)) if is_non_null_type(type_): return GraphQLNonNull(replace_type(type_.of_type)) - return replace_named_type(cast(GraphQLNamedType, type_)) + return replace_named_type(cast("GraphQLNamedType", type_)) def replace_named_type(type_: GraphQLNamedType) -> GraphQLNamedType: return type_map[type_.name] @@ -76,7 +76,7 @@ def sort_args(args_map: dict[str, GraphQLArgument]) -> dict[str, GraphQLArgument args[name] = GraphQLArgument( **merge_kwargs( arg.to_kwargs(), - type_=replace_type(cast(GraphQLNamedType, arg.type)), + type_=replace_type(cast("GraphQLNamedType", arg.type)), ) ) return args @@ -87,7 +87,7 @@ def sort_fields(fields_map: dict[str, GraphQLField]) -> dict[str, GraphQLField]: fields[name] = GraphQLField( **merge_kwargs( field.to_kwargs(), - type_=replace_type(cast(GraphQLNamedType, field.type)), + type_=replace_type(cast("GraphQLNamedType", field.type)), args=sort_args(field.args), ) ) @@ -99,7 +99,8 @@ def sort_input_fields( return { name: GraphQLInputField( cast( - GraphQLInputType, replace_type(cast(GraphQLNamedType, field.type)) + "GraphQLInputType", + replace_type(cast("GraphQLNamedType", field.type)), ), description=field.description, default_value=field.default_value, @@ -174,12 +175,14 @@ def sort_named_type(type_: GraphQLNamedType) -> GraphQLNamedType: sort_directive(directive) for directive in sorted(schema.directives, key=sort_by_name_key) ], - query=cast(Optional[GraphQLObjectType], replace_maybe_type(schema.query_type)), + query=cast( + "Optional[GraphQLObjectType]", replace_maybe_type(schema.query_type) + ), mutation=cast( - Optional[GraphQLObjectType], replace_maybe_type(schema.mutation_type) + "Optional[GraphQLObjectType]", replace_maybe_type(schema.mutation_type) ), subscription=cast( - Optional[GraphQLObjectType], replace_maybe_type(schema.subscription_type) + "Optional[GraphQLObjectType]", replace_maybe_type(schema.subscription_type) ), ast_node=schema.ast_node, ) diff --git a/src/graphql/utilities/strip_ignored_characters.py b/src/graphql/utilities/strip_ignored_characters.py index 6521d10b..9ffe1e26 100644 --- a/src/graphql/utilities/strip_ignored_characters.py +++ b/src/graphql/utilities/strip_ignored_characters.py @@ -68,7 +68,7 @@ def strip_ignored_characters(source: str | Source) -> str: """Type description""" type Foo{"""Field description""" bar:String} ''' if not is_source(source): - source = Source(cast(str, source)) + source = Source(cast("str", source)) body = source.body lexer = Lexer(source) diff --git a/src/graphql/utilities/type_from_ast.py b/src/graphql/utilities/type_from_ast.py index c082ebc1..10acd68f 100644 --- a/src/graphql/utilities/type_from_ast.py +++ b/src/graphql/utilities/type_from_ast.py @@ -58,7 +58,7 @@ def type_from_ast( return GraphQLList(inner_type) if inner_type else None if isinstance(type_node, NonNullTypeNode): inner_type = type_from_ast(schema, type_node.type) - inner_type = cast(GraphQLNullableType, inner_type) + inner_type = cast("GraphQLNullableType", inner_type) return GraphQLNonNull(inner_type) if inner_type else None if isinstance(type_node, NamedTypeNode): return schema.get_type(type_node.name.value) diff --git a/src/graphql/utilities/value_from_ast.py b/src/graphql/utilities/value_from_ast.py index dfefb723..399cdcb4 100644 --- a/src/graphql/utilities/value_from_ast.py +++ b/src/graphql/utilities/value_from_ast.py @@ -131,7 +131,7 @@ def value_from_ast( if is_leaf_type(type_): # Scalars fulfill parsing a literal value via `parse_literal()`. Invalid values # represent a failure to parse correctly, in which case Undefined is returned. - type_ = cast(GraphQLScalarType, type_) + type_ = cast("GraphQLScalarType", type_) # noinspection PyBroadException try: if variables: diff --git a/src/graphql/validation/rules/defer_stream_directive_on_root_field.py b/src/graphql/validation/rules/defer_stream_directive_on_root_field.py index 7a73a990..023fc2b2 100644 --- a/src/graphql/validation/rules/defer_stream_directive_on_root_field.py +++ b/src/graphql/validation/rules/defer_stream_directive_on_root_field.py @@ -29,7 +29,7 @@ def enter_directive( _path: Any, _ancestors: list[Node], ) -> None: - context = cast(ValidationContext, self.context) + context = cast("ValidationContext", self.context) parent_type = context.get_parent_type() if not parent_type: return diff --git a/src/graphql/validation/rules/executable_definitions.py b/src/graphql/validation/rules/executable_definitions.py index 1f702210..6ca01a9d 100644 --- a/src/graphql/validation/rules/executable_definitions.py +++ b/src/graphql/validation/rules/executable_definitions.py @@ -39,7 +39,7 @@ def enter_document(self, node: DocumentNode, *_args: Any) -> VisitorAction: ) else "'{}'".format( cast( - Union[DirectiveDefinitionNode, TypeDefinitionNode], + "Union[DirectiveDefinitionNode, TypeDefinitionNode]", definition, ).name.value ) diff --git a/src/graphql/validation/rules/known_argument_names.py b/src/graphql/validation/rules/known_argument_names.py index 46f9ef42..643300d0 100644 --- a/src/graphql/validation/rules/known_argument_names.py +++ b/src/graphql/validation/rules/known_argument_names.py @@ -35,7 +35,7 @@ def __init__(self, context: ValidationContext | SDLValidationContext) -> None: schema = context.schema defined_directives = schema.directives if schema else specified_directives - for directive in cast(List, defined_directives): + for directive in cast("List", defined_directives): directive_args[directive.name] = list(directive.args) ast_definitions = context.document.definitions diff --git a/src/graphql/validation/rules/known_directives.py b/src/graphql/validation/rules/known_directives.py index 8a0c76c4..da31730b 100644 --- a/src/graphql/validation/rules/known_directives.py +++ b/src/graphql/validation/rules/known_directives.py @@ -35,7 +35,7 @@ def __init__(self, context: ValidationContext | SDLValidationContext) -> None: schema = context.schema defined_directives = ( - schema.directives if schema else cast(List, specified_directives) + schema.directives if schema else cast("List", specified_directives) ) for directive in defined_directives: locations_map[directive.name] = directive.locations @@ -111,7 +111,7 @@ def get_directive_location_for_ast_path( raise TypeError(msg) kind = applied_to.kind if kind == "operation_definition": - applied_to = cast(OperationDefinitionNode, applied_to) + applied_to = cast("OperationDefinitionNode", applied_to) return _operation_location[applied_to.operation.value] if kind == "input_value_definition": parent_node = ancestors[-3] diff --git a/src/graphql/validation/rules/known_type_names.py b/src/graphql/validation/rules/known_type_names.py index 118d7c0e..5dbac00b 100644 --- a/src/graphql/validation/rules/known_type_names.py +++ b/src/graphql/validation/rules/known_type_names.py @@ -94,7 +94,7 @@ def is_sdl_node( value is not None and not isinstance(value, list) and ( - is_type_system_definition_node(cast(Node, value)) - or is_type_system_extension_node(cast(Node, value)) + is_type_system_definition_node(cast("Node", value)) + or is_type_system_extension_node(cast("Node", value)) ) ) diff --git a/src/graphql/validation/rules/overlapping_fields_can_be_merged.py b/src/graphql/validation/rules/overlapping_fields_can_be_merged.py index 58a7a3b7..97939e56 100644 --- a/src/graphql/validation/rules/overlapping_fields_can_be_merged.py +++ b/src/graphql/validation/rules/overlapping_fields_can_be_merged.py @@ -538,8 +538,8 @@ def find_conflict( ) # The return type for each field. - type1 = cast(Optional[GraphQLOutputType], def1 and def1.type) - type2 = cast(Optional[GraphQLOutputType], def2 and def2.type) + type1 = cast("Optional[GraphQLOutputType]", def1 and def1.type) + type2 = cast("Optional[GraphQLOutputType]", def2 and def2.type) if not are_mutually_exclusive: # Two aliases must refer to the same field. @@ -739,7 +739,7 @@ def collect_fields_and_fragment_names( if not node_and_defs.get(response_name): node_and_defs[response_name] = [] node_and_defs[response_name].append( - cast(NodeAndDef, (parent_type, selection, field_def)) + cast("NodeAndDef", (parent_type, selection, field_def)) ) elif isinstance(selection, FragmentSpreadNode): fragment_names[selection.name.value] = True diff --git a/src/graphql/validation/rules/provided_required_arguments.py b/src/graphql/validation/rules/provided_required_arguments.py index f94515fe..9c98065e 100644 --- a/src/graphql/validation/rules/provided_required_arguments.py +++ b/src/graphql/validation/rules/provided_required_arguments.py @@ -41,7 +41,7 @@ def __init__(self, context: ValidationContext | SDLValidationContext) -> None: schema = context.schema defined_directives = schema.directives if schema else specified_directives - for directive in cast(List, defined_directives): + for directive in cast("List", defined_directives): required_args_map[directive.name] = { name: arg for name, arg in directive.args.items() @@ -71,7 +71,7 @@ def leave_directive(self, directive_node: DirectiveNode, *_args: Any) -> None: arg_type_str = ( str(arg_type) if is_type(arg_type) - else print_ast(cast(TypeNode, arg_type)) + else print_ast(cast("TypeNode", arg_type)) ) self.report_error( GraphQLError( diff --git a/src/graphql/validation/rules/stream_directive_on_list_field.py b/src/graphql/validation/rules/stream_directive_on_list_field.py index 141984c2..03015cd0 100644 --- a/src/graphql/validation/rules/stream_directive_on_list_field.py +++ b/src/graphql/validation/rules/stream_directive_on_list_field.py @@ -28,7 +28,7 @@ def enter_directive( _path: Any, _ancestors: list[Node], ) -> None: - context = cast(ValidationContext, self.context) + context = cast("ValidationContext", self.context) field_def = context.get_field_def() parent_type = context.get_parent_type() if ( diff --git a/src/graphql/validation/rules/unique_directives_per_location.py b/src/graphql/validation/rules/unique_directives_per_location.py index de9a05d0..daab2935 100644 --- a/src/graphql/validation/rules/unique_directives_per_location.py +++ b/src/graphql/validation/rules/unique_directives_per_location.py @@ -38,7 +38,7 @@ def __init__(self, context: ValidationContext | SDLValidationContext) -> None: schema = context.schema defined_directives = ( - schema.directives if schema else cast(List, specified_directives) + schema.directives if schema else cast("List", specified_directives) ) for directive in defined_directives: unique_directive_map[directive.name] = not directive.is_repeatable @@ -60,7 +60,7 @@ def enter(self, node: Node, *_args: Any) -> None: directives = getattr(node, "directives", None) if not directives: return - directives = cast(List[DirectiveNode], directives) + directives = cast("List[DirectiveNode]", directives) if isinstance(node, (SchemaDefinitionNode, SchemaExtensionNode)): seen_directives = self.schema_directives diff --git a/src/graphql/validation/rules/values_of_correct_type.py b/src/graphql/validation/rules/values_of_correct_type.py index 7df72c6e..ea4c4a3c 100644 --- a/src/graphql/validation/rules/values_of_correct_type.py +++ b/src/graphql/validation/rules/values_of_correct_type.py @@ -157,7 +157,7 @@ def is_valid_value_node(self, node: ValueNode) -> None: # Scalars determine if a literal value is valid via `parse_literal()` which may # throw or return an invalid value to indicate failure. - type_ = cast(GraphQLScalarType, type_) + type_ = cast("GraphQLScalarType", type_) try: parse_result = type_.parse_literal(node) if parse_result is Undefined: @@ -218,7 +218,7 @@ def validate_one_of_input_object( is_variable = value and isinstance(value, VariableNode) if is_variable: - variable_name = cast(VariableNode, value).name.value + variable_name = cast("VariableNode", value).name.value definition = variable_definitions[variable_name] is_nullable_variable = not isinstance(definition.type, NonNullTypeNode) diff --git a/src/graphql/validation/validation_context.py b/src/graphql/validation/validation_context.py index dec21042..055b4231 100644 --- a/src/graphql/validation/validation_context.py +++ b/src/graphql/validation/validation_context.py @@ -143,7 +143,7 @@ def get_fragment_spreads(self, node: SelectionSetNode) -> list[FragmentSpreadNod append_spread(selection) else: set_to_visit = cast( - NodeWithSelectionSet, selection + "NodeWithSelectionSet", selection ).selection_set if set_to_visit: append_set(set_to_visit) diff --git a/tests/error/test_graphql_error.py b/tests/error/test_graphql_error.py index fbc8602e..03b85dcf 100644 --- a/tests/error/test_graphql_error.py +++ b/tests/error/test_graphql_error.py @@ -25,7 +25,7 @@ ast = parse(source) operation_node = ast.definitions[0] -operation_node = cast(OperationDefinitionNode, operation_node) +operation_node = cast("OperationDefinitionNode", operation_node) assert operation_node assert operation_node.kind == "operation_definition" field_node = operation_node.selection_set.selections[0] @@ -299,7 +299,7 @@ def prints_an_error_with_nodes_from_different_sources(): ) ) op_a = doc_a.definitions[0] - op_a = cast(ObjectTypeDefinitionNode, op_a) + op_a = cast("ObjectTypeDefinitionNode", op_a) assert op_a assert op_a.kind == "object_type_definition" assert op_a.fields @@ -317,7 +317,7 @@ def prints_an_error_with_nodes_from_different_sources(): ) ) op_b = doc_b.definitions[0] - op_b = cast(ObjectTypeDefinitionNode, op_b) + op_b = cast("ObjectTypeDefinitionNode", op_b) assert op_b assert op_b.kind == "object_type_definition" assert op_b.fields diff --git a/tests/error/test_located_error.py b/tests/error/test_located_error.py index 593b24ad..f22f6fd4 100644 --- a/tests/error/test_located_error.py +++ b/tests/error/test_located_error.py @@ -11,7 +11,7 @@ def throws_without_an_original_error(): def passes_graphql_error_through(): path = ["path", 3, "to", "field"] - e = GraphQLError("msg", None, None, None, cast(Any, path)) + e = GraphQLError("msg", None, None, None, cast("Any", path)) assert located_error(e, [], []) == e def passes_graphql_error_ish_through(): @@ -21,7 +21,7 @@ def passes_graphql_error_ish_through(): def does_not_pass_through_elasticsearch_like_errors(): e = Exception("I am from elasticsearch") - cast(Any, e).path = "/something/feed/_search" + cast("Any", e).path = "/something/feed/_search" assert located_error(e, [], []) is not e def handles_lazy_error_messages(): diff --git a/tests/execution/test_defer.py b/tests/execution/test_defer.py index 62dc88bb..51133100 100644 --- a/tests/execution/test_defer.py +++ b/tests/execution/test_defer.py @@ -417,7 +417,7 @@ def can_format_and_print_subsequent_incremental_execution_result(): pending = [PendingResult("foo", ["bar"])] incremental = [ - cast(IncrementalResult, IncrementalDeferResult({"foo": 1}, "bar")) + cast("IncrementalResult", IncrementalDeferResult({"foo": 1}, "bar")) ] completed = [CompletedResult("foo")] result = SubsequentIncrementalExecutionResult( @@ -442,7 +442,7 @@ def can_format_and_print_subsequent_incremental_execution_result(): def can_compare_subsequent_incremental_execution_result(): pending = [PendingResult("foo", ["bar"])] incremental = [ - cast(IncrementalResult, IncrementalDeferResult({"foo": 1}, "bar")) + cast("IncrementalResult", IncrementalDeferResult({"foo": 1}, "bar")) ] completed = [CompletedResult("foo")] args: dict[str, Any] = { diff --git a/tests/execution/test_executor.py b/tests/execution/test_executor.py index 792066f1..a11c6b5e 100644 --- a/tests/execution/test_executor.py +++ b/tests/execution/test_executor.py @@ -245,16 +245,16 @@ def resolve(_obj, info): execute_sync(schema, document, root_value, variable_values=variable_values) assert len(resolved_infos) == 1 - operation = cast(OperationDefinitionNode, document.definitions[0]) + operation = cast("OperationDefinitionNode", document.definitions[0]) assert operation assert operation.kind == "operation_definition" - field = cast(FieldNode, operation.selection_set.selections[0]) + field = cast("FieldNode", operation.selection_set.selections[0]) assert resolved_infos[0] == GraphQLResolveInfo( field_name="test", field_nodes=[field], return_type=GraphQLString, - parent_type=cast(GraphQLObjectType, schema.query_type), + parent_type=cast("GraphQLObjectType", schema.query_type), path=ResponsePath(None, "result", "Test"), schema=schema, fragments={}, diff --git a/tests/execution/test_middleware.py b/tests/execution/test_middleware.py index 291f218c..50159995 100644 --- a/tests/execution/test_middleware.py +++ b/tests/execution/test_middleware.py @@ -323,7 +323,7 @@ def bad_middleware_object(): GraphQLSchema(test_type), doc, None, - middleware=cast(Middleware, {"bad": "value"}), + middleware=cast("Middleware", {"bad": "value"}), ) assert str(exc_info.value) == ( diff --git a/tests/execution/test_nonnull.py b/tests/execution/test_nonnull.py index 99810ed9..6c98eb67 100644 --- a/tests/execution/test_nonnull.py +++ b/tests/execution/test_nonnull.py @@ -111,7 +111,7 @@ def patch(data: str) -> str: async def execute_sync_and_async(query: str, root_value: Any) -> ExecutionResult: sync_result = execute_sync(schema, parse(query), root_value) async_result = await cast( - Awaitable[ExecutionResult], execute(schema, parse(patch(query)), root_value) + "Awaitable[ExecutionResult]", execute(schema, parse(patch(query)), root_value) ) assert repr(async_result) == patch(repr(sync_result)) @@ -218,14 +218,14 @@ def describe_nulls_a_complex_tree_of_nullable_fields_each(): @pytest.mark.asyncio async def returns_null(): result = await cast( - Awaitable[ExecutionResult], execute_query(query, NullingData()) + "Awaitable[ExecutionResult]", execute_query(query, NullingData()) ) assert result == (data, None) @pytest.mark.asyncio async def throws(): result = await cast( - Awaitable[ExecutionResult], execute_query(query, ThrowingData()) + "Awaitable[ExecutionResult]", execute_query(query, ThrowingData()) ) assert result == ( data, @@ -352,7 +352,7 @@ def describe_nulls_first_nullable_after_long_chain_of_non_null_fields(): @pytest.mark.asyncio async def returns_null(): result = await cast( - Awaitable[ExecutionResult], execute_query(query, NullingData()) + "Awaitable[ExecutionResult]", execute_query(query, NullingData()) ) assert result == ( data, @@ -415,7 +415,7 @@ async def returns_null(): @pytest.mark.asyncio async def throws(): result = await cast( - Awaitable[ExecutionResult], execute_query(query, ThrowingData()) + "Awaitable[ExecutionResult]", execute_query(query, ThrowingData()) ) assert result == ( data, diff --git a/tests/language/test_block_string.py b/tests/language/test_block_string.py index 74f99734..d135dde9 100644 --- a/tests/language/test_block_string.py +++ b/tests/language/test_block_string.py @@ -148,8 +148,8 @@ def __init__(self, string: str) -> None: def __str__(self) -> str: return self.string - _assert_printable(cast(str, LazyString(""))) - _assert_non_printable(cast(str, LazyString(" "))) + _assert_printable(cast("str", LazyString(""))) + _assert_non_printable(cast("str", LazyString(" "))) def describe_print_block_string(): @@ -212,4 +212,4 @@ class LazyString: def __str__(self) -> str: return "lazy" - _assert_block_string(cast(str, LazyString()), '"""lazy"""') + _assert_block_string(cast("str", LazyString()), '"""lazy"""') diff --git a/tests/language/test_parser.py b/tests/language/test_parser.py index e6d33064..0121db23 100644 --- a/tests/language/test_parser.py +++ b/tests/language/test_parser.py @@ -181,11 +181,11 @@ def parses_multi_byte_characters(): definitions = doc.definitions assert isinstance(definitions, tuple) assert len(definitions) == 1 - selection_set = cast(OperationDefinitionNode, definitions[0]).selection_set + selection_set = cast("OperationDefinitionNode", definitions[0]).selection_set selections = selection_set.selections assert isinstance(selections, tuple) assert len(selections) == 1 - arguments = cast(FieldNode, selections[0]).arguments + arguments = cast("FieldNode", selections[0]).arguments assert isinstance(arguments, tuple) assert len(arguments) == 1 value = arguments[0].value @@ -263,7 +263,7 @@ def parses_required_field(): definitions = doc.definitions assert isinstance(definitions, tuple) assert len(definitions) == 1 - definition = cast(OperationDefinitionNode, definitions[0]) + definition = cast("OperationDefinitionNode", definitions[0]) selection_set: SelectionSetNode | None = definition.selection_set assert isinstance(selection_set, SelectionSetNode) selections = selection_set.selections @@ -328,7 +328,7 @@ def parses_field_with_required_list_elements(): definitions = doc.definitions assert isinstance(definitions, tuple) assert len(definitions) == 1 - definition = cast(OperationDefinitionNode, definitions[0]) + definition = cast("OperationDefinitionNode", definitions[0]) selection_set: SelectionSetNode | None = definition.selection_set assert isinstance(selection_set, SelectionSetNode) selections = selection_set.selections @@ -352,7 +352,7 @@ def parses_field_with_optional_list_elements(): definitions = doc.definitions assert isinstance(definitions, tuple) assert len(definitions) == 1 - definition = cast(OperationDefinitionNode, definitions[0]) + definition = cast("OperationDefinitionNode", definitions[0]) selection_set: SelectionSetNode | None = definition.selection_set assert isinstance(selection_set, SelectionSetNode) selections = selection_set.selections @@ -376,7 +376,7 @@ def parses_field_with_required_list(): definitions = doc.definitions assert isinstance(definitions, tuple) assert len(definitions) == 1 - definition = cast(OperationDefinitionNode, definitions[0]) + definition = cast("OperationDefinitionNode", definitions[0]) selection_set: SelectionSetNode | None = definition.selection_set assert isinstance(selection_set, SelectionSetNode) selections = selection_set.selections @@ -400,7 +400,7 @@ def parses_field_with_optional_list(): definitions = doc.definitions assert isinstance(definitions, tuple) assert len(definitions) == 1 - definition = cast(OperationDefinitionNode, definitions[0]) + definition = cast("OperationDefinitionNode", definitions[0]) selection_set: SelectionSetNode | None = definition.selection_set assert isinstance(selection_set, SelectionSetNode) selections = selection_set.selections @@ -424,7 +424,7 @@ def parses_field_with_mixed_list_elements(): definitions = doc.definitions assert isinstance(definitions, tuple) assert len(definitions) == 1 - definition = cast(OperationDefinitionNode, definitions[0]) + definition = cast("OperationDefinitionNode", definitions[0]) selection_set: SelectionSetNode | None = definition.selection_set assert isinstance(selection_set, SelectionSetNode) selections = selection_set.selections @@ -483,7 +483,7 @@ def creates_ast(): definitions = doc.definitions assert isinstance(definitions, tuple) assert len(definitions) == 1 - definition = cast(OperationDefinitionNode, definitions[0]) + definition = cast("OperationDefinitionNode", definitions[0]) assert isinstance(definition, DefinitionNode) assert definition.loc == (0, 40) assert definition.operation == OperationType.QUERY diff --git a/tests/language/test_source.py b/tests/language/test_source.py index 24008605..b973410d 100644 --- a/tests/language/test_source.py +++ b/tests/language/test_source.py @@ -81,7 +81,7 @@ def can_create_custom_attribute(): def rejects_invalid_location_offset(): def create_source(location_offset: tuple[int, int]) -> Source: - return Source("", "", cast(SourceLocation, location_offset)) + return Source("", "", cast("SourceLocation", location_offset)) with pytest.raises(TypeError): create_source(None) # type: ignore diff --git a/tests/language/test_visitor.py b/tests/language/test_visitor.py index 00283fe1..f3fdb370 100644 --- a/tests/language/test_visitor.py +++ b/tests/language/test_visitor.py @@ -581,7 +581,9 @@ class CustomFieldNode(SelectionNode): name: NameNode selection_set: SelectionSetNode | None - custom_selection_set = cast(FieldNode, custom_ast.definitions[0]).selection_set + custom_selection_set = cast( + "FieldNode", custom_ast.definitions[0] + ).selection_set assert custom_selection_set is not None custom_selection_set.selections = ( *custom_selection_set.selections, diff --git a/tests/pyutils/test_description.py b/tests/pyutils/test_description.py index 3148520b..781ab14e 100644 --- a/tests/pyutils/test_description.py +++ b/tests/pyutils/test_description.py @@ -34,7 +34,7 @@ def __str__(self) -> str: return str(self.text) -lazy_string = cast(str, LazyString("Why am I so lazy?")) +lazy_string = cast("str", LazyString("Why am I so lazy?")) @contextmanager @@ -186,8 +186,8 @@ def __str__(self) -> str: with registered(Lazy): field = GraphQLField( GraphQLString, - description=cast(str, description), - deprecation_reason=cast(str, deprecation_reason), + description=cast("str", description), + deprecation_reason=cast("str", deprecation_reason), ) schema = GraphQLSchema(GraphQLObjectType("Query", {"lazyField": field})) @@ -222,8 +222,8 @@ def __str__(self) -> str: with registered(Lazy): field = GraphQLField( GraphQLString, - description=cast(str, description), - deprecation_reason=cast(str, deprecation_reason), + description=cast("str", description), + deprecation_reason=cast("str", deprecation_reason), ) schema = GraphQLSchema(GraphQLObjectType("Query", {"lazyField": field})) diff --git a/tests/test_user_registry.py b/tests/test_user_registry.py index 147e01bd..0cb2b5b9 100644 --- a/tests/test_user_registry.py +++ b/tests/test_user_registry.py @@ -262,7 +262,7 @@ def receive(msg): return receive # noinspection PyProtectedMember - pubsub = context["registry"]._pubsub # noqa: SLF001s + pubsub = context["registry"]._pubsub # noqa: SLF001 pubsub[None].subscribers.add(subscriber("User")) pubsub["0"].subscribers.add(subscriber("User 0")) diff --git a/tests/utilities/test_build_client_schema.py b/tests/utilities/test_build_client_schema.py index 8a4cecba..1455f473 100644 --- a/tests/utilities/test_build_client_schema.py +++ b/tests/utilities/test_build_client_schema.py @@ -1,4 +1,4 @@ -from typing import cast +from typing import TYPE_CHECKING, cast import pytest @@ -23,14 +23,16 @@ introspection_from_schema, print_schema, ) -from graphql.utilities.get_introspection_query import ( - IntrospectionEnumType, - IntrospectionInputObjectType, - IntrospectionInterfaceType, - IntrospectionObjectType, - IntrospectionType, - IntrospectionUnionType, -) + +if TYPE_CHECKING: + from graphql.utilities.get_introspection_query import ( + IntrospectionEnumType, + IntrospectionInputObjectType, + IntrospectionInterfaceType, + IntrospectionObjectType, + IntrospectionType, + IntrospectionUnionType, + ) from ..utils import dedent @@ -715,7 +717,9 @@ def throws_when_missing_definition_for_one_of_the_standard_scalars(): def throws_when_type_reference_is_missing_name(): introspection = introspection_from_schema(dummy_schema) - query_type = cast(IntrospectionType, introspection["__schema"]["queryType"]) + query_type = cast( + "IntrospectionType", introspection["__schema"]["queryType"] + ) assert query_type["name"] == "Query" del query_type["name"] # type: ignore @@ -745,7 +749,7 @@ def throws_when_missing_kind(): def throws_when_missing_interfaces(): introspection = introspection_from_schema(dummy_schema) query_type_introspection = cast( - IntrospectionObjectType, + "IntrospectionObjectType", next( type_ for type_ in introspection["__schema"]["types"] @@ -767,7 +771,7 @@ def throws_when_missing_interfaces(): def legacy_support_for_interfaces_with_null_as_interfaces_field(): introspection = introspection_from_schema(dummy_schema) some_interface_introspection = cast( - IntrospectionInterfaceType, + "IntrospectionInterfaceType", next( type_ for type_ in introspection["__schema"]["types"] @@ -784,7 +788,7 @@ def legacy_support_for_interfaces_with_null_as_interfaces_field(): def throws_when_missing_fields(): introspection = introspection_from_schema(dummy_schema) query_type_introspection = cast( - IntrospectionObjectType, + "IntrospectionObjectType", next( type_ for type_ in introspection["__schema"]["types"] @@ -806,7 +810,7 @@ def throws_when_missing_fields(): def throws_when_missing_field_args(): introspection = introspection_from_schema(dummy_schema) query_type_introspection = cast( - IntrospectionObjectType, + "IntrospectionObjectType", next( type_ for type_ in introspection["__schema"]["types"] @@ -828,7 +832,7 @@ def throws_when_missing_field_args(): def throws_when_output_type_is_used_as_an_arg_type(): introspection = introspection_from_schema(dummy_schema) query_type_introspection = cast( - IntrospectionObjectType, + "IntrospectionObjectType", next( type_ for type_ in introspection["__schema"]["types"] @@ -852,7 +856,7 @@ def throws_when_output_type_is_used_as_an_arg_type(): def throws_when_output_type_is_used_as_an_input_value_type(): introspection = introspection_from_schema(dummy_schema) input_object_type_introspection = cast( - IntrospectionInputObjectType, + "IntrospectionInputObjectType", next( type_ for type_ in introspection["__schema"]["types"] @@ -876,7 +880,7 @@ def throws_when_output_type_is_used_as_an_input_value_type(): def throws_when_input_type_is_used_as_a_field_type(): introspection = introspection_from_schema(dummy_schema) query_type_introspection = cast( - IntrospectionObjectType, + "IntrospectionObjectType", next( type_ for type_ in introspection["__schema"]["types"] @@ -900,7 +904,7 @@ def throws_when_input_type_is_used_as_a_field_type(): def throws_when_missing_possible_types(): introspection = introspection_from_schema(dummy_schema) some_union_introspection = cast( - IntrospectionUnionType, + "IntrospectionUnionType", next( type_ for type_ in introspection["__schema"]["types"] @@ -921,7 +925,7 @@ def throws_when_missing_possible_types(): def throws_when_missing_enum_values(): introspection = introspection_from_schema(dummy_schema) some_enum_introspection = cast( - IntrospectionEnumType, + "IntrospectionEnumType", next( type_ for type_ in introspection["__schema"]["types"] @@ -942,7 +946,7 @@ def throws_when_missing_enum_values(): def throws_when_missing_input_fields(): introspection = introspection_from_schema(dummy_schema) some_input_object_introspection = cast( - IntrospectionInputObjectType, + "IntrospectionInputObjectType", next( type_ for type_ in introspection["__schema"]["types"] @@ -1055,7 +1059,7 @@ def recursive_interfaces(): schema = build_schema(sdl, assume_valid=True) introspection = introspection_from_schema(schema) foo_introspection = cast( - IntrospectionObjectType, + "IntrospectionObjectType", next( type_ for type_ in introspection["__schema"]["types"] diff --git a/tests/utilities/test_print_schema.py b/tests/utilities/test_print_schema.py index b12d30dc..ab997610 100644 --- a/tests/utilities/test_print_schema.py +++ b/tests/utilities/test_print_schema.py @@ -555,7 +555,7 @@ def prints_enum(): def prints_empty_types(): schema = GraphQLSchema( types=[ - GraphQLEnumType("SomeEnum", cast(Dict[str, Any], {})), + GraphQLEnumType("SomeEnum", cast("Dict[str, Any]", {})), GraphQLInputObjectType("SomeInputObject", {}), GraphQLInterfaceType("SomeInterface", {}), GraphQLObjectType("SomeObject", {}), diff --git a/tests/utils/assert_equal_awaitables_or_values.py b/tests/utils/assert_equal_awaitables_or_values.py index 8ed8d175..964db1a8 100644 --- a/tests/utils/assert_equal_awaitables_or_values.py +++ b/tests/utils/assert_equal_awaitables_or_values.py @@ -15,7 +15,7 @@ def assert_equal_awaitables_or_values(*items: T) -> T: """Check whether the items are the same and either all awaitables or all values.""" if all(is_awaitable(item) for item in items): - awaitable_items = cast(Tuple[Awaitable], items) + awaitable_items = cast("Tuple[Awaitable]", items) async def assert_matching_awaitables(): return assert_matching_values(*(await asyncio.gather(*awaitable_items))) From 0107e309cde181035cc806a5abd0358c37924251 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 3 May 2025 13:47:30 +0200 Subject: [PATCH 230/230] Fix Sphinx issues --- docs/conf.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docs/conf.py b/docs/conf.py index e78359fe..f70b6d03 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -142,7 +142,7 @@ """ GNT GT KT T VT TContext -enum.Enum +Enum traceback types.TracebackType TypeMap @@ -157,6 +157,7 @@ FormattedSourceLocation GraphQLAbstractType GraphQLCompositeType +GraphQLEnumValueMap GraphQLErrorExtensions GraphQLFieldResolver GraphQLInputType @@ -175,6 +176,7 @@ asyncio.events.AbstractEventLoop collections.abc.MutableMapping collections.abc.MutableSet +enum.Enum graphql.execution.collect_fields.DeferUsage graphql.execution.collect_fields.CollectFieldsResult graphql.execution.collect_fields.FieldGroup