diff --git a/.bumpversion.cfg b/.bumpversion.cfg index b7e42d6d..e8560a6a 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 3.2.2 +current_version = 3.3.0a7 commit = False tag = False diff --git a/.coveragerc b/.coveragerc deleted file mode 100644 index 8bdacb69..00000000 --- a/.coveragerc +++ /dev/null @@ -1,26 +0,0 @@ -[run] -branch = True -source = src -omit = - */conftest.py - */test_*_fuzz.py - */assert_valid_name.py - */cached_property.py - */character_classes.py - */is_iterable.py - */subscription/__init__.py - -[report] -exclude_lines = - pragma: no cover - except ImportError: - \# Python < - raise NotImplementedError - raise TypeError\(f?"Unexpected - assert False, - \s+next\($ - if MYPY: - if TYPE_CHECKING: - ^\s+\.\.\.$ - ^\s+pass$ -ignore_errors = True diff --git a/.flake8 b/.flake8 deleted file mode 100644 index ccded588..00000000 --- a/.flake8 +++ /dev/null @@ -1,4 +0,0 @@ -[flake8] -ignore = E203,W503 -exclude = .git,.mypy_cache,.pytest_cache,.tox,.venv,__pycache__,build,dist,docs -max-line-length = 88 diff --git a/.github/workflows/benchmark.yml b/.github/workflows/benchmark.yml new file mode 100644 index 00000000..fce1037f --- /dev/null +++ b/.github/workflows/benchmark.yml @@ -0,0 +1,35 @@ +name: Performance + +on: + push: + branches: + - "main" + pull_request: + workflow_dispatch: + +jobs: + benchmarks: + name: ๐Ÿ“ˆ Benchmarks + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + + - name: Set up Python 3.12 + uses: actions/setup-python@v5 + id: setup-python + with: + python-version: "3.12" + architecture: x64 + + - name: Install with poetry + run: | + pipx install poetry + poetry env use 3.12 + poetry install --with test + + - name: Run benchmarks with CodSpeed + uses: CodSpeedHQ/action@v3 + with: + token: ${{ secrets.CODSPEED_TOKEN }} + run: poetry run pytest tests --benchmark-enable --codspeed diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 626f94c2..703a56aa 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -3,16 +3,17 @@ name: Code quality on: [push, pull_request] jobs: - build: + lint: + name: ๐Ÿงน Lint runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - - name: Set up Python 3.9 - uses: actions/setup-python@v2 + - name: Set up Python 3.12 + uses: actions/setup-python@v5 with: - python-version: 3.9 + python-version: '3.12' - name: Install dependencies run: | @@ -22,4 +23,4 @@ jobs: - name: Run code quality tests with tox run: tox env: - TOXENV: black,flake8,mypy,docs,manifest + TOXENV: ruff,mypy,docs diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index fc166745..8bd8c296 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -7,21 +7,21 @@ on: jobs: build: + name: ๐Ÿ—๏ธ Build runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - - name: Set up Python 3.9 - uses: actions/setup-python@v2 + - name: Set up Python 3.12 + uses: actions/setup-python@v5 with: - python-version: 3.9 + python-version: '3.12' - name: Build wheel and source tarball run: | - pip install wheel - python setup.py sdist bdist_wheel - + pip install poetry + poetry build - name: Publish a Python distribution to PyPI uses: pypa/gh-action-pypi-publish@release/v1 with: diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 09278c14..581528cc 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -3,25 +3,50 @@ name: Tests on: [push, pull_request] jobs: - build: + tests: + name: ๐Ÿงช Tests runs-on: ubuntu-latest strategy: matrix: - python: ['3.6', '3.7', '3.8', '3.9', '3.10', 'pypy3'] + python-version: ['3.9', '3.10', '3.11', '3.12', '3.13', 'pypy3.9', 'pypy3.10'] steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies run: | python -m pip install --upgrade pip - pip install tox tox-gh-actions + pip install "tox>=4.24,<5" "tox-gh-actions>=3.2,<4" + + - name: Run unit tests with tox + run: tox + + tests-old: + name: ๐Ÿงช Tests (older Python versions) + runs-on: ubuntu-22.04 + + strategy: + matrix: + python-version: ['3.7', '3.8'] + + steps: + - uses: actions/checkout@v4 + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install "tox>=3.28,<5" "tox-gh-actions>=3.2,<4" - name: Run unit tests with tox run: tox diff --git a/.gitignore b/.gitignore index 6b51313b..a15cbec4 100644 --- a/.gitignore +++ b/.gitignore @@ -8,6 +8,7 @@ .tox/ .venv*/ .vs/ +.vscode/ build/ dist/ diff --git a/.mypy.ini b/.mypy.ini deleted file mode 100644 index 4b539ae9..00000000 --- a/.mypy.ini +++ /dev/null @@ -1,20 +0,0 @@ -[mypy] -python_version = 3.9 -check_untyped_defs = True -no_implicit_optional = True -strict_optional = True -warn_redundant_casts = True -warn_unused_ignores = True -disallow_untyped_defs = True - -[mypy-graphql.pyutils.frozen_dict] -disallow_untyped_defs = False - -[mypy-graphql.pyutils.frozen_list] -disallow_untyped_defs = False - -[mypy-graphql.type.introspection] -disallow_untyped_defs = False - -[mypy-tests.*] -disallow_untyped_defs = False diff --git a/.readthedocs.yaml b/.readthedocs.yaml index bb8e1846..69c62c18 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -5,9 +5,9 @@ version: 2 build: - os: ubuntu-20.04 + os: ubuntu-22.04 tools: - python: "3.9" + python: "3.10" sphinx: configuration: docs/conf.py diff --git a/MANIFEST.in b/MANIFEST.in deleted file mode 100644 index b59b72ad..00000000 --- a/MANIFEST.in +++ /dev/null @@ -1,25 +0,0 @@ -include MANIFEST.in - -include CODEOWNERS -include LICENSE -include README.md -include SECURITY.md - -include .bumpversion.cfg -include .coveragerc -include .editorconfig -include .flake8 -include .mypy.ini -include .readthedocs.yaml - -include tox.ini - -include poetry.lock -include pyproject.toml - -graft src/graphql -graft tests -recursive-include docs *.txt *.rst conf.py Makefile make.bat *.jpg *.png *.gif -prune docs/_build - -global-exclude *.py[co] __pycache__ diff --git a/README.md b/README.md index 910d0144..aa36c84d 100644 --- a/README.md +++ b/README.md @@ -6,22 +6,33 @@ a query language for APIs created by Facebook. [![PyPI version](https://badge.fury.io/py/graphql-core.svg)](https://badge.fury.io/py/graphql-core) [![Documentation Status](https://readthedocs.org/projects/graphql-core-3/badge/)](https://graphql-core-3.readthedocs.io) -![Test Status](https://github.com/graphql-python/graphql-core/actions/workflows/test.yml/badge.svg) -![Lint Status](https://github.com/graphql-python/graphql-core/actions/workflows/lint.yml/badge.svg) -[![Code Style](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/ambv/black) - -The current version 3.2.2 of GraphQL-core is up-to-date with GraphQL.js version 16.4.0. - -An extensive test suite with over 2300 unit tests and 100% coverage comprises a -replication of the complete test suite of GraphQL.js, making sure this port is -reliable and compatible with GraphQL.js. - -Note that for various reasons, GraphQL-core does not use SemVer like GraphQL.js. Increases in the major version of GraphQL.js are reflected in the minor version of GraphQL-core instead. This means there can be breaking changes in the API when the minor version changes, and only patch releases are fully backward compatible. Therefore, we recommend something like `=~ 3.2.0` as version specifier when including GraphQL-core as a dependency. - +[![Test Status](https://github.com/graphql-python/graphql-core/actions/workflows/test.yml/badge.svg)](https://github.com/graphql-python/graphql-core/actions/workflows/test.yml) +[![Lint Status](https://github.com/graphql-python/graphql-core/actions/workflows/lint.yml/badge.svg)](https://github.com/graphql-python/graphql-core/actions/workflows/lint.yml) +[![CodSpeed](https://img.shields.io/endpoint?url=https://codspeed.io/badge.json)](https://codspeed.io/graphql-python/graphql-core) +[![Code style](https://img.shields.io/badge/code%20style-ruff-000000.svg)](https://github.com/astral-sh/ruff) + +An extensive test suite with over 2500 unit tests and 100% coverage replicates the +complete test suite of GraphQL.js, ensuring that this port is reliable and compatible +with GraphQL.js. + +The current stable version 3.2.6 of GraphQL-core is up-to-date with GraphQL.js +version 16.8.2 and supports Python versions 3.6 to 3.13. + +You can also try out the latest alpha version 3.3.0a7 of GraphQL-core, +which is up-to-date with GraphQL.js version 17.0.0a3. +Please note that this new minor version of GraphQL-core does not support +Python 3.6 anymore. + +Note that for various reasons, GraphQL-core does not use SemVer like GraphQL.js. +Changes in the major version of GraphQL.js are reflected in the minor version of +GraphQL-core instead. This means there can be breaking changes in the API +when the minor version changes, and only patch releases are fully backward compatible. +Therefore, we recommend using something like `~= 3.2.0` as the version specifier +when including GraphQL-core as a dependency. ## Documentation -A more detailed documentation for GraphQL-core 3 can be found at +More detailed documentation for GraphQL-core 3 can be found at [graphql-core-3.readthedocs.io](https://graphql-core-3.readthedocs.io/). The documentation for GraphQL.js can be found at [graphql.org/graphql-js/](https://graphql.org/graphql-js/). @@ -36,10 +47,10 @@ examples. A general overview of GraphQL is available in the [README](https://github.com/graphql/graphql-spec/blob/main/README.md) for the -[Specification for GraphQL](https://github.com/graphql/graphql-spec). That overview -describes a simple set of GraphQL examples that exist as [tests](tests) in this -repository. A good way to get started with this repository is to walk through that -README and the corresponding tests in parallel. +[Specification for GraphQL](https://github.com/graphql/graphql-spec). This overview +includes a simple set of GraphQL examples that are also available as [tests](tests) +in this repository. A good way to get started with this repository is to walk through +that README and the corresponding tests in parallel. ## Installation @@ -126,9 +137,9 @@ ExecutionResult(data=None, errors=[GraphQLError( locations=[SourceLocation(line=1, column=3)])]) ``` -The `graphql_sync` function assumes that all resolvers return values synchronously. By -using coroutines as resolvers, you can also create results in an asynchronous fashion -with the `graphql` function. +The `graphql_sync` function assumes that all resolvers return values synchronously. +By using coroutines as resolvers, you can also create results in an asynchronous +fashion with the `graphql` function. ```python import asyncio @@ -163,17 +174,17 @@ asyncio.run(main()) ## Goals and restrictions -GraphQL-core tries to reproduce the code of the reference implementation GraphQL.js -in Python as closely as possible and to stay up-to-date with the latest development of -GraphQL.js. +GraphQL-core aims to reproduce the code of the reference implementation GraphQL.js +in Python as closely as possible while staying up-to-date with the latest development +of GraphQL.js. -GraphQL-core 3 (formerly known as GraphQL-core-next) has been created as a modern +GraphQL-core 3 (formerly known as GraphQL-core-next) was created as a modern alternative to [GraphQL-core 2](https://github.com/graphql-python/graphql-core-legacy), -a prior work by Syrus Akbary, based on an older version of GraphQL.js and also -targeting older Python versions. Some parts of GraphQL-core 3 have been inspired by -GraphQL-core 2 or directly taken over with only slight modifications, but most of the -code has been re-implemented from scratch, replicating the latest code in GraphQL.js -very closely and adding type hints for Python. +a prior work by Syrus Akbary based on an older version of GraphQL.js that still +supported legacy Python versions. While some parts of GraphQL-core 3 were inspired by +GraphQL-core 2 or directly taken over with slight modifications, most of the code has +been re-implemented from scratch. This re-implementation closely replicates the latest +code in GraphQL.js and adds type hints for Python. Design goals for the GraphQL-core 3 library were: @@ -185,17 +196,22 @@ Design goals for the GraphQL-core 3 library were: (and is now using TypeScript) * to use [black](https://github.com/ambv/black) to achieve a consistent code style while saving time and mental energy for more important matters + (we are now using [ruff](https://github.com/astral-sh/ruff) instead) * to replicate the complete Mocha-based test suite of GraphQL.js using [pytest](https://docs.pytest.org/) with [pytest-describe](https://pypi.org/project/pytest-describe/) Some restrictions (mostly in line with the design goals): -* requires Python 3.6 or newer +* requires Python 3.6 or newer (Python 3.7 and newer in latest version) * does not support some already deprecated methods and options of GraphQL.js * supports asynchronous operations only via async.io (does not support the additional executors in GraphQL-core) +Note that meanwhile we are using the amazing [ruff](https://docs.astral.sh/ruff/) tool +to both format and check the code of GraphQL-core 3, +in addition to using [mypy](https://mypy-lang.org/) as type checker. + ## Integration with other libraries and roadmap @@ -205,19 +221,19 @@ Some restrictions (mostly in line with the design goals): also been created by Syrus Akbary, who meanwhile has handed over the maintenance and future development to members of the GraphQL-Python community. - The current version 2 of Graphene is using Graphql-core 2 as core library for much of - the heavy lifting. Note that Graphene 2 is not compatible with GraphQL-core 3. - The new version 3 of Graphene will use GraphQL-core 3 instead of GraphQL-core 2. + Graphene 3 is now using Graphql-core 3 as core library for much of the heavy lifting. * [Ariadne](https://github.com/mirumee/ariadne) is a Python library for implementing GraphQL servers using schema-first approach created by Mirumee Software. - Ariadne is already using GraphQL-core 3 as its GraphQL implementation. + Ariadne is also using GraphQL-core 3 as its GraphQL implementation. * [Strawberry](https://github.com/strawberry-graphql/strawberry), created by Patrick Arminio, is a new GraphQL library for Python 3, inspired by dataclasses, that is also using GraphQL-core 3 as underpinning. +* [Typed GraphQL](https://github.com/willemt/typed-graphql), thin layer over GraphQL-core that uses native Python types for creating GraphQL schemas. + ## Changelog @@ -228,6 +244,7 @@ Changes are tracked as ## Credits and history The GraphQL-core 3 library + * has been created and is maintained by Christoph Zwerschke * uses ideas and code from GraphQL-core 2, a prior work by Syrus Akbary * is a Python port of GraphQL.js which has been developed by Lee Byron and others diff --git a/docs/conf.py b/docs/conf.py index d5eca20e..f70b6d03 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # # GraphQL-core 3 documentation build configuration file, created by # sphinx-quickstart on Thu Jun 21 16:28:30 2018. @@ -30,45 +29,45 @@ # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ - 'sphinx.ext.autodoc', + "sphinx.ext.autodoc", ] # Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] +templates_path = ["_templates"] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # # source_suffix = ['.rst', '.md'] -source_suffix = '.rst' +source_suffix = ".rst" # The encoding of source files. # # source_encoding = 'utf-8-sig' # The master toctree document. -master_doc = 'index' +master_doc = "index" # General information about the project. -project = 'GraphQL-core 3' -copyright = '2022, Christoph Zwerschke' -author = 'Christoph Zwerschke' +project = "GraphQL-core 3" +copyright = "2025, Christoph Zwerschke" +author = "Christoph Zwerschke" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. -# version = '3.2' +# version = '3.3' # The full version, including alpha/beta/rc tags. -version = release = '3.2.2' +version = release = "3.3.0a7" # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = None +language = "en" # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: @@ -82,23 +81,23 @@ # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This patterns also effect to html_static_path and html_extra_path -exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] +exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] # AutoDoc configuration autoclass_content = "class" autodoc_default_options = { - 'members': True, - 'inherited-members': True, - 'undoc-members': True, - 'show-inheritance': True + "members": True, + "inherited-members": True, + "undoc-members": True, + "show-inheritance": True, } autosummary_generate = True autodoc_type_aliases = { - 'AwaitableOrValue': 'graphql.pyutils.AwaitableOrValue', - 'FormattedSourceLocation': 'graphql.language.FormattedSourceLocation', - 'Middleware': 'graphql.execution.Middleware', - 'TypeMap': 'graphql.schema.TypeMap' + "AwaitableOrValue": "graphql.pyutils.AwaitableOrValue", + "FormattedSourceLocation": "graphql.language.FormattedSourceLocation", + "Middleware": "graphql.execution.Middleware", + "TypeMap": "graphql.schema.TypeMap", } # GraphQL-core top level modules with submodules that can be omitted. @@ -106,117 +105,179 @@ # qualified form, but the documentation has the shorter form. # We need to give autodoc a little help in this cases. graphql_modules = { - 'error': ['graphql_error'], - 'execution': ['execute', 'middleware'], - 'language': ['ast', 'directive_locations', 'location', - 'source', 'token_kind', 'visitor'], - 'pyutils': ['simple_pub_sub', 'frozen_list', 'path'], - 'type': ['definition', 'directives', 'schema'], - 'utilities': ['find_breaking_changes', 'type_info'], - 'validation': ['rules', 'validation_context']} + "error": ["graphql_error"], + "execution": ["execute", "middleware"], + "language": [ + "ast", + "directive_locations", + "location", + "source", + "token_kind", + "visitor", + ], + "pyutils": ["simple_pub_sub", "frozen_list", "path"], + "type": ["definition", "directives", "schema"], + "utilities": ["find_breaking_changes", "type_info"], + "validation": ["rules", "validation_context"], +} # GraphQL-core classes that autodoc sometimes cannot find # (e.g. where specified as string in type hints). # We need to give autodoc a little help in this cases, too: graphql_classes = { - 'GraphQLAbstractType': 'type', - 'GraphQLObjectType': 'type', - 'GraphQLOutputType': 'type', - 'GraphQLTypeResolver': 'type', - 'Node': 'language', - 'Source': 'language', - 'SourceLocation': 'language' + "GraphQLAbstractType": "type", + "GraphQLFieldResolver": "type", + "GraphQLObjectType": "type", + "GraphQLOutputType": "type", + "GraphQLTypeResolver": "type", + "AwaitableOrValue": "execution", + "Middleware": "execution", + "Node": "language", + "Source": "language", + "SourceLocation": "language", } # ignore the following undocumented or internal references: -ignore_references = set(''' +ignore_references = set( + """ GNT GT KT T VT -enum.Enum +TContext +Enum traceback types.TracebackType +TypeMap +AwaitableOrValue +DeferredFragmentRecord +DeferUsage EnterLeaveVisitor +ExperimentalIncrementalExecutionResults +FieldGroup +FormattedIncrementalResult +FormattedPendingResult FormattedSourceLocation GraphQLAbstractType +GraphQLCompositeType +GraphQLEnumValueMap +GraphQLErrorExtensions +GraphQLFieldResolver +GraphQLInputType +GraphQLNullableType GraphQLOutputType +GraphQLTypeResolver +GroupedFieldSet +IncrementalDataRecord +IncrementalResult +InitialResultRecord +Middleware +PendingResult +StreamItemsRecord +StreamRecord +SubsequentDataRecord asyncio.events.AbstractEventLoop -graphql.execution.map_async_iterator.MapAsyncIterator +collections.abc.MutableMapping +collections.abc.MutableSet +enum.Enum +graphql.execution.collect_fields.DeferUsage +graphql.execution.collect_fields.CollectFieldsResult +graphql.execution.collect_fields.FieldGroup +graphql.execution.execute.StreamArguments +graphql.execution.execute.StreamUsage +graphql.execution.map_async_iterable.map_async_iterable +graphql.execution.incremental_publisher.CompletedResult +graphql.execution.incremental_publisher.DeferredFragmentRecord +graphql.execution.incremental_publisher.DeferredGroupedFieldSetRecord +graphql.execution.incremental_publisher.FormattedCompletedResult +graphql.execution.incremental_publisher.FormattedPendingResult +graphql.execution.incremental_publisher.IncrementalPublisher +graphql.execution.incremental_publisher.InitialResultRecord +graphql.execution.incremental_publisher.PendingResult +graphql.execution.incremental_publisher.StreamItemsRecord +graphql.execution.incremental_publisher.StreamRecord +graphql.execution.Middleware graphql.language.lexer.EscapeSequence graphql.language.visitor.EnterLeaveVisitor +graphql.pyutils.ref_map.K +graphql.pyutils.ref_map.V +graphql.type.definition.GT_co +graphql.type.definition.GNT_co +graphql.type.definition.TContext graphql.type.schema.InterfaceImplementations graphql.validation.validation_context.VariableUsage graphql.validation.rules.known_argument_names.KnownArgumentNamesOnDirectivesRule graphql.validation.rules.provided_required_arguments.ProvidedRequiredArgumentsOnDirectivesRule -'''.split()) +""".split() +) ignore_references.update(__builtins__.keys()) def on_missing_reference(app, env, node, contnode): """Fix or skip any missing references.""" - if node.get('refdomain') != 'py': + if node.get("refdomain") != "py": return None - target = node.get('reftarget') + target = node.get("reftarget") if not target: return None - if target in ignore_references or target.endswith('Kwargs'): + if target in ignore_references or target.endswith("Kwargs"): return contnode - typ = node.get('reftype') - name = target.rsplit('.', 1)[-1] - if name in ('GT', 'GNT', 'KT', 'T', 'VT'): + typ = node.get("reftype") + name = target.rsplit(".", 1)[-1] + if name in ("GT", "GNT", "KT", "T", "VT"): return contnode - if typ == 'obj': - if target.startswith('typing.'): - if name in ('Any', 'Optional', 'Union'): + if typ == "obj": + if target.startswith("typing."): + if name in ("Any", "Optional", "Union"): return contnode - if typ != 'class': + if typ != "class": return None - if '.' in target: # maybe too specific - base_module, target = target.split('.', 1) - if base_module == 'graphql': - if '.' not in target: + if "." in target: # maybe too specific + base_module, target = target.split(".", 1) + if base_module == "graphql": + if "." not in target: return None - base_module, target = target.split('.', 1) - if '.' not in target: + base_module, target = target.split(".", 1) + if "." not in target: return None sub_modules = graphql_modules.get(base_module) if not sub_modules: - return - sub_module = target.split('.', 1)[0] + return None + sub_module = target.split(".", 1)[0] if sub_module not in sub_modules: return None - target = 'graphql.' + base_module + '.' + target.rsplit('.', 1)[-1] + target = "graphql." + base_module + "." + target.rsplit(".", 1)[-1] else: # maybe not specific enough base_module = graphql_classes.get(target) if not base_module: return None - target = 'graphql.' + base_module + '.' + target + target = "graphql." + base_module + "." + target # replace target - if contnode.__class__.__name__ == 'Text': + if contnode.__class__.__name__ == "Text": contnode = contnode.__class__(target) - elif contnode.__class__.__name__ == 'literal': + elif contnode.__class__.__name__ == "literal": if len(contnode.children) != 1: return None textnode = contnode.children[0] contnode.children[0] = textnode.__class__(target) else: return None - node['reftarget'] = target - fromdoc = node.get('refdoc') + node["reftarget"] = target + fromdoc = node.get("refdoc") if not fromdoc: - doc_module = node.get('py:module') + doc_module = node.get("py:module") if doc_module: - if doc_module.startswith('graphql.'): - doc_module = doc_module.split('.', 1)[-1] - if doc_module not in graphql_modules and doc_module != 'graphql': + if doc_module.startswith("graphql."): + doc_module = doc_module.split(".", 1)[-1] + if doc_module not in graphql_modules and doc_module != "graphql": doc_module = None - fromdoc = 'modules/' + (doc_module or base_module) + fromdoc = "modules/" + (doc_module or base_module) # try resolving again with replaced target - return env.domains['py'].resolve_xref( - env, fromdoc, app.builder, typ, target, node, contnode) + return env.domains["py"].resolve_xref( + env, fromdoc, app.builder, typ, target, node, contnode + ) def on_skip_member(_app, what, name, _obj, skip, _options): - if what == 'class' and name == "__init__": + if what == "class" and name == "__init__": # we could set "special-members" to "__init__", # but this gives an error when documenting modules return False @@ -224,7 +285,7 @@ def on_skip_member(_app, what, name, _obj, skip, _options): def setup(app): - app.connect('missing-reference', on_missing_reference) + app.connect("missing-reference", on_missing_reference) app.connect("autodoc-skip-member", on_skip_member) @@ -252,7 +313,7 @@ def setup(app): # show_authors = False # The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' +pygments_style = "sphinx" # A list of ignored prefixes for module index sorting. # modindex_common_prefix = [] @@ -268,15 +329,13 @@ def setup(app): # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # -html_theme = 'sphinx_rtd_theme' +html_theme = "sphinx_rtd_theme" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # -html_theme_options = { - 'navigation_depth': 5 -} +html_theme_options = {"navigation_depth": 5} # Add any paths that contain custom themes here, relative to this directory. # html_theme_path = [] @@ -384,34 +443,36 @@ def setup(app): # html_search_scorer = 'scorer.js' # Output file base name for HTML help builder. -htmlhelp_basename = 'GraphQL-core-3-doc' +htmlhelp_basename = "GraphQL-core-3-doc" # -- Options for LaTeX output --------------------------------------------- latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # - # 'papersize': 'letterpaper', - - # The font size ('10pt', '11pt' or '12pt'). - # - # 'pointsize': '10pt', - - # Additional stuff for the LaTeX preamble. - # - # 'preamble': '', - - # Latex figure (float) alignment - # - # 'figure_align': 'htbp', + # The paper size ('letterpaper' or 'a4paper'). + # + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # + # 'preamble': '', + # Latex figure (float) alignment + # + # 'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ - (master_doc, 'GraphQL-core-3.tex', 'GraphQL-core 3 Documentation', - 'Christoph Zwerschke', 'manual'), + ( + master_doc, + "GraphQL-core-3.tex", + "GraphQL-core 3 Documentation", + "Christoph Zwerschke", + "manual", + ), ] # The name of an image file (relative to this directory) to place at the top of @@ -445,10 +506,7 @@ def setup(app): # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). -man_pages = [ - (master_doc, 'graphql-core', 'GraphQL-core 3 Documentation', - [author], 1) -] +man_pages = [(master_doc, "graphql-core", "GraphQL-core 3 Documentation", [author], 1)] # If true, show URL addresses after external links. # @@ -461,9 +519,15 @@ def setup(app): # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - (master_doc, 'GraphQL-core', 'GraphQL-core 3 Documentation', - author, 'GraphQL-core 3', 'One line description of project.', - 'Miscellaneous'), + ( + master_doc, + "GraphQL-core", + "GraphQL-core 3 Documentation", + author, + "GraphQL-core 3", + "One line description of project.", + "Miscellaneous", + ), ] # Documents to append as an appendix to all manuals. diff --git a/docs/modules/execution.rst b/docs/modules/execution.rst index 958cfbe1..7509676c 100644 --- a/docs/modules/execution.rst +++ b/docs/modules/execution.rst @@ -9,6 +9,8 @@ Execution .. autofunction:: execute +.. autofunction:: experimental_execute_incrementally + .. autofunction:: execute_sync .. autofunction:: default_field_resolver @@ -22,12 +24,37 @@ Execution .. autoclass:: FormattedExecutionResult :no-inherited-members: +.. autoclass:: ExperimentalIncrementalExecutionResults + +.. autoclass:: InitialIncrementalExecutionResult + +.. autoclass:: FormattedInitialIncrementalExecutionResult + :no-inherited-members: + +.. autoclass:: SubsequentIncrementalExecutionResult + +.. autoclass:: FormattedSubsequentIncrementalExecutionResult + :no-inherited-members: + +.. autoclass:: IncrementalDeferResult + +.. autoclass:: FormattedIncrementalDeferResult + :no-inherited-members: + +.. autoclass:: IncrementalStreamResult + +.. autoclass:: FormattedIncrementalStreamResult + :no-inherited-members: + +.. autoclass:: IncrementalResult + +.. autoclass:: FormattedIncrementalResult + :no-inherited-members: + .. autofunction:: subscribe .. autofunction:: create_source_event_stream -.. autoclass:: MapAsyncIterator - .. autoclass:: Middleware .. autoclass:: MiddlewareManager diff --git a/docs/modules/language.rst b/docs/modules/language.rst index b447787e..be45fd26 100644 --- a/docs/modules/language.rst +++ b/docs/modules/language.rst @@ -31,6 +31,7 @@ Each kind of AST node has its own class: .. autoclass:: EnumTypeExtensionNode .. autoclass:: EnumValueDefinitionNode .. autoclass:: EnumValueNode +.. autoclass:: ErrorBoundaryNode .. autoclass:: ExecutableDefinitionNode .. autoclass:: FieldDefinitionNode .. autoclass:: FieldNode @@ -44,11 +45,14 @@ Each kind of AST node has its own class: .. autoclass:: IntValueNode .. autoclass:: InterfaceTypeDefinitionNode .. autoclass:: InterfaceTypeExtensionNode +.. autoclass:: ListNullabilityOperatorNode .. autoclass:: ListTypeNode .. autoclass:: ListValueNode .. autoclass:: NameNode .. autoclass:: NamedTypeNode +.. autoclass:: NonNullAssertionNode .. autoclass:: NonNullTypeNode +.. autoclass:: NullabilityAssertionNode .. autoclass:: NullValueNode .. autoclass:: ObjectFieldNode .. autoclass:: ObjectTypeDefinitionNode diff --git a/docs/modules/pyutils.rst b/docs/modules/pyutils.rst index cd178d65..e33b5d1f 100644 --- a/docs/modules/pyutils.rst +++ b/docs/modules/pyutils.rst @@ -30,3 +30,7 @@ PyUtils .. autoclass:: SimplePubSub .. autoclass:: SimplePubSubIterator .. autodata:: Undefined +.. autoclass:: RefMap + :no-inherited-members: +.. autoclass:: RefSet + :no-inherited-members: diff --git a/docs/modules/type.rst b/docs/modules/type.rst index 393cb362..d3c3b4b8 100644 --- a/docs/modules/type.rst +++ b/docs/modules/type.rst @@ -122,7 +122,10 @@ Definitions .. autoclass:: GraphQLDirective .. autoclass:: GraphQLIncludeDirective .. autoclass:: GraphQLSkipDirective +.. autoclass:: GraphQLDeferDirective +.. autoclass:: GraphQLStreamDirective .. autoclass:: GraphQLDeprecatedDirective +.. autoclass:: GraphQLSpecifiedByDirective .. data:: specified_directives diff --git a/docs/modules/utilities.rst b/docs/modules/utilities.rst index 21571404..65169b39 100644 --- a/docs/modules/utilities.rst +++ b/docs/modules/utilities.rst @@ -18,10 +18,6 @@ Get the target Operation from a Document: .. autofunction:: get_operation_ast -Get the Type for the target Operation AST: - -.. autofunction:: get_operation_root_type - Convert a GraphQLSchema to an IntrospectionQuery: .. autofunction:: introspection_from_schema @@ -45,9 +41,10 @@ Sort a GraphQLSchema: Print a GraphQLSchema to GraphQL Schema language: -.. autofunction:: print_introspection_schema .. autofunction:: print_schema .. autofunction:: print_type +.. autofunction:: print_directive +.. autofunction:: print_introspection_schema Create a GraphQLType from a GraphQL language AST: @@ -98,11 +95,6 @@ Comparators for types: .. autofunction:: is_type_sub_type_of .. autofunction:: do_types_overlap -Assert that a string is a valid GraphQL name: - -.. autofunction:: assert_valid_name -.. autofunction:: is_valid_name_error - Compare two GraphQLSchemas and detect breaking changes: .. autofunction:: find_breaking_changes diff --git a/docs/requirements.txt b/docs/requirements.txt index 0ad38822..9652132e 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,2 +1,2 @@ -sphinx>=4.3,<5 -sphinx_rtd_theme>=1,<2 +sphinx>=7,<8 +sphinx_rtd_theme>=2,<3 diff --git a/poetry.lock b/poetry.lock index 7c0e0394..6af5b224 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,335 +1,779 @@ +# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand. + [[package]] name = "alabaster" -version = "0.7.12" +version = "0.7.13" description = "A configurable sidebar-enabled Sphinx theme" -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "appdirs" -version = "1.4.4" -description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "atomicwrites" -version = "1.4.1" -description = "Atomic file writes." -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - -[[package]] -name = "attrs" -version = "22.1.0" -description = "Classes Without Boilerplate" -category = "dev" -optional = false -python-versions = ">=3.5" - -[package.extras] -dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"] -docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] -tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"] -tests_no_zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"] - -[[package]] -name = "Babel" -version = "2.10.3" -description = "Internationalization utilities" -category = "dev" optional = false python-versions = ">=3.6" - -[package.dependencies] -pytz = ">=2015.7" - -[[package]] -name = "black" -version = "20.8b1" -description = "The uncompromising code formatter." -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -appdirs = "*" -click = ">=7.1.2" -dataclasses = {version = ">=0.6", markers = "python_version < \"3.7\""} -mypy-extensions = ">=0.4.3" -pathspec = ">=0.6,<1" -regex = ">=2020.1.8" -toml = ">=0.10.1" -typed-ast = ">=1.4.0" -typing-extensions = ">=3.7.4" - -[package.extras] -colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.3.2)", "aiohttp-cors"] +files = [ + {file = "alabaster-0.7.13-py3-none-any.whl", hash = "sha256:1ee19aca801bbabb5ba3f5f258e4422dfa86f82f3e9cefb0859b283cdd7f62a3"}, + {file = "alabaster-0.7.13.tar.gz", hash = "sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2"}, +] [[package]] -name = "black" -version = "22.8.0" -description = "The uncompromising code formatter." -category = "dev" +name = "babel" +version = "2.14.0" +description = "Internationalization utilities" optional = false -python-versions = ">=3.6.2" +python-versions = ">=3.7" +files = [ + {file = "Babel-2.14.0-py3-none-any.whl", hash = "sha256:efb1a25b7118e67ce3a259bed20545c29cb68be8ad2c784c83689981b7a57287"}, + {file = "Babel-2.14.0.tar.gz", hash = "sha256:6919867db036398ba21eb5c7a0f6b28ab8cbc3ae7a73a44ebe34ae74a4e7d363"}, +] [package.dependencies] -click = ">=8.0.0" -dataclasses = {version = ">=0.6", markers = "python_version < \"3.7\""} -mypy-extensions = ">=0.4.3" -pathspec = ">=0.9.0" -platformdirs = ">=2" -tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""} -typed-ast = {version = ">=1.4.2", markers = "python_version < \"3.8\" and implementation_name == \"cpython\""} -typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} +pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} [package.extras] -colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)"] -jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] -uvloop = ["uvloop (>=0.15.2)"] +dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] [[package]] -name = "build" -version = "0.8.0" -description = "A simple, correct PEP 517 build frontend" -category = "dev" +name = "babel" +version = "2.17.0" +description = "Internationalization utilities" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" +files = [ + {file = "babel-2.17.0-py3-none-any.whl", hash = "sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2"}, + {file = "babel-2.17.0.tar.gz", hash = "sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d"}, +] [package.dependencies] -colorama = {version = "*", markers = "os_name == \"nt\""} -importlib-metadata = {version = ">=0.22", markers = "python_version < \"3.8\""} -packaging = ">=19.0" -pep517 = ">=0.9.1" -tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} +pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} [package.extras] -docs = ["furo (>=2021.08.31)", "sphinx (>=4.0,<5.0)", "sphinx-argparse-cli (>=1.5)", "sphinx-autodoc-typehints (>=1.10)"] -test = ["filelock (>=3)", "pytest (>=6.2.4)", "pytest-cov (>=2.12)", "pytest-mock (>=2)", "pytest-rerunfailures (>=9.1)", "pytest-xdist (>=1.34)", "setuptools (>=42.0.0)", "setuptools (>=56.0.0)", "toml (>=0.10.0)", "wheel (>=0.36.0)"] -typing = ["importlib-metadata (>=4.6.4)", "mypy (==0.950)", "typing-extensions (>=3.7.4.3)"] -virtualenv = ["virtualenv (>=20.0.35)"] +dev = ["backports.zoneinfo", "freezegun (>=1.0,<2.0)", "jinja2 (>=3.0)", "pytest (>=6.0)", "pytest-cov", "pytz", "setuptools", "tzdata"] [[package]] name = "bump2version" version = "1.0.1" description = "Version-bump your software with a single command!" -category = "dev" optional = false python-versions = ">=3.5" +files = [ + {file = "bump2version-1.0.1-py2.py3-none-any.whl", hash = "sha256:37f927ea17cde7ae2d7baf832f8e80ce3777624554a653006c9144f8017fe410"}, + {file = "bump2version-1.0.1.tar.gz", hash = "sha256:762cb2bfad61f4ec8e2bdf452c7c267416f8c70dd9ecb1653fd0bbb01fa936e6"}, +] + +[[package]] +name = "cachetools" +version = "5.5.2" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.5.2-py3-none-any.whl", hash = "sha256:d26a22bcc62eb95c3beabd9f1ee5e820d3d2704fe2967cbe350e20c8ffcd3f0a"}, + {file = "cachetools-5.5.2.tar.gz", hash = "sha256:1a661caa9175d26759571b2e19580f9d6393969e5dfca11fdb1f947a23e640d4"}, +] [[package]] name = "certifi" -version = "2022.9.14" +version = "2025.4.26" description = "Python package for providing Mozilla's CA Bundle." -category = "dev" optional = false python-versions = ">=3.6" +files = [ + {file = "certifi-2025.4.26-py3-none-any.whl", hash = "sha256:30350364dfe371162649852c63336a15c70c6510c2ad5015b21c2345311805f3"}, + {file = "certifi-2025.4.26.tar.gz", hash = "sha256:0a816057ea3cdefcef70270d2c515e4506bbc954f417fa5ade2021213bb8f0c6"}, +] + +[[package]] +name = "cffi" +version = "1.15.1" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = "*" +files = [ + {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, + {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, + {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, + {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, + {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, + {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, + {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, + {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, + {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, + {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, + {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, + {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, + {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, + {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, + {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, + {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, + {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, + {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, + {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, +] + +[package.dependencies] +pycparser = "*" + +[[package]] +name = "cffi" +version = "1.17.1" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, + {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, + {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, + {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, + {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, + {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, + {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, + {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, + {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, + {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, + {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, + {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, + {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, + {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, + {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, +] + +[package.dependencies] +pycparser = "*" + +[[package]] +name = "chardet" +version = "5.2.0" +description = "Universal encoding detector for Python 3" +optional = false +python-versions = ">=3.7" +files = [ + {file = "chardet-5.2.0-py3-none-any.whl", hash = "sha256:e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970"}, + {file = "chardet-5.2.0.tar.gz", hash = "sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7"}, +] [[package]] name = "charset-normalizer" -version = "2.0.12" +version = "3.4.2" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "dev" optional = false -python-versions = ">=3.5.0" +python-versions = ">=3.7" +files = [ + {file = "charset_normalizer-3.4.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7c48ed483eb946e6c04ccbe02c6b4d1d48e51944b6db70f697e089c193404941"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2d318c11350e10662026ad0eb71bb51c7812fc8590825304ae0bdd4ac283acd"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9cbfacf36cb0ec2897ce0ebc5d08ca44213af24265bd56eca54bee7923c48fd6"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18dd2e350387c87dabe711b86f83c9c78af772c748904d372ade190b5c7c9d4d"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8075c35cd58273fee266c58c0c9b670947c19df5fb98e7b66710e04ad4e9ff86"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5bf4545e3b962767e5c06fe1738f951f77d27967cb2caa64c28be7c4563e162c"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7a6ab32f7210554a96cd9e33abe3ddd86732beeafc7a28e9955cdf22ffadbab0"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b33de11b92e9f75a2b545d6e9b6f37e398d86c3e9e9653c4864eb7e89c5773ef"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:8755483f3c00d6c9a77f490c17e6ab0c8729e39e6390328e42521ef175380ae6"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:68a328e5f55ec37c57f19ebb1fdc56a248db2e3e9ad769919a58672958e8f366"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:21b2899062867b0e1fde9b724f8aecb1af14f2778d69aacd1a5a1853a597a5db"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-win32.whl", hash = "sha256:e8082b26888e2f8b36a042a58307d5b917ef2b1cacab921ad3323ef91901c71a"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:f69a27e45c43520f5487f27627059b64aaf160415589230992cec34c5e18a509"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:be1e352acbe3c78727a16a455126d9ff83ea2dfdcbc83148d2982305a04714c2"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa88ca0b1932e93f2d961bf3addbb2db902198dca337d88c89e1559e066e7645"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d524ba3f1581b35c03cb42beebab4a13e6cdad7b36246bd22541fa585a56cccd"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28a1005facc94196e1fb3e82a3d442a9d9110b8434fc1ded7a24a2983c9888d8"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdb20a30fe1175ecabed17cbf7812f7b804b8a315a25f24678bcdf120a90077f"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f5d9ed7f254402c9e7d35d2f5972c9bbea9040e99cd2861bd77dc68263277c7"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:efd387a49825780ff861998cd959767800d54f8308936b21025326de4b5a42b9"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f0aa37f3c979cf2546b73e8222bbfa3dc07a641585340179d768068e3455e544"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e70e990b2137b29dc5564715de1e12701815dacc1d056308e2b17e9095372a82"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0c8c57f84ccfc871a48a47321cfa49ae1df56cd1d965a09abe84066f6853b9c0"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6b66f92b17849b85cad91259efc341dce9c1af48e2173bf38a85c6329f1033e5"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-win32.whl", hash = "sha256:daac4765328a919a805fa5e2720f3e94767abd632ae410a9062dff5412bae65a"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:e53efc7c7cee4c1e70661e2e112ca46a575f90ed9ae3fef200f2a25e954f4b28"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0c29de6a1a95f24b9a1aa7aefd27d2487263f00dfd55a77719b530788f75cff7"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cddf7bd982eaa998934a91f69d182aec997c6c468898efe6679af88283b498d3"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcbe676a55d7445b22c10967bceaaf0ee69407fbe0ece4d032b6eb8d4565982a"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d41c4d287cfc69060fa91cae9683eacffad989f1a10811995fa309df656ec214"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e594135de17ab3866138f496755f302b72157d115086d100c3f19370839dd3a"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf713fe9a71ef6fd5adf7a79670135081cd4431c2943864757f0fa3a65b1fafd"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a370b3e078e418187da8c3674eddb9d983ec09445c99a3a263c2011993522981"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a955b438e62efdf7e0b7b52a64dc5c3396e2634baa62471768a64bc2adb73d5c"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7222ffd5e4de8e57e03ce2cef95a4c43c98fcb72ad86909abdfc2c17d227fc1b"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:bee093bf902e1d8fc0ac143c88902c3dfc8941f7ea1d6a8dd2bcb786d33db03d"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dedb8adb91d11846ee08bec4c8236c8549ac721c245678282dcb06b221aab59f"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-win32.whl", hash = "sha256:db4c7bf0e07fc3b7d89ac2a5880a6a8062056801b83ff56d8464b70f65482b6c"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:5a9979887252a82fefd3d3ed2a8e3b937a7a809f65dcb1e068b090e165bbe99e"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:926ca93accd5d36ccdabd803392ddc3e03e6d4cd1cf17deff3b989ab8e9dbcf0"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eba9904b0f38a143592d9fc0e19e2df0fa2e41c3c3745554761c5f6447eedabf"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3fddb7e2c84ac87ac3a947cb4e66d143ca5863ef48e4a5ecb83bd48619e4634e"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98f862da73774290f251b9df8d11161b6cf25b599a66baf087c1ffe340e9bfd1"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c9379d65defcab82d07b2a9dfbfc2e95bc8fe0ebb1b176a3190230a3ef0e07c"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e635b87f01ebc977342e2697d05b56632f5f879a4f15955dfe8cef2448b51691"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1c95a1e2902a8b722868587c0e1184ad5c55631de5afc0eb96bc4b0d738092c0"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ef8de666d6179b009dce7bcb2ad4c4a779f113f12caf8dc77f0162c29d20490b"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:32fc0341d72e0f73f80acb0a2c94216bd704f4f0bce10aedea38f30502b271ff"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:289200a18fa698949d2b39c671c2cc7a24d44096784e76614899a7ccf2574b7b"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a476b06fbcf359ad25d34a057b7219281286ae2477cc5ff5e3f70a246971148"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-win32.whl", hash = "sha256:aaeeb6a479c7667fbe1099af9617c83aaca22182d6cf8c53966491a0f1b7ffb7"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-win_amd64.whl", hash = "sha256:aa6af9e7d59f9c12b33ae4e9450619cf2488e2bbe9b44030905877f0b2324980"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cad5f45b3146325bb38d6855642f6fd609c3f7cad4dbaf75549bf3b904d3184"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b2680962a4848b3c4f155dc2ee64505a9c57186d0d56b43123b17ca3de18f0fa"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:36b31da18b8890a76ec181c3cf44326bf2c48e36d393ca1b72b3f484113ea344"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f4074c5a429281bf056ddd4c5d3b740ebca4d43ffffe2ef4bf4d2d05114299da"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9e36a97bee9b86ef9a1cf7bb96747eb7a15c2f22bdb5b516434b00f2a599f02"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:1b1bde144d98e446b056ef98e59c256e9294f6b74d7af6846bf5ffdafd687a7d"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:915f3849a011c1f593ab99092f3cecfcb4d65d8feb4a64cf1bf2d22074dc0ec4"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:fb707f3e15060adf5b7ada797624a6c6e0138e2a26baa089df64c68ee98e040f"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:25a23ea5c7edc53e0f29bae2c44fcb5a1aa10591aae107f2a2b2583a9c5cbc64"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:770cab594ecf99ae64c236bc9ee3439c3f46be49796e265ce0cc8bc17b10294f"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-win32.whl", hash = "sha256:6a0289e4589e8bdfef02a80478f1dfcb14f0ab696b5a00e1f4b8a14a307a3c58"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6fc1f5b51fa4cecaa18f2bd7a003f3dd039dd615cd69a2afd6d3b19aed6775f2"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76af085e67e56c8816c3ccf256ebd136def2ed9654525348cfa744b6802b69eb"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e45ba65510e2647721e35323d6ef54c7974959f6081b58d4ef5d87c60c84919a"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:046595208aae0120559a67693ecc65dd75d46f7bf687f159127046628178dc45"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75d10d37a47afee94919c4fab4c22b9bc2a8bf7d4f46f87363bcf0573f3ff4f5"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6333b3aa5a12c26b2a4d4e7335a28f1475e0e5e17d69d55141ee3cab736f66d1"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e8323a9b031aa0393768b87f04b4164a40037fb2a3c11ac06a03ffecd3618027"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:24498ba8ed6c2e0b56d4acbf83f2d989720a93b41d712ebd4f4979660db4417b"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:844da2b5728b5ce0e32d863af26f32b5ce61bc4273a9c720a9f3aa9df73b1455"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:65c981bdbd3f57670af8b59777cbfae75364b483fa8a9f420f08094531d54a01"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:3c21d4fca343c805a52c0c78edc01e3477f6dd1ad7c47653241cf2a206d4fc58"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:dc7039885fa1baf9be153a0626e337aa7ec8bf96b0128605fb0d77788ddc1681"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-win32.whl", hash = "sha256:8272b73e1c5603666618805fe821edba66892e2870058c94c53147602eab29c7"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:70f7172939fdf8790425ba31915bfbe8335030f05b9913d7ae00a87d4395620a"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:005fa3432484527f9732ebd315da8da8001593e2cf46a3d817669f062c3d9ed4"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e92fca20c46e9f5e1bb485887d074918b13543b1c2a1185e69bb8d17ab6236a7"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:50bf98d5e563b83cc29471fa114366e6806bc06bc7a25fd59641e41445327836"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:721c76e84fe669be19c5791da68232ca2e05ba5185575086e384352e2c309597"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d8fd25b7f4675d0c47cf95b594d4e7b158aca33b76aa63d07186e13c0e0ab7"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3daeac64d5b371dea99714f08ffc2c208522ec6b06fbc7866a450dd446f5c0f"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dccab8d5fa1ef9bfba0590ecf4d46df048d18ffe3eec01eeb73a42e0d9e7a8ba"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:aaf27faa992bfee0264dc1f03f4c75e9fcdda66a519db6b957a3f826e285cf12"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:eb30abc20df9ab0814b5a2524f23d75dcf83cde762c161917a2b4b7b55b1e518"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:c72fbbe68c6f32f251bdc08b8611c7b3060612236e960ef848e0a517ddbe76c5"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:982bb1e8b4ffda883b3d0a521e23abcd6fd17418f6d2c4118d257a10199c0ce3"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-win32.whl", hash = "sha256:43e0933a0eff183ee85833f341ec567c0980dae57c464d8a508e1b2ceb336471"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:d11b54acf878eef558599658b0ffca78138c8c3655cf4f3a4a673c437e67732e"}, + {file = "charset_normalizer-3.4.2-py3-none-any.whl", hash = "sha256:7f56930ab0abd1c45cd15be65cc741c28b1c9a34876ce8c17a2fa107810c0af0"}, + {file = "charset_normalizer-3.4.2.tar.gz", hash = "sha256:5baececa9ecba31eff645232d59845c07aa030f0c81ee70184a90d35099a0e63"}, +] -[package.extras] -unicode_backport = ["unicodedata2"] +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] [[package]] -name = "check-manifest" -version = "0.48" -description = "Check MANIFEST.in in a Python source package for completeness" -category = "dev" +name = "coverage" +version = "7.2.7" +description = "Code coverage measurement for Python" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" +files = [ + {file = "coverage-7.2.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d39b5b4f2a66ccae8b7263ac3c8170994b65266797fb96cbbfd3fb5b23921db8"}, + {file = "coverage-7.2.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d040ef7c9859bb11dfeb056ff5b3872436e3b5e401817d87a31e1750b9ae2fb"}, + {file = "coverage-7.2.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba90a9563ba44a72fda2e85302c3abc71c5589cea608ca16c22b9804262aaeb6"}, + {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7d9405291c6928619403db1d10bd07888888ec1abcbd9748fdaa971d7d661b2"}, + {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31563e97dae5598556600466ad9beea39fb04e0229e61c12eaa206e0aa202063"}, + {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ebba1cd308ef115925421d3e6a586e655ca5a77b5bf41e02eb0e4562a111f2d1"}, + {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cb017fd1b2603ef59e374ba2063f593abe0fc45f2ad9abdde5b4d83bd922a353"}, + {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62a5c7dad11015c66fbb9d881bc4caa5b12f16292f857842d9d1871595f4495"}, + {file = "coverage-7.2.7-cp310-cp310-win32.whl", hash = "sha256:ee57190f24fba796e36bb6d3aa8a8783c643d8fa9760c89f7a98ab5455fbf818"}, + {file = "coverage-7.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:f75f7168ab25dd93110c8a8117a22450c19976afbc44234cbf71481094c1b850"}, + {file = "coverage-7.2.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06a9a2be0b5b576c3f18f1a241f0473575c4a26021b52b2a85263a00f034d51f"}, + {file = "coverage-7.2.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5baa06420f837184130752b7c5ea0808762083bf3487b5038d68b012e5937dbe"}, + {file = "coverage-7.2.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdec9e8cbf13a5bf63290fc6013d216a4c7232efb51548594ca3631a7f13c3a3"}, + {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52edc1a60c0d34afa421c9c37078817b2e67a392cab17d97283b64c5833f427f"}, + {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63426706118b7f5cf6bb6c895dc215d8a418d5952544042c8a2d9fe87fcf09cb"}, + {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:afb17f84d56068a7c29f5fa37bfd38d5aba69e3304af08ee94da8ed5b0865833"}, + {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:48c19d2159d433ccc99e729ceae7d5293fbffa0bdb94952d3579983d1c8c9d97"}, + {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0e1f928eaf5469c11e886fe0885ad2bf1ec606434e79842a879277895a50942a"}, + {file = "coverage-7.2.7-cp311-cp311-win32.whl", hash = "sha256:33d6d3ea29d5b3a1a632b3c4e4f4ecae24ef170b0b9ee493883f2df10039959a"}, + {file = "coverage-7.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:5b7540161790b2f28143191f5f8ec02fb132660ff175b7747b95dcb77ac26562"}, + {file = "coverage-7.2.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f2f67fe12b22cd130d34d0ef79206061bfb5eda52feb6ce0dba0644e20a03cf4"}, + {file = "coverage-7.2.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a342242fe22407f3c17f4b499276a02b01e80f861f1682ad1d95b04018e0c0d4"}, + {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:171717c7cb6b453aebac9a2ef603699da237f341b38eebfee9be75d27dc38e01"}, + {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49969a9f7ffa086d973d91cec8d2e31080436ef0fb4a359cae927e742abfaaa6"}, + {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b46517c02ccd08092f4fa99f24c3b83d8f92f739b4657b0f146246a0ca6a831d"}, + {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a3d33a6b3eae87ceaefa91ffdc130b5e8536182cd6dfdbfc1aa56b46ff8c86de"}, + {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:976b9c42fb2a43ebf304fa7d4a310e5f16cc99992f33eced91ef6f908bd8f33d"}, + {file = "coverage-7.2.7-cp312-cp312-win32.whl", hash = "sha256:8de8bb0e5ad103888d65abef8bca41ab93721647590a3f740100cd65c3b00511"}, + {file = "coverage-7.2.7-cp312-cp312-win_amd64.whl", hash = "sha256:9e31cb64d7de6b6f09702bb27c02d1904b3aebfca610c12772452c4e6c21a0d3"}, + {file = "coverage-7.2.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:58c2ccc2f00ecb51253cbe5d8d7122a34590fac9646a960d1430d5b15321d95f"}, + {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d22656368f0e6189e24722214ed8d66b8022db19d182927b9a248a2a8a2f67eb"}, + {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a895fcc7b15c3fc72beb43cdcbdf0ddb7d2ebc959edac9cef390b0d14f39f8a9"}, + {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e84606b74eb7de6ff581a7915e2dab7a28a0517fbe1c9239eb227e1354064dcd"}, + {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0a5f9e1dbd7fbe30196578ca36f3fba75376fb99888c395c5880b355e2875f8a"}, + {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:419bfd2caae268623dd469eff96d510a920c90928b60f2073d79f8fe2bbc5959"}, + {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2aee274c46590717f38ae5e4650988d1af340fe06167546cc32fe2f58ed05b02"}, + {file = "coverage-7.2.7-cp37-cp37m-win32.whl", hash = "sha256:61b9a528fb348373c433e8966535074b802c7a5d7f23c4f421e6c6e2f1697a6f"}, + {file = "coverage-7.2.7-cp37-cp37m-win_amd64.whl", hash = "sha256:b1c546aca0ca4d028901d825015dc8e4d56aac4b541877690eb76490f1dc8ed0"}, + {file = "coverage-7.2.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:54b896376ab563bd38453cecb813c295cf347cf5906e8b41d340b0321a5433e5"}, + {file = "coverage-7.2.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3d376df58cc111dc8e21e3b6e24606b5bb5dee6024f46a5abca99124b2229ef5"}, + {file = "coverage-7.2.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e330fc79bd7207e46c7d7fd2bb4af2963f5f635703925543a70b99574b0fea9"}, + {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e9d683426464e4a252bf70c3498756055016f99ddaec3774bf368e76bbe02b6"}, + {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d13c64ee2d33eccf7437961b6ea7ad8673e2be040b4f7fd4fd4d4d28d9ccb1e"}, + {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b7aa5f8a41217360e600da646004f878250a0d6738bcdc11a0a39928d7dc2050"}, + {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8fa03bce9bfbeeef9f3b160a8bed39a221d82308b4152b27d82d8daa7041fee5"}, + {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:245167dd26180ab4c91d5e1496a30be4cd721a5cf2abf52974f965f10f11419f"}, + {file = "coverage-7.2.7-cp38-cp38-win32.whl", hash = "sha256:d2c2db7fd82e9b72937969bceac4d6ca89660db0a0967614ce2481e81a0b771e"}, + {file = "coverage-7.2.7-cp38-cp38-win_amd64.whl", hash = "sha256:2e07b54284e381531c87f785f613b833569c14ecacdcb85d56b25c4622c16c3c"}, + {file = "coverage-7.2.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:537891ae8ce59ef63d0123f7ac9e2ae0fc8b72c7ccbe5296fec45fd68967b6c9"}, + {file = "coverage-7.2.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:06fb182e69f33f6cd1d39a6c597294cff3143554b64b9825d1dc69d18cc2fff2"}, + {file = "coverage-7.2.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:201e7389591af40950a6480bd9edfa8ed04346ff80002cec1a66cac4549c1ad7"}, + {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f6951407391b639504e3b3be51b7ba5f3528adbf1a8ac3302b687ecababf929e"}, + {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f48351d66575f535669306aa7d6d6f71bc43372473b54a832222803eb956fd1"}, + {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b29019c76039dc3c0fd815c41392a044ce555d9bcdd38b0fb60fb4cd8e475ba9"}, + {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:81c13a1fc7468c40f13420732805a4c38a105d89848b7c10af65a90beff25250"}, + {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:975d70ab7e3c80a3fe86001d8751f6778905ec723f5b110aed1e450da9d4b7f2"}, + {file = "coverage-7.2.7-cp39-cp39-win32.whl", hash = "sha256:7ee7d9d4822c8acc74a5e26c50604dff824710bc8de424904c0982e25c39c6cb"}, + {file = "coverage-7.2.7-cp39-cp39-win_amd64.whl", hash = "sha256:eb393e5ebc85245347950143969b241d08b52b88a3dc39479822e073a1a8eb27"}, + {file = "coverage-7.2.7-pp37.pp38.pp39-none-any.whl", hash = "sha256:b7b4c971f05e6ae490fef852c218b0e79d4e52f79ef0c8475566584a8fb3e01d"}, + {file = "coverage-7.2.7.tar.gz", hash = "sha256:924d94291ca674905fe9481f12294eb11f2d3d3fd1adb20314ba89e94f44ed59"}, +] [package.dependencies] -build = ">=0.1" -setuptools = "*" -tomli = "*" +tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} [package.extras] -test = ["mock (>=3.0.0)", "pytest"] +toml = ["tomli"] [[package]] -name = "click" -version = "8.0.4" -description = "Composable command line interface toolkit" -category = "dev" +name = "coverage" +version = "7.6.1" +description = "Code coverage measurement for Python" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" +files = [ + {file = "coverage-7.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16"}, + {file = "coverage-7.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959"}, + {file = "coverage-7.6.1-cp310-cp310-win32.whl", hash = "sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232"}, + {file = "coverage-7.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0"}, + {file = "coverage-7.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93"}, + {file = "coverage-7.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133"}, + {file = "coverage-7.6.1-cp311-cp311-win32.whl", hash = "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c"}, + {file = "coverage-7.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6"}, + {file = "coverage-7.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778"}, + {file = "coverage-7.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d"}, + {file = "coverage-7.6.1-cp312-cp312-win32.whl", hash = "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5"}, + {file = "coverage-7.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb"}, + {file = "coverage-7.6.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a4acd025ecc06185ba2b801f2de85546e0b8ac787cf9d3b06e7e2a69f925b106"}, + {file = "coverage-7.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a6d3adcf24b624a7b778533480e32434a39ad8fa30c315208f6d3e5542aeb6e9"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0c212c49b6c10e6951362f7c6df3329f04c2b1c28499563d4035d964ab8e08c"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e81d7a3e58882450ec4186ca59a3f20a5d4440f25b1cff6f0902ad890e6748a"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78b260de9790fd81e69401c2dc8b17da47c8038176a79092a89cb2b7d945d060"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a78d169acd38300060b28d600344a803628c3fd585c912cacc9ea8790fe96862"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2c09f4ce52cb99dd7505cd0fc8e0e37c77b87f46bc9c1eb03fe3bc9991085388"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6878ef48d4227aace338d88c48738a4258213cd7b74fd9a3d4d7582bb1d8a155"}, + {file = "coverage-7.6.1-cp313-cp313-win32.whl", hash = "sha256:44df346d5215a8c0e360307d46ffaabe0f5d3502c8a1cefd700b34baf31d411a"}, + {file = "coverage-7.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:8284cf8c0dd272a247bc154eb6c95548722dce90d098c17a883ed36e67cdb129"}, + {file = "coverage-7.6.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d3296782ca4eab572a1a4eca686d8bfb00226300dcefdf43faa25b5242ab8a3e"}, + {file = "coverage-7.6.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:502753043567491d3ff6d08629270127e0c31d4184c4c8d98f92c26f65019962"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a89ecca80709d4076b95f89f308544ec8f7b4727e8a547913a35f16717856cb"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a318d68e92e80af8b00fa99609796fdbcdfef3629c77c6283566c6f02c6d6704"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13b0a73a0896988f053e4fbb7de6d93388e6dd292b0d87ee51d106f2c11b465b"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4421712dbfc5562150f7554f13dde997a2e932a6b5f352edcce948a815efee6f"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:166811d20dfea725e2e4baa71fffd6c968a958577848d2131f39b60043400223"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3"}, + {file = "coverage-7.6.1-cp313-cp313t-win32.whl", hash = "sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f"}, + {file = "coverage-7.6.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657"}, + {file = "coverage-7.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6db04803b6c7291985a761004e9060b2bca08da6d04f26a7f2294b8623a0c1a0"}, + {file = "coverage-7.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f1adfc8ac319e1a348af294106bc6a8458a0f1633cc62a1446aebc30c5fa186a"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a95324a9de9650a729239daea117df21f4b9868ce32e63f8b650ebe6cef5595b"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b43c03669dc4618ec25270b06ecd3ee4fa94c7f9b3c14bae6571ca00ef98b0d3"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8929543a7192c13d177b770008bc4e8119f2e1f881d563fc6b6305d2d0ebe9de"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:a09ece4a69cf399510c8ab25e0950d9cf2b42f7b3cb0374f95d2e2ff594478a6"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9054a0754de38d9dbd01a46621636689124d666bad1936d76c0341f7d71bf569"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0dbde0f4aa9a16fa4d754356a8f2e36296ff4d83994b2c9d8398aa32f222f989"}, + {file = "coverage-7.6.1-cp38-cp38-win32.whl", hash = "sha256:da511e6ad4f7323ee5702e6633085fb76c2f893aaf8ce4c51a0ba4fc07580ea7"}, + {file = "coverage-7.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:3f1156e3e8f2872197af3840d8ad307a9dd18e615dc64d9ee41696f287c57ad8"}, + {file = "coverage-7.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abd5fd0db5f4dc9289408aaf34908072f805ff7792632250dcb36dc591d24255"}, + {file = "coverage-7.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:547f45fa1a93154bd82050a7f3cddbc1a7a4dd2a9bf5cb7d06f4ae29fe94eaf8"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645786266c8f18a931b65bfcefdbf6952dd0dea98feee39bd188607a9d307ed2"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e0b2df163b8ed01d515807af24f63de04bebcecbd6c3bfeff88385789fdf75a"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:609b06f178fe8e9f89ef676532760ec0b4deea15e9969bf754b37f7c40326dbc"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:702855feff378050ae4f741045e19a32d57d19f3e0676d589df0575008ea5004"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2bdb062ea438f22d99cba0d7829c2ef0af1d768d1e4a4f528087224c90b132cb"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9c56863d44bd1c4fe2abb8a4d6f5371d197f1ac0ebdee542f07f35895fc07f36"}, + {file = "coverage-7.6.1-cp39-cp39-win32.whl", hash = "sha256:6e2cd258d7d927d09493c8df1ce9174ad01b381d4729a9d8d4e38670ca24774c"}, + {file = "coverage-7.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:06a737c882bd26d0d6ee7269b20b12f14a8704807a01056c80bb881a4b2ce6ca"}, + {file = "coverage-7.6.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df"}, + {file = "coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d"}, +] [package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} -importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} +tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} -[[package]] -name = "colorama" -version = "0.4.5" -description = "Cross-platform colored terminal text." -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +[package.extras] +toml = ["tomli"] [[package]] name = "coverage" -version = "6.2" +version = "7.8.0" description = "Code coverage measurement for Python" -category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.9" +files = [ + {file = "coverage-7.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2931f66991175369859b5fd58529cd4b73582461877ecfd859b6549869287ffe"}, + {file = "coverage-7.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:52a523153c568d2c0ef8826f6cc23031dc86cffb8c6aeab92c4ff776e7951b28"}, + {file = "coverage-7.8.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c8a5c139aae4c35cbd7cadca1df02ea8cf28a911534fc1b0456acb0b14234f3"}, + {file = "coverage-7.8.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a26c0c795c3e0b63ec7da6efded5f0bc856d7c0b24b2ac84b4d1d7bc578d676"}, + {file = "coverage-7.8.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:821f7bcbaa84318287115d54becb1915eece6918136c6f91045bb84e2f88739d"}, + {file = "coverage-7.8.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a321c61477ff8ee705b8a5fed370b5710c56b3a52d17b983d9215861e37b642a"}, + {file = "coverage-7.8.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:ed2144b8a78f9d94d9515963ed273d620e07846acd5d4b0a642d4849e8d91a0c"}, + {file = "coverage-7.8.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:042e7841a26498fff7a37d6fda770d17519982f5b7d8bf5278d140b67b61095f"}, + {file = "coverage-7.8.0-cp310-cp310-win32.whl", hash = "sha256:f9983d01d7705b2d1f7a95e10bbe4091fabc03a46881a256c2787637b087003f"}, + {file = "coverage-7.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:5a570cd9bd20b85d1a0d7b009aaf6c110b52b5755c17be6962f8ccd65d1dbd23"}, + {file = "coverage-7.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e7ac22a0bb2c7c49f441f7a6d46c9c80d96e56f5a8bc6972529ed43c8b694e27"}, + {file = "coverage-7.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bf13d564d310c156d1c8e53877baf2993fb3073b2fc9f69790ca6a732eb4bfea"}, + {file = "coverage-7.8.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5761c70c017c1b0d21b0815a920ffb94a670c8d5d409d9b38857874c21f70d7"}, + {file = "coverage-7.8.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5ff52d790c7e1628241ffbcaeb33e07d14b007b6eb00a19320c7b8a7024c040"}, + {file = "coverage-7.8.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d39fc4817fd67b3915256af5dda75fd4ee10621a3d484524487e33416c6f3543"}, + {file = "coverage-7.8.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b44674870709017e4b4036e3d0d6c17f06a0e6d4436422e0ad29b882c40697d2"}, + {file = "coverage-7.8.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8f99eb72bf27cbb167b636eb1726f590c00e1ad375002230607a844d9e9a2318"}, + {file = "coverage-7.8.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b571bf5341ba8c6bc02e0baeaf3b061ab993bf372d982ae509807e7f112554e9"}, + {file = "coverage-7.8.0-cp311-cp311-win32.whl", hash = "sha256:e75a2ad7b647fd8046d58c3132d7eaf31b12d8a53c0e4b21fa9c4d23d6ee6d3c"}, + {file = "coverage-7.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:3043ba1c88b2139126fc72cb48574b90e2e0546d4c78b5299317f61b7f718b78"}, + {file = "coverage-7.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:bbb5cc845a0292e0c520656d19d7ce40e18d0e19b22cb3e0409135a575bf79fc"}, + {file = "coverage-7.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4dfd9a93db9e78666d178d4f08a5408aa3f2474ad4d0e0378ed5f2ef71640cb6"}, + {file = "coverage-7.8.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f017a61399f13aa6d1039f75cd467be388d157cd81f1a119b9d9a68ba6f2830d"}, + {file = "coverage-7.8.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0915742f4c82208ebf47a2b154a5334155ed9ef9fe6190674b8a46c2fb89cb05"}, + {file = "coverage-7.8.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a40fcf208e021eb14b0fac6bdb045c0e0cab53105f93ba0d03fd934c956143a"}, + {file = "coverage-7.8.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a1f406a8e0995d654b2ad87c62caf6befa767885301f3b8f6f73e6f3c31ec3a6"}, + {file = "coverage-7.8.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:77af0f6447a582fdc7de5e06fa3757a3ef87769fbb0fdbdeba78c23049140a47"}, + {file = "coverage-7.8.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f2d32f95922927186c6dbc8bc60df0d186b6edb828d299ab10898ef3f40052fe"}, + {file = "coverage-7.8.0-cp312-cp312-win32.whl", hash = "sha256:769773614e676f9d8e8a0980dd7740f09a6ea386d0f383db6821df07d0f08545"}, + {file = "coverage-7.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:e5d2b9be5b0693cf21eb4ce0ec8d211efb43966f6657807f6859aab3814f946b"}, + {file = "coverage-7.8.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5ac46d0c2dd5820ce93943a501ac5f6548ea81594777ca585bf002aa8854cacd"}, + {file = "coverage-7.8.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:771eb7587a0563ca5bb6f622b9ed7f9d07bd08900f7589b4febff05f469bea00"}, + {file = "coverage-7.8.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42421e04069fb2cbcbca5a696c4050b84a43b05392679d4068acbe65449b5c64"}, + {file = "coverage-7.8.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:554fec1199d93ab30adaa751db68acec2b41c5602ac944bb19187cb9a41a8067"}, + {file = "coverage-7.8.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aaeb00761f985007b38cf463b1d160a14a22c34eb3f6a39d9ad6fc27cb73008"}, + {file = "coverage-7.8.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:581a40c7b94921fffd6457ffe532259813fc68eb2bdda60fa8cc343414ce3733"}, + {file = "coverage-7.8.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:f319bae0321bc838e205bf9e5bc28f0a3165f30c203b610f17ab5552cff90323"}, + {file = "coverage-7.8.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:04bfec25a8ef1c5f41f5e7e5c842f6b615599ca8ba8391ec33a9290d9d2db3a3"}, + {file = "coverage-7.8.0-cp313-cp313-win32.whl", hash = "sha256:dd19608788b50eed889e13a5d71d832edc34fc9dfce606f66e8f9f917eef910d"}, + {file = "coverage-7.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:a9abbccd778d98e9c7e85038e35e91e67f5b520776781d9a1e2ee9d400869487"}, + {file = "coverage-7.8.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:18c5ae6d061ad5b3e7eef4363fb27a0576012a7447af48be6c75b88494c6cf25"}, + {file = "coverage-7.8.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:95aa6ae391a22bbbce1b77ddac846c98c5473de0372ba5c463480043a07bff42"}, + {file = "coverage-7.8.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e013b07ba1c748dacc2a80e69a46286ff145935f260eb8c72df7185bf048f502"}, + {file = "coverage-7.8.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d766a4f0e5aa1ba056ec3496243150698dc0481902e2b8559314368717be82b1"}, + {file = "coverage-7.8.0-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad80e6b4a0c3cb6f10f29ae4c60e991f424e6b14219d46f1e7d442b938ee68a4"}, + {file = "coverage-7.8.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b87eb6fc9e1bb8f98892a2458781348fa37e6925f35bb6ceb9d4afd54ba36c73"}, + {file = "coverage-7.8.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:d1ba00ae33be84066cfbe7361d4e04dec78445b2b88bdb734d0d1cbab916025a"}, + {file = "coverage-7.8.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f3c38e4e5ccbdc9198aecc766cedbb134b2d89bf64533973678dfcf07effd883"}, + {file = "coverage-7.8.0-cp313-cp313t-win32.whl", hash = "sha256:379fe315e206b14e21db5240f89dc0774bdd3e25c3c58c2c733c99eca96f1ada"}, + {file = "coverage-7.8.0-cp313-cp313t-win_amd64.whl", hash = "sha256:2e4b6b87bb0c846a9315e3ab4be2d52fac905100565f4b92f02c445c8799e257"}, + {file = "coverage-7.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fa260de59dfb143af06dcf30c2be0b200bed2a73737a8a59248fcb9fa601ef0f"}, + {file = "coverage-7.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:96121edfa4c2dfdda409877ea8608dd01de816a4dc4a0523356067b305e4e17a"}, + {file = "coverage-7.8.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b8af63b9afa1031c0ef05b217faa598f3069148eeee6bb24b79da9012423b82"}, + {file = "coverage-7.8.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:89b1f4af0d4afe495cd4787a68e00f30f1d15939f550e869de90a86efa7e0814"}, + {file = "coverage-7.8.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94ec0be97723ae72d63d3aa41961a0b9a6f5a53ff599813c324548d18e3b9e8c"}, + {file = "coverage-7.8.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8a1d96e780bdb2d0cbb297325711701f7c0b6f89199a57f2049e90064c29f6bd"}, + {file = "coverage-7.8.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f1d8a2a57b47142b10374902777e798784abf400a004b14f1b0b9eaf1e528ba4"}, + {file = "coverage-7.8.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:cf60dd2696b457b710dd40bf17ad269d5f5457b96442f7f85722bdb16fa6c899"}, + {file = "coverage-7.8.0-cp39-cp39-win32.whl", hash = "sha256:be945402e03de47ba1872cd5236395e0f4ad635526185a930735f66710e1bd3f"}, + {file = "coverage-7.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:90e7fbc6216ecaffa5a880cdc9c77b7418c1dcb166166b78dbc630d07f278cc3"}, + {file = "coverage-7.8.0-pp39.pp310.pp311-none-any.whl", hash = "sha256:b8194fb8e50d556d5849753de991d390c5a1edeeba50f68e3a9253fbd8bf8ccd"}, + {file = "coverage-7.8.0-py3-none-any.whl", hash = "sha256:dbf364b4c5e7bae9250528167dfe40219b62e2d573c854d74be213e1e52069f7"}, + {file = "coverage-7.8.0.tar.gz", hash = "sha256:7a3d62b3b03b4b6fd41a085f3574874cf946cb4604d2b4d3e8dca8cd570ca501"}, +] [package.dependencies] -tomli = {version = "*", optional = true, markers = "extra == \"toml\""} +tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} [package.extras] toml = ["tomli"] -[[package]] -name = "dataclasses" -version = "0.8" -description = "A backport of the dataclasses module for Python 3.6" -category = "dev" -optional = false -python-versions = ">=3.6, <3.7" - [[package]] name = "distlib" -version = "0.3.6" +version = "0.3.9" description = "Distribution utilities" -category = "dev" optional = false python-versions = "*" +files = [ + {file = "distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87"}, + {file = "distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403"}, +] [[package]] name = "docutils" -version = "0.17.1" +version = "0.19" description = "Docutils -- Python Documentation Utilities" -category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.7" +files = [ + {file = "docutils-0.19-py3-none-any.whl", hash = "sha256:5e1de4d849fee02c63b040a4a3fd567f4ab104defd8a5511fbbc24a8a017efbc"}, + {file = "docutils-0.19.tar.gz", hash = "sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6"}, +] [[package]] -name = "filelock" -version = "3.4.1" -description = "A platform independent file lock." -category = "dev" +name = "docutils" +version = "0.20.1" +description = "Docutils -- Python Documentation Utilities" optional = false -python-versions = ">=3.6" - -[package.extras] -docs = ["furo (>=2021.8.17b43)", "sphinx (>=4.1)", "sphinx-autodoc-typehints (>=1.12)"] -testing = ["covdefaults (>=1.2.0)", "coverage (>=4)", "pytest (>=4)", "pytest-cov", "pytest-timeout (>=1.4.2)"] +python-versions = ">=3.7" +files = [ + {file = "docutils-0.20.1-py3-none-any.whl", hash = "sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6"}, + {file = "docutils-0.20.1.tar.gz", hash = "sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b"}, +] [[package]] -name = "filelock" -version = "3.8.0" -description = "A platform independent file lock." -category = "dev" +name = "exceptiongroup" +version = "1.2.2" +description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, + {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, +] [package.extras] -docs = ["furo (>=2022.6.21)", "sphinx (>=5.1.1)", "sphinx-autodoc-typehints (>=1.19.1)"] -testing = ["covdefaults (>=2.2)", "coverage (>=6.4.2)", "pytest (>=7.1.2)", "pytest-cov (>=3)", "pytest-timeout (>=2.1)"] +test = ["pytest (>=6)"] [[package]] -name = "flake8" -version = "4.0.1" -description = "the modular source code checker: pep8 pyflakes and co" -category = "dev" +name = "filelock" +version = "3.12.2" +description = "A platform independent file lock." optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" +files = [ + {file = "filelock-3.12.2-py3-none-any.whl", hash = "sha256:cbb791cdea2a72f23da6ac5b5269ab0a0d161e9ef0100e653b69049a7706d1ec"}, + {file = "filelock-3.12.2.tar.gz", hash = "sha256:002740518d8aa59a26b0c76e10fb8c6e15eae825d34b6fdf670333fd7b938d81"}, +] -[package.dependencies] -importlib-metadata = {version = "<4.3", markers = "python_version < \"3.8\""} -mccabe = ">=0.6.0,<0.7.0" -pycodestyle = ">=2.8.0,<2.9.0" -pyflakes = ">=2.4.0,<2.5.0" +[package.extras] +docs = ["furo (>=2023.5.20)", "sphinx (>=7.0.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "diff-cover (>=7.5)", "pytest (>=7.3.1)", "pytest-cov (>=4.1)", "pytest-mock (>=3.10)", "pytest-timeout (>=2.1)"] [[package]] -name = "flake8" -version = "5.0.4" -description = "the modular source code checker: pep8 pyflakes and co" -category = "dev" +name = "filelock" +version = "3.16.1" +description = "A platform independent file lock." optional = false -python-versions = ">=3.6.1" +python-versions = ">=3.8" +files = [ + {file = "filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0"}, + {file = "filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435"}, +] -[package.dependencies] -importlib-metadata = {version = ">=1.1.0,<4.3", markers = "python_version < \"3.8\""} -mccabe = ">=0.7.0,<0.8.0" -pycodestyle = ">=2.9.0,<2.10.0" -pyflakes = ">=2.5.0,<2.6.0" +[package.extras] +docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4.1)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.2)", "pytest (>=8.3.3)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.4)"] +typing = ["typing-extensions (>=4.12.2)"] [[package]] name = "idna" -version = "3.4" +version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" -category = "dev" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" +files = [ + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, +] + +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] [[package]] name = "imagesize" version = "1.4.1" description = "Getting image size from png/jpeg/jpeg2000/gif file" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, + {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, +] [[package]] name = "importlib-metadata" -version = "4.2.0" +version = "6.7.0" description = "Read metadata from Python packages" -category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" +files = [ + {file = "importlib_metadata-6.7.0-py3-none-any.whl", hash = "sha256:cb52082e659e97afc5dac71e79de97d8681de3aa07ff18578330904a9d18e5b5"}, + {file = "importlib_metadata-6.7.0.tar.gz", hash = "sha256:1aaf550d4f73e5d6783e7acb77aec43d49da8017410afae93822cc9cca98c4d4"}, +] [package.dependencies] typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} zipp = ">=0.5" [package.extras] -docs = ["jaraco.packaging (>=8.2)", "rst.linker (>=1.9)", "sphinx"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pep517", "pyfakefs", "pytest (>=4.6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy"] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +perf = ["ipython"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] [[package]] -name = "importlib-resources" -version = "5.4.0" -description = "Read resources from Python packages" -category = "dev" +name = "importlib-metadata" +version = "8.5.0" +description = "Read metadata from Python packages" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" +files = [ + {file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"}, + {file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"}, +] [package.dependencies] -zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} +zipp = ">=3.20" [package.extras] -docs = ["jaraco.packaging (>=8.2)", "rst.linker (>=1.9)", "sphinx"] -testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +perf = ["ipython"] +test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] +type = ["pytest-mypy"] [[package]] name = "iniconfig" -version = "1.1.1" -description = "iniconfig: brain-dead simple config-ini parsing" -category = "dev" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" optional = false -python-versions = "*" +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] [[package]] -name = "Jinja2" -version = "3.0.3" +name = "iniconfig" +version = "2.1.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.8" +files = [ + {file = "iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760"}, + {file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"}, +] + +[[package]] +name = "jinja2" +version = "3.1.6" description = "A very fast and expressive template engine." -category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" +files = [ + {file = "jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67"}, + {file = "jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d"}, +] [package.dependencies] MarkupSafe = ">=2.0" @@ -338,107 +782,356 @@ MarkupSafe = ">=2.0" i18n = ["Babel (>=2.7)"] [[package]] -name = "MarkupSafe" -version = "2.0.1" +name = "markdown-it-py" +version = "3.0.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +optional = false +python-versions = ">=3.8" +files = [ + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +code-style = ["pre-commit (>=3.0,<4.0)"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins"] +profiling = ["gprof2dot"] +rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + +[[package]] +name = "markupsafe" +version = "2.1.5" description = "Safely add untrusted strings to HTML/XML markup." -category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] [[package]] -name = "mccabe" -version = "0.6.1" -description = "McCabe checker, plugin for flake8" -category = "dev" +name = "mypy" +version = "1.4.1" +description = "Optional static typing for Python" optional = false -python-versions = "*" +python-versions = ">=3.7" +files = [ + {file = "mypy-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:566e72b0cd6598503e48ea610e0052d1b8168e60a46e0bfd34b3acf2d57f96a8"}, + {file = "mypy-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ca637024ca67ab24a7fd6f65d280572c3794665eaf5edcc7e90a866544076878"}, + {file = "mypy-1.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dde1d180cd84f0624c5dcaaa89c89775550a675aff96b5848de78fb11adabcd"}, + {file = "mypy-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8c4d8e89aa7de683e2056a581ce63c46a0c41e31bd2b6d34144e2c80f5ea53dc"}, + {file = "mypy-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:bfdca17c36ae01a21274a3c387a63aa1aafe72bff976522886869ef131b937f1"}, + {file = "mypy-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7549fbf655e5825d787bbc9ecf6028731973f78088fbca3a1f4145c39ef09462"}, + {file = "mypy-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:98324ec3ecf12296e6422939e54763faedbfcc502ea4a4c38502082711867258"}, + {file = "mypy-1.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:141dedfdbfe8a04142881ff30ce6e6653c9685b354876b12e4fe6c78598b45e2"}, + {file = "mypy-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8207b7105829eca6f3d774f64a904190bb2231de91b8b186d21ffd98005f14a7"}, + {file = "mypy-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:16f0db5b641ba159eff72cff08edc3875f2b62b2fa2bc24f68c1e7a4e8232d01"}, + {file = "mypy-1.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:470c969bb3f9a9efcedbadcd19a74ffb34a25f8e6b0e02dae7c0e71f8372f97b"}, + {file = "mypy-1.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5952d2d18b79f7dc25e62e014fe5a23eb1a3d2bc66318df8988a01b1a037c5b"}, + {file = "mypy-1.4.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:190b6bab0302cec4e9e6767d3eb66085aef2a1cc98fe04936d8a42ed2ba77bb7"}, + {file = "mypy-1.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:9d40652cc4fe33871ad3338581dca3297ff5f2213d0df345bcfbde5162abf0c9"}, + {file = "mypy-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:01fd2e9f85622d981fd9063bfaef1aed6e336eaacca00892cd2d82801ab7c042"}, + {file = "mypy-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2460a58faeea905aeb1b9b36f5065f2dc9a9c6e4c992a6499a2360c6c74ceca3"}, + {file = "mypy-1.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2746d69a8196698146a3dbe29104f9eb6a2a4d8a27878d92169a6c0b74435b6"}, + {file = "mypy-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ae704dcfaa180ff7c4cfbad23e74321a2b774f92ca77fd94ce1049175a21c97f"}, + {file = "mypy-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:43d24f6437925ce50139a310a64b2ab048cb2d3694c84c71c3f2a1626d8101dc"}, + {file = "mypy-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c482e1246726616088532b5e964e39765b6d1520791348e6c9dc3af25b233828"}, + {file = "mypy-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:43b592511672017f5b1a483527fd2684347fdffc041c9ef53428c8dc530f79a3"}, + {file = "mypy-1.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:34a9239d5b3502c17f07fd7c0b2ae6b7dd7d7f6af35fbb5072c6208e76295816"}, + {file = "mypy-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5703097c4936bbb9e9bce41478c8d08edd2865e177dc4c52be759f81ee4dd26c"}, + {file = "mypy-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e02d700ec8d9b1859790c0475df4e4092c7bf3272a4fd2c9f33d87fac4427b8f"}, + {file = "mypy-1.4.1-py3-none-any.whl", hash = "sha256:45d32cec14e7b97af848bddd97d85ea4f0db4d5a149ed9676caa4eb2f7402bb4"}, + {file = "mypy-1.4.1.tar.gz", hash = "sha256:9bbcd9ab8ea1f2e1c8031c21445b511442cc45c89951e49bbf852cbb70755b1b"}, +] + +[package.dependencies] +mypy-extensions = ">=1.0.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typed-ast = {version = ">=1.4.0,<2", markers = "python_version < \"3.8\""} +typing-extensions = ">=4.1.0" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +install-types = ["pip"] +python2 = ["typed-ast (>=1.4.0,<2)"] +reports = ["lxml"] [[package]] -name = "mccabe" -version = "0.7.0" -description = "McCabe checker, plugin for flake8" -category = "dev" +name = "mypy" +version = "1.14.1" +description = "Optional static typing for Python" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" +files = [ + {file = "mypy-1.14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:52686e37cf13d559f668aa398dd7ddf1f92c5d613e4f8cb262be2fb4fedb0fcb"}, + {file = "mypy-1.14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1fb545ca340537d4b45d3eecdb3def05e913299ca72c290326be19b3804b39c0"}, + {file = "mypy-1.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:90716d8b2d1f4cd503309788e51366f07c56635a3309b0f6a32547eaaa36a64d"}, + {file = "mypy-1.14.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2ae753f5c9fef278bcf12e1a564351764f2a6da579d4a81347e1d5a15819997b"}, + {file = "mypy-1.14.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e0fe0f5feaafcb04505bcf439e991c6d8f1bf8b15f12b05feeed96e9e7bf1427"}, + {file = "mypy-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:7d54bd85b925e501c555a3227f3ec0cfc54ee8b6930bd6141ec872d1c572f81f"}, + {file = "mypy-1.14.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f995e511de847791c3b11ed90084a7a0aafdc074ab88c5a9711622fe4751138c"}, + {file = "mypy-1.14.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d64169ec3b8461311f8ce2fd2eb5d33e2d0f2c7b49116259c51d0d96edee48d1"}, + {file = "mypy-1.14.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ba24549de7b89b6381b91fbc068d798192b1b5201987070319889e93038967a8"}, + {file = "mypy-1.14.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:183cf0a45457d28ff9d758730cd0210419ac27d4d3f285beda038c9083363b1f"}, + {file = "mypy-1.14.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f2a0ecc86378f45347f586e4163d1769dd81c5a223d577fe351f26b179e148b1"}, + {file = "mypy-1.14.1-cp311-cp311-win_amd64.whl", hash = "sha256:ad3301ebebec9e8ee7135d8e3109ca76c23752bac1e717bc84cd3836b4bf3eae"}, + {file = "mypy-1.14.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:30ff5ef8519bbc2e18b3b54521ec319513a26f1bba19a7582e7b1f58a6e69f14"}, + {file = "mypy-1.14.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cb9f255c18052343c70234907e2e532bc7e55a62565d64536dbc7706a20b78b9"}, + {file = "mypy-1.14.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b4e3413e0bddea671012b063e27591b953d653209e7a4fa5e48759cda77ca11"}, + {file = "mypy-1.14.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:553c293b1fbdebb6c3c4030589dab9fafb6dfa768995a453d8a5d3b23784af2e"}, + {file = "mypy-1.14.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fad79bfe3b65fe6a1efaed97b445c3d37f7be9fdc348bdb2d7cac75579607c89"}, + {file = "mypy-1.14.1-cp312-cp312-win_amd64.whl", hash = "sha256:8fa2220e54d2946e94ab6dbb3ba0a992795bd68b16dc852db33028df2b00191b"}, + {file = "mypy-1.14.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:92c3ed5afb06c3a8e188cb5da4984cab9ec9a77ba956ee419c68a388b4595255"}, + {file = "mypy-1.14.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:dbec574648b3e25f43d23577309b16534431db4ddc09fda50841f1e34e64ed34"}, + {file = "mypy-1.14.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8c6d94b16d62eb3e947281aa7347d78236688e21081f11de976376cf010eb31a"}, + {file = "mypy-1.14.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d4b19b03fdf54f3c5b2fa474c56b4c13c9dbfb9a2db4370ede7ec11a2c5927d9"}, + {file = "mypy-1.14.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0c911fde686394753fff899c409fd4e16e9b294c24bfd5e1ea4675deae1ac6fd"}, + {file = "mypy-1.14.1-cp313-cp313-win_amd64.whl", hash = "sha256:8b21525cb51671219f5307be85f7e646a153e5acc656e5cebf64bfa076c50107"}, + {file = "mypy-1.14.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7084fb8f1128c76cd9cf68fe5971b37072598e7c31b2f9f95586b65c741a9d31"}, + {file = "mypy-1.14.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8f845a00b4f420f693f870eaee5f3e2692fa84cc8514496114649cfa8fd5e2c6"}, + {file = "mypy-1.14.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:44bf464499f0e3a2d14d58b54674dee25c031703b2ffc35064bd0df2e0fac319"}, + {file = "mypy-1.14.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c99f27732c0b7dc847adb21c9d47ce57eb48fa33a17bc6d7d5c5e9f9e7ae5bac"}, + {file = "mypy-1.14.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:bce23c7377b43602baa0bd22ea3265c49b9ff0b76eb315d6c34721af4cdf1d9b"}, + {file = "mypy-1.14.1-cp38-cp38-win_amd64.whl", hash = "sha256:8edc07eeade7ebc771ff9cf6b211b9a7d93687ff892150cb5692e4f4272b0837"}, + {file = "mypy-1.14.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3888a1816d69f7ab92092f785a462944b3ca16d7c470d564165fe703b0970c35"}, + {file = "mypy-1.14.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:46c756a444117c43ee984bd055db99e498bc613a70bbbc120272bd13ca579fbc"}, + {file = "mypy-1.14.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:27fc248022907e72abfd8e22ab1f10e903915ff69961174784a3900a8cba9ad9"}, + {file = "mypy-1.14.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:499d6a72fb7e5de92218db961f1a66d5f11783f9ae549d214617edab5d4dbdbb"}, + {file = "mypy-1.14.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:57961db9795eb566dc1d1b4e9139ebc4c6b0cb6e7254ecde69d1552bf7613f60"}, + {file = "mypy-1.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:07ba89fdcc9451f2ebb02853deb6aaaa3d2239a236669a63ab3801bbf923ef5c"}, + {file = "mypy-1.14.1-py3-none-any.whl", hash = "sha256:b66a60cc4073aeb8ae00057f9c1f64d49e90f918fbcef9a977eb121da8b8f1d1"}, + {file = "mypy-1.14.1.tar.gz", hash = "sha256:7ec88144fe9b510e8475ec2f5f251992690fcf89ccb4500b214b4226abcd32d6"}, +] + +[package.dependencies] +mypy_extensions = ">=1.0.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing_extensions = ">=4.6.0" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +faster-cache = ["orjson"] +install-types = ["pip"] +mypyc = ["setuptools (>=50)"] +reports = ["lxml"] [[package]] name = "mypy" -version = "0.971" +version = "1.15.0" description = "Optional static typing for Python" -category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.9" +files = [ + {file = "mypy-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:979e4e1a006511dacf628e36fadfecbcc0160a8af6ca7dad2f5025529e082c13"}, + {file = "mypy-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c4bb0e1bd29f7d34efcccd71cf733580191e9a264a2202b0239da95984c5b559"}, + {file = "mypy-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:be68172e9fd9ad8fb876c6389f16d1c1b5f100ffa779f77b1fb2176fcc9ab95b"}, + {file = "mypy-1.15.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c7be1e46525adfa0d97681432ee9fcd61a3964c2446795714699a998d193f1a3"}, + {file = "mypy-1.15.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:2e2c2e6d3593f6451b18588848e66260ff62ccca522dd231cd4dd59b0160668b"}, + {file = "mypy-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:6983aae8b2f653e098edb77f893f7b6aca69f6cffb19b2cc7443f23cce5f4828"}, + {file = "mypy-1.15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2922d42e16d6de288022e5ca321cd0618b238cfc5570e0263e5ba0a77dbef56f"}, + {file = "mypy-1.15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2ee2d57e01a7c35de00f4634ba1bbf015185b219e4dc5909e281016df43f5ee5"}, + {file = "mypy-1.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:973500e0774b85d9689715feeffcc980193086551110fd678ebe1f4342fb7c5e"}, + {file = "mypy-1.15.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5a95fb17c13e29d2d5195869262f8125dfdb5c134dc8d9a9d0aecf7525b10c2c"}, + {file = "mypy-1.15.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1905f494bfd7d85a23a88c5d97840888a7bd516545fc5aaedff0267e0bb54e2f"}, + {file = "mypy-1.15.0-cp311-cp311-win_amd64.whl", hash = "sha256:c9817fa23833ff189db061e6d2eff49b2f3b6ed9856b4a0a73046e41932d744f"}, + {file = "mypy-1.15.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:aea39e0583d05124836ea645f412e88a5c7d0fd77a6d694b60d9b6b2d9f184fd"}, + {file = "mypy-1.15.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2f2147ab812b75e5b5499b01ade1f4a81489a147c01585cda36019102538615f"}, + {file = "mypy-1.15.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ce436f4c6d218a070048ed6a44c0bbb10cd2cc5e272b29e7845f6a2f57ee4464"}, + {file = "mypy-1.15.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8023ff13985661b50a5928fc7a5ca15f3d1affb41e5f0a9952cb68ef090b31ee"}, + {file = "mypy-1.15.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1124a18bc11a6a62887e3e137f37f53fbae476dc36c185d549d4f837a2a6a14e"}, + {file = "mypy-1.15.0-cp312-cp312-win_amd64.whl", hash = "sha256:171a9ca9a40cd1843abeca0e405bc1940cd9b305eaeea2dda769ba096932bb22"}, + {file = "mypy-1.15.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:93faf3fdb04768d44bf28693293f3904bbb555d076b781ad2530214ee53e3445"}, + {file = "mypy-1.15.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:811aeccadfb730024c5d3e326b2fbe9249bb7413553f15499a4050f7c30e801d"}, + {file = "mypy-1.15.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:98b7b9b9aedb65fe628c62a6dc57f6d5088ef2dfca37903a7d9ee374d03acca5"}, + {file = "mypy-1.15.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c43a7682e24b4f576d93072216bf56eeff70d9140241f9edec0c104d0c515036"}, + {file = "mypy-1.15.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:baefc32840a9f00babd83251560e0ae1573e2f9d1b067719479bfb0e987c6357"}, + {file = "mypy-1.15.0-cp313-cp313-win_amd64.whl", hash = "sha256:b9378e2c00146c44793c98b8d5a61039a048e31f429fb0eb546d93f4b000bedf"}, + {file = "mypy-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e601a7fa172c2131bff456bb3ee08a88360760d0d2f8cbd7a75a65497e2df078"}, + {file = "mypy-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:712e962a6357634fef20412699a3655c610110e01cdaa6180acec7fc9f8513ba"}, + {file = "mypy-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f95579473af29ab73a10bada2f9722856792a36ec5af5399b653aa28360290a5"}, + {file = "mypy-1.15.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8f8722560a14cde92fdb1e31597760dc35f9f5524cce17836c0d22841830fd5b"}, + {file = "mypy-1.15.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1fbb8da62dc352133d7d7ca90ed2fb0e9d42bb1a32724c287d3c76c58cbaa9c2"}, + {file = "mypy-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:d10d994b41fb3497719bbf866f227b3489048ea4bbbb5015357db306249f7980"}, + {file = "mypy-1.15.0-py3-none-any.whl", hash = "sha256:5469affef548bd1895d86d3bf10ce2b44e33d86923c29e4d675b3e323437ea3e"}, + {file = "mypy-1.15.0.tar.gz", hash = "sha256:404534629d51d3efea5c800ee7c42b72a6554d6c400e6a79eafe15d11341fd43"}, +] [package.dependencies] -mypy-extensions = ">=0.4.3" +mypy_extensions = ">=1.0.0" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typed-ast = {version = ">=1.4.0,<2", markers = "python_version < \"3.8\""} -typing-extensions = ">=3.10" +typing_extensions = ">=4.6.0" [package.extras] dmypy = ["psutil (>=4.0)"] -python2 = ["typed-ast (>=1.4.0,<2)"] +faster-cache = ["orjson"] +install-types = ["pip"] +mypyc = ["setuptools (>=50)"] reports = ["lxml"] [[package]] name = "mypy-extensions" -version = "0.4.3" -description = "Experimental type system extensions for programs checked with the mypy typechecker." -category = "dev" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." optional = false -python-versions = "*" +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "mypy-extensions" +version = "1.1.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505"}, + {file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"}, +] [[package]] name = "packaging" -version = "21.3" +version = "24.0" description = "Core utilities for Python packages" -category = "dev" optional = false -python-versions = ">=3.6" - -[package.dependencies] -pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" +python-versions = ">=3.7" +files = [ + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, +] [[package]] -name = "pathspec" -version = "0.9.0" -description = "Utility library for gitignore style pattern matching of file paths." -category = "dev" +name = "packaging" +version = "25.0" +description = "Core utilities for Python packages" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +python-versions = ">=3.8" +files = [ + {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, + {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, +] [[package]] -name = "pep517" -version = "0.13.0" -description = "Wrappers to build Python packages using PEP 517 hooks" -category = "dev" +name = "platformdirs" +version = "4.0.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" +files = [ + {file = "platformdirs-4.0.0-py3-none-any.whl", hash = "sha256:118c954d7e949b35437270383a3f2531e99dd93cf7ce4dc8340d3356d30f173b"}, + {file = "platformdirs-4.0.0.tar.gz", hash = "sha256:cb633b2bcf10c51af60beb0ab06d2f1d69064b43abf4c185ca6b28865f3f9731"}, +] [package.dependencies] -importlib_metadata = {version = "*", markers = "python_version < \"3.8\""} -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -zipp = {version = "*", markers = "python_version < \"3.8\""} +typing-extensions = {version = ">=4.7.1", markers = "python_version < \"3.8\""} + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] [[package]] name = "platformdirs" -version = "2.4.0" -description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "dev" +version = "4.3.6" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, + {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, +] [package.extras] -docs = ["Sphinx (>=4)", "furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)"] -test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.11.2)"] [[package]] name = "pluggy" -version = "1.0.0" +version = "1.2.0" description = "plugin and hook calling mechanisms for python" -category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" +files = [ + {file = "pluggy-1.2.0-py3-none-any.whl", hash = "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849"}, + {file = "pluggy-1.2.0.tar.gz", hash = "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3"}, +] [package.dependencies] importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} @@ -447,263 +1140,567 @@ importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] +[[package]] +name = "pluggy" +version = "1.5.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + [[package]] name = "py" version = "1.11.0" description = "library with cross-python path, ini-parsing, io, code, log facilities" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, +] [[package]] name = "py-cpuinfo" -version = "8.0.0" -description = "Get CPU info with pure Python 2 & 3" -category = "dev" +version = "9.0.0" +description = "Get CPU info with pure Python" optional = false python-versions = "*" +files = [ + {file = "py-cpuinfo-9.0.0.tar.gz", hash = "sha256:3cdbbf3fac90dc6f118bfd64384f309edeadd902d7c8fb17f02ffa1fc3f49690"}, + {file = "py_cpuinfo-9.0.0-py3-none-any.whl", hash = "sha256:859625bc251f64e21f077d099d4162689c762b5d6a4c3c97553d56241c9674d5"}, +] [[package]] -name = "pycodestyle" -version = "2.8.0" -description = "Python style guide checker" -category = "dev" +name = "pycparser" +version = "2.21" +description = "C parser in Python" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, + {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, +] [[package]] -name = "pycodestyle" -version = "2.9.1" -description = "Python style guide checker" -category = "dev" +name = "pycparser" +version = "2.22" +description = "C parser in Python" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" +files = [ + {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, + {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, +] [[package]] -name = "pyflakes" -version = "2.4.0" -description = "passive checker of Python programs" -category = "dev" +name = "pygments" +version = "2.17.2" +description = "Pygments is a syntax highlighting package written in Python." optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.7" +files = [ + {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, + {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, +] + +[package.extras] +plugins = ["importlib-metadata"] +windows-terminal = ["colorama (>=0.4.6)"] [[package]] -name = "pyflakes" -version = "2.5.0" -description = "passive checker of Python programs" -category = "dev" +name = "pygments" +version = "2.19.1" +description = "Pygments is a syntax highlighting package written in Python." optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" +files = [ + {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, + {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, +] + +[package.extras] +windows-terminal = ["colorama (>=0.4.6)"] [[package]] -name = "Pygments" -version = "2.13.0" -description = "Pygments is a syntax highlighting package written in Python." -category = "dev" +name = "pyproject-api" +version = "1.8.0" +description = "API to interact with the python pyproject.toml based projects" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" +files = [ + {file = "pyproject_api-1.8.0-py3-none-any.whl", hash = "sha256:3d7d347a047afe796fd5d1885b1e391ba29be7169bd2f102fcd378f04273d228"}, + {file = "pyproject_api-1.8.0.tar.gz", hash = "sha256:77b8049f2feb5d33eefcc21b57f1e279636277a8ac8ad6b5871037b243778496"}, +] + +[package.dependencies] +packaging = ">=24.1" +tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} [package.extras] -plugins = ["importlib-metadata"] +docs = ["furo (>=2024.8.6)", "sphinx-autodoc-typehints (>=2.4.1)"] +testing = ["covdefaults (>=2.3)", "pytest (>=8.3.3)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "setuptools (>=75.1)"] [[package]] -name = "pyparsing" -version = "3.0.7" -description = "Python parsing module" -category = "dev" +name = "pytest" +version = "7.4.4" +description = "pytest: simple powerful testing with Python" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" +files = [ + {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, + {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest" +version = "8.3.5" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820"}, + {file = "pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=1.5,<2" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} + +[package.extras] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-asyncio" +version = "0.21.2" +description = "Pytest support for asyncio" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest_asyncio-0.21.2-py3-none-any.whl", hash = "sha256:ab664c88bb7998f711d8039cacd4884da6430886ae8bbd4eded552ed2004f16b"}, + {file = "pytest_asyncio-0.21.2.tar.gz", hash = "sha256:d67738fc232b94b326b9d060750beb16e0074210b98dd8b58a5239fa2a154f45"}, +] + +[package.dependencies] +pytest = ">=7.0.0" +typing-extensions = {version = ">=3.7.2", markers = "python_version < \"3.8\""} + +[package.extras] +docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] +testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy (>=0.931)", "pytest-trio (>=0.7.0)"] + +[[package]] +name = "pytest-asyncio" +version = "0.24.0" +description = "Pytest support for asyncio" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest_asyncio-0.24.0-py3-none-any.whl", hash = "sha256:a811296ed596b69bf0b6f3dc40f83bcaf341b155a269052d82efa2b25ac7037b"}, + {file = "pytest_asyncio-0.24.0.tar.gz", hash = "sha256:d081d828e576d85f875399194281e92bf8a68d60d72d1a2faf2feddb6c46b276"}, +] + +[package.dependencies] +pytest = ">=8.2,<9" + +[package.extras] +docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] +testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] + +[[package]] +name = "pytest-asyncio" +version = "0.25.3" +description = "Pytest support for asyncio" +optional = false +python-versions = ">=3.9" +files = [ + {file = "pytest_asyncio-0.25.3-py3-none-any.whl", hash = "sha256:9e89518e0f9bd08928f97a3482fdc4e244df17529460bc038291ccaf8f85c7c3"}, + {file = "pytest_asyncio-0.25.3.tar.gz", hash = "sha256:fc1da2cf9f125ada7e710b4ddad05518d4cee187ae9412e9ac9271003497f07a"}, +] + +[package.dependencies] +pytest = ">=8.2,<9" + +[package.extras] +docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1)"] +testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] + +[[package]] +name = "pytest-benchmark" +version = "4.0.0" +description = "A ``pytest`` fixture for benchmarking code. It will group the tests into rounds that are calibrated to the chosen timer." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-benchmark-4.0.0.tar.gz", hash = "sha256:fb0785b83efe599a6a956361c0691ae1dbb5318018561af10f3e915caa0048d1"}, + {file = "pytest_benchmark-4.0.0-py3-none-any.whl", hash = "sha256:fdb7db64e31c8b277dff9850d2a2556d8b60bcb0ea6524e36e28ffd7c87f71d6"}, +] + +[package.dependencies] +py-cpuinfo = "*" +pytest = ">=3.8" + +[package.extras] +aspect = ["aspectlib"] +elasticsearch = ["elasticsearch"] +histogram = ["pygal", "pygaljs"] + +[[package]] +name = "pytest-benchmark" +version = "5.1.0" +description = "A ``pytest`` fixture for benchmarking code. It will group the tests into rounds that are calibrated to the chosen timer." +optional = false +python-versions = ">=3.9" +files = [ + {file = "pytest-benchmark-5.1.0.tar.gz", hash = "sha256:9ea661cdc292e8231f7cd4c10b0319e56a2118e2c09d9f50e1b3d150d2aca105"}, + {file = "pytest_benchmark-5.1.0-py3-none-any.whl", hash = "sha256:922de2dfa3033c227c96da942d1878191afa135a29485fb942e85dff1c592c89"}, +] + +[package.dependencies] +py-cpuinfo = "*" +pytest = ">=8.1" [package.extras] -diagrams = ["jinja2", "railroad-diagrams"] +aspect = ["aspectlib"] +elasticsearch = ["elasticsearch"] +histogram = ["pygal", "pygaljs", "setuptools"] [[package]] -name = "pytest" -version = "6.2.5" -description = "pytest: simple powerful testing with Python" -category = "dev" +name = "pytest-codspeed" +version = "2.2.1" +description = "Pytest plugin to create CodSpeed benchmarks" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" +files = [ + {file = "pytest_codspeed-2.2.1-py3-none-any.whl", hash = "sha256:aad08033015f3e6c8c14c8bf0eca475921a9b088e92c98b626bf8af8f516471e"}, + {file = "pytest_codspeed-2.2.1.tar.gz", hash = "sha256:0adc24baf01c64a6ca0a0b83b3cd704351708997e09ec086b7776c32227d4e0a"}, +] [package.dependencies] -atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} -attrs = ">=19.2.0" -colorama = {version = "*", markers = "sys_platform == \"win32\""} -importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} -iniconfig = "*" -packaging = "*" -pluggy = ">=0.12,<2.0" -py = ">=1.8.2" -toml = "*" +cffi = ">=1.15.1" +filelock = ">=3.12.2" +pytest = ">=3.8" [package.extras] -testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] - -[[package]] -name = "pytest-asyncio" -version = "0.16.0" -description = "Pytest support for asyncio." -category = "dev" -optional = false -python-versions = ">= 3.6" +compat = ["pytest-benchmark (>=4.0.0,<4.1.0)", "pytest-xdist (>=2.0.0,<2.1.0)"] +lint = ["mypy (>=1.3.0,<1.4.0)", "ruff (>=0.3.3,<0.4.0)"] +test = ["pytest (>=7.0,<8.0)", "pytest-cov (>=4.0.0,<4.1.0)"] + +[[package]] +name = "pytest-codspeed" +version = "3.2.0" +description = "Pytest plugin to create CodSpeed benchmarks" +optional = false +python-versions = ">=3.9" +files = [ + {file = "pytest_codspeed-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c5165774424c7ab8db7e7acdb539763a0e5657996effefdf0664d7fd95158d34"}, + {file = "pytest_codspeed-3.2.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9bd55f92d772592c04a55209950c50880413ae46876e66bd349ef157075ca26c"}, + {file = "pytest_codspeed-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4cf6f56067538f4892baa8d7ab5ef4e45bb59033be1ef18759a2c7fc55b32035"}, + {file = "pytest_codspeed-3.2.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:39a687b05c3d145642061b45ea78e47e12f13ce510104d1a2cda00eee0e36f58"}, + {file = "pytest_codspeed-3.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:46a1afaaa1ac4c2ca5b0700d31ac46d80a27612961d031067d73c6ccbd8d3c2b"}, + {file = "pytest_codspeed-3.2.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c48ce3af3dfa78413ed3d69d1924043aa1519048dbff46edccf8f35a25dab3c2"}, + {file = "pytest_codspeed-3.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:66692506d33453df48b36a84703448cb8b22953eea51f03fbb2eb758dc2bdc4f"}, + {file = "pytest_codspeed-3.2.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:479774f80d0bdfafa16112700df4dbd31bf2a6757fac74795fd79c0a7b3c389b"}, + {file = "pytest_codspeed-3.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:109f9f4dd1088019c3b3f887d003b7d65f98a7736ca1d457884f5aa293e8e81c"}, + {file = "pytest_codspeed-3.2.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e2f69a03b52c9bb041aec1b8ee54b7b6c37a6d0a948786effa4c71157765b6da"}, + {file = "pytest_codspeed-3.2.0-py3-none-any.whl", hash = "sha256:54b5c2e986d6a28e7b0af11d610ea57bd5531cec8326abe486f1b55b09d91c39"}, + {file = "pytest_codspeed-3.2.0.tar.gz", hash = "sha256:f9d1b1a3b2c69cdc0490a1e8b1ced44bffbd0e8e21d81a7160cfdd923f6e8155"}, +] [package.dependencies] -pytest = ">=5.4.0" +cffi = ">=1.17.1" +importlib-metadata = {version = ">=8.5.0", markers = "python_version < \"3.10\""} +pytest = ">=3.8" +rich = ">=13.8.1" [package.extras] -testing = ["coverage", "hypothesis (>=5.7.1)"] +compat = ["pytest-benchmark (>=5.0.0,<5.1.0)", "pytest-xdist (>=3.6.1,<3.7.0)"] +lint = ["mypy (>=1.11.2,<1.12.0)", "ruff (>=0.6.5,<0.7.0)"] +test = ["pytest (>=7.0,<8.0)", "pytest-cov (>=4.0.0,<4.1.0)"] [[package]] -name = "pytest-asyncio" -version = "0.19.0" -description = "Pytest support for asyncio" -category = "dev" +name = "pytest-cov" +version = "4.1.0" +description = "Pytest plugin for measuring coverage." optional = false python-versions = ">=3.7" +files = [ + {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, + {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, +] [package.dependencies] -pytest = ">=6.1.0" -typing-extensions = {version = ">=3.7.2", markers = "python_version < \"3.8\""} +coverage = {version = ">=5.2.1", extras = ["toml"]} +pytest = ">=4.6" [package.extras] -testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy (>=0.931)", "pytest-trio (>=0.7.0)"] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] [[package]] -name = "pytest-benchmark" -version = "3.4.1" -description = "A ``pytest`` fixture for benchmarking code. It will group the tests into rounds that are calibrated to the chosen timer." -category = "dev" +name = "pytest-cov" +version = "5.0.0" +description = "Pytest plugin for measuring coverage." optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.8" +files = [ + {file = "pytest-cov-5.0.0.tar.gz", hash = "sha256:5837b58e9f6ebd335b0f8060eecce69b662415b16dc503883a02f45dfeb14857"}, + {file = "pytest_cov-5.0.0-py3-none-any.whl", hash = "sha256:4f0764a1219df53214206bf1feea4633c3b558a2925c8b59f144f682861ce652"}, +] [package.dependencies] -py-cpuinfo = "*" -pytest = ">=3.8" +coverage = {version = ">=5.2.1", extras = ["toml"]} +pytest = ">=4.6" [package.extras] -aspect = ["aspectlib"] -elasticsearch = ["elasticsearch"] -histogram = ["pygal", "pygaljs"] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"] [[package]] name = "pytest-cov" -version = "3.0.0" +version = "6.1.1" description = "Pytest plugin for measuring coverage." -category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.9" +files = [ + {file = "pytest_cov-6.1.1-py3-none-any.whl", hash = "sha256:bddf29ed2d0ab6f4df17b4c55b0a657287db8684af9c42ea546b21b1041b3dde"}, + {file = "pytest_cov-6.1.1.tar.gz", hash = "sha256:46935f7aaefba760e716c2ebfbe1c216240b9592966e7da99ea8292d4d3e2a0a"}, +] [package.dependencies] -coverage = {version = ">=5.2.1", extras = ["toml"]} +coverage = {version = ">=7.5", extras = ["toml"]} pytest = ">=4.6" [package.extras] -testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"] [[package]] name = "pytest-describe" -version = "2.0.1" +version = "2.2.0" description = "Describe-style plugin for pytest" -category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.7" +files = [ + {file = "pytest-describe-2.2.0.tar.gz", hash = "sha256:39bb05eb90f2497d9ca342ef9a0b7fa5bada7e58505aec33f66d661d631955b7"}, + {file = "pytest_describe-2.2.0-py3-none-any.whl", hash = "sha256:bd9e2c73acb4b9522a8400823d98f5b6a081667d3bfd7243a8598336896b544d"}, +] [package.dependencies] -pytest = ">=4.0.0" +pytest = ">=4.6,<9" [[package]] name = "pytest-timeout" -version = "2.1.0" +version = "2.3.1" description = "pytest plugin to abort hanging tests" -category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" +files = [ + {file = "pytest-timeout-2.3.1.tar.gz", hash = "sha256:12397729125c6ecbdaca01035b9e5239d4db97352320af155b3f5de1ba5165d9"}, + {file = "pytest_timeout-2.3.1-py3-none-any.whl", hash = "sha256:68188cb703edfc6a18fad98dc25a3c61e9f24d644b0b70f33af545219fc7813e"}, +] [package.dependencies] -pytest = ">=5.0.0" +pytest = ">=7.0.0" [[package]] name = "pytz" -version = "2022.2.1" +version = "2025.2" description = "World timezone definitions, modern and historical" -category = "dev" optional = false python-versions = "*" +files = [ + {file = "pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00"}, + {file = "pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3"}, +] [[package]] -name = "regex" -version = "2022.9.13" -description = "Alternative regular expression module, to replace re." -category = "dev" +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests" -version = "2.27.1" +version = "2.32.3" description = "Python HTTP for Humans." -category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +python-versions = ">=3.8" +files = [ + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, +] [package.dependencies] certifi = ">=2017.4.17" -charset-normalizer = {version = ">=2.0.0,<2.1.0", markers = "python_version >= \"3\""} -idna = {version = ">=2.5,<4", markers = "python_version >= \"3\""} -urllib3 = ">=1.21.1,<1.27" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" [package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] -use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] -name = "setuptools" -version = "59.6.0" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -category = "dev" +name = "rich" +version = "14.0.0" +description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8.0" +files = [ + {file = "rich-14.0.0-py3-none-any.whl", hash = "sha256:1c9491e1951aac09caffd42f448ee3d04e58923ffe14993f6e83068dc395d7e0"}, + {file = "rich-14.0.0.tar.gz", hash = "sha256:82f1bc23a6a21ebca4ae0c45af9bdbc492ed20231dcb63f297d6d1021a9d5725"}, +] + +[package.dependencies] +markdown-it-py = ">=2.2.0" +pygments = ">=2.13.0,<3.0.0" +typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.11\""} [package.extras] -docs = ["furo", "jaraco.packaging (>=8.2)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx", "sphinx-inline-tabs", "sphinxcontrib-towncrier"] -testing = ["flake8-2020", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mock", "paver", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy", "pytest-virtualenv (>=1.2.7)", "pytest-xdist", "sphinx", "virtualenv (>=13.0.0)", "wheel"] +jupyter = ["ipywidgets (>=7.5.1,<9)"] + +[[package]] +name = "ruff" +version = "0.11.8" +description = "An extremely fast Python linter and code formatter, written in Rust." +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruff-0.11.8-py3-none-linux_armv6l.whl", hash = "sha256:896a37516c594805e34020c4a7546c8f8a234b679a7716a3f08197f38913e1a3"}, + {file = "ruff-0.11.8-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ab86d22d3d721a40dd3ecbb5e86ab03b2e053bc93c700dc68d1c3346b36ce835"}, + {file = "ruff-0.11.8-py3-none-macosx_11_0_arm64.whl", hash = "sha256:258f3585057508d317610e8a412788cf726efeefa2fec4dba4001d9e6f90d46c"}, + {file = "ruff-0.11.8-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:727d01702f7c30baed3fc3a34901a640001a2828c793525043c29f7614994a8c"}, + {file = "ruff-0.11.8-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3dca977cc4fc8f66e89900fa415ffe4dbc2e969da9d7a54bfca81a128c5ac219"}, + {file = "ruff-0.11.8-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c657fa987d60b104d2be8b052d66da0a2a88f9bd1d66b2254333e84ea2720c7f"}, + {file = "ruff-0.11.8-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:f2e74b021d0de5eceb8bd32919f6ff8a9b40ee62ed97becd44993ae5b9949474"}, + {file = "ruff-0.11.8-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f9b5ef39820abc0f2c62111f7045009e46b275f5b99d5e59dda113c39b7f4f38"}, + {file = "ruff-0.11.8-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1dba3135ca503727aa4648152c0fa67c3b1385d3dc81c75cd8a229c4b2a1458"}, + {file = "ruff-0.11.8-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f024d32e62faad0f76b2d6afd141b8c171515e4fb91ce9fd6464335c81244e5"}, + {file = "ruff-0.11.8-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:d365618d3ad747432e1ae50d61775b78c055fee5936d77fb4d92c6f559741948"}, + {file = "ruff-0.11.8-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:4d9aaa91035bdf612c8ee7266153bcf16005c7c7e2f5878406911c92a31633cb"}, + {file = "ruff-0.11.8-py3-none-musllinux_1_2_i686.whl", hash = "sha256:0eba551324733efc76116d9f3a0d52946bc2751f0cd30661564117d6fd60897c"}, + {file = "ruff-0.11.8-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:161eb4cff5cfefdb6c9b8b3671d09f7def2f960cee33481dd898caf2bcd02304"}, + {file = "ruff-0.11.8-py3-none-win32.whl", hash = "sha256:5b18caa297a786465cc511d7f8be19226acf9c0a1127e06e736cd4e1878c3ea2"}, + {file = "ruff-0.11.8-py3-none-win_amd64.whl", hash = "sha256:6e70d11043bef637c5617297bdedec9632af15d53ac1e1ba29c448da9341b0c4"}, + {file = "ruff-0.11.8-py3-none-win_arm64.whl", hash = "sha256:304432e4c4a792e3da85b7699feb3426a0908ab98bf29df22a31b0cdd098fac2"}, + {file = "ruff-0.11.8.tar.gz", hash = "sha256:6d742d10626f9004b781f4558154bb226620a7242080e11caeffab1a40e99df8"}, +] [[package]] name = "six" -version = "1.16.0" +version = "1.17.0" description = "Python 2 and 3 compatibility utilities" -category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, + {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, +] [[package]] name = "snowballstemmer" version = "2.2.0" description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." -category = "dev" optional = false python-versions = "*" +files = [ + {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, + {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, +] [[package]] -name = "Sphinx" -version = "4.3.2" +name = "sphinx" +version = "5.3.0" description = "Python documentation generator" -category = "dev" optional = false python-versions = ">=3.6" +files = [ + {file = "Sphinx-5.3.0.tar.gz", hash = "sha256:51026de0a9ff9fc13c05d74913ad66047e104f56a129ff73e174eb5c3ee794b5"}, + {file = "sphinx-5.3.0-py3-none-any.whl", hash = "sha256:060ca5c9f7ba57a08a1219e547b269fadf125ae25b06b9fa7f66768efb652d6d"}, +] [package.dependencies] alabaster = ">=0.7,<0.8" -babel = ">=1.3" -colorama = {version = ">=0.3.5", markers = "sys_platform == \"win32\""} -docutils = ">=0.14,<0.18" -imagesize = "*" -Jinja2 = ">=2.3" -packaging = "*" -Pygments = ">=2.0" +babel = ">=2.9" +colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} +docutils = ">=0.14,<0.20" +imagesize = ">=1.3" +importlib-metadata = {version = ">=4.8", markers = "python_version < \"3.10\""} +Jinja2 = ">=3.0" +packaging = ">=21.0" +Pygments = ">=2.12" requests = ">=2.5.0" -setuptools = "*" -snowballstemmer = ">=1.1" +snowballstemmer = ">=2.0" +sphinxcontrib-applehelp = "*" +sphinxcontrib-devhelp = "*" +sphinxcontrib-htmlhelp = ">=2.0.0" +sphinxcontrib-jsmath = "*" +sphinxcontrib-qthelp = "*" +sphinxcontrib-serializinghtml = ">=1.1.5" + +[package.extras] +docs = ["sphinxcontrib-websupport"] +lint = ["docutils-stubs", "flake8 (>=3.5.0)", "flake8-bugbear", "flake8-comprehensions", "flake8-simplify", "isort", "mypy (>=0.981)", "sphinx-lint", "types-requests", "types-typed-ast"] +test = ["cython", "html5lib", "pytest (>=4.6)", "typed_ast"] + +[[package]] +name = "sphinx" +version = "7.1.2" +description = "Python documentation generator" +optional = false +python-versions = ">=3.8" +files = [ + {file = "sphinx-7.1.2-py3-none-any.whl", hash = "sha256:d170a81825b2fcacb6dfd5a0d7f578a053e45d3f2b153fecc948c37344eb4cbe"}, + {file = "sphinx-7.1.2.tar.gz", hash = "sha256:780f4d32f1d7d1126576e0e5ecc19dc32ab76cd24e950228dcf7b1f6d3d9e22f"}, +] + +[package.dependencies] +alabaster = ">=0.7,<0.8" +babel = ">=2.9" +colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} +docutils = ">=0.18.1,<0.21" +imagesize = ">=1.3" +importlib-metadata = {version = ">=4.8", markers = "python_version < \"3.10\""} +Jinja2 = ">=3.0" +packaging = ">=21.0" +Pygments = ">=2.13" +requests = ">=2.25.0" +snowballstemmer = ">=2.0" sphinxcontrib-applehelp = "*" sphinxcontrib-devhelp = "*" sphinxcontrib-htmlhelp = ">=2.0.0" @@ -713,31 +1710,53 @@ sphinxcontrib-serializinghtml = ">=1.1.5" [package.extras] docs = ["sphinxcontrib-websupport"] -lint = ["docutils-stubs", "flake8 (>=3.5.0)", "isort", "mypy (>=0.920)", "types-pkg-resources", "types-requests", "types-typed-ast"] -test = ["cython", "html5lib", "pytest", "pytest-cov", "typed-ast"] +lint = ["docutils-stubs", "flake8 (>=3.5.0)", "flake8-simplify", "isort", "mypy (>=0.990)", "ruff", "sphinx-lint", "types-requests"] +test = ["cython", "filelock", "html5lib", "pytest (>=4.6)"] [[package]] name = "sphinx-rtd-theme" -version = "1.0.0" +version = "2.0.0" description = "Read the Docs theme for Sphinx" -category = "dev" optional = false -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" +python-versions = ">=3.6" +files = [ + {file = "sphinx_rtd_theme-2.0.0-py2.py3-none-any.whl", hash = "sha256:ec93d0856dc280cf3aee9a4c9807c60e027c7f7b461b77aeffed682e68f0e586"}, + {file = "sphinx_rtd_theme-2.0.0.tar.gz", hash = "sha256:bd5d7b80622406762073a04ef8fadc5f9151261563d47027de09910ce03afe6b"}, +] [package.dependencies] -docutils = "<0.18" -sphinx = ">=1.6" +docutils = "<0.21" +sphinx = ">=5,<8" +sphinxcontrib-jquery = ">=4,<5" [package.extras] -dev = ["bump2version", "sphinxcontrib-httpdomain", "transifex-client"] +dev = ["bump2version", "sphinxcontrib-httpdomain", "transifex-client", "wheel"] [[package]] name = "sphinxcontrib-applehelp" version = "1.0.2" description = "sphinxcontrib-applehelp is a sphinx extension which outputs Apple help books" -category = "dev" optional = false python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-applehelp-1.0.2.tar.gz", hash = "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58"}, + {file = "sphinxcontrib_applehelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-applehelp" +version = "1.0.4" +description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" +optional = false +python-versions = ">=3.8" +files = [ + {file = "sphinxcontrib-applehelp-1.0.4.tar.gz", hash = "sha256:828f867945bbe39817c210a1abfd1bc4895c8b73fcaade56d45357a348a07d7e"}, + {file = "sphinxcontrib_applehelp-1.0.4-py3-none-any.whl", hash = "sha256:29d341f67fb0f6f586b23ad80e072c8e6ad0b48417db2bde114a4c9746feb228"}, +] [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] @@ -747,9 +1766,12 @@ test = ["pytest"] name = "sphinxcontrib-devhelp" version = "1.0.2" description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." -category = "dev" optional = false python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"}, + {file = "sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e"}, +] [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] @@ -759,21 +1781,56 @@ test = ["pytest"] name = "sphinxcontrib-htmlhelp" version = "2.0.0" description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" -category = "dev" optional = false python-versions = ">=3.6" +files = [ + {file = "sphinxcontrib-htmlhelp-2.0.0.tar.gz", hash = "sha256:f5f8bb2d0d629f398bf47d0d69c07bc13b65f75a81ad9e2f71a63d4b7a2f6db2"}, + {file = "sphinxcontrib_htmlhelp-2.0.0-py2.py3-none-any.whl", hash = "sha256:d412243dfb797ae3ec2b59eca0e52dac12e75a241bf0e4eb861e450d06c6ed07"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["html5lib", "pytest"] + +[[package]] +name = "sphinxcontrib-htmlhelp" +version = "2.0.1" +description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "sphinxcontrib-htmlhelp-2.0.1.tar.gz", hash = "sha256:0cbdd302815330058422b98a113195c9249825d681e18f11e8b1f78a2f11efff"}, + {file = "sphinxcontrib_htmlhelp-2.0.1-py3-none-any.whl", hash = "sha256:c38cb46dccf316c79de6e5515e1770414b797162b23cd3d06e67020e1d2a6903"}, +] [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] test = ["html5lib", "pytest"] +[[package]] +name = "sphinxcontrib-jquery" +version = "4.1" +description = "Extension to include jQuery on newer Sphinx releases" +optional = false +python-versions = ">=2.7" +files = [ + {file = "sphinxcontrib-jquery-4.1.tar.gz", hash = "sha256:1620739f04e36a2c779f1a131a2dfd49b2fd07351bf1968ced074365933abc7a"}, + {file = "sphinxcontrib_jquery-4.1-py2.py3-none-any.whl", hash = "sha256:f936030d7d0147dd026a4f2b5a57343d233f1fc7b363f68b3d4f1cb0993878ae"}, +] + +[package.dependencies] +Sphinx = ">=1.8" + [[package]] name = "sphinxcontrib-jsmath" version = "1.0.1" description = "A sphinx extension which renders display math in HTML via JavaScript" -category = "dev" optional = false python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, + {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, +] [package.extras] test = ["flake8", "mypy", "pytest"] @@ -782,9 +1839,12 @@ test = ["flake8", "mypy", "pytest"] name = "sphinxcontrib-qthelp" version = "1.0.3" description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." -category = "dev" optional = false python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-qthelp-1.0.3.tar.gz", hash = "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72"}, + {file = "sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"}, +] [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] @@ -794,45 +1854,79 @@ test = ["pytest"] name = "sphinxcontrib-serializinghtml" version = "1.1.5" description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." -category = "dev" optional = false python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-serializinghtml-1.1.5.tar.gz", hash = "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952"}, + {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"}, +] [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] test = ["pytest"] -[[package]] -name = "toml" -version = "0.10.2" -description = "Python Library for Tom's Obvious, Minimal Language" -category = "dev" -optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" - [[package]] name = "tomli" -version = "1.2.3" +version = "2.0.1" description = "A lil' TOML parser" -category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] [[package]] name = "tomli" -version = "2.0.1" +version = "2.2.1" description = "A lil' TOML parser" -category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" +files = [ + {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, + {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}, + {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}, + {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}, + {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}, + {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}, + {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}, + {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}, + {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, + {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, +] [[package]] name = "tox" -version = "3.25.0" +version = "3.28.0" description = "tox is a generic virtualenv management and test command line tool" -category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +files = [ + {file = "tox-3.28.0-py2.py3-none-any.whl", hash = "sha256:57b5ab7e8bb3074edc3c0c0b4b192a4f3799d3723b2c5b76f1fa9f2d40316eea"}, + {file = "tox-3.28.0.tar.gz", hash = "sha256:d0d28f3fe6d6d7195c27f8b054c3e99d5451952b54abdae673b71609a581f640"}, +] [package.dependencies] colorama = {version = ">=0.4.1", markers = "platform_system == \"Windows\""} @@ -842,7 +1936,7 @@ packaging = ">=14" pluggy = ">=0.12.0" py = ">=1.4.17" six = ">=1.14.0" -toml = ">=0.9.4" +tomli = {version = ">=2.0.1", markers = "python_version >= \"3.7\" and python_version < \"3.11\""} virtualenv = ">=16.0.0,<20.0.0 || >20.0.0,<20.0.1 || >20.0.1,<20.0.2 || >20.0.2,<20.0.3 || >20.0.3,<20.0.4 || >20.0.4,<20.0.5 || >20.0.5,<20.0.6 || >20.0.6,<20.0.7 || >20.0.7" [package.extras] @@ -851,649 +1945,213 @@ testing = ["flaky (>=3.4.0)", "freezegun (>=0.3.11)", "pathlib2 (>=2.3.3)", "psu [[package]] name = "tox" -version = "3.26.0" +version = "4.25.0" description = "tox is a generic virtualenv management and test command line tool" -category = "dev" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +python-versions = ">=3.8" +files = [ + {file = "tox-4.25.0-py3-none-any.whl", hash = "sha256:4dfdc7ba2cc6fdc6688dde1b21e7b46ff6c41795fb54586c91a3533317b5255c"}, + {file = "tox-4.25.0.tar.gz", hash = "sha256:dd67f030317b80722cf52b246ff42aafd3ed27ddf331c415612d084304cf5e52"}, +] [package.dependencies] -colorama = {version = ">=0.4.1", markers = "platform_system == \"Windows\""} -filelock = ">=3.0.0" -importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} -packaging = ">=14" -pluggy = ">=0.12.0" -py = ">=1.4.17" -six = ">=1.14.0" -tomli = {version = ">=2.0.1", markers = "python_version >= \"3.7\" and python_version < \"3.11\""} -virtualenv = ">=16.0.0,<20.0.0 || >20.0.0,<20.0.1 || >20.0.1,<20.0.2 || >20.0.2,<20.0.3 || >20.0.3,<20.0.4 || >20.0.4,<20.0.5 || >20.0.5,<20.0.6 || >20.0.6,<20.0.7 || >20.0.7" +cachetools = ">=5.5.1" +chardet = ">=5.2" +colorama = ">=0.4.6" +filelock = ">=3.16.1" +packaging = ">=24.2" +platformdirs = ">=4.3.6" +pluggy = ">=1.5" +pyproject-api = ">=1.8" +tomli = {version = ">=2.2.1", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.12.2", markers = "python_version < \"3.11\""} +virtualenv = ">=20.29.1" [package.extras] -docs = ["pygments-github-lexers (>=0.0.5)", "sphinx (>=2.0.0)", "sphinxcontrib-autoprogram (>=0.1.5)", "towncrier (>=18.5.0)"] -testing = ["flaky (>=3.4.0)", "freezegun (>=0.3.11)", "pathlib2 (>=2.3.3)", "psutil (>=5.6.1)", "pytest (>=4.0.0)", "pytest-cov (>=2.5.1)", "pytest-mock (>=1.10.0)", "pytest-randomly (>=1.0.0)"] +test = ["devpi-process (>=1.0.2)", "pytest (>=8.3.4)", "pytest-mock (>=3.14)"] [[package]] name = "typed-ast" -version = "1.5.4" +version = "1.5.5" description = "a fork of Python 2 and 3 ast modules with type comment support" -category = "dev" optional = false python-versions = ">=3.6" +files = [ + {file = "typed_ast-1.5.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4bc1efe0ce3ffb74784e06460f01a223ac1f6ab31c6bc0376a21184bf5aabe3b"}, + {file = "typed_ast-1.5.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5f7a8c46a8b333f71abd61d7ab9255440d4a588f34a21f126bbfc95f6049e686"}, + {file = "typed_ast-1.5.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:597fc66b4162f959ee6a96b978c0435bd63791e31e4f410622d19f1686d5e769"}, + {file = "typed_ast-1.5.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d41b7a686ce653e06c2609075d397ebd5b969d821b9797d029fccd71fdec8e04"}, + {file = "typed_ast-1.5.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5fe83a9a44c4ce67c796a1b466c270c1272e176603d5e06f6afbc101a572859d"}, + {file = "typed_ast-1.5.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d5c0c112a74c0e5db2c75882a0adf3133adedcdbfd8cf7c9d6ed77365ab90a1d"}, + {file = "typed_ast-1.5.5-cp310-cp310-win_amd64.whl", hash = "sha256:e1a976ed4cc2d71bb073e1b2a250892a6e968ff02aa14c1f40eba4f365ffec02"}, + {file = "typed_ast-1.5.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c631da9710271cb67b08bd3f3813b7af7f4c69c319b75475436fcab8c3d21bee"}, + {file = "typed_ast-1.5.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b445c2abfecab89a932b20bd8261488d574591173d07827c1eda32c457358b18"}, + {file = "typed_ast-1.5.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc95ffaaab2be3b25eb938779e43f513e0e538a84dd14a5d844b8f2932593d88"}, + {file = "typed_ast-1.5.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61443214d9b4c660dcf4b5307f15c12cb30bdfe9588ce6158f4a005baeb167b2"}, + {file = "typed_ast-1.5.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6eb936d107e4d474940469e8ec5b380c9b329b5f08b78282d46baeebd3692dc9"}, + {file = "typed_ast-1.5.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e48bf27022897577d8479eaed64701ecaf0467182448bd95759883300ca818c8"}, + {file = "typed_ast-1.5.5-cp311-cp311-win_amd64.whl", hash = "sha256:83509f9324011c9a39faaef0922c6f720f9623afe3fe220b6d0b15638247206b"}, + {file = "typed_ast-1.5.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:44f214394fc1af23ca6d4e9e744804d890045d1643dd7e8229951e0ef39429b5"}, + {file = "typed_ast-1.5.5-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:118c1ce46ce58fda78503eae14b7664163aa735b620b64b5b725453696f2a35c"}, + {file = "typed_ast-1.5.5-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be4919b808efa61101456e87f2d4c75b228f4e52618621c77f1ddcaae15904fa"}, + {file = "typed_ast-1.5.5-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:fc2b8c4e1bc5cd96c1a823a885e6b158f8451cf6f5530e1829390b4d27d0807f"}, + {file = "typed_ast-1.5.5-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:16f7313e0a08c7de57f2998c85e2a69a642e97cb32f87eb65fbfe88381a5e44d"}, + {file = "typed_ast-1.5.5-cp36-cp36m-win_amd64.whl", hash = "sha256:2b946ef8c04f77230489f75b4b5a4a6f24c078be4aed241cfabe9cbf4156e7e5"}, + {file = "typed_ast-1.5.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2188bc33d85951ea4ddad55d2b35598b2709d122c11c75cffd529fbc9965508e"}, + {file = "typed_ast-1.5.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0635900d16ae133cab3b26c607586131269f88266954eb04ec31535c9a12ef1e"}, + {file = "typed_ast-1.5.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57bfc3cf35a0f2fdf0a88a3044aafaec1d2f24d8ae8cd87c4f58d615fb5b6311"}, + {file = "typed_ast-1.5.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:fe58ef6a764de7b4b36edfc8592641f56e69b7163bba9f9c8089838ee596bfb2"}, + {file = "typed_ast-1.5.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d09d930c2d1d621f717bb217bf1fe2584616febb5138d9b3e8cdd26506c3f6d4"}, + {file = "typed_ast-1.5.5-cp37-cp37m-win_amd64.whl", hash = "sha256:d40c10326893ecab8a80a53039164a224984339b2c32a6baf55ecbd5b1df6431"}, + {file = "typed_ast-1.5.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fd946abf3c31fb50eee07451a6aedbfff912fcd13cf357363f5b4e834cc5e71a"}, + {file = "typed_ast-1.5.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ed4a1a42df8a3dfb6b40c3d2de109e935949f2f66b19703eafade03173f8f437"}, + {file = "typed_ast-1.5.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:045f9930a1550d9352464e5149710d56a2aed23a2ffe78946478f7b5416f1ede"}, + {file = "typed_ast-1.5.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:381eed9c95484ceef5ced626355fdc0765ab51d8553fec08661dce654a935db4"}, + {file = "typed_ast-1.5.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:bfd39a41c0ef6f31684daff53befddae608f9daf6957140228a08e51f312d7e6"}, + {file = "typed_ast-1.5.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8c524eb3024edcc04e288db9541fe1f438f82d281e591c548903d5b77ad1ddd4"}, + {file = "typed_ast-1.5.5-cp38-cp38-win_amd64.whl", hash = "sha256:7f58fabdde8dcbe764cef5e1a7fcb440f2463c1bbbec1cf2a86ca7bc1f95184b"}, + {file = "typed_ast-1.5.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:042eb665ff6bf020dd2243307d11ed626306b82812aba21836096d229fdc6a10"}, + {file = "typed_ast-1.5.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:622e4a006472b05cf6ef7f9f2636edc51bda670b7bbffa18d26b255269d3d814"}, + {file = "typed_ast-1.5.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1efebbbf4604ad1283e963e8915daa240cb4bf5067053cf2f0baadc4d4fb51b8"}, + {file = "typed_ast-1.5.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0aefdd66f1784c58f65b502b6cf8b121544680456d1cebbd300c2c813899274"}, + {file = "typed_ast-1.5.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:48074261a842acf825af1968cd912f6f21357316080ebaca5f19abbb11690c8a"}, + {file = "typed_ast-1.5.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:429ae404f69dc94b9361bb62291885894b7c6fb4640d561179548c849f8492ba"}, + {file = "typed_ast-1.5.5-cp39-cp39-win_amd64.whl", hash = "sha256:335f22ccb244da2b5c296e6f96b06ee9bed46526db0de38d2f0e5a6597b81155"}, + {file = "typed_ast-1.5.5.tar.gz", hash = "sha256:94282f7a354f36ef5dbce0ef3467ebf6a258e370ab33d5b40c249fa996e590dd"}, +] [[package]] name = "typing-extensions" -version = "4.1.1" -description = "Backported and Experimental Type Hints for Python 3.6+" -category = "main" +version = "4.7.1" +description = "Backported and Experimental Type Hints for Python 3.7+" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" +files = [ + {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"}, + {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"}, +] [[package]] name = "typing-extensions" -version = "4.3.0" -description = "Backported and Experimental Type Hints for Python 3.7+" -category = "main" +version = "4.13.2" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c"}, + {file = "typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef"}, +] + +[[package]] +name = "urllib3" +version = "2.0.7" +description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.7" +files = [ + {file = "urllib3-2.0.7-py3-none-any.whl", hash = "sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e"}, + {file = "urllib3-2.0.7.tar.gz", hash = "sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] [[package]] name = "urllib3" -version = "1.26.12" +version = "2.2.3" description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4" +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, + {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, +] [package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] -socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] [[package]] name = "virtualenv" -version = "20.16.2" +version = "20.26.6" description = "Virtual Python Environment builder" -category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" +files = [ + {file = "virtualenv-20.26.6-py3-none-any.whl", hash = "sha256:7345cc5b25405607a624d8418154577459c3e0277f5466dd79c49d5e492995f2"}, + {file = "virtualenv-20.26.6.tar.gz", hash = "sha256:280aede09a2a5c317e409a00102e7077c6432c5a38f0ef938e643805a7ad2c48"}, +] [package.dependencies] -distlib = ">=0.3.1,<1" -filelock = ">=3.2,<4" -importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} -importlib-resources = {version = ">=1.0", markers = "python_version < \"3.7\""} -platformdirs = ">=2,<3" +distlib = ">=0.3.7,<1" +filelock = ">=3.12.2,<4" +importlib-metadata = {version = ">=6.6", markers = "python_version < \"3.8\""} +platformdirs = ">=3.9.1,<5" + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] + +[[package]] +name = "virtualenv" +version = "20.30.0" +description = "Virtual Python Environment builder" +optional = false +python-versions = ">=3.8" +files = [ + {file = "virtualenv-20.30.0-py3-none-any.whl", hash = "sha256:e34302959180fca3af42d1800df014b35019490b119eba981af27f2fa486e5d6"}, + {file = "virtualenv-20.30.0.tar.gz", hash = "sha256:800863162bcaa5450a6e4d721049730e7f2dae07720e0902b0e4040bd6f9ada8"}, +] + +[package.dependencies] +distlib = ">=0.3.7,<1" +filelock = ">=3.12.2,<4" +platformdirs = ">=3.9.1,<5" [package.extras] -docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=21.3)"] -testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "packaging (>=20.0)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)"] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] [[package]] name = "zipp" -version = "3.6.0" +version = "3.15.0" description = "Backport of pathlib-compatible object wrapper for zip files" -category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" +files = [ + {file = "zipp-3.15.0-py3-none-any.whl", hash = "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556"}, + {file = "zipp-3.15.0.tar.gz", hash = "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b"}, +] [package.extras] -docs = ["jaraco.packaging (>=8.2)", "rst.linker (>=1.9)", "sphinx"] -testing = ["func-timeout", "jaraco.itertools", "pytest (>=4.6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy"] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] -[metadata] -lock-version = "1.1" -python-versions = "^3.6" -content-hash = "dc1f4a6e11fed7704f09dcd5e3b3af090398383b36458c966a52c840b7fc6c78" - -[metadata.files] -alabaster = [ - {file = "alabaster-0.7.12-py2.py3-none-any.whl", hash = "sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359"}, - {file = "alabaster-0.7.12.tar.gz", hash = "sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02"}, -] -appdirs = [ - {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, - {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, -] -atomicwrites = [ - {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"}, -] -attrs = [ - {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"}, - {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"}, -] -Babel = [ - {file = "Babel-2.10.3-py3-none-any.whl", hash = "sha256:ff56f4892c1c4bf0d814575ea23471c230d544203c7748e8c68f0089478d48eb"}, - {file = "Babel-2.10.3.tar.gz", hash = "sha256:7614553711ee97490f732126dc077f8d0ae084ebc6a96e23db1482afabdb2c51"}, -] -black = [ - {file = "black-20.8b1.tar.gz", hash = "sha256:1c02557aa099101b9d21496f8a914e9ed2222ef70336404eeeac8edba836fbea"}, - {file = "black-22.8.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ce957f1d6b78a8a231b18e0dd2d94a33d2ba738cd88a7fe64f53f659eea49fdd"}, - {file = "black-22.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5107ea36b2b61917956d018bd25129baf9ad1125e39324a9b18248d362156a27"}, - {file = "black-22.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e8166b7bfe5dcb56d325385bd1d1e0f635f24aae14b3ae437102dedc0c186747"}, - {file = "black-22.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd82842bb272297503cbec1a2600b6bfb338dae017186f8f215c8958f8acf869"}, - {file = "black-22.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:d839150f61d09e7217f52917259831fe2b689f5c8e5e32611736351b89bb2a90"}, - {file = "black-22.8.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a05da0430bd5ced89176db098567973be52ce175a55677436a271102d7eaa3fe"}, - {file = "black-22.8.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a098a69a02596e1f2a58a2a1c8d5a05d5a74461af552b371e82f9fa4ada8342"}, - {file = "black-22.8.0-cp36-cp36m-win_amd64.whl", hash = "sha256:5594efbdc35426e35a7defa1ea1a1cb97c7dbd34c0e49af7fb593a36bd45edab"}, - {file = "black-22.8.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a983526af1bea1e4cf6768e649990f28ee4f4137266921c2c3cee8116ae42ec3"}, - {file = "black-22.8.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b2c25f8dea5e8444bdc6788a2f543e1fb01494e144480bc17f806178378005e"}, - {file = "black-22.8.0-cp37-cp37m-win_amd64.whl", hash = "sha256:78dd85caaab7c3153054756b9fe8c611efa63d9e7aecfa33e533060cb14b6d16"}, - {file = "black-22.8.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:cea1b2542d4e2c02c332e83150e41e3ca80dc0fb8de20df3c5e98e242156222c"}, - {file = "black-22.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5b879eb439094751185d1cfdca43023bc6786bd3c60372462b6f051efa6281a5"}, - {file = "black-22.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0a12e4e1353819af41df998b02c6742643cfef58282915f781d0e4dd7a200411"}, - {file = "black-22.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3a73f66b6d5ba7288cd5d6dad9b4c9b43f4e8a4b789a94bf5abfb878c663eb3"}, - {file = "black-22.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:e981e20ec152dfb3e77418fb616077937378b322d7b26aa1ff87717fb18b4875"}, - {file = "black-22.8.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8ce13ffed7e66dda0da3e0b2eb1bdfc83f5812f66e09aca2b0978593ed636b6c"}, - {file = "black-22.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:32a4b17f644fc288c6ee2bafdf5e3b045f4eff84693ac069d87b1a347d861497"}, - {file = "black-22.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0ad827325a3a634bae88ae7747db1a395d5ee02cf05d9aa7a9bd77dfb10e940c"}, - {file = "black-22.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53198e28a1fb865e9fe97f88220da2e44df6da82b18833b588b1883b16bb5d41"}, - {file = "black-22.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:bc4d4123830a2d190e9cc42a2e43570f82ace35c3aeb26a512a2102bce5af7ec"}, - {file = "black-22.8.0-py3-none-any.whl", hash = "sha256:d2c21d439b2baf7aa80d6dd4e3659259be64c6f49dfd0f32091063db0e006db4"}, - {file = "black-22.8.0.tar.gz", hash = "sha256:792f7eb540ba9a17e8656538701d3eb1afcb134e3b45b71f20b25c77a8db7e6e"}, -] -build = [ - {file = "build-0.8.0-py3-none-any.whl", hash = "sha256:19b0ed489f92ace6947698c3ca8436cb0556a66e2aa2d34cd70e2a5d27cd0437"}, - {file = "build-0.8.0.tar.gz", hash = "sha256:887a6d471c901b1a6e6574ebaeeebb45e5269a79d095fe9a8f88d6614ed2e5f0"}, -] -bump2version = [ - {file = "bump2version-1.0.1-py2.py3-none-any.whl", hash = "sha256:37f927ea17cde7ae2d7baf832f8e80ce3777624554a653006c9144f8017fe410"}, - {file = "bump2version-1.0.1.tar.gz", hash = "sha256:762cb2bfad61f4ec8e2bdf452c7c267416f8c70dd9ecb1653fd0bbb01fa936e6"}, -] -certifi = [ - {file = "certifi-2022.9.14-py3-none-any.whl", hash = "sha256:e232343de1ab72c2aa521b625c80f699e356830fd0e2c620b465b304b17b0516"}, - {file = "certifi-2022.9.14.tar.gz", hash = "sha256:36973885b9542e6bd01dea287b2b4b3b21236307c56324fcc3f1160f2d655ed5"}, -] -charset-normalizer = [ - {file = "charset-normalizer-2.0.12.tar.gz", hash = "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597"}, - {file = "charset_normalizer-2.0.12-py3-none-any.whl", hash = "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"}, -] -check-manifest = [ - {file = "check-manifest-0.48.tar.gz", hash = "sha256:3b575f1dade7beb3078ef4bf33a94519834457c7281dbc726b15c5466b55c657"}, - {file = "check_manifest-0.48-py3-none-any.whl", hash = "sha256:b1923685f98c1c2468601a1a7bed655db549a25d43c583caded3860ad8308f8c"}, -] -click = [ - {file = "click-8.0.4-py3-none-any.whl", hash = "sha256:6a7a62563bbfabfda3a38f3023a1db4a35978c0abd76f6c9605ecd6554d6d9b1"}, - {file = "click-8.0.4.tar.gz", hash = "sha256:8458d7b1287c5fb128c90e23381cf99dcde74beaf6c7ff6384ce84d6fe090adb"}, -] -colorama = [ - {file = "colorama-0.4.5-py2.py3-none-any.whl", hash = "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da"}, - {file = "colorama-0.4.5.tar.gz", hash = "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4"}, -] -coverage = [ - {file = "coverage-6.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6dbc1536e105adda7a6312c778f15aaabe583b0e9a0b0a324990334fd458c94b"}, - {file = "coverage-6.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:174cf9b4bef0db2e8244f82059a5a72bd47e1d40e71c68ab055425172b16b7d0"}, - {file = "coverage-6.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:92b8c845527eae547a2a6617d336adc56394050c3ed8a6918683646328fbb6da"}, - {file = "coverage-6.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c7912d1526299cb04c88288e148c6c87c0df600eca76efd99d84396cfe00ef1d"}, - {file = "coverage-6.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d5d2033d5db1d58ae2d62f095e1aefb6988af65b4b12cb8987af409587cc0739"}, - {file = "coverage-6.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:3feac4084291642165c3a0d9eaebedf19ffa505016c4d3db15bfe235718d4971"}, - {file = "coverage-6.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:276651978c94a8c5672ea60a2656e95a3cce2a3f31e9fb2d5ebd4c215d095840"}, - {file = "coverage-6.2-cp310-cp310-win32.whl", hash = "sha256:f506af4f27def639ba45789fa6fde45f9a217da0be05f8910458e4557eed020c"}, - {file = "coverage-6.2-cp310-cp310-win_amd64.whl", hash = "sha256:3f7c17209eef285c86f819ff04a6d4cbee9b33ef05cbcaae4c0b4e8e06b3ec8f"}, - {file = "coverage-6.2-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:13362889b2d46e8d9f97c421539c97c963e34031ab0cb89e8ca83a10cc71ac76"}, - {file = "coverage-6.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:22e60a3ca5acba37d1d4a2ee66e051f5b0e1b9ac950b5b0cf4aa5366eda41d47"}, - {file = "coverage-6.2-cp311-cp311-win_amd64.whl", hash = "sha256:b637c57fdb8be84e91fac60d9325a66a5981f8086c954ea2772efe28425eaf64"}, - {file = "coverage-6.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f467bbb837691ab5a8ca359199d3429a11a01e6dfb3d9dcc676dc035ca93c0a9"}, - {file = "coverage-6.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2641f803ee9f95b1f387f3e8f3bf28d83d9b69a39e9911e5bfee832bea75240d"}, - {file = "coverage-6.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1219d760ccfafc03c0822ae2e06e3b1248a8e6d1a70928966bafc6838d3c9e48"}, - {file = "coverage-6.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9a2b5b52be0a8626fcbffd7e689781bf8c2ac01613e77feda93d96184949a98e"}, - {file = "coverage-6.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:8e2c35a4c1f269704e90888e56f794e2d9c0262fb0c1b1c8c4ee44d9b9e77b5d"}, - {file = "coverage-6.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:5d6b09c972ce9200264c35a1d53d43ca55ef61836d9ec60f0d44273a31aa9f17"}, - {file = "coverage-6.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:e3db840a4dee542e37e09f30859f1612da90e1c5239a6a2498c473183a50e781"}, - {file = "coverage-6.2-cp36-cp36m-win32.whl", hash = "sha256:4e547122ca2d244f7c090fe3f4b5a5861255ff66b7ab6d98f44a0222aaf8671a"}, - {file = "coverage-6.2-cp36-cp36m-win_amd64.whl", hash = "sha256:01774a2c2c729619760320270e42cd9e797427ecfddd32c2a7b639cdc481f3c0"}, - {file = "coverage-6.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fb8b8ee99b3fffe4fd86f4c81b35a6bf7e4462cba019997af2fe679365db0c49"}, - {file = "coverage-6.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:619346d57c7126ae49ac95b11b0dc8e36c1dd49d148477461bb66c8cf13bb521"}, - {file = "coverage-6.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0a7726f74ff63f41e95ed3a89fef002916c828bb5fcae83b505b49d81a066884"}, - {file = "coverage-6.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cfd9386c1d6f13b37e05a91a8583e802f8059bebfccde61a418c5808dea6bbfa"}, - {file = "coverage-6.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:17e6c11038d4ed6e8af1407d9e89a2904d573be29d51515f14262d7f10ef0a64"}, - {file = "coverage-6.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c254b03032d5a06de049ce8bca8338a5185f07fb76600afff3c161e053d88617"}, - {file = "coverage-6.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:dca38a21e4423f3edb821292e97cec7ad38086f84313462098568baedf4331f8"}, - {file = "coverage-6.2-cp37-cp37m-win32.whl", hash = "sha256:600617008aa82032ddeace2535626d1bc212dfff32b43989539deda63b3f36e4"}, - {file = "coverage-6.2-cp37-cp37m-win_amd64.whl", hash = "sha256:bf154ba7ee2fd613eb541c2bc03d3d9ac667080a737449d1a3fb342740eb1a74"}, - {file = "coverage-6.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f9afb5b746781fc2abce26193d1c817b7eb0e11459510fba65d2bd77fe161d9e"}, - {file = "coverage-6.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edcada2e24ed68f019175c2b2af2a8b481d3d084798b8c20d15d34f5c733fa58"}, - {file = "coverage-6.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a9c8c4283e17690ff1a7427123ffb428ad6a52ed720d550e299e8291e33184dc"}, - {file = "coverage-6.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f614fc9956d76d8a88a88bb41ddc12709caa755666f580af3a688899721efecd"}, - {file = "coverage-6.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9365ed5cce5d0cf2c10afc6add145c5037d3148585b8ae0e77cc1efdd6aa2953"}, - {file = "coverage-6.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8bdfe9ff3a4ea37d17f172ac0dff1e1c383aec17a636b9b35906babc9f0f5475"}, - {file = "coverage-6.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:63c424e6f5b4ab1cf1e23a43b12f542b0ec2e54f99ec9f11b75382152981df57"}, - {file = "coverage-6.2-cp38-cp38-win32.whl", hash = "sha256:49dbff64961bc9bdd2289a2bda6a3a5a331964ba5497f694e2cbd540d656dc1c"}, - {file = "coverage-6.2-cp38-cp38-win_amd64.whl", hash = "sha256:9a29311bd6429be317c1f3fe4bc06c4c5ee45e2fa61b2a19d4d1d6111cb94af2"}, - {file = "coverage-6.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:03b20e52b7d31be571c9c06b74746746d4eb82fc260e594dc662ed48145e9efd"}, - {file = "coverage-6.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:215f8afcc02a24c2d9a10d3790b21054b58d71f4b3c6f055d4bb1b15cecce685"}, - {file = "coverage-6.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a4bdeb0a52d1d04123b41d90a4390b096f3ef38eee35e11f0b22c2d031222c6c"}, - {file = "coverage-6.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c332d8f8d448ded473b97fefe4a0983265af21917d8b0cdcb8bb06b2afe632c3"}, - {file = "coverage-6.2-cp39-cp39-win32.whl", hash = "sha256:6e1394d24d5938e561fbeaa0cd3d356207579c28bd1792f25a068743f2d5b282"}, - {file = "coverage-6.2-cp39-cp39-win_amd64.whl", hash = "sha256:86f2e78b1eff847609b1ca8050c9e1fa3bd44ce755b2ec30e70f2d3ba3844644"}, - {file = "coverage-6.2-pp36.pp37.pp38-none-any.whl", hash = "sha256:5829192582c0ec8ca4a2532407bc14c2f338d9878a10442f5d03804a95fac9de"}, - {file = "coverage-6.2.tar.gz", hash = "sha256:e2cad8093172b7d1595b4ad66f24270808658e11acf43a8f95b41276162eb5b8"}, -] -dataclasses = [ - {file = "dataclasses-0.8-py3-none-any.whl", hash = "sha256:0201d89fa866f68c8ebd9d08ee6ff50c0b255f8ec63a71c16fda7af82bb887bf"}, - {file = "dataclasses-0.8.tar.gz", hash = "sha256:8479067f342acf957dc82ec415d355ab5edb7e7646b90dc6e2fd1d96ad084c97"}, -] -distlib = [ - {file = "distlib-0.3.6-py2.py3-none-any.whl", hash = "sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e"}, - {file = "distlib-0.3.6.tar.gz", hash = "sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46"}, -] -docutils = [ - {file = "docutils-0.17.1-py2.py3-none-any.whl", hash = "sha256:cf316c8370a737a022b72b56874f6602acf974a37a9fba42ec2876387549fc61"}, - {file = "docutils-0.17.1.tar.gz", hash = "sha256:686577d2e4c32380bb50cbb22f575ed742d58168cee37e99117a854bcd88f125"}, -] -filelock = [ - {file = "filelock-3.4.1-py3-none-any.whl", hash = "sha256:a4bc51381e01502a30e9f06dd4fa19a1712eab852b6fb0f84fd7cce0793d8ca3"}, - {file = "filelock-3.4.1.tar.gz", hash = "sha256:0f12f552b42b5bf60dba233710bf71337d35494fc8bdd4fd6d9f6d082ad45e06"}, - {file = "filelock-3.8.0-py3-none-any.whl", hash = "sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4"}, - {file = "filelock-3.8.0.tar.gz", hash = "sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc"}, -] -flake8 = [ - {file = "flake8-4.0.1-py2.py3-none-any.whl", hash = "sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d"}, - {file = "flake8-4.0.1.tar.gz", hash = "sha256:806e034dda44114815e23c16ef92f95c91e4c71100ff52813adf7132a6ad870d"}, - {file = "flake8-5.0.4-py2.py3-none-any.whl", hash = "sha256:7a1cf6b73744f5806ab95e526f6f0d8c01c66d7bbe349562d22dfca20610b248"}, - {file = "flake8-5.0.4.tar.gz", hash = "sha256:6fbe320aad8d6b95cec8b8e47bc933004678dc63095be98528b7bdd2a9f510db"}, -] -idna = [ - {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, - {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, -] -imagesize = [ - {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, - {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, -] -importlib-metadata = [ - {file = "importlib_metadata-4.2.0-py3-none-any.whl", hash = "sha256:057e92c15bc8d9e8109738a48db0ccb31b4d9d5cfbee5a8670879a30be66304b"}, - {file = "importlib_metadata-4.2.0.tar.gz", hash = "sha256:b7e52a1f8dec14a75ea73e0891f3060099ca1d8e6a462a4dff11c3e119ea1b31"}, -] -importlib-resources = [ - {file = "importlib_resources-5.4.0-py3-none-any.whl", hash = "sha256:33a95faed5fc19b4bc16b29a6eeae248a3fe69dd55d4d229d2b480e23eeaad45"}, - {file = "importlib_resources-5.4.0.tar.gz", hash = "sha256:d756e2f85dd4de2ba89be0b21dba2a3bbec2e871a42a3a16719258a11f87506b"}, -] -iniconfig = [ - {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, - {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, -] -Jinja2 = [ - {file = "Jinja2-3.0.3-py3-none-any.whl", hash = "sha256:077ce6014f7b40d03b47d1f1ca4b0fc8328a692bd284016f806ed0eaca390ad8"}, - {file = "Jinja2-3.0.3.tar.gz", hash = "sha256:611bb273cd68f3b993fabdc4064fc858c5b47a973cb5aa7999ec1ba405c87cd7"}, -] -MarkupSafe = [ - {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36bc903cbb393720fad60fc28c10de6acf10dc6cc883f3e24ee4012371399a38"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d7d807855b419fc2ed3e631034685db6079889a1f01d5d9dac950f764da3dad"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:add36cb2dbb8b736611303cd3bfcee00afd96471b09cda130da3581cbdc56a6d"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:168cd0a3642de83558a5153c8bd34f175a9a6e7f6dc6384b9655d2697312a646"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4dc8f9fb58f7364b63fd9f85013b780ef83c11857ae79f2feda41e270468dd9b"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:20dca64a3ef2d6e4d5d615a3fd418ad3bde77a47ec8a23d984a12b5b4c74491a"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cdfba22ea2f0029c9261a4bd07e830a8da012291fbe44dc794e488b6c9bb353a"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-win32.whl", hash = "sha256:99df47edb6bda1249d3e80fdabb1dab8c08ef3975f69aed437cb69d0a5de1e28"}, - {file = "MarkupSafe-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:e0f138900af21926a02425cf736db95be9f4af72ba1bb21453432a07f6082134"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf5d821ffabf0ef3533c39c518f3357b171a1651c1ff6827325e4489b0e46c3c"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0d4b31cc67ab36e3392bbf3862cfbadac3db12bdd8b02a2731f509ed5b829724"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:baa1a4e8f868845af802979fcdbf0bb11f94f1cb7ced4c4b8a351bb60d108145"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:deb993cacb280823246a026e3b2d81c493c53de6acfd5e6bfe31ab3402bb37dd"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:63f3268ba69ace99cab4e3e3b5840b03340efed0948ab8f78d2fd87ee5442a4f"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:8d206346619592c6200148b01a2142798c989edcb9c896f9ac9722a99d4e77e6"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:6557b31b5e2c9ddf0de32a691f2312a32f77cd7681d8af66c2692efdbef84c18"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:49e3ceeabbfb9d66c3aef5af3a60cc43b85c33df25ce03d0031a608b0a8b2e3f"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9936f0b261d4df76ad22f8fee3ae83b60d7c3e871292cd42f40b81b70afae85"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2a7d351cbd8cfeb19ca00de495e224dea7e7d919659c2841bbb7f420ad03e2d6"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:60bf42e36abfaf9aff1f50f52644b336d4f0a3fd6d8a60ca0d054ac9f713a864"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d6c7ebd4e944c85e2c3421e612a7057a2f48d478d79e61800d81468a8d842207"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f0567c4dc99f264f49fe27da5f735f414c4e7e7dd850cfd8e69f0862d7c74ea9"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:89c687013cb1cd489a0f0ac24febe8c7a666e6e221b783e53ac50ebf68e45d86"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5bb28c636d87e840583ee3adeb78172efc47c8b26127267f54a9c0ec251d41a9"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fcf051089389abe060c9cd7caa212c707e58153afa2c649f00346ce6d260f1b"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5855f8438a7d1d458206a2466bf82b0f104a3724bf96a1c781ab731e4201731a"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3dd007d54ee88b46be476e293f48c85048603f5f516008bee124ddd891398ed6"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aca6377c0cb8a8253e493c6b451565ac77e98c2951c45f913e0b52facdcff83f"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:04635854b943835a6ea959e948d19dcd311762c5c0c6e1f0e16ee57022669194"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6300b8454aa6930a24b9618fbb54b5a68135092bc666f7b06901f897fa5c2fee"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-win32.whl", hash = "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3c112550557578c26af18a1ccc9e090bfe03832ae994343cfdacd287db6a6ae7"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:53edb4da6925ad13c07b6d26c2a852bd81e364f95301c66e930ab2aef5b5ddd8"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f5653a225f31e113b152e56f154ccbe59eeb1c7487b39b9d9f9cdb58e6c79dc5"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c47adbc92fc1bb2b3274c4b3a43ae0e4573d9fbff4f54cd484555edbf030baf1"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:37205cac2a79194e3750b0af2a5720d95f786a55ce7df90c3af697bfa100eaac"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1f2ade76b9903f39aa442b4aadd2177decb66525062db244b35d71d0ee8599b6"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4296f2b1ce8c86a6aea78613c34bb1a672ea0e3de9c6ba08a960efe0b0a09047"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f02365d4e99430a12647f09b6cc8bab61a6564363f313126f775eb4f6ef798e"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5b6d930f030f8ed98e3e6c98ffa0652bdb82601e7a016ec2ab5d7ff23baa78d1"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-win32.whl", hash = "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8"}, - {file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"}, -] -mccabe = [ - {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, - {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, - {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, - {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, -] -mypy = [ - {file = "mypy-0.971-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f2899a3cbd394da157194f913a931edfd4be5f274a88041c9dc2d9cdcb1c315c"}, - {file = "mypy-0.971-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:98e02d56ebe93981c41211c05adb630d1d26c14195d04d95e49cd97dbc046dc5"}, - {file = "mypy-0.971-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:19830b7dba7d5356d3e26e2427a2ec91c994cd92d983142cbd025ebe81d69cf3"}, - {file = "mypy-0.971-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:02ef476f6dcb86e6f502ae39a16b93285fef97e7f1ff22932b657d1ef1f28655"}, - {file = "mypy-0.971-cp310-cp310-win_amd64.whl", hash = "sha256:25c5750ba5609a0c7550b73a33deb314ecfb559c350bb050b655505e8aed4103"}, - {file = "mypy-0.971-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d3348e7eb2eea2472db611486846742d5d52d1290576de99d59edeb7cd4a42ca"}, - {file = "mypy-0.971-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3fa7a477b9900be9b7dd4bab30a12759e5abe9586574ceb944bc29cddf8f0417"}, - {file = "mypy-0.971-cp36-cp36m-win_amd64.whl", hash = "sha256:2ad53cf9c3adc43cf3bea0a7d01a2f2e86db9fe7596dfecb4496a5dda63cbb09"}, - {file = "mypy-0.971-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:855048b6feb6dfe09d3353466004490b1872887150c5bb5caad7838b57328cc8"}, - {file = "mypy-0.971-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:23488a14a83bca6e54402c2e6435467a4138785df93ec85aeff64c6170077fb0"}, - {file = "mypy-0.971-cp37-cp37m-win_amd64.whl", hash = "sha256:4b21e5b1a70dfb972490035128f305c39bc4bc253f34e96a4adf9127cf943eb2"}, - {file = "mypy-0.971-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:9796a2ba7b4b538649caa5cecd398d873f4022ed2333ffde58eaf604c4d2cb27"}, - {file = "mypy-0.971-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5a361d92635ad4ada1b1b2d3630fc2f53f2127d51cf2def9db83cba32e47c856"}, - {file = "mypy-0.971-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b793b899f7cf563b1e7044a5c97361196b938e92f0a4343a5d27966a53d2ec71"}, - {file = "mypy-0.971-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d1ea5d12c8e2d266b5fb8c7a5d2e9c0219fedfeb493b7ed60cd350322384ac27"}, - {file = "mypy-0.971-cp38-cp38-win_amd64.whl", hash = "sha256:23c7ff43fff4b0df93a186581885c8512bc50fc4d4910e0f838e35d6bb6b5e58"}, - {file = "mypy-0.971-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1f7656b69974a6933e987ee8ffb951d836272d6c0f81d727f1d0e2696074d9e6"}, - {file = "mypy-0.971-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d2022bfadb7a5c2ef410d6a7c9763188afdb7f3533f22a0a32be10d571ee4bbe"}, - {file = "mypy-0.971-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef943c72a786b0f8d90fd76e9b39ce81fb7171172daf84bf43eaf937e9f220a9"}, - {file = "mypy-0.971-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d744f72eb39f69312bc6c2abf8ff6656973120e2eb3f3ec4f758ed47e414a4bf"}, - {file = "mypy-0.971-cp39-cp39-win_amd64.whl", hash = "sha256:77a514ea15d3007d33a9e2157b0ba9c267496acf12a7f2b9b9f8446337aac5b0"}, - {file = "mypy-0.971-py3-none-any.whl", hash = "sha256:0d054ef16b071149917085f51f89555a576e2618d5d9dd70bd6eea6410af3ac9"}, - {file = "mypy-0.971.tar.gz", hash = "sha256:40b0f21484238269ae6a57200c807d80debc6459d444c0489a102d7c6a75fa56"}, -] -mypy-extensions = [ - {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, - {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, -] -packaging = [ - {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, - {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, -] -pathspec = [ - {file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"}, - {file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"}, -] -pep517 = [ - {file = "pep517-0.13.0-py3-none-any.whl", hash = "sha256:4ba4446d80aed5b5eac6509ade100bff3e7943a8489de249654a5ae9b33ee35b"}, - {file = "pep517-0.13.0.tar.gz", hash = "sha256:ae69927c5c172be1add9203726d4b84cf3ebad1edcd5f71fcdc746e66e829f59"}, -] -platformdirs = [ - {file = "platformdirs-2.4.0-py3-none-any.whl", hash = "sha256:8868bbe3c3c80d42f20156f22e7131d2fb321f5bc86a2a345375c6481a67021d"}, - {file = "platformdirs-2.4.0.tar.gz", hash = "sha256:367a5e80b3d04d2428ffa76d33f124cf11e8fff2acdaa9b43d545f5c7d661ef2"}, -] -pluggy = [ - {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, - {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, -] -py = [ - {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, - {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, -] -py-cpuinfo = [ - {file = "py-cpuinfo-8.0.0.tar.gz", hash = "sha256:5f269be0e08e33fd959de96b34cd4aeeeacac014dd8305f70eb28d06de2345c5"}, -] -pycodestyle = [ - {file = "pycodestyle-2.8.0-py2.py3-none-any.whl", hash = "sha256:720f8b39dde8b293825e7ff02c475f3077124006db4f440dcbc9a20b76548a20"}, - {file = "pycodestyle-2.8.0.tar.gz", hash = "sha256:eddd5847ef438ea1c7870ca7eb78a9d47ce0cdb4851a5523949f2601d0cbbe7f"}, - {file = "pycodestyle-2.9.1-py2.py3-none-any.whl", hash = "sha256:d1735fc58b418fd7c5f658d28d943854f8a849b01a5d0a1e6f3f3fdd0166804b"}, - {file = "pycodestyle-2.9.1.tar.gz", hash = "sha256:2c9607871d58c76354b697b42f5d57e1ada7d261c261efac224b664affdc5785"}, -] -pyflakes = [ - {file = "pyflakes-2.4.0-py2.py3-none-any.whl", hash = "sha256:3bb3a3f256f4b7968c9c788781e4ff07dce46bdf12339dcda61053375426ee2e"}, - {file = "pyflakes-2.4.0.tar.gz", hash = "sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c"}, - {file = "pyflakes-2.5.0-py2.py3-none-any.whl", hash = "sha256:4579f67d887f804e67edb544428f264b7b24f435b263c4614f384135cea553d2"}, - {file = "pyflakes-2.5.0.tar.gz", hash = "sha256:491feb020dca48ccc562a8c0cbe8df07ee13078df59813b83959cbdada312ea3"}, -] -Pygments = [ - {file = "Pygments-2.13.0-py3-none-any.whl", hash = "sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42"}, - {file = "Pygments-2.13.0.tar.gz", hash = "sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1"}, -] -pyparsing = [ - {file = "pyparsing-3.0.7-py3-none-any.whl", hash = "sha256:a6c06a88f252e6c322f65faf8f418b16213b51bdfaece0524c1c1bc30c63c484"}, - {file = "pyparsing-3.0.7.tar.gz", hash = "sha256:18ee9022775d270c55187733956460083db60b37d0d0fb357445f3094eed3eea"}, -] -pytest = [ - {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, - {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, -] -pytest-asyncio = [ - {file = "pytest-asyncio-0.16.0.tar.gz", hash = "sha256:7496c5977ce88c34379df64a66459fe395cd05543f0a2f837016e7144391fcfb"}, - {file = "pytest_asyncio-0.16.0-py3-none-any.whl", hash = "sha256:5f2a21273c47b331ae6aa5b36087047b4899e40f03f18397c0e65fa5cca54e9b"}, - {file = "pytest-asyncio-0.19.0.tar.gz", hash = "sha256:ac4ebf3b6207259750bc32f4c1d8fcd7e79739edbc67ad0c58dd150b1d072fed"}, - {file = "pytest_asyncio-0.19.0-py3-none-any.whl", hash = "sha256:7a97e37cfe1ed296e2e84941384bdd37c376453912d397ed39293e0916f521fa"}, -] -pytest-benchmark = [ - {file = "pytest-benchmark-3.4.1.tar.gz", hash = "sha256:40e263f912de5a81d891619032983557d62a3d85843f9a9f30b98baea0cd7b47"}, - {file = "pytest_benchmark-3.4.1-py2.py3-none-any.whl", hash = "sha256:36d2b08c4882f6f997fd3126a3d6dfd70f3249cde178ed8bbc0b73db7c20f809"}, -] -pytest-cov = [ - {file = "pytest-cov-3.0.0.tar.gz", hash = "sha256:e7f0f5b1617d2210a2cabc266dfe2f4c75a8d32fb89eafb7ad9d06f6d076d470"}, - {file = "pytest_cov-3.0.0-py3-none-any.whl", hash = "sha256:578d5d15ac4a25e5f961c938b85a05b09fdaae9deef3bb6de9a6e766622ca7a6"}, -] -pytest-describe = [ - {file = "pytest-describe-2.0.1.tar.gz", hash = "sha256:e5cbaa31169f0060348ad5ca0191027e5f1f41f3f27fdeef208365e09c55eb9a"}, - {file = "pytest_describe-2.0.1-py3-none-any.whl", hash = "sha256:ea347838bdf774b498ee7cb4a0b802a40be89e667a399fb63d860e3223bf4183"}, -] -pytest-timeout = [ - {file = "pytest-timeout-2.1.0.tar.gz", hash = "sha256:c07ca07404c612f8abbe22294b23c368e2e5104b521c1790195561f37e1ac3d9"}, - {file = "pytest_timeout-2.1.0-py3-none-any.whl", hash = "sha256:f6f50101443ce70ad325ceb4473c4255e9d74e3c7cd0ef827309dfa4c0d975c6"}, -] -pytz = [ - {file = "pytz-2022.2.1-py2.py3-none-any.whl", hash = "sha256:220f481bdafa09c3955dfbdddb7b57780e9a94f5127e35456a48589b9e0c0197"}, - {file = "pytz-2022.2.1.tar.gz", hash = "sha256:cea221417204f2d1a2aa03ddae3e867921971d0d76f14d87abb4414415bbdcf5"}, -] -regex = [ - {file = "regex-2022.9.13-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0394265391a86e2bbaa7606e59ac71bd9f1edf8665a59e42771a9c9adbf6fd4f"}, - {file = "regex-2022.9.13-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:86df2049b18745f3cd4b0f4c4ef672bfac4b80ca488e6ecfd2bbfe68d2423a2c"}, - {file = "regex-2022.9.13-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce331b076b2b013e7d7f07157f957974ef0b0881a808e8a4a4b3b5105aee5d04"}, - {file = "regex-2022.9.13-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:360ffbc9357794ae41336b681dff1c0463193199dfb91fcad3ec385ea4972f46"}, - {file = "regex-2022.9.13-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18e503b1e515a10282b3f14f1b3d856194ecece4250e850fad230842ed31227f"}, - {file = "regex-2022.9.13-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6e167d1ccd41d27b7b6655bb7a2dcb1b1eb1e0d2d662043470bd3b4315d8b2b"}, - {file = "regex-2022.9.13-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4146cb7ae6029fc83b5c905ec6d806b7e5568dc14297c423e66b86294bad6c39"}, - {file = "regex-2022.9.13-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a1aec4ae549fd7b3f52ceaf67e133010e2fba1538bf4d5fc5cd162a5e058d5df"}, - {file = "regex-2022.9.13-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:cab548d6d972e1de584161487b2ac1aa82edd8430d1bde69587ba61698ad1cfb"}, - {file = "regex-2022.9.13-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:3d64e1a7e6d98a4cdc8b29cb8d8ed38f73f49e55fbaa737bdb5933db99b9de22"}, - {file = "regex-2022.9.13-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:67a4c625361db04ae40ef7c49d3cbe2c1f5ff10b5a4491327ab20f19f2fb5d40"}, - {file = "regex-2022.9.13-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:5d0dd8b06896423211ce18fba0c75dacc49182a1d6514c004b535be7163dca0f"}, - {file = "regex-2022.9.13-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4318f69b79f9f7d84a7420e97d4bfe872dc767c72f891d4fea5fa721c74685f7"}, - {file = "regex-2022.9.13-cp310-cp310-win32.whl", hash = "sha256:26df88c9636a0c3f3bd9189dd435850a0c49d0b7d6e932500db3f99a6dd604d1"}, - {file = "regex-2022.9.13-cp310-cp310-win_amd64.whl", hash = "sha256:6fe1dd1021e0f8f3f454ce2811f1b0b148f2d25bb38c712fec00316551e93650"}, - {file = "regex-2022.9.13-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:83cc32a1a2fa5bac00f4abc0e6ce142e3c05d3a6d57e23bd0f187c59b4e1e43b"}, - {file = "regex-2022.9.13-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a2effeaf50a6838f3dd4d3c5d265f06eabc748f476e8441892645ae3a697e273"}, - {file = "regex-2022.9.13-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59a786a55d00439d8fae4caaf71581f2aaef7297d04ee60345c3594efef5648a"}, - {file = "regex-2022.9.13-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b7b701dbc124558fd2b1b08005eeca6c9160e209108fbcbd00091fcfac641ac7"}, - {file = "regex-2022.9.13-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dab81cc4d58026861445230cfba27f9825e9223557926e7ec22156a1a140d55c"}, - {file = "regex-2022.9.13-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b0c5cc3d1744a67c3b433dce91e5ef7c527d612354c1f1e8576d9e86bc5c5e2"}, - {file = "regex-2022.9.13-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:518272f25da93e02af4f1e94985f5042cec21557ef3591027d0716f2adda5d0a"}, - {file = "regex-2022.9.13-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8418ee2cb857b83881b8f981e4c636bc50a0587b12d98cb9b947408a3c484fe7"}, - {file = "regex-2022.9.13-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cfa4c956ff0a977c4823cb3b930b0a4e82543b060733628fec7ab3eb9b1abe37"}, - {file = "regex-2022.9.13-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:a1c4d17879dd4c4432c08a1ca1ab379f12ab54af569e945b6fc1c4cf6a74ca45"}, - {file = "regex-2022.9.13-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:77c2879d3ba51e5ca6c2b47f2dcf3d04a976a623a8fc8236010a16c9e0b0a3c7"}, - {file = "regex-2022.9.13-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d2885ec6eea629c648ecc9bde0837ec6b92208b7f36381689937fe5d64a517e8"}, - {file = "regex-2022.9.13-cp311-cp311-win32.whl", hash = "sha256:2dda4b096a6f630d6531728a45bd12c67ec3badf44342046dc77d4897277d4f2"}, - {file = "regex-2022.9.13-cp311-cp311-win_amd64.whl", hash = "sha256:592b9e2e1862168e71d9e612bfdc22c451261967dbd46681f14e76dfba7105fd"}, - {file = "regex-2022.9.13-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:df8fe00b60e4717662c7f80c810ba66dcc77309183c76b7754c0dff6f1d42054"}, - {file = "regex-2022.9.13-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:995e70bb8c91d1b99ed2aaf8ec44863e06ad1dfbb45d7df95f76ef583ec323a9"}, - {file = "regex-2022.9.13-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad75173349ad79f9d21e0d0896b27dcb37bfd233b09047bc0b4d226699cf5c87"}, - {file = "regex-2022.9.13-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7681c49da1a2d4b905b4f53d86c9ba4506e79fba50c4a664d9516056e0f7dfcc"}, - {file = "regex-2022.9.13-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9bc8edc5f8ef0ebb46f3fa0d02bd825bbe9cc63d59e428ffb6981ff9672f6de1"}, - {file = "regex-2022.9.13-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7bee775ff05c9d519195bd9e8aaaccfe3971db60f89f89751ee0f234e8aeac5"}, - {file = "regex-2022.9.13-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1a901ce5cd42658ab8f8eade51b71a6d26ad4b68c7cfc86b87efc577dfa95602"}, - {file = "regex-2022.9.13-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:14a7ab070fa3aec288076eed6ed828587b805ef83d37c9bfccc1a4a7cfbd8111"}, - {file = "regex-2022.9.13-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:d23ac6b4bf9e32fcde5fcdb2e1fd5e7370d6693fcac51ee1d340f0e886f50d1f"}, - {file = "regex-2022.9.13-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:4cdbfa6d2befeaee0c899f19222e9b20fc5abbafe5e9c43a46ef819aeb7b75e5"}, - {file = "regex-2022.9.13-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:ab07934725e6f25c6f87465976cc69aef1141e86987af49d8c839c3ffd367c72"}, - {file = "regex-2022.9.13-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:d2a1371dc73e921f3c2e087c05359050f3525a9a34b476ebc8130e71bec55e97"}, - {file = "regex-2022.9.13-cp36-cp36m-win32.whl", hash = "sha256:fcbd1edff1473d90dc5cf4b52d355cf1f47b74eb7c85ba6e45f45d0116b8edbd"}, - {file = "regex-2022.9.13-cp36-cp36m-win_amd64.whl", hash = "sha256:fe428822b7a8c486bcd90b334e9ab541ce6cc0d6106993d59f201853e5e14121"}, - {file = "regex-2022.9.13-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d7430f041755801b712ec804aaf3b094b9b5facbaa93a6339812a8e00d7bd53a"}, - {file = "regex-2022.9.13-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:079c182f99c89524069b9cd96f5410d6af437e9dca576a7d59599a574972707e"}, - {file = "regex-2022.9.13-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59bac44b5a07b08a261537f652c26993af9b1bbe2a29624473968dd42fc29d56"}, - {file = "regex-2022.9.13-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a59d0377e58d96a6f11636e97992f5b51b7e1e89eb66332d1c01b35adbabfe8a"}, - {file = "regex-2022.9.13-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9d68eb704b24bc4d441b24e4a12653acd07d2c39940548761e0985a08bc1fff"}, - {file = "regex-2022.9.13-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0385d66e73cdd4462f3cc42c76a6576ddcc12472c30e02a2ae82061bff132c32"}, - {file = "regex-2022.9.13-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:db45016364eec9ddbb5af93c8740c5c92eb7f5fc8848d1ae04205a40a1a2efc6"}, - {file = "regex-2022.9.13-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:03ff695518482b946a6d3d4ce9cbbd99a21320e20d94913080aa3841f880abcd"}, - {file = "regex-2022.9.13-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:6b32b45433df1fad7fed738fe15200b6516da888e0bd1fdd6aa5e50cc16b76bc"}, - {file = "regex-2022.9.13-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:003a2e1449d425afc817b5f0b3d4c4aa9072dd5f3dfbf6c7631b8dc7b13233de"}, - {file = "regex-2022.9.13-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:a9eb9558e1d0f78e07082d8a70d5c4d631c8dd75575fae92105df9e19c736730"}, - {file = "regex-2022.9.13-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:f6e0321921d2fdc082ef90c1fd0870f129c2e691bfdc4937dcb5cd308aba95c4"}, - {file = "regex-2022.9.13-cp37-cp37m-win32.whl", hash = "sha256:3f3b4594d564ed0b2f54463a9f328cf6a5b2a32610a90cdff778d6e3e561d08b"}, - {file = "regex-2022.9.13-cp37-cp37m-win_amd64.whl", hash = "sha256:8aba0d01e3dfd335f2cb107079b07fdddb4cd7fb2d8c8a1986f9cb8ce9246c24"}, - {file = "regex-2022.9.13-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:944567bb08f52268d8600ee5bdf1798b2b62ea002cc692a39cec113244cbdd0d"}, - {file = "regex-2022.9.13-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0b664a4d33ffc6be10996606dfc25fd3248c24cc589c0b139feb4c158053565e"}, - {file = "regex-2022.9.13-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f06cc1190f3db3192ab8949e28f2c627e1809487e2cfc435b6524c1ce6a2f391"}, - {file = "regex-2022.9.13-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6c57d50d4d5eb0c862569ca3c840eba2a73412f31d9ecc46ef0d6b2e621a592b"}, - {file = "regex-2022.9.13-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19a4da6f513045f5ba00e491215bd00122e5bd131847586522463e5a6b2bd65f"}, - {file = "regex-2022.9.13-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a926339356fe29595f8e37af71db37cd87ff764e15da8ad5129bbaff35bcc5a6"}, - {file = "regex-2022.9.13-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:091efcfdd4178a7e19a23776dc2b1fafb4f57f4d94daf340f98335817056f874"}, - {file = "regex-2022.9.13-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:880dbeb6bdde7d926b4d8e41410b16ffcd4cb3b4c6d926280fea46e2615c7a01"}, - {file = "regex-2022.9.13-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:73b985c9fc09a7896846e26d7b6f4d1fd5a20437055f4ef985d44729f9f928d0"}, - {file = "regex-2022.9.13-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c0b7cb9598795b01f9a3dd3f770ab540889259def28a3bf9b2fa24d52edecba3"}, - {file = "regex-2022.9.13-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:37e5a26e76c46f54b3baf56a6fdd56df9db89758694516413757b7d127d4c57b"}, - {file = "regex-2022.9.13-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:99945ddb4f379bb9831c05e9f80f02f079ba361a0fb1fba1fc3b267639b6bb2e"}, - {file = "regex-2022.9.13-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dcbcc9e72a791f622a32d17ff5011326a18996647509cac0609a7fc43adc229"}, - {file = "regex-2022.9.13-cp38-cp38-win32.whl", hash = "sha256:d3102ab9bf16bf541ca228012d45d88d2a567c9682a805ae2c145a79d3141fdd"}, - {file = "regex-2022.9.13-cp38-cp38-win_amd64.whl", hash = "sha256:14216ea15efc13f28d0ef1c463d86d93ca7158a79cd4aec0f9273f6d4c6bb047"}, - {file = "regex-2022.9.13-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9a165a05979e212b2c2d56a9f40b69c811c98a788964e669eb322de0a3e420b4"}, - {file = "regex-2022.9.13-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:14c71437ffb89479c89cc7022a5ea2075a842b728f37205e47c824cc17b30a42"}, - {file = "regex-2022.9.13-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee7045623a5ace70f3765e452528b4c1f2ce669ed31959c63f54de64fe2f6ff7"}, - {file = "regex-2022.9.13-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6e521d9db006c5e4a0f8acfef738399f72b704913d4e083516774eb51645ad7c"}, - {file = "regex-2022.9.13-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b86548b8234b2be3985dbc0b385e35f5038f0f3e6251464b827b83ebf4ed90e5"}, - {file = "regex-2022.9.13-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2b39ee3b280e15824298b97cec3f7cbbe6539d8282cc8a6047a455b9a72c598"}, - {file = "regex-2022.9.13-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e6e6e61e9a38b6cc60ca3e19caabc90261f070f23352e66307b3d21a24a34aaf"}, - {file = "regex-2022.9.13-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d837ccf3bd2474feabee96cd71144e991472e400ed26582edc8ca88ce259899c"}, - {file = "regex-2022.9.13-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6adfe300848d61a470ec7547adc97b0ccf86de86a99e6830f1d8c8d19ecaf6b3"}, - {file = "regex-2022.9.13-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d5b003d248e6f292475cd24b04e5f72c48412231961a675edcb653c70730e79e"}, - {file = "regex-2022.9.13-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:d5edd3eb877c9fc2e385173d4a4e1d792bf692d79e25c1ca391802d36ecfaa01"}, - {file = "regex-2022.9.13-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:50e764ffbd08b06aa8c4e86b8b568b6722c75d301b33b259099f237c46b2134e"}, - {file = "regex-2022.9.13-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6d43bd402b27e0e7eae85c612725ba1ce7798f20f6fab4e8bc3de4f263294f03"}, - {file = "regex-2022.9.13-cp39-cp39-win32.whl", hash = "sha256:7fcf7f94ccad19186820ac67e2ec7e09e0ac2dac39689f11cf71eac580503296"}, - {file = "regex-2022.9.13-cp39-cp39-win_amd64.whl", hash = "sha256:322bd5572bed36a5b39952d88e072738926759422498a96df138d93384934ff8"}, - {file = "regex-2022.9.13.tar.gz", hash = "sha256:f07373b6e56a6f3a0df3d75b651a278ca7bd357a796078a26a958ea1ce0588fd"}, -] -requests = [ - {file = "requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"}, - {file = "requests-2.27.1.tar.gz", hash = "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61"}, -] -setuptools = [ - {file = "setuptools-59.6.0-py3-none-any.whl", hash = "sha256:4ce92f1e1f8f01233ee9952c04f6b81d1e02939d6e1b488428154974a4d0783e"}, - {file = "setuptools-59.6.0.tar.gz", hash = "sha256:22c7348c6d2976a52632c67f7ab0cdf40147db7789f9aed18734643fe9cf3373"}, -] -six = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] -snowballstemmer = [ - {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, - {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, -] -Sphinx = [ - {file = "Sphinx-4.3.2-py3-none-any.whl", hash = "sha256:6a11ea5dd0bdb197f9c2abc2e0ce73e01340464feaece525e64036546d24c851"}, - {file = "Sphinx-4.3.2.tar.gz", hash = "sha256:0a8836751a68306b3fe97ecbe44db786f8479c3bf4b80e3a7f5c838657b4698c"}, -] -sphinx-rtd-theme = [ - {file = "sphinx_rtd_theme-1.0.0-py2.py3-none-any.whl", hash = "sha256:4d35a56f4508cfee4c4fb604373ede6feae2a306731d533f409ef5c3496fdbd8"}, - {file = "sphinx_rtd_theme-1.0.0.tar.gz", hash = "sha256:eec6d497e4c2195fa0e8b2016b337532b8a699a68bcb22a512870e16925c6a5c"}, -] -sphinxcontrib-applehelp = [ - {file = "sphinxcontrib-applehelp-1.0.2.tar.gz", hash = "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58"}, - {file = "sphinxcontrib_applehelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a"}, -] -sphinxcontrib-devhelp = [ - {file = "sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"}, - {file = "sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e"}, -] -sphinxcontrib-htmlhelp = [ - {file = "sphinxcontrib-htmlhelp-2.0.0.tar.gz", hash = "sha256:f5f8bb2d0d629f398bf47d0d69c07bc13b65f75a81ad9e2f71a63d4b7a2f6db2"}, - {file = "sphinxcontrib_htmlhelp-2.0.0-py2.py3-none-any.whl", hash = "sha256:d412243dfb797ae3ec2b59eca0e52dac12e75a241bf0e4eb861e450d06c6ed07"}, -] -sphinxcontrib-jsmath = [ - {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, - {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, -] -sphinxcontrib-qthelp = [ - {file = "sphinxcontrib-qthelp-1.0.3.tar.gz", hash = "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72"}, - {file = "sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"}, -] -sphinxcontrib-serializinghtml = [ - {file = "sphinxcontrib-serializinghtml-1.1.5.tar.gz", hash = "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952"}, - {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"}, -] -toml = [ - {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, - {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, -] -tomli = [ - {file = "tomli-1.2.3-py3-none-any.whl", hash = "sha256:e3069e4be3ead9668e21cb9b074cd948f7b3113fd9c8bba083f48247aab8b11c"}, - {file = "tomli-1.2.3.tar.gz", hash = "sha256:05b6166bff487dc068d322585c7ea4ef78deed501cc124060e0f238e89a9231f"}, - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, -] -tox = [ - {file = "tox-3.25.0-py2.py3-none-any.whl", hash = "sha256:0805727eb4d6b049de304977dfc9ce315a1938e6619c3ab9f38682bb04662a5a"}, - {file = "tox-3.25.0.tar.gz", hash = "sha256:37888f3092aa4e9f835fc8cc6dadbaaa0782651c41ef359e3a5743fcb0308160"}, - {file = "tox-3.26.0-py2.py3-none-any.whl", hash = "sha256:bf037662d7c740d15c9924ba23bb3e587df20598697bb985ac2b49bdc2d847f6"}, - {file = "tox-3.26.0.tar.gz", hash = "sha256:44f3c347c68c2c68799d7d44f1808f9d396fc8a1a500cbc624253375c7ae107e"}, -] -typed-ast = [ - {file = "typed_ast-1.5.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:669dd0c4167f6f2cd9f57041e03c3c2ebf9063d0757dc89f79ba1daa2bfca9d4"}, - {file = "typed_ast-1.5.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:211260621ab1cd7324e0798d6be953d00b74e0428382991adfddb352252f1d62"}, - {file = "typed_ast-1.5.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:267e3f78697a6c00c689c03db4876dd1efdfea2f251a5ad6555e82a26847b4ac"}, - {file = "typed_ast-1.5.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c542eeda69212fa10a7ada75e668876fdec5f856cd3d06829e6aa64ad17c8dfe"}, - {file = "typed_ast-1.5.4-cp310-cp310-win_amd64.whl", hash = "sha256:a9916d2bb8865f973824fb47436fa45e1ebf2efd920f2b9f99342cb7fab93f72"}, - {file = "typed_ast-1.5.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:79b1e0869db7c830ba6a981d58711c88b6677506e648496b1f64ac7d15633aec"}, - {file = "typed_ast-1.5.4-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a94d55d142c9265f4ea46fab70977a1944ecae359ae867397757d836ea5a3f47"}, - {file = "typed_ast-1.5.4-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:183afdf0ec5b1b211724dfef3d2cad2d767cbefac291f24d69b00546c1837fb6"}, - {file = "typed_ast-1.5.4-cp36-cp36m-win_amd64.whl", hash = "sha256:639c5f0b21776605dd6c9dbe592d5228f021404dafd377e2b7ac046b0349b1a1"}, - {file = "typed_ast-1.5.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cf4afcfac006ece570e32d6fa90ab74a17245b83dfd6655a6f68568098345ff6"}, - {file = "typed_ast-1.5.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed855bbe3eb3715fca349c80174cfcfd699c2f9de574d40527b8429acae23a66"}, - {file = "typed_ast-1.5.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6778e1b2f81dfc7bc58e4b259363b83d2e509a65198e85d5700dfae4c6c8ff1c"}, - {file = "typed_ast-1.5.4-cp37-cp37m-win_amd64.whl", hash = "sha256:0261195c2062caf107831e92a76764c81227dae162c4f75192c0d489faf751a2"}, - {file = "typed_ast-1.5.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2efae9db7a8c05ad5547d522e7dbe62c83d838d3906a3716d1478b6c1d61388d"}, - {file = "typed_ast-1.5.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7d5d014b7daa8b0bf2eaef684295acae12b036d79f54178b92a2b6a56f92278f"}, - {file = "typed_ast-1.5.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:370788a63915e82fd6f212865a596a0fefcbb7d408bbbb13dea723d971ed8bdc"}, - {file = "typed_ast-1.5.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4e964b4ff86550a7a7d56345c7864b18f403f5bd7380edf44a3c1fb4ee7ac6c6"}, - {file = "typed_ast-1.5.4-cp38-cp38-win_amd64.whl", hash = "sha256:683407d92dc953c8a7347119596f0b0e6c55eb98ebebd9b23437501b28dcbb8e"}, - {file = "typed_ast-1.5.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4879da6c9b73443f97e731b617184a596ac1235fe91f98d279a7af36c796da35"}, - {file = "typed_ast-1.5.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3e123d878ba170397916557d31c8f589951e353cc95fb7f24f6bb69adc1a8a97"}, - {file = "typed_ast-1.5.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebd9d7f80ccf7a82ac5f88c521115cc55d84e35bf8b446fcd7836eb6b98929a3"}, - {file = "typed_ast-1.5.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98f80dee3c03455e92796b58b98ff6ca0b2a6f652120c263efdba4d6c5e58f72"}, - {file = "typed_ast-1.5.4-cp39-cp39-win_amd64.whl", hash = "sha256:0fdbcf2fef0ca421a3f5912555804296f0b0960f0418c440f5d6d3abb549f3e1"}, - {file = "typed_ast-1.5.4.tar.gz", hash = "sha256:39e21ceb7388e4bb37f4c679d72707ed46c2fbf2a5609b8b8ebc4b067d977df2"}, -] -typing-extensions = [ - {file = "typing_extensions-4.1.1-py3-none-any.whl", hash = "sha256:21c85e0fe4b9a155d0799430b0ad741cdce7e359660ccbd8b530613e8df88ce2"}, - {file = "typing_extensions-4.1.1.tar.gz", hash = "sha256:1a9462dcc3347a79b1f1c0271fbe79e844580bb598bafa1ed208b94da3cdcd42"}, - {file = "typing_extensions-4.3.0-py3-none-any.whl", hash = "sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02"}, - {file = "typing_extensions-4.3.0.tar.gz", hash = "sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6"}, -] -urllib3 = [ - {file = "urllib3-1.26.12-py2.py3-none-any.whl", hash = "sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997"}, - {file = "urllib3-1.26.12.tar.gz", hash = "sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e"}, -] -virtualenv = [ - {file = "virtualenv-20.16.2-py2.py3-none-any.whl", hash = "sha256:635b272a8e2f77cb051946f46c60a54ace3cb5e25568228bd6b57fc70eca9ff3"}, - {file = "virtualenv-20.16.2.tar.gz", hash = "sha256:0ef5be6d07181946891f5abc8047fda8bc2f0b4b9bf222c64e6e8963baee76db"}, -] -zipp = [ - {file = "zipp-3.6.0-py3-none-any.whl", hash = "sha256:9fe5ea21568a0a70e50f273397638d39b03353731e6cbbb3fd8502a33fec40bc"}, - {file = "zipp-3.6.0.tar.gz", hash = "sha256:71c644c5369f4a6e07636f0aa966270449561fcea2e3d6747b8d23efaa9d7832"}, +[[package]] +name = "zipp" +version = "3.20.2" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "zipp-3.20.2-py3-none-any.whl", hash = "sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350"}, + {file = "zipp-3.20.2.tar.gz", hash = "sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29"}, ] + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +type = ["pytest-mypy"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.7" +content-hash = "73cdf582288c9a4f22ebca27df8a40982b23954061d23e7d2301dfe9877cdb8d" diff --git a/pyproject.toml b/pyproject.toml index 439a4256..e8d2ec6d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,13 +1,11 @@ [tool.poetry] name = "graphql-core" -version = "3.2.2" -description = """ +version = "3.3.0a7" +description = """\ GraphQL-core is a Python port of GraphQL.js,\ the JavaScript reference implementation for GraphQL.""" license = "MIT" -authors = [ - "Christoph Zwerschke " -] +authors = ["Christoph Zwerschke "] readme = "README.md" homepage = "https://github.com/graphql-python/graphql-core" repository = "https://github.com/graphql-python/graphql-core" @@ -18,72 +16,320 @@ classifiers = [ "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10" + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13" ] packages = [ { include = "graphql", from = "src" }, { include = "tests", format = "sdist" }, { include = "docs", format = "sdist" }, - { include = '.bumpversion.cfg', format = "sdist" }, - { include = '.coveragerc', format = "sdist" }, - { include = '.editorconfig', format = "sdist" }, - { include = '.flake8', format = "sdist" }, - { include = '.mypy.ini', format = "sdist" }, - { include = '.readthedocs.yaml', format = "sdist" }, - { include = 'poetry.lock', format = "sdist" }, - { include = 'tox.ini', format = "sdist" }, - { include = 'setup.cfg', format = "sdist" }, - { include = 'CODEOWNERS', format = "sdist" }, - { include = 'MANIFEST.in', format = "sdist" }, - { include = 'SECURITY.md', format = "sdist" } + { include = ".bumpversion.cfg", format = "sdist" }, + { include = ".editorconfig", format = "sdist" }, + { include = ".readthedocs.yaml", format = "sdist" }, + { include = "poetry.lock", format = "sdist" }, + { include = "tox.ini", format = "sdist" }, + { include = "CODEOWNERS", format = "sdist" }, + { include = "SECURITY.md", format = "sdist" } ] +exclude = ["docs/_build/**"] + +[tool.poetry.urls] +Changelog = "https://github.com/graphql-python/graphql-core/releases" [tool.poetry.dependencies] -python = "^3.6" +python = "^3.7" typing-extensions = [ - { version = "^4.3", python = ">=3.7,<3.8" }, - { version = "^4.1", python = "<3.7" } + { version = "^4.12.2", python = ">=3.8,<3.10" }, + { version = "^4.7.1", python = "<3.8" }, ] -[tool.poetry.dev-dependencies] -pytest = "^6.2" +[tool.poetry.group.test] +optional = true + +[tool.poetry.group.test.dependencies] +pytest = [ + { version = "^8.3", python = ">=3.8" }, + { version = "^7.4", python = "<3.8" } +] pytest-asyncio = [ - {version=">=0.19,<1", python = ">=3.7" }, - {version=">=0.16,<0.17", python = "<3.7" }, -] -pytest-benchmark = "^3.4" -pytest-cov = "^3.0" -pytest-describe = "^2.0" -pytest-timeout = "^2.1" -black = [ - {version = "22.8.0", python = ">=3.6.2"}, - {version = "20.8b1", python = "<3.6.2"} -] -flake8 = [ - {version = "^5.0", python = ">=3.6.1"}, - {version = "^4.0", python = "<3.6.1"} -] -mypy = "0.971" -sphinx = "^4.3" -sphinx_rtd_theme = ">=1,<2" -check-manifest = ">=0.48,<1" -bump2version = ">=1.0,<2" -tomli = [ - {version="^2", python = ">=3.7"}, - {version="^1.2", python = "<3.7"} + { version = "^0.25.2", python = ">=3.9" }, + { version = "~0.24.0", python = ">=3.8,<3.9" }, + { version = "~0.21.1", python = "<3.8" } +] +pytest-benchmark = [ + { version = "^5.1", python = ">=3.9" }, + { version = "^4.0", python = "<3.9" } +] +pytest-cov = [ + { version = "^6.0", python = ">=3.9" }, + { version = "^5.0", python = ">=3.8,<3.9" }, + { version = "^4.1", python = "<3.8" }, +] +pytest-describe = "^2.2" +pytest-timeout = "^2.3" +pytest-codspeed = [ + { version = "^3.1.2", python = ">=3.9" }, + { version = "^2.2.1", python = "<3.8" } ] tox = [ - {version = "^3.26", python = ">=3.7"}, - {version = "3.25", python = "<3.7"} + { version = "^4.24", python = ">=3.8" }, + { version = "^3.28", python = "<3.8" } +] + +[tool.poetry.group.lint] +optional = true + +[tool.poetry.group.lint.dependencies] +ruff = ">=0.11,<0.12" +mypy = [ + { version = "^1.15", python = ">=3.9" }, + { version = "~1.14", python = ">=3.8,<3.9" }, + { version = "~1.4", python = "<3.8" } +] +bump2version = ">=1,<2" + +[tool.poetry.group.doc] +optional = true + +[tool.poetry.group.doc.dependencies] +sphinx = [ + { version = ">=7,<9", python = ">=3.8" }, + { version = ">=4,<6", python = "<3.8" } +] +sphinx_rtd_theme = ">=2,<4" + +[tool.ruff] +line-length = 88 +target-version = "py37" + +[tool.ruff.lint] +select = [ + "A", # flake8-builtins + "ANN", # flake8-annotations + "ARG", # flake8-unused-arguments + "B", # flake8-bugbear + "BLE", # flake8-blind-except + "C4", # flake8-comprehensions + "C90", # McCabe cyclomatic complexity + "COM", # flake8-commas + "D", # pydocstyle + "DTZ", # flake8-datetimez + "E", # pycodestyle + "EM", # flake8-errmsg + "ERA", # eradicate + "EXE", # flake8-executable + "F", # Pyflakes + "FBT", # flake8-boolean-trap + "G", # flake8-logging-format + "I", # isort + "ICN", # flake8-import-conventions + "INP", # flake8-no-pep420 + "INT", # flake8-gettext + "ISC", # flake8-implicit-str-concat + "N", # pep8-naming + "PGH", # pygrep-hooks + "PIE", # flake8-pie + "PL", # Pylint + "PT", # flake8-pytest-style + "PTH", # flake8-use-pathlib + "PYI", # flake8-pyi + "Q", # flake8-quotes + "RET", # flake8-return + "RSE", # flake8-raise + "RUF", # Ruff-specific rules + "S", # flake8-bandit + "SLF", # flake8-self + "SIM", # flake8-simplify + "T10", # flake8-debugger + "T20", # flake8-print + "TCH", # flake8-type-checking + "TID", # flake8-tidy-imports + "TRY", # tryceratops + "UP", # pyupgrade + "W", # pycodestyle + "YTT", # flake8-2020 +] +ignore = [ + "A005", # allow using standard-lib module names + "ANN401", # allow explicit Any + "COM812", # allow trailing commas for auto-formatting + "D105", "D107", # no docstring needed for magic methods + "D203", # no blank line before class docstring + "D213", # multi-line docstrings should not start at second line + "D400", "D415", # first line in docstring does not need to be a sentence + "D401", # do not always require imperative mood in first line + "FBT001", "FBT002", "FBT003", # allow boolean parameters + "ISC001", # allow string literal concatenation for auto-formatting + "PGH003", # type ignores do not need to be specific + "PLR2004", # allow some "magic" values + "PYI034", # do not check return value of new method + "TID252", # allow relative imports + "TRY003", # allow specific messages outside the exception class +] + +[tool.ruff.lint.per-file-ignores] +"*/__init__.py" = [ + "I001", # imports do not need to be sorted +] +"src/graphql/execution/*" = [ + "BLE001", # allow catching blind exception +] +"src/graphql/language/ast.py" = [ + "D101", # do not require docstrings +] +"src/graphql/language/parser.py" = [ + "RSE102", # raised exception may need to be called +] +"src/graphql/type/introspection.py" = [ + "ANN001", "ANN003", "ANN204", "ANN205", # allow missing type annotations + "N803", # allow JavaScript style arguments +] +"src/graphql/utilities/get_introspection_query.py" = [ + "D101", # allow missing class docstrings + "N815", # allow JavaScript style class attributes +] +"src/graphql/utilities/type_info.py" = [ + "D102", # allow missing method docstrings +] +"src/graphql/validation/rules/*" = [ + "D102", # allow missing method docstrings +] +"src/graphql/validation/validation_context.py" = [ + "D102", # allow missing method docstrings +] +"tests/*" = [ + "ANN001", "ANN002", "ANN003", # allow missing type annotations + "ANN201", "ANN202", "ANN204", "ANN205", # allow missing type annotations + "B011", # allow always failing assertions + "B904", # allow raising exceptions without context + "C901", # allow complex functions + "D100", "D101", "D102", "D103", # allow missing docstrings + "EM101", "EM102", # allow passing literal strings to exceptions + "N802", "N803", "N806", "N815", "N816", # allow JavaScript style names + "PLR0915", # allow many statements + "PT015", # allow always failing assertions + "RUF012", # allow mutable class attributes + "S101", # allow assertions + "S301", # allow pickling + "TRY002", "TRY301", # less strict handling of exceptions +] +"tests/star_wars_schema.py" = [ + "A002", # allow shadowin builtins + "ERA001", # allow commented-out code +] +"tests/test_docs.py" = [ + "S102", # allow use of exec +] + + +[tool.ruff.lint.flake8-quotes] +inline-quotes = "double" + +[tool.ruff.lint.mccabe] +max-complexity = 50 + +[tool.ruff.lint.pylint] +max-args = 15 +max-branches = 50 +max-returns = 25 +max-statements = 125 + +[tool.ruff.format] +indent-style = "space" +quote-style = "double" +skip-magic-trailing-comma = false + +[tool.coverage.run] +branch = true +source = ["src", "tests"] +omit = [ + "*/conftest.py", + "*/test_*_fuzz.py", + "*/assert_valid_name.py", + "*/cached_property.py", + "*/character_classes.py", + "*/is_iterable.py", + "*/subscription/__init__.py" +] + +[tool.coverage.report] +exclude_lines = [ + "pragma: no cover", + "except ImportError:", + "# Python <", + 'sys\.version_info <', + "raise NotImplementedError", + "assert False,", + '\s+next\($', + "if MYPY:", + "if TYPE_CHECKING:", + '^\s+\.\.\.$', + '^\s+pass$', + ': \.\.\.$' +] +ignore_errors = true + +[tool.mypy] +python_version = "3.11" +check_untyped_defs = true +no_implicit_optional = true +strict_optional = true +warn_redundant_casts = true +warn_unused_ignores = true +disallow_untyped_defs = true + +[[tool.mypy.overrides]] +module = [ + "graphql.type.introspection", + "tests.*" +] +disallow_untyped_defs = false + +[tool.pyright] +reportIncompatibleVariableOverride = false +reportMissingTypeArgument = false +reportUnknownArgumentType = false +reportUnknownMemberType = false +reportUnknownParameterType = false +reportUnnecessaryIsInstance = false +reportUnknownVariableType = false +ignore = ["**/test_*"] # test functions + +[tool.pylint.basic] +max-module-lines = 2000 + +[tool.pylint.messages_control] +disable = [ + "method-hidden", + "missing-module-docstring", # test modules + "redefined-outer-name", + "unused-variable", # test functions ] -[tool.black] -target-version = ['py36', 'py37', 'py38', 'py39', 'py310'] +[tool.pytest.ini_options] +minversion = "7.4" +# Only run benchmarks as tests. +# To actually run the benchmarks, use --benchmark-enable on the command line. +# To run the slow tests (fuzzing), add --run-slow on the command line. +addopts = "--benchmark-disable" +# Deactivate default name pattern for test classes (we use pytest_describe). +python_classes = "PyTest*" +# Handle all async fixtures and tests automatically by asyncio, +asyncio_mode = "auto" +# Set a timeout in seconds for aborting tests that run too long. +timeout = "100" +# Ignore config options not (yet) available in older Python versions. +filterwarnings = "ignore::pytest.PytestConfigWarning" +# All tests can be found in the tests directory. +testpaths = ["tests"] +# Use the functions scope as the default for asynchronous tests. +asyncio_default_fixture_loop_scope = "function" [build-system] -requires = ["poetry_core>=1,<2", "setuptools>=59,<70"] +requires = ["poetry_core>=1.6.1,<3"] build-backend = "poetry.core.masonry.api" diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index dee6b2da..00000000 --- a/setup.cfg +++ /dev/null @@ -1,19 +0,0 @@ -[bdist_wheel] -python-tag = py3 - -[aliases] -test = pytest - -[tool:pytest] -# Only run benchmarks as tests. -# To actually run the benchmarks, use --benchmark-enable on the command line. -# To run the slow tests (fuzzing), add --run-slow on the command line. -addopts = --benchmark-disable -# Deactivate default name pattern for test classes (we use pytest_describe). -python_classes = PyTest* -# Handle all async fixtures and tests automatically by asyncio -asyncio_mode = auto -# Set a timeout in seconds for aborting tests that run too long. -timeout = 100 -# Ignore config options not (yet) available in older Python versions. -filterwarnings = ignore::pytest.PytestConfigWarning diff --git a/setup.py b/setup.py deleted file mode 100644 index d307a66a..00000000 --- a/setup.py +++ /dev/null @@ -1,45 +0,0 @@ -from re import search -from setuptools import setup, find_packages - -with open("src/graphql/version.py") as version_file: - version = search('version = "(.*)"', version_file.read()).group(1) - -with open("README.md") as readme_file: - readme = readme_file.read() - -setup( - name="graphql-core", - version=version, - description="GraphQL implementation for Python, a port of GraphQL.js," - " the JavaScript reference implementation for GraphQL.", - long_description=readme, - long_description_content_type="text/markdown", - keywords="graphql", - url="https://github.com/graphql-python/graphql-core", - author="Christoph Zwerschke", - author_email="cito@online.de", - license="MIT license", - classifiers=[ - "Development Status :: 5 - Production/Stable", - "Intended Audience :: Developers", - "Topic :: Software Development :: Libraries", - "License :: OSI Approved :: MIT License", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3 :: Only", - "Programming Language :: Python :: 3.6", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - ], - install_requires=[ - "typing-extensions>=4.2,<5; python_version < '3.8'", - ], - python_requires=">=3.6,<4", - packages=find_packages("src"), - package_dir={"": "src"}, - # PEP-561: https://www.python.org/dev/peps/pep-0561/ - package_data={"graphql": ["py.typed"]}, - include_package_data=True, - zip_safe=False, -) diff --git a/src/graphql/__init__.py b/src/graphql/__init__.py index f1b21ab3..6938435a 100644 --- a/src/graphql/__init__.py +++ b/src/graphql/__init__.py @@ -83,6 +83,7 @@ # Predicates is_definition_node, is_executable_definition_node, + is_nullability_assertion_node, is_selection_node, is_value_node, is_const_value_node, @@ -110,6 +111,10 @@ SelectionNode, FieldNode, ArgumentNode, + NullabilityAssertionNode, + NonNullAssertionNode, + ErrorBoundaryNode, + ListNullabilityOperatorNode, ConstArgumentNode, FragmentSpreadNode, InlineFragmentNode, @@ -167,8 +172,6 @@ IntrospectionQuery, # Get the target Operation from a Document. get_operation_ast, - # Get the Type for the target Operation AST. - get_operation_root_type, # Convert a GraphQLSchema to an IntrospectionQuery. introspection_from_schema, # Build a GraphQLSchema from an introspection result. @@ -185,6 +188,8 @@ print_schema, # Print a GraphQLType to GraphQL Schema language. print_type, + # Print a GraphQLDirective to GraphQL Schema language. + print_directive, # Prints the built-in introspection schema in the Schema Language format. print_introspection_schema, # Create a GraphQLType from a GraphQL language AST. @@ -214,10 +219,6 @@ is_equal_type, is_type_sub_type_of, do_types_overlap, - # Assert a string is a valid GraphQL name. - assert_valid_name, - # Determine if a string is a valid GraphQL name. - is_valid_name_error, # Compare two GraphQLSchemas and detect breaking changes. BreakingChange, BreakingChangeType, @@ -254,8 +255,11 @@ specified_directives, GraphQLIncludeDirective, GraphQLSkipDirective, + GraphQLDeferDirective, + GraphQLStreamDirective, GraphQLDeprecatedDirective, GraphQLSpecifiedByDirective, + GraphQLOneOfDirective, # "Enum" of Type Kinds TypeKind, # Constant Deprecation Reason @@ -319,7 +323,7 @@ # Validate GraphQL schema. validate_schema, assert_valid_schema, - # Uphold the spec rules about naming + # Uphold the spec rules about naming assert_name, assert_enum_value_name, # Types @@ -331,6 +335,8 @@ GraphQLAbstractType, GraphQLWrappingType, GraphQLNullableType, + GraphQLNullableInputType, + GraphQLNullableOutputType, GraphQLNamedType, GraphQLNamedInputType, GraphQLNamedOutputType, @@ -346,6 +352,7 @@ GraphQLFieldResolver, GraphQLInputField, GraphQLInputFieldMap, + GraphQLInputFieldOutType, GraphQLScalarSerializer, GraphQLScalarValueParser, GraphQLScalarLiteralParser, @@ -431,11 +438,22 @@ # Types ExecutionContext, ExecutionResult, + ExperimentalIncrementalExecutionResults, + InitialIncrementalExecutionResult, + SubsequentIncrementalExecutionResult, + IncrementalDeferResult, + IncrementalStreamResult, + IncrementalResult, FormattedExecutionResult, + FormattedInitialIncrementalExecutionResult, + FormattedSubsequentIncrementalExecutionResult, + FormattedIncrementalDeferResult, + FormattedIncrementalStreamResult, + FormattedIncrementalResult, # Subscription subscribe, create_source_event_stream, - MapAsyncIterator, + map_async_iterable, # Middleware Middleware, MiddlewareManager, @@ -456,325 +474,345 @@ __all__ = [ - "version", - "version_info", - "version_js", - "version_info_js", - "graphql", - "graphql_sync", - "GraphQLSchema", - "GraphQLDirective", - "GraphQLScalarType", - "GraphQLObjectType", - "GraphQLInterfaceType", - "GraphQLUnionType", - "GraphQLEnumType", - "GraphQLInputObjectType", - "GraphQLList", - "GraphQLNonNull", - "specified_scalar_types", - "GraphQLInt", - "GraphQLFloat", - "GraphQLString", - "GraphQLBoolean", - "GraphQLID", + "BREAK", + "DEFAULT_DEPRECATION_REASON", "GRAPHQL_MAX_INT", "GRAPHQL_MIN_INT", - "specified_directives", - "GraphQLIncludeDirective", - "GraphQLSkipDirective", - "GraphQLDeprecatedDirective", - "GraphQLSpecifiedByDirective", - "TypeKind", - "DEFAULT_DEPRECATION_REASON", - "introspection_types", - "SchemaMetaFieldDef", - "TypeMetaFieldDef", - "TypeNameMetaFieldDef", - "is_schema", - "is_directive", - "is_type", - "is_scalar_type", - "is_object_type", - "is_interface_type", - "is_union_type", - "is_enum_type", - "is_input_object_type", - "is_list_type", - "is_non_null_type", - "is_input_type", - "is_output_type", - "is_leaf_type", - "is_composite_type", - "is_abstract_type", - "is_wrapping_type", - "is_nullable_type", - "is_named_type", - "is_required_argument", - "is_required_input_field", - "is_specified_scalar_type", - "is_introspection_type", - "is_specified_directive", - "assert_schema", - "assert_directive", - "assert_type", - "assert_scalar_type", - "assert_object_type", - "assert_interface_type", - "assert_union_type", - "assert_enum_type", - "assert_input_object_type", - "assert_list_type", - "assert_non_null_type", - "assert_input_type", - "assert_output_type", - "assert_leaf_type", - "assert_composite_type", - "assert_abstract_type", - "assert_wrapping_type", - "assert_nullable_type", - "assert_named_type", - "get_nullable_type", - "get_named_type", - "resolve_thunk", - "validate_schema", - "assert_valid_schema", - "assert_name", - "assert_enum_value_name", - "GraphQLType", - "GraphQLInputType", - "GraphQLOutputType", - "GraphQLLeafType", - "GraphQLCompositeType", + "IDLE", + "REMOVE", + "SKIP", + "ASTValidationRule", + "ArgumentNode", + "BooleanValueNode", + "BreakingChange", + "BreakingChangeType", + "ConstArgumentNode", + "ConstDirectiveNode", + "ConstListValueNode", + "ConstObjectFieldNode", + "ConstObjectValueNode", + "ConstValueNode", + "DangerousChange", + "DangerousChangeType", + "DefinitionNode", + "DirectiveDefinitionNode", + "DirectiveLocation", + "DirectiveNode", + "DocumentNode", + "EnumTypeDefinitionNode", + "EnumTypeExtensionNode", + "EnumValueDefinitionNode", + "EnumValueNode", + "ErrorBoundaryNode", + "ExecutableDefinitionNode", + "ExecutableDefinitionsRule", + "ExecutionContext", + "ExecutionResult", + "ExperimentalIncrementalExecutionResults", + "FieldDefinitionNode", + "FieldNode", + "FieldsOnCorrectTypeRule", + "FloatValueNode", + "FormattedExecutionResult", + "FormattedIncrementalDeferResult", + "FormattedIncrementalResult", + "FormattedIncrementalStreamResult", + "FormattedInitialIncrementalExecutionResult", + "FormattedSubsequentIncrementalExecutionResult", + "FragmentDefinitionNode", + "FragmentSpreadNode", + "FragmentsOnCompositeTypesRule", "GraphQLAbstractType", - "GraphQLWrappingType", - "GraphQLNullableType", - "GraphQLNamedType", - "GraphQLNamedInputType", - "GraphQLNamedOutputType", - "Thunk", - "ThunkCollection", - "ThunkMapping", "GraphQLArgument", + "GraphQLArgumentKwargs", "GraphQLArgumentMap", + "GraphQLBoolean", + "GraphQLCompositeType", + "GraphQLDeferDirective", + "GraphQLDeprecatedDirective", + "GraphQLDirective", + "GraphQLDirectiveKwargs", + "GraphQLEnumType", + "GraphQLEnumTypeKwargs", "GraphQLEnumValue", + "GraphQLEnumValueKwargs", "GraphQLEnumValueMap", + "GraphQLError", + "GraphQLErrorExtensions", "GraphQLField", + "GraphQLFieldKwargs", "GraphQLFieldMap", "GraphQLFieldResolver", + "GraphQLFloat", + "GraphQLFormattedError", + "GraphQLID", + "GraphQLIncludeDirective", "GraphQLInputField", - "GraphQLInputFieldMap", - "GraphQLScalarSerializer", - "GraphQLScalarValueParser", - "GraphQLScalarLiteralParser", - "GraphQLIsTypeOfFn", - "GraphQLResolveInfo", - "ResponsePath", - "GraphQLTypeResolver", - "GraphQLArgumentKwargs", - "GraphQLDirectiveKwargs", - "GraphQLEnumTypeKwargs", - "GraphQLEnumValueKwargs", - "GraphQLFieldKwargs", "GraphQLInputFieldKwargs", + "GraphQLInputFieldMap", + "GraphQLInputFieldOutType", + "GraphQLInputObjectType", "GraphQLInputObjectTypeKwargs", + "GraphQLInputType", + "GraphQLInt", + "GraphQLInterfaceType", "GraphQLInterfaceTypeKwargs", + "GraphQLIsTypeOfFn", + "GraphQLLeafType", + "GraphQLList", + "GraphQLNamedInputType", + "GraphQLNamedOutputType", + "GraphQLNamedType", "GraphQLNamedTypeKwargs", + "GraphQLNonNull", + "GraphQLNullableInputType", + "GraphQLNullableOutputType", + "GraphQLNullableType", + "GraphQLObjectType", "GraphQLObjectTypeKwargs", + "GraphQLOneOfDirective", + "GraphQLOutputType", + "GraphQLResolveInfo", + "GraphQLScalarLiteralParser", + "GraphQLScalarSerializer", + "GraphQLScalarType", "GraphQLScalarTypeKwargs", + "GraphQLScalarValueParser", + "GraphQLSchema", "GraphQLSchemaKwargs", - "GraphQLUnionTypeKwargs", - "Source", - "get_location", - "print_location", - "print_source_location", - "Lexer", - "TokenKind", - "parse", - "parse_value", - "parse_const_value", - "parse_type", - "print_ast", - "visit", - "ParallelVisitor", - "TypeInfoVisitor", - "Visitor", - "VisitorAction", - "VisitorKeyMap", - "BREAK", - "SKIP", - "REMOVE", - "IDLE", - "DirectiveLocation", - "is_definition_node", - "is_executable_definition_node", - "is_selection_node", - "is_value_node", - "is_const_value_node", - "is_type_node", - "is_type_system_definition_node", - "is_type_definition_node", - "is_type_system_extension_node", - "is_type_extension_node", - "SourceLocation", - "Location", - "Token", - "Node", - "NameNode", - "DocumentNode", - "DefinitionNode", - "ExecutableDefinitionNode", - "OperationDefinitionNode", - "OperationType", - "VariableDefinitionNode", - "VariableNode", - "SelectionSetNode", - "SelectionNode", - "FieldNode", - "ArgumentNode", - "ConstArgumentNode", - "FragmentSpreadNode", - "InlineFragmentNode", - "FragmentDefinitionNode", - "ValueNode", - "ConstValueNode", - "IntValueNode", - "FloatValueNode", - "StringValueNode", - "BooleanValueNode", - "NullValueNode", - "EnumValueNode", - "ListValueNode", - "ConstListValueNode", - "ObjectValueNode", - "ConstObjectValueNode", - "ObjectFieldNode", - "ConstObjectFieldNode", - "DirectiveNode", - "ConstDirectiveNode", - "TypeNode", - "NamedTypeNode", - "ListTypeNode", - "NonNullTypeNode", - "TypeSystemDefinitionNode", - "SchemaDefinitionNode", - "OperationTypeDefinitionNode", - "TypeDefinitionNode", - "ScalarTypeDefinitionNode", - "ObjectTypeDefinitionNode", - "FieldDefinitionNode", + "GraphQLSkipDirective", + "GraphQLSpecifiedByDirective", + "GraphQLStreamDirective", + "GraphQLString", + "GraphQLSyntaxError", + "GraphQLType", + "GraphQLTypeResolver", + "GraphQLUnionType", + "GraphQLUnionTypeKwargs", + "GraphQLWrappingType", + "IncrementalDeferResult", + "IncrementalResult", + "IncrementalStreamResult", + "InitialIncrementalExecutionResult", + "InlineFragmentNode", + "InputObjectTypeDefinitionNode", + "InputObjectTypeExtensionNode", "InputValueDefinitionNode", + "IntValueNode", "InterfaceTypeDefinitionNode", - "UnionTypeDefinitionNode", - "EnumTypeDefinitionNode", - "EnumValueDefinitionNode", - "InputObjectTypeDefinitionNode", - "DirectiveDefinitionNode", - "TypeSystemExtensionNode", - "SchemaExtensionNode", - "TypeExtensionNode", - "ScalarTypeExtensionNode", - "ObjectTypeExtensionNode", "InterfaceTypeExtensionNode", - "UnionTypeExtensionNode", - "EnumTypeExtensionNode", - "InputObjectTypeExtensionNode", - "execute", - "execute_sync", - "default_field_resolver", - "default_type_resolver", - "get_argument_values", - "get_directive_values", - "get_variable_values", - "ExecutionContext", - "ExecutionResult", - "FormattedExecutionResult", - "Middleware", - "MiddlewareManager", - "subscribe", - "create_source_event_stream", - "MapAsyncIterator", - "validate", - "ValidationContext", - "ValidationRule", - "ASTValidationRule", - "SDLValidationRule", - "specified_rules", - "ExecutableDefinitionsRule", - "FieldsOnCorrectTypeRule", - "FragmentsOnCompositeTypesRule", + "IntrospectionQuery", "KnownArgumentNamesRule", "KnownDirectivesRule", "KnownFragmentNamesRule", "KnownTypeNamesRule", + "Lexer", + "ListNullabilityOperatorNode", + "ListTypeNode", + "ListValueNode", + "Location", "LoneAnonymousOperationRule", + "LoneSchemaDefinitionRule", + "Middleware", + "MiddlewareManager", + "NameNode", + "NamedTypeNode", + "NoDeprecatedCustomRule", "NoFragmentCyclesRule", + "NoSchemaIntrospectionCustomRule", "NoUndefinedVariablesRule", "NoUnusedFragmentsRule", "NoUnusedVariablesRule", + "Node", + "NonNullAssertionNode", + "NonNullTypeNode", + "NullValueNode", + "NullabilityAssertionNode", + "ObjectFieldNode", + "ObjectTypeDefinitionNode", + "ObjectTypeExtensionNode", + "ObjectValueNode", + "OperationDefinitionNode", + "OperationType", + "OperationTypeDefinitionNode", "OverlappingFieldsCanBeMergedRule", + "ParallelVisitor", "PossibleFragmentSpreadsRule", + "PossibleTypeExtensionsRule", "ProvidedRequiredArgumentsRule", + "ResponsePath", + "SDLValidationRule", "ScalarLeafsRule", + "ScalarTypeDefinitionNode", + "ScalarTypeExtensionNode", + "SchemaDefinitionNode", + "SchemaExtensionNode", + "SchemaMetaFieldDef", + "SelectionNode", + "SelectionSetNode", "SingleFieldSubscriptionsRule", + "Source", + "SourceLocation", + "StringValueNode", + "SubsequentIncrementalExecutionResult", + "Thunk", + "ThunkCollection", + "ThunkMapping", + "Token", + "TokenKind", + "TypeDefinitionNode", + "TypeExtensionNode", + "TypeInfo", + "TypeInfoVisitor", + "TypeKind", + "TypeMetaFieldDef", + "TypeNameMetaFieldDef", + "TypeNode", + "TypeSystemDefinitionNode", + "TypeSystemExtensionNode", + "Undefined", + "UndefinedType", + "UnionTypeDefinitionNode", + "UnionTypeExtensionNode", + "UniqueArgumentDefinitionNamesRule", "UniqueArgumentNamesRule", + "UniqueDirectiveNamesRule", "UniqueDirectivesPerLocationRule", + "UniqueEnumValueNamesRule", + "UniqueFieldDefinitionNamesRule", "UniqueFragmentNamesRule", "UniqueInputFieldNamesRule", "UniqueOperationNamesRule", + "UniqueOperationTypesRule", + "UniqueTypeNamesRule", "UniqueVariableNamesRule", + "ValidationContext", + "ValidationRule", + "ValueNode", "ValuesOfCorrectTypeRule", + "VariableDefinitionNode", + "VariableNode", "VariablesAreInputTypesRule", "VariablesInAllowedPositionRule", - "LoneSchemaDefinitionRule", - "UniqueOperationTypesRule", - "UniqueTypeNamesRule", - "UniqueEnumValueNamesRule", - "UniqueFieldDefinitionNamesRule", - "UniqueArgumentDefinitionNamesRule", - "UniqueDirectiveNamesRule", - "PossibleTypeExtensionsRule", - "NoDeprecatedCustomRule", - "NoSchemaIntrospectionCustomRule", - "GraphQLError", - "GraphQLErrorExtensions", - "GraphQLFormattedError", - "GraphQLSyntaxError", - "located_error", - "get_introspection_query", - "IntrospectionQuery", - "get_operation_ast", - "get_operation_root_type", - "introspection_from_schema", - "build_client_schema", + "Visitor", + "VisitorAction", + "VisitorKeyMap", + "assert_abstract_type", + "assert_composite_type", + "assert_directive", + "assert_enum_type", + "assert_enum_value_name", + "assert_input_object_type", + "assert_input_type", + "assert_interface_type", + "assert_leaf_type", + "assert_list_type", + "assert_name", + "assert_named_type", + "assert_non_null_type", + "assert_nullable_type", + "assert_object_type", + "assert_output_type", + "assert_scalar_type", + "assert_schema", + "assert_type", + "assert_union_type", + "assert_valid_schema", + "assert_wrapping_type", + "ast_from_value", + "ast_to_dict", "build_ast_schema", + "build_client_schema", "build_schema", + "coerce_input_value", + "concat_ast", + "create_source_event_stream", + "default_field_resolver", + "default_type_resolver", + "do_types_overlap", + "execute", + "execute_sync", "extend_schema", + "find_breaking_changes", + "find_dangerous_changes", + "get_argument_values", + "get_directive_values", + "get_introspection_query", + "get_location", + "get_named_type", + "get_nullable_type", + "get_operation_ast", + "get_variable_values", + "graphql", + "graphql_sync", + "introspection_from_schema", + "introspection_types", + "is_abstract_type", + "is_composite_type", + "is_const_value_node", + "is_definition_node", + "is_directive", + "is_enum_type", + "is_equal_type", + "is_executable_definition_node", + "is_input_object_type", + "is_input_type", + "is_interface_type", + "is_introspection_type", + "is_leaf_type", + "is_list_type", + "is_named_type", + "is_non_null_type", + "is_nullability_assertion_node", + "is_nullable_type", + "is_object_type", + "is_output_type", + "is_required_argument", + "is_required_input_field", + "is_scalar_type", + "is_schema", + "is_selection_node", + "is_specified_directive", + "is_specified_scalar_type", + "is_type", + "is_type_definition_node", + "is_type_extension_node", + "is_type_node", + "is_type_sub_type_of", + "is_type_system_definition_node", + "is_type_system_extension_node", + "is_union_type", + "is_value_node", + "is_wrapping_type", "lexicographic_sort_schema", + "located_error", + "map_async_iterable", + "parse", + "parse_const_value", + "parse_type", + "parse_value", + "print_ast", + "print_directive", + "print_introspection_schema", + "print_location", "print_schema", + "print_source_location", "print_type", - "print_introspection_schema", + "resolve_thunk", + "separate_operations", + "specified_directives", + "specified_rules", + "specified_scalar_types", + "strip_ignored_characters", + "subscribe", "type_from_ast", + "validate", + "validate_schema", "value_from_ast", "value_from_ast_untyped", - "ast_from_value", - "ast_to_dict", - "TypeInfo", - "coerce_input_value", - "concat_ast", - "separate_operations", - "strip_ignored_characters", - "is_equal_type", - "is_type_sub_type_of", - "do_types_overlap", - "assert_valid_name", - "is_valid_name_error", - "find_breaking_changes", - "find_dangerous_changes", - "BreakingChange", - "BreakingChangeType", - "DangerousChange", - "DangerousChangeType", - "Undefined", - "UndefinedType", + "version", + "version_info", + "version_info_js", + "version_js", + "visit", ] diff --git a/src/graphql/error/graphql_error.py b/src/graphql/error/graphql_error.py index 75e34b6a..8123a713 100644 --- a/src/graphql/error/graphql_error.py +++ b/src/graphql/error/graphql_error.py @@ -1,24 +1,32 @@ +"""GraphQL Error""" + +from __future__ import annotations + from sys import exc_info -from typing import Any, Collection, Dict, List, Optional, Union, TYPE_CHECKING +from typing import TYPE_CHECKING, Any, Collection, Dict try: from typing import TypedDict except ImportError: # Python < 3.8 from typing_extensions import TypedDict +try: + from typing import TypeAlias +except ImportError: # Python < 3.10 + from typing_extensions import TypeAlias if TYPE_CHECKING: - from ..language.ast import Node # noqa: F401 + from ..language.ast import Node from ..language.location import ( - SourceLocation, FormattedSourceLocation, - ) # noqa: F401 - from ..language.source import Source # noqa: F401 + SourceLocation, + ) + from ..language.source import Source __all__ = ["GraphQLError", "GraphQLErrorExtensions", "GraphQLFormattedError"] # Custom extensions -GraphQLErrorExtensions = Dict[str, Any] +GraphQLErrorExtensions: TypeAlias = Dict[str, Any] # Use a unique identifier name for your extension, for example the name of # your library or project. Do not use a shortened identifier as this increases # the risk of conflicts. We recommend you add at most one extension key, @@ -33,12 +41,12 @@ class GraphQLFormattedError(TypedDict, total=False): message: str # If an error can be associated to a particular point in the requested # GraphQL document, it should contain a list of locations. - locations: List["FormattedSourceLocation"] + locations: list[FormattedSourceLocation] # If an error can be associated to a particular field in the GraphQL result, # it _must_ contain an entry with the key `path` that details the path of # the response field which experienced the error. This allows clients to # identify whether a null result is intentional or caused by a runtime error. - path: List[Union[str, int]] + path: list[str | int] # Reserved for implementors to extend the protocol however they see fit, # and hence there are no additional restrictions on its contents. extensions: GraphQLErrorExtensions @@ -56,7 +64,7 @@ class GraphQLError(Exception): message: str """A message describing the Error for debugging purposes""" - locations: Optional[List["SourceLocation"]] + locations: list[SourceLocation] | None """Source locations A list of (line, column) locations within the source GraphQL document which @@ -67,7 +75,7 @@ class GraphQLError(Exception): the field which produced the error. """ - path: Optional[List[Union[str, int]]] + path: list[str | int] | None """ A list of field names and array indexes describing the JSON-path into the execution @@ -76,38 +84,38 @@ class GraphQLError(Exception): Only included for errors during execution. """ - nodes: Optional[List["Node"]] + nodes: list[Node] | None """A list of GraphQL AST Nodes corresponding to this error""" - source: Optional["Source"] + source: Source | None """The source GraphQL document for the first location of this error Note that if this Error represents more than one node, the source may not represent nodes after the first node. """ - positions: Optional[Collection[int]] + positions: Collection[int] | None """Error positions A list of character offsets within the source GraphQL document which correspond to this error. """ - original_error: Optional[Exception] + original_error: Exception | None """The original error thrown from a field resolver during execution""" - extensions: Optional[GraphQLErrorExtensions] + extensions: GraphQLErrorExtensions | None """Extension fields to add to the formatted error""" __slots__ = ( + "extensions", + "locations", "message", "nodes", - "source", - "positions", - "locations", - "path", "original_error", - "extensions", + "path", + "positions", + "source", ) __hash__ = Exception.__hash__ @@ -115,13 +123,14 @@ class GraphQLError(Exception): def __init__( self, message: str, - nodes: Union[Collection["Node"], "Node", None] = None, - source: Optional["Source"] = None, - positions: Optional[Collection[int]] = None, - path: Optional[Collection[Union[str, int]]] = None, - original_error: Optional[Exception] = None, - extensions: Optional[GraphQLErrorExtensions] = None, + nodes: Collection[Node] | Node | None = None, + source: Source | None = None, + positions: Collection[int] | None = None, + path: Collection[str | int] | None = None, + original_error: Exception | None = None, + extensions: GraphQLErrorExtensions | None = None, ) -> None: + """Initialize a GraphQLError.""" super().__init__(message) self.message = message @@ -148,7 +157,7 @@ def __init__( positions = [loc.start for loc in node_locations] self.positions = positions or None if positions and source: - locations: Optional[List["SourceLocation"]] = [ + locations: list[SourceLocation] | None = [ source.get_location(pos) for pos in positions ] else: @@ -196,7 +205,7 @@ def __repr__(self) -> str: args.append(f"extensions={self.extensions!r}") return f"{self.__class__.__name__}({', '.join(args)})" - def __eq__(self, other: Any) -> bool: + def __eq__(self, other: object) -> bool: return ( isinstance(other, GraphQLError) and self.__class__ == other.__class__ @@ -215,7 +224,7 @@ def __eq__(self, other: Any) -> bool: ) ) - def __ne__(self, other: Any) -> bool: + def __ne__(self, other: object) -> bool: return not self == other @property @@ -235,30 +244,3 @@ def formatted(self) -> GraphQLFormattedError: if self.extensions: formatted["extensions"] = self.extensions return formatted - - -def print_error(error: GraphQLError) -> str: - """Print a GraphQLError to a string. - - Represents useful location information about the error's position in the source. - - .. deprecated:: 3.2 - Please use ``str(error)`` instead. Will be removed in v3.3. - """ - if not isinstance(error, GraphQLError): - raise TypeError("Expected a GraphQLError.") - return str(error) - - -def format_error(error: GraphQLError) -> GraphQLFormattedError: - """Format a GraphQL error. - - Given a GraphQLError, format it according to the rules described by the "Response - Format, Errors" section of the GraphQL Specification. - - .. deprecated:: 3.2 - Please use ``error.formatted`` instead. Will be removed in v3.3. - """ - if not isinstance(error, GraphQLError): - raise TypeError("Expected a GraphQLError.") - return error.formatted diff --git a/src/graphql/error/located_error.py b/src/graphql/error/located_error.py index d295163c..31e423bc 100644 --- a/src/graphql/error/located_error.py +++ b/src/graphql/error/located_error.py @@ -1,18 +1,25 @@ -from typing import TYPE_CHECKING, Collection, Optional, Union +"""Located GraphQL Error""" + +from __future__ import annotations + +from contextlib import suppress +from typing import TYPE_CHECKING, Collection from ..pyutils import inspect from .graphql_error import GraphQLError if TYPE_CHECKING: - from ..language.ast import Node # noqa: F401 + from ..language.ast import Node __all__ = ["located_error"] +suppress_attribute_error = suppress(AttributeError) + def located_error( original_error: Exception, - nodes: Optional[Union["None", Collection["Node"]]] = None, - path: Optional[Collection[Union[str, int]]] = None, + nodes: None | Collection[Node] = None, + path: Collection[str | int] | None = None, ) -> GraphQLError: """Located GraphQL Error @@ -28,23 +35,18 @@ def located_error( if isinstance(original_error, GraphQLError) and original_error.path is not None: return original_error try: - # noinspection PyUnresolvedReferences message = str(original_error.message) # type: ignore except AttributeError: message = str(original_error) try: - # noinspection PyUnresolvedReferences source = original_error.source # type: ignore except AttributeError: source = None try: - # noinspection PyUnresolvedReferences positions = original_error.positions # type: ignore except AttributeError: positions = None - try: - # noinspection PyUnresolvedReferences + + with suppress_attribute_error: nodes = original_error.nodes or nodes # type: ignore - except AttributeError: - pass return GraphQLError(message, nodes, source, positions, path, original_error) diff --git a/src/graphql/error/syntax_error.py b/src/graphql/error/syntax_error.py index 9ab41c25..10b6b3df 100644 --- a/src/graphql/error/syntax_error.py +++ b/src/graphql/error/syntax_error.py @@ -1,9 +1,13 @@ +"""GraphQL Syntax Error""" + +from __future__ import annotations + from typing import TYPE_CHECKING from .graphql_error import GraphQLError if TYPE_CHECKING: - from ..language.source import Source # noqa: F401 + from ..language.source import Source __all__ = ["GraphQLSyntaxError"] @@ -11,7 +15,8 @@ class GraphQLSyntaxError(GraphQLError): """A GraphQLError representing a syntax error.""" - def __init__(self, source: "Source", position: int, description: str) -> None: + def __init__(self, source: Source, position: int, description: str) -> None: + """Initialize the GraphQLSyntaxError""" super().__init__( f"Syntax Error: {description}", source=source, positions=[position] ) diff --git a/src/graphql/execution/__init__.py b/src/graphql/execution/__init__.py index 7317fef2..375ec400 100644 --- a/src/graphql/execution/__init__.py +++ b/src/graphql/execution/__init__.py @@ -5,34 +5,63 @@ """ from .execute import ( + ASYNC_DELAY, + create_source_event_stream, execute, + experimental_execute_incrementally, execute_sync, default_field_resolver, default_type_resolver, + subscribe, ExecutionContext, + Middleware, +) +from .incremental_publisher import ( ExecutionResult, + ExperimentalIncrementalExecutionResults, + FormattedSubsequentIncrementalExecutionResult, + FormattedIncrementalDeferResult, + FormattedIncrementalResult, + FormattedIncrementalStreamResult, FormattedExecutionResult, - Middleware, + FormattedInitialIncrementalExecutionResult, + IncrementalDeferResult, + IncrementalResult, + IncrementalStreamResult, + InitialIncrementalExecutionResult, + SubsequentIncrementalExecutionResult, ) -from .map_async_iterator import MapAsyncIterator -from .subscribe import subscribe, create_source_event_stream +from .async_iterables import map_async_iterable from .middleware import MiddlewareManager from .values import get_argument_values, get_directive_values, get_variable_values __all__ = [ - "create_source_event_stream", - "execute", - "execute_sync", - "default_field_resolver", - "default_type_resolver", - "subscribe", + "ASYNC_DELAY", "ExecutionContext", "ExecutionResult", + "ExperimentalIncrementalExecutionResults", "FormattedExecutionResult", - "MapAsyncIterator", + "FormattedIncrementalDeferResult", + "FormattedIncrementalResult", + "FormattedIncrementalStreamResult", + "FormattedInitialIncrementalExecutionResult", + "FormattedSubsequentIncrementalExecutionResult", + "IncrementalDeferResult", + "IncrementalResult", + "IncrementalStreamResult", + "InitialIncrementalExecutionResult", "Middleware", "MiddlewareManager", + "SubsequentIncrementalExecutionResult", + "create_source_event_stream", + "default_field_resolver", + "default_type_resolver", + "execute", + "execute_sync", + "experimental_execute_incrementally", "get_argument_values", "get_directive_values", "get_variable_values", + "map_async_iterable", + "subscribe", ] diff --git a/src/graphql/execution/async_iterables.py b/src/graphql/execution/async_iterables.py new file mode 100644 index 00000000..b8faad88 --- /dev/null +++ b/src/graphql/execution/async_iterables.py @@ -0,0 +1,61 @@ +"""Helpers for async iterables""" + +from __future__ import annotations + +from contextlib import AbstractAsyncContextManager, suppress +from typing import ( + AsyncGenerator, + AsyncIterable, + Awaitable, + Callable, + Generic, + TypeVar, + Union, +) + +__all__ = ["aclosing", "map_async_iterable"] + +T = TypeVar("T") +V = TypeVar("V") + +AsyncIterableOrGenerator = Union[AsyncGenerator[T, None], AsyncIterable[T]] + +suppress_exceptions = suppress(Exception) + + +class aclosing(AbstractAsyncContextManager, Generic[T]): # noqa: N801 + """Async context manager for safely finalizing an async iterator or generator. + + Contrary to the function available via the standard library, this one silently + ignores the case that custom iterators have no aclose() method. + """ + + def __init__(self, iterable: AsyncIterableOrGenerator[T]) -> None: + self.iterable = iterable + + async def __aenter__(self) -> AsyncIterableOrGenerator[T]: + return self.iterable + + async def __aexit__(self, *_exc_info: object) -> None: + try: + aclose = self.iterable.aclose # type: ignore + except AttributeError: + pass # do not complain if the iterator has no aclose() method + else: + with suppress_exceptions: # or if the aclose() method fails + await aclose() + + +async def map_async_iterable( + iterable: AsyncIterableOrGenerator[T], callback: Callable[[T], Awaitable[V]] +) -> AsyncGenerator[V, None]: + """Map an AsyncIterable over a callback function. + + Given an AsyncIterable and an async callback function, return an AsyncGenerator + that produces values mapped via calling the callback function. + If the inner iterator supports an `aclose()` method, it will be called when + the generator finishes or closes. + """ + async with aclosing(iterable) as items: + async for item in items: + yield await callback(item) diff --git a/src/graphql/execution/collect_fields.py b/src/graphql/execution/collect_fields.py index 60ae75fa..c3fc99cc 100644 --- a/src/graphql/execution/collect_fields.py +++ b/src/graphql/execution/collect_fields.py @@ -1,14 +1,22 @@ -from typing import Any, Dict, List, Set, Union, cast +"""Collect fields""" + +from __future__ import annotations + +import sys +from typing import Any, Dict, NamedTuple, Union, cast from ..language import ( FieldNode, FragmentDefinitionNode, FragmentSpreadNode, InlineFragmentNode, + OperationDefinitionNode, + OperationType, SelectionSetNode, ) +from ..pyutils import RefMap, RefSet from ..type import ( - GraphQLAbstractType, + GraphQLDeferDirective, GraphQLIncludeDirective, GraphQLObjectType, GraphQLSchema, @@ -18,16 +26,104 @@ from ..utilities.type_from_ast import type_from_ast from .values import get_directive_values -__all__ = ["collect_fields", "collect_sub_fields"] +try: + from typing import TypeAlias +except ImportError: # Python < 3.10 + from typing_extensions import TypeAlias + + +__all__ = [ + "NON_DEFERRED_TARGET_SET", + "CollectFieldsContext", + "CollectFieldsResult", + "DeferUsage", + "DeferUsageSet", + "FieldDetails", + "FieldGroup", + "GroupedFieldSetDetails", + "Target", + "TargetSet", + "collect_fields", + "collect_subfields", +] + + +class DeferUsage(NamedTuple): + """An optionally labelled list of ancestor targets.""" + + label: str | None + ancestors: list[Target] + + +Target: TypeAlias = Union[DeferUsage, None] + +TargetSet: TypeAlias = RefSet[Target] +DeferUsageSet: TypeAlias = RefSet[DeferUsage] + + +NON_DEFERRED_TARGET_SET: TargetSet = RefSet([None]) + + +class FieldDetails(NamedTuple): + """A field node and its target.""" + + node: FieldNode + target: Target + + +class FieldGroup(NamedTuple): + """A group of fields that share the same target set.""" + + fields: list[FieldDetails] + targets: TargetSet + + def to_nodes(self) -> list[FieldNode]: + """Return the field nodes in this group.""" + return [field_details.node for field_details in self.fields] + + +if sys.version_info < (3, 9): + GroupedFieldSet: TypeAlias = Dict[str, FieldGroup] +else: # Python >= 3.9 + GroupedFieldSet: TypeAlias = dict[str, FieldGroup] + + +class GroupedFieldSetDetails(NamedTuple): + """A grouped field set with defer info.""" + + grouped_field_set: GroupedFieldSet + should_initiate_defer: bool + + +class CollectFieldsResult(NamedTuple): + """Collected fields and deferred usages.""" + + grouped_field_set: GroupedFieldSet + new_grouped_field_set_details: RefMap[DeferUsageSet, GroupedFieldSetDetails] + new_defer_usages: list[DeferUsage] + + +class CollectFieldsContext(NamedTuple): + """Context for collecting fields.""" + + schema: GraphQLSchema + fragments: dict[str, FragmentDefinitionNode] + variable_values: dict[str, Any] + operation: OperationDefinitionNode + runtime_type: GraphQLObjectType + targets_by_key: dict[str, TargetSet] + fields_by_target: RefMap[Target, dict[str, list[FieldNode]]] + new_defer_usages: list[DeferUsage] + visited_fragment_names: set[str] def collect_fields( schema: GraphQLSchema, - fragments: Dict[str, FragmentDefinitionNode], - variable_values: Dict[str, Any], + fragments: dict[str, FragmentDefinitionNode], + variable_values: dict[str, Any], runtime_type: GraphQLObjectType, - selection_set: SelectionSetNode, -) -> Dict[str, List[FieldNode]]: + operation: OperationDefinitionNode, +) -> CollectFieldsResult: """Collect fields. Given a selection_set, collects all the fields and returns them. @@ -38,103 +134,197 @@ def collect_fields( For internal use only. """ - fields: Dict[str, List[FieldNode]] = {} - collect_fields_impl( - schema, fragments, variable_values, runtime_type, selection_set, fields, set() + context = CollectFieldsContext( + schema, + fragments, + variable_values, + operation, + runtime_type, + {}, + RefMap(), + [], + set(), + ) + collect_fields_impl(context, operation.selection_set) + + return CollectFieldsResult( + *build_grouped_field_sets(context.targets_by_key, context.fields_by_target), + context.new_defer_usages, ) - return fields -def collect_sub_fields( +def collect_subfields( schema: GraphQLSchema, - fragments: Dict[str, FragmentDefinitionNode], - variable_values: Dict[str, Any], + fragments: dict[str, FragmentDefinitionNode], + variable_values: dict[str, Any], + operation: OperationDefinitionNode, return_type: GraphQLObjectType, - field_nodes: List[FieldNode], -) -> Dict[str, List[FieldNode]]: - """Collect sub fields. + field_group: FieldGroup, +) -> CollectFieldsResult: + """Collect subfields. Given a list of field nodes, collects all the subfields of the passed in fields, and returns them at the end. - collect_sub_fields requires the "return type" of an object. For a field that + collect_subfields requires the "return type" of an object. For a field that returns an Interface or Union type, the "return type" will be the actual object type returned by that field. For internal use only. """ - sub_field_nodes: Dict[str, List[FieldNode]] = {} - visited_fragment_names: Set[str] = set() - for node in field_nodes: + context = CollectFieldsContext( + schema, + fragments, + variable_values, + operation, + return_type, + {}, + RefMap(), + [], + set(), + ) + + for field_details in field_group.fields: + node = field_details.node if node.selection_set: - collect_fields_impl( - schema, - fragments, - variable_values, - return_type, - node.selection_set, - sub_field_nodes, - visited_fragment_names, - ) - return sub_field_nodes + collect_fields_impl(context, node.selection_set, field_details.target) + + return CollectFieldsResult( + *build_grouped_field_sets( + context.targets_by_key, context.fields_by_target, field_group.targets + ), + context.new_defer_usages, + ) def collect_fields_impl( - schema: GraphQLSchema, - fragments: Dict[str, FragmentDefinitionNode], - variable_values: Dict[str, Any], - runtime_type: GraphQLObjectType, + context: CollectFieldsContext, selection_set: SelectionSetNode, - fields: Dict[str, List[FieldNode]], - visited_fragment_names: Set[str], + parent_target: Target | None = None, + new_target: Target | None = None, ) -> None: """Collect fields (internal implementation).""" + ( + schema, + fragments, + variable_values, + operation, + runtime_type, + targets_by_key, + fields_by_target, + new_defer_usages, + visited_fragment_names, + ) = context + + ancestors: list[Target] + for selection in selection_set.selections: if isinstance(selection, FieldNode): if not should_include_node(variable_values, selection): continue - name = get_field_entry_key(selection) - fields.setdefault(name, []).append(selection) + key = get_field_entry_key(selection) + target = new_target or parent_target + key_targets = targets_by_key.get(key) + if key_targets is None: + key_targets = RefSet([target]) + targets_by_key[key] = key_targets + else: + key_targets.add(target) + target_fields = fields_by_target.get(target) + if target_fields is None: + fields_by_target[target] = {key: [selection]} + else: + field_nodes = target_fields.get(key) + if field_nodes is None: + target_fields[key] = [selection] + else: + field_nodes.append(selection) elif isinstance(selection, InlineFragmentNode): if not should_include_node( variable_values, selection ) or not does_fragment_condition_match(schema, selection, runtime_type): continue - collect_fields_impl( - schema, - fragments, - variable_values, - runtime_type, - selection.selection_set, - fields, - visited_fragment_names, - ) + + defer = get_defer_values(operation, variable_values, selection) + + if defer: + ancestors = ( + [None] + if parent_target is None + else [parent_target, *parent_target.ancestors] + ) + target = DeferUsage(defer.label, ancestors) + new_defer_usages.append(target) + else: + target = new_target + + collect_fields_impl(context, selection.selection_set, parent_target, target) elif isinstance(selection, FragmentSpreadNode): # pragma: no cover else frag_name = selection.name.value - if frag_name in visited_fragment_names or not should_include_node( - variable_values, selection - ): + + if not should_include_node(variable_values, selection): + continue + + defer = get_defer_values(operation, variable_values, selection) + if frag_name in visited_fragment_names and not defer: continue - visited_fragment_names.add(frag_name) + fragment = fragments.get(frag_name) if not fragment or not does_fragment_condition_match( schema, fragment, runtime_type ): continue - collect_fields_impl( - schema, - fragments, - variable_values, - runtime_type, - fragment.selection_set, - fields, - visited_fragment_names, - ) + + if defer: + ancestors = ( + [None] + if parent_target is None + else [parent_target, *parent_target.ancestors] + ) + target = DeferUsage(defer.label, ancestors) + new_defer_usages.append(target) + else: + visited_fragment_names.add(frag_name) + target = new_target + + collect_fields_impl(context, fragment.selection_set, parent_target, target) + + +class DeferValues(NamedTuple): + """Values of an active defer directive.""" + + label: str | None + + +def get_defer_values( + operation: OperationDefinitionNode, + variable_values: dict[str, Any], + node: FragmentSpreadNode | InlineFragmentNode, +) -> DeferValues | None: + """Get values of defer directive if active. + + Returns an object containing the `@defer` arguments if a field should be + deferred based on the experimental flag, defer directive present and + not disabled by the "if" argument. + """ + defer = get_directive_values(GraphQLDeferDirective, node, variable_values) + + if not defer or defer.get("if") is False: + return None + + if operation.operation == OperationType.SUBSCRIPTION: + msg = ( + "`@defer` directive not supported on subscription operations." + " Disable `@defer` by setting the `if` argument to `false`." + ) + raise TypeError(msg) + + return DeferValues(defer.get("label")) def should_include_node( - variable_values: Dict[str, Any], - node: Union[FragmentSpreadNode, FieldNode, InlineFragmentNode], + variable_values: dict[str, Any], + node: FragmentSpreadNode | FieldNode | InlineFragmentNode, ) -> bool: """Check if node should be included @@ -146,15 +336,12 @@ def should_include_node( return False include = get_directive_values(GraphQLIncludeDirective, node, variable_values) - if include and not include["if"]: - return False - - return True + return not (include and not include["if"]) def does_fragment_condition_match( schema: GraphQLSchema, - fragment: Union[FragmentDefinitionNode, InlineFragmentNode], + fragment: FragmentDefinitionNode | InlineFragmentNode, type_: GraphQLObjectType, ) -> bool: """Determine if a fragment is applicable to the given type.""" @@ -165,10 +352,119 @@ def does_fragment_condition_match( if conditional_type is type_: return True if is_abstract_type(conditional_type): - return schema.is_sub_type(cast(GraphQLAbstractType, conditional_type), type_) + # noinspection PyTypeChecker + return schema.is_sub_type(conditional_type, type_) return False def get_field_entry_key(node: FieldNode) -> str: - """Implements the logic to compute the key of a given field's entry""" + """Implement the logic to compute the key of a given field's entry""" return node.alias.value if node.alias else node.name.value + + +def build_grouped_field_sets( + targets_by_key: dict[str, TargetSet], + fields_by_target: RefMap[Target, dict[str, list[FieldNode]]], + parent_targets: TargetSet = NON_DEFERRED_TARGET_SET, +) -> tuple[GroupedFieldSet, RefMap[DeferUsageSet, GroupedFieldSetDetails]]: + """Build grouped field sets.""" + parent_target_keys, target_set_details_map = get_target_set_details( + targets_by_key, parent_targets + ) + + grouped_field_set = ( + get_ordered_grouped_field_set( + parent_target_keys, parent_targets, targets_by_key, fields_by_target + ) + if parent_target_keys + else {} + ) + + new_grouped_field_set_details: RefMap[DeferUsageSet, GroupedFieldSetDetails] = ( + RefMap() + ) + + for masking_targets, target_set_details in target_set_details_map.items(): + keys, should_initiate_defer = target_set_details + + new_grouped_field_set = get_ordered_grouped_field_set( + keys, masking_targets, targets_by_key, fields_by_target + ) + + # All TargetSets that causes new grouped field sets consist only of DeferUsages + # and have should_initiate_defer defined + + new_grouped_field_set_details[cast("DeferUsageSet", masking_targets)] = ( + GroupedFieldSetDetails(new_grouped_field_set, should_initiate_defer) + ) + + return grouped_field_set, new_grouped_field_set_details + + +class TargetSetDetails(NamedTuple): + """A set of target keys with defer info.""" + + keys: set[str] + should_initiate_defer: bool + + +def get_target_set_details( + targets_by_key: dict[str, TargetSet], parent_targets: TargetSet +) -> tuple[set[str], RefMap[TargetSet, TargetSetDetails]]: + """Get target set details.""" + parent_target_keys: set[str] = set() + target_set_details_map: RefMap[TargetSet, TargetSetDetails] = RefMap() + + for response_key, targets in targets_by_key.items(): + masking_target_list: list[Target] = [] + for target in targets: + if not target or all( + ancestor not in targets for ancestor in target.ancestors + ): + masking_target_list.append(target) + + masking_targets: TargetSet = RefSet(masking_target_list) + if masking_targets == parent_targets: + parent_target_keys.add(response_key) + continue + + for target_set, target_set_details in target_set_details_map.items(): + if target_set == masking_targets: + target_set_details.keys.add(response_key) + break + else: + target_set_details = TargetSetDetails( + {response_key}, + any( + defer_usage not in parent_targets for defer_usage in masking_targets + ), + ) + target_set_details_map[masking_targets] = target_set_details + + return parent_target_keys, target_set_details_map + + +def get_ordered_grouped_field_set( + keys: set[str], + masking_targets: TargetSet, + targets_by_key: dict[str, TargetSet], + fields_by_target: RefMap[Target, dict[str, list[FieldNode]]], +) -> GroupedFieldSet: + """Get ordered grouped field set.""" + grouped_field_set: GroupedFieldSet = {} + + first_target = next(iter(masking_targets)) + first_fields = fields_by_target[first_target] + for key in list(first_fields): + if key in keys: + field_group = grouped_field_set.get(key) + if field_group is None: # pragma: no cover else + field_group = FieldGroup([], masking_targets) + grouped_field_set[key] = field_group + for target in targets_by_key[key]: + fields_for_target = fields_by_target[target] + nodes = fields_for_target[key] + del fields_for_target[key] + field_group.fields.extend(FieldDetails(node, target) for node in nodes) + + return grouped_field_set diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index 01ec288a..1097e80f 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -1,57 +1,66 @@ -from asyncio import ensure_future, gather -from collections.abc import Mapping -from inspect import isawaitable +"""GraphQL execution""" + +from __future__ import annotations + +from asyncio import ensure_future, gather, shield, wait_for +from contextlib import suppress +from copy import copy from typing import ( Any, + AsyncGenerator, AsyncIterable, + AsyncIterator, Awaitable, Callable, - Dict, Iterable, List, + Mapping, + NamedTuple, Optional, - Union, + Sequence, Tuple, - Type, + Union, cast, ) try: - from typing import TypedDict -except ImportError: # Python < 3.8 - from typing_extensions import TypedDict - -from ..error import GraphQLError, GraphQLFormattedError, located_error + from typing import TypeAlias, TypeGuard # noqa: F401 +except ImportError: # Python < 3.10 + from typing_extensions import TypeAlias +try: # only needed for Python < 3.11 + from asyncio.exceptions import TimeoutError # noqa: A004 +except ImportError: # Python < 3.7 + from concurrent.futures import TimeoutError # noqa: A004 + +from ..error import GraphQLError, located_error from ..language import ( DocumentNode, - FieldNode, FragmentDefinitionNode, OperationDefinitionNode, OperationType, ) from ..pyutils import ( - inspect, - is_awaitable as default_is_awaitable, - is_iterable, AwaitableOrValue, Path, + RefMap, Undefined, + async_reduce, + inspect, + is_iterable, ) +from ..pyutils import is_awaitable as default_is_awaitable from ..type import ( GraphQLAbstractType, GraphQLField, + GraphQLFieldResolver, GraphQLLeafType, GraphQLList, - GraphQLNonNull, GraphQLObjectType, GraphQLOutputType, + GraphQLResolveInfo, GraphQLSchema, - GraphQLFieldResolver, + GraphQLStreamDirective, GraphQLTypeResolver, - GraphQLResolveInfo, - SchemaMetaFieldDef, - TypeMetaFieldDef, - TypeNameMetaFieldDef, assert_valid_schema, is_abstract_type, is_leaf_type, @@ -59,23 +68,59 @@ is_non_null_type, is_object_type, ) -from .collect_fields import collect_fields, collect_sub_fields +from .async_iterables import map_async_iterable +from .collect_fields import ( + NON_DEFERRED_TARGET_SET, + CollectFieldsResult, + DeferUsage, + DeferUsageSet, + FieldDetails, + FieldGroup, + GroupedFieldSet, + GroupedFieldSetDetails, + collect_fields, + collect_subfields, +) +from .incremental_publisher import ( + ASYNC_DELAY, + DeferredFragmentRecord, + DeferredGroupedFieldSetRecord, + ExecutionResult, + ExperimentalIncrementalExecutionResults, + IncrementalDataRecord, + IncrementalPublisher, + InitialResultRecord, + StreamItemsRecord, + StreamRecord, +) from .middleware import MiddlewareManager -from .values import get_argument_values, get_variable_values +from .values import get_argument_values, get_directive_values, get_variable_values + +try: # pragma: no cover + anext # noqa: B018 # pyright: ignore +except NameError: # pragma: no cover (Python < 3.10) + # noinspection PyShadowingBuiltins + async def anext(iterator: AsyncIterator) -> Any: + """Return the next item from an async iterator.""" + return await iterator.__anext__() + __all__ = [ - "assert_valid_execution_arguments", + "ASYNC_DELAY", + "ExecutionContext", + "Middleware", + "create_source_event_stream", "default_field_resolver", "default_type_resolver", "execute", "execute_sync", - "get_field_def", - "ExecutionResult", - "ExecutionContext", - "FormattedExecutionResult", - "Middleware", + "experimental_execute_incrementally", + "subscribe", ] +suppress_exceptions = suppress(Exception) +suppress_timeout_error = suppress(TimeoutError) + # Terminology # @@ -96,79 +141,15 @@ # 3) inline fragment "spreads" e.g. "...on Type { a }" -class FormattedExecutionResult(TypedDict, total=False): - """Formatted execution result""" - - errors: List[GraphQLFormattedError] - data: Optional[Dict[str, Any]] - extensions: Dict[str, Any] - - -class ExecutionResult: - """The result of GraphQL execution. - - - ``data`` is the result of a successful execution of the query. - - ``errors`` is included when any errors occurred as a non-empty list. - - ``extensions`` is reserved for adding non-standard properties. - """ - - __slots__ = "data", "errors", "extensions" - - data: Optional[Dict[str, Any]] - errors: Optional[List[GraphQLError]] - extensions: Optional[Dict[str, Any]] - - def __init__( - self, - data: Optional[Dict[str, Any]] = None, - errors: Optional[List[GraphQLError]] = None, - extensions: Optional[Dict[str, Any]] = None, - ): - self.data = data - self.errors = errors - self.extensions = extensions - - def __repr__(self) -> str: - name = self.__class__.__name__ - ext = "" if self.extensions is None else f", extensions={self.extensions}" - return f"{name}(data={self.data!r}, errors={self.errors!r}{ext})" - - def __iter__(self) -> Iterable[Any]: - return iter((self.data, self.errors)) - - @property - def formatted(self) -> FormattedExecutionResult: - """Get execution result formatted according to the specification.""" - formatted: FormattedExecutionResult = {"data": self.data} - if self.errors is not None: - formatted["errors"] = [error.formatted for error in self.errors] - if self.extensions is not None: - formatted["extensions"] = self.extensions - return formatted - - def __eq__(self, other: Any) -> bool: - if isinstance(other, dict): - if "extensions" not in other: - return other == dict(data=self.data, errors=self.errors) - return other == dict( - data=self.data, errors=self.errors, extensions=self.extensions - ) - if isinstance(other, tuple): - if len(other) == 2: - return other == (self.data, self.errors) - return other == (self.data, self.errors, self.extensions) - return ( - isinstance(other, self.__class__) - and other.data == self.data - and other.errors == self.errors - and other.extensions == self.extensions - ) +Middleware: TypeAlias = Optional[Union[Tuple, List, MiddlewareManager]] - def __ne__(self, other: Any) -> bool: - return not self == other +class StreamUsage(NamedTuple): + """Stream directive usage information""" -Middleware = Optional[Union[Tuple, List, MiddlewareManager]] + label: str | None + initial_count: int + field_group: FieldGroup class ExecutionContext: @@ -179,33 +160,33 @@ class ExecutionContext: """ schema: GraphQLSchema - fragments: Dict[str, FragmentDefinitionNode] + fragments: dict[str, FragmentDefinitionNode] root_value: Any context_value: Any operation: OperationDefinitionNode - variable_values: Dict[str, Any] + variable_values: dict[str, Any] field_resolver: GraphQLFieldResolver type_resolver: GraphQLTypeResolver subscribe_field_resolver: GraphQLFieldResolver - errors: List[GraphQLError] - middleware_manager: Optional[MiddlewareManager] + incremental_publisher: IncrementalPublisher + middleware_manager: MiddlewareManager | None - is_awaitable = staticmethod(default_is_awaitable) + is_awaitable: Callable[[Any], bool] = staticmethod(default_is_awaitable) def __init__( self, schema: GraphQLSchema, - fragments: Dict[str, FragmentDefinitionNode], + fragments: dict[str, FragmentDefinitionNode], root_value: Any, context_value: Any, operation: OperationDefinitionNode, - variable_values: Dict[str, Any], + variable_values: dict[str, Any], field_resolver: GraphQLFieldResolver, type_resolver: GraphQLTypeResolver, subscribe_field_resolver: GraphQLFieldResolver, - errors: List[GraphQLError], - middleware_manager: Optional[MiddlewareManager], - is_awaitable: Optional[Callable[[Any], bool]], + incremental_publisher: IncrementalPublisher, + middleware_manager: MiddlewareManager | None, + is_awaitable: Callable[[Any], bool] | None, ) -> None: self.schema = schema self.fragments = fragments @@ -213,14 +194,17 @@ def __init__( self.context_value = context_value self.operation = operation self.variable_values = variable_values - self.field_resolver = field_resolver # type: ignore - self.type_resolver = type_resolver # type: ignore - self.subscribe_field_resolver = subscribe_field_resolver # type: ignore - self.errors = errors + self.field_resolver = field_resolver + self.type_resolver = type_resolver + self.subscribe_field_resolver = subscribe_field_resolver + self.incremental_publisher = incremental_publisher self.middleware_manager = middleware_manager if is_awaitable: self.is_awaitable = is_awaitable - self._subfields_cache: Dict[Tuple, Dict[str, List[FieldNode]]] = {} + self._canceled_iterators: set[AsyncIterator] = set() + self._subfields_cache: dict[tuple, CollectFieldsResult] = {} + self._tasks: set[Awaitable] = set() + self._stream_usages: RefMap[FieldGroup, StreamUsage] = RefMap() @classmethod def build( @@ -229,14 +213,15 @@ def build( document: DocumentNode, root_value: Any = None, context_value: Any = None, - raw_variable_values: Optional[Dict[str, Any]] = None, - operation_name: Optional[str] = None, - field_resolver: Optional[GraphQLFieldResolver] = None, - type_resolver: Optional[GraphQLTypeResolver] = None, - subscribe_field_resolver: Optional[GraphQLFieldResolver] = None, - middleware: Optional[Middleware] = None, - is_awaitable: Optional[Callable[[Any], bool]] = None, - ) -> Union[List[GraphQLError], "ExecutionContext"]: + raw_variable_values: dict[str, Any] | None = None, + operation_name: str | None = None, + field_resolver: GraphQLFieldResolver | None = None, + type_resolver: GraphQLTypeResolver | None = None, + subscribe_field_resolver: GraphQLFieldResolver | None = None, + middleware: Middleware | None = None, + is_awaitable: Callable[[Any], bool] | None = None, + **custom_args: Any, + ) -> list[GraphQLError] | ExecutionContext: """Build an execution context Constructs a ExecutionContext object from the arguments passed to execute, which @@ -246,20 +231,24 @@ def build( For internal use only. """ - operation: Optional[OperationDefinitionNode] = None - fragments: Dict[str, FragmentDefinitionNode] = {} - middleware_manager: Optional[MiddlewareManager] = None + # If the schema used for execution is invalid, raise an error. + assert_valid_schema(schema) + + operation: OperationDefinitionNode | None = None + fragments: dict[str, FragmentDefinitionNode] = {} + middleware_manager: MiddlewareManager | None = None if middleware is not None: if isinstance(middleware, (list, tuple)): middleware_manager = MiddlewareManager(*middleware) elif isinstance(middleware, MiddlewareManager): middleware_manager = middleware else: - raise TypeError( + msg = ( "Middleware must be passed as a list or tuple of functions" " or objects, or as a single MiddlewareManager object." f" Got {inspect(middleware)} instead." ) + raise TypeError(msg) for definition in document.definitions: if isinstance(definition, OperationDefinitionNode): @@ -302,121 +291,136 @@ def build( field_resolver or default_field_resolver, type_resolver or default_type_resolver, subscribe_field_resolver or default_field_resolver, - [], + IncrementalPublisher(), middleware_manager, is_awaitable, + **custom_args, ) - @staticmethod - def build_response( - data: Optional[Dict[str, Any]], errors: List[GraphQLError] - ) -> ExecutionResult: - """Build response. - - Given a completed execution context and data, build the (data, errors) response - defined by the "Response" section of the GraphQL spec. - """ - if not errors: - return ExecutionResult(data, None) - # Sort the error list in order to make it deterministic, since we might have - # been using parallel execution. - errors.sort( - key=lambda error: (error.locations or [], error.path or [], error.message) - ) - return ExecutionResult(data, errors) + def build_per_event_execution_context(self, payload: Any) -> ExecutionContext: + """Create a copy of the execution context for usage with subscribe events.""" + context = copy(self) + context.root_value = payload + return context def execute_operation( - self, operation: OperationDefinitionNode, root_value: Any - ) -> Optional[AwaitableOrValue[Any]]: + self, initial_result_record: InitialResultRecord + ) -> AwaitableOrValue[dict[str, Any]]: """Execute an operation. Implements the "Executing operations" section of the spec. """ - root_type = self.schema.get_root_type(operation.operation) + operation = self.operation + schema = self.schema + root_type = schema.get_root_type(operation.operation) if root_type is None: - raise GraphQLError( + msg = ( "Schema is not configured to execute" - f" {operation.operation.value} operation.", - operation, + f" {operation.operation.value} operation." ) + raise GraphQLError(msg, operation) - root_fields = collect_fields( - self.schema, - self.fragments, - self.variable_values, - root_type, - operation.selection_set, + grouped_field_set, new_grouped_field_set_details, new_defer_usages = ( + collect_fields( + schema, self.fragments, self.variable_values, root_type, operation + ) + ) + + incremental_publisher = self.incremental_publisher + new_defer_map = add_new_deferred_fragments( + incremental_publisher, new_defer_usages, initial_result_record ) - path = None + path: Path | None = None + + new_deferred_grouped_field_set_records = add_new_deferred_grouped_field_sets( + incremental_publisher, + new_grouped_field_set_details, + new_defer_map, + path, + ) - return ( + root_value = self.root_value + # noinspection PyTypeChecker + result = ( self.execute_fields_serially if operation.operation == OperationType.MUTATION else self.execute_fields - )(root_type, root_value, path, root_fields) + )( + root_type, + root_value, + path, + grouped_field_set, + initial_result_record, + new_defer_map, + ) + + self.execute_deferred_grouped_field_sets( + root_type, + root_value, + path, + new_deferred_grouped_field_set_records, + new_defer_map, + ) + + return result def execute_fields_serially( self, parent_type: GraphQLObjectType, source_value: Any, - path: Optional[Path], - fields: Dict[str, List[FieldNode]], - ) -> AwaitableOrValue[Dict[str, Any]]: + path: Path | None, + grouped_field_set: GroupedFieldSet, + incremental_data_record: IncrementalDataRecord, + defer_map: RefMap[DeferUsage, DeferredFragmentRecord], + ) -> AwaitableOrValue[dict[str, Any]]: """Execute the given fields serially. Implements the "Executing selection sets" section of the spec for fields that must be executed serially. """ - results: AwaitableOrValue[Dict[str, Any]] = {} is_awaitable = self.is_awaitable - for response_name, field_nodes in fields.items(): + + def reducer( + results: dict[str, Any], field_item: tuple[str, FieldGroup] + ) -> AwaitableOrValue[dict[str, Any]]: + response_name, field_group = field_item field_path = Path(path, response_name, parent_type.name) result = self.execute_field( - parent_type, source_value, field_nodes, field_path + parent_type, + source_value, + field_group, + field_path, + incremental_data_record, + defer_map, ) if result is Undefined: - continue - if is_awaitable(results): - # noinspection PyShadowingNames - async def await_and_set_result( - results: Awaitable[Dict[str, Any]], - response_name: str, - result: AwaitableOrValue[Any], - ) -> Dict[str, Any]: - awaited_results = await results - awaited_results[response_name] = ( - await result if is_awaitable(result) else result - ) - return awaited_results - - results = await_and_set_result( - cast(Awaitable, results), response_name, result - ) - elif is_awaitable(result): + return results + if is_awaitable(result): # noinspection PyShadowingNames async def set_result( - results: Dict[str, Any], response_name: str, - result: Awaitable, - ) -> Dict[str, Any]: - results[response_name] = await result + awaitable_result: Awaitable, + ) -> dict[str, Any]: + results[response_name] = await awaitable_result return results - results = set_result( - cast(Dict[str, Any], results), response_name, result - ) - else: - cast(Dict[str, Any], results)[response_name] = result - return results + return set_result(response_name, result) + results[response_name] = result + return results + + # noinspection PyTypeChecker + return async_reduce(reducer, grouped_field_set.items(), {}) def execute_fields( self, parent_type: GraphQLObjectType, source_value: Any, - path: Optional[Path], - fields: Dict[str, List[FieldNode]], - ) -> AwaitableOrValue[Dict[str, Any]]: + path: Path | None, + grouped_field_set: GroupedFieldSet, + incremental_data_record: IncrementalDataRecord, + defer_map: RefMap[DeferUsage, DeferredFragmentRecord], + ) -> AwaitableOrValue[dict[str, Any]]: """Execute the given fields concurrently. Implements the "Executing selection sets" section of the spec @@ -424,19 +428,24 @@ def execute_fields( """ results = {} is_awaitable = self.is_awaitable - awaitable_fields: List[str] = [] + awaitable_fields: list[str] = [] append_awaitable = awaitable_fields.append - for response_name, field_nodes in fields.items(): + for response_name, field_group in grouped_field_set.items(): field_path = Path(path, response_name, parent_type.name) result = self.execute_field( - parent_type, source_value, field_nodes, field_path + parent_type, + source_value, + field_group, + field_path, + incremental_data_record, + defer_map, ) if result is not Undefined: results[response_name] = result if is_awaitable(result): append_awaitable(response_name) - # If there are no coroutines, we can just return the object + # If there are no coroutines, we can just return the object. if not awaitable_fields: return results @@ -444,50 +453,30 @@ def execute_fields( # field, which is possibly a coroutine object. Return a coroutine object that # will yield this same map, but with any coroutines awaited in parallel and # replaced with the values they yielded. - async def get_results() -> Dict[str, Any]: - results.update( - zip( - awaitable_fields, - await gather(*(results[field] for field in awaitable_fields)), + async def get_results() -> dict[str, Any]: + if len(awaitable_fields) == 1: + # If there is only one field, avoid the overhead of parallelization. + field = awaitable_fields[0] + results[field] = await results[field] + else: + results.update( + zip( + awaitable_fields, + await gather(*(results[field] for field in awaitable_fields)), + ) ) - ) return results return get_results() - def build_resolve_info( - self, - field_def: GraphQLField, - field_nodes: List[FieldNode], - parent_type: GraphQLObjectType, - path: Path, - ) -> GraphQLResolveInfo: - """Build the GraphQLResolveInfo object. - - For internal use only.""" - # The resolve function's first argument is a collection of information about - # the current execution state. - return GraphQLResolveInfo( - field_nodes[0].name.value, - field_nodes, - field_def.type, - parent_type, - path, - self.schema, - self.fragments, - self.root_value, - self.operation, - self.variable_values, - self.context_value, - self.is_awaitable, - ) - def execute_field( self, parent_type: GraphQLObjectType, source: Any, - field_nodes: List[FieldNode], + field_group: FieldGroup, path: Path, + incremental_data_record: IncrementalDataRecord, + defer_map: RefMap[DeferUsage, DeferredFragmentRecord], ) -> AwaitableOrValue[Any]: """Resolve the field on the given source object. @@ -497,7 +486,8 @@ def execute_field( calling its resolve function, then calls complete_value to await coroutine objects, serialize scalars, or execute the sub-selection-set for objects. """ - field_def = get_field_def(self.schema, parent_type, field_nodes[0]) + field_name = field_group.fields[0].node.name.value + field_def = self.schema.get_field(parent_type, field_name) if not field_def: return Undefined @@ -507,38 +497,40 @@ def execute_field( if self.middleware_manager: resolve_fn = self.middleware_manager.get_field_resolver(resolve_fn) - info = self.build_resolve_info(field_def, field_nodes, parent_type, path) + info = self.build_resolve_info(field_def, field_group, parent_type, path) # Get the resolve function, regardless of if its result is normal or abrupt # (error). try: # Build a dictionary of arguments from the field.arguments AST, using the # variables scope to fulfill any variable references. - args = get_argument_values(field_def, field_nodes[0], self.variable_values) + args = get_argument_values( + field_def, field_group.fields[0].node, self.variable_values + ) # Note that contrary to the JavaScript implementation, we pass the context # value as part of the resolve info. result = resolve_fn(source, info, **args) if self.is_awaitable(result): - # noinspection PyShadowingNames - async def await_result() -> Any: - try: - completed = self.complete_value( - return_type, field_nodes, info, path, await result - ) - if self.is_awaitable(completed): - return await completed - return completed - except Exception as raw_error: - error = located_error(raw_error, field_nodes, path.as_list()) - self.handle_field_error(error, return_type) - return None - - return await_result() + return self.complete_awaitable_value( + return_type, + field_group, + info, + path, + result, + incremental_data_record, + defer_map, + ) completed = self.complete_value( - return_type, field_nodes, info, path, result + return_type, + field_group, + info, + path, + result, + incremental_data_record, + defer_map, ) if self.is_awaitable(completed): # noinspection PyShadowingNames @@ -546,39 +538,88 @@ async def await_completed() -> Any: try: return await completed except Exception as raw_error: - error = located_error(raw_error, field_nodes, path.as_list()) - self.handle_field_error(error, return_type) + self.handle_field_error( + raw_error, + return_type, + field_group, + path, + incremental_data_record, + ) + self.incremental_publisher.filter(path, incremental_data_record) return None return await_completed() - return completed except Exception as raw_error: - error = located_error(raw_error, field_nodes, path.as_list()) - self.handle_field_error(error, return_type) + self.handle_field_error( + raw_error, + return_type, + field_group, + path, + incremental_data_record, + ) + self.incremental_publisher.filter(path, incremental_data_record) return None + return completed + + def build_resolve_info( + self, + field_def: GraphQLField, + field_group: FieldGroup, + parent_type: GraphQLObjectType, + path: Path, + ) -> GraphQLResolveInfo: + """Build the GraphQLResolveInfo object. + + For internal use only. + """ + # The resolve function's first argument is a collection of information about + # the current execution state. + return GraphQLResolveInfo( + field_group.fields[0].node.name.value, + field_group.to_nodes(), + field_def.type, + parent_type, + path, + self.schema, + self.fragments, + self.root_value, + self.operation, + self.variable_values, + self.context_value, + self.is_awaitable, + ) + def handle_field_error( self, - error: GraphQLError, + raw_error: Exception, return_type: GraphQLOutputType, + field_group: FieldGroup, + path: Path, + incremental_data_record: IncrementalDataRecord, ) -> None: + """Handle error properly according to the field type.""" + error = located_error(raw_error, field_group.to_nodes(), path.as_list()) + # If the field type is non-nullable, then it is resolved without any protection # from errors, however it still properly locates the error. if is_non_null_type(return_type): raise error + # Otherwise, error protection is applied, logging the error and resolving a # null value for this field if one is encountered. - self.errors.append(error) - return None + self.incremental_publisher.add_field_error(incremental_data_record, error) def complete_value( self, return_type: GraphQLOutputType, - field_nodes: List[FieldNode], + field_group: FieldGroup, info: GraphQLResolveInfo, path: Path, result: Any, + incremental_data_record: IncrementalDataRecord, + defer_map: RefMap[DeferUsage, DeferredFragmentRecord], ) -> AwaitableOrValue[Any]: """Complete a value. @@ -610,17 +651,20 @@ def complete_value( # result is null. if is_non_null_type(return_type): completed = self.complete_value( - cast(GraphQLNonNull, return_type).of_type, - field_nodes, + return_type.of_type, + field_group, info, path, result, + incremental_data_record, + defer_map, ) if completed is None: - raise TypeError( + msg = ( "Cannot return null for non-nullable field" f" {info.parent_type.name}.{info.field_name}." ) + raise TypeError(msg) return completed # If result value is null or undefined then return null. @@ -630,162 +674,435 @@ def complete_value( # If field type is List, complete each item in the list with inner type if is_list_type(return_type): return self.complete_list_value( - cast(GraphQLList, return_type), field_nodes, info, path, result + return_type, + field_group, + info, + path, + result, + incremental_data_record, + defer_map, ) # If field type is a leaf type, Scalar or Enum, serialize to a valid value, # returning null if serialization is not possible. if is_leaf_type(return_type): - return self.complete_leaf_value(cast(GraphQLLeafType, return_type), result) + return self.complete_leaf_value(return_type, result) # If field type is an abstract type, Interface or Union, determine the runtime # Object type and complete for that type. if is_abstract_type(return_type): return self.complete_abstract_value( - cast(GraphQLAbstractType, return_type), field_nodes, info, path, result + return_type, + field_group, + info, + path, + result, + incremental_data_record, + defer_map, ) # If field type is Object, execute and complete all sub-selections. if is_object_type(return_type): return self.complete_object_value( - cast(GraphQLObjectType, return_type), field_nodes, info, path, result + return_type, + field_group, + info, + path, + result, + incremental_data_record, + defer_map, ) # Not reachable. All possible output types have been considered. - raise TypeError( # pragma: no cover + msg = ( "Cannot complete value of unexpected output type:" f" '{inspect(return_type)}'." + ) # pragma: no cover + raise TypeError(msg) # pragma: no cover + + async def complete_awaitable_value( + self, + return_type: GraphQLOutputType, + field_group: FieldGroup, + info: GraphQLResolveInfo, + path: Path, + result: Any, + incremental_data_record: IncrementalDataRecord, + defer_map: RefMap[DeferUsage, DeferredFragmentRecord], + ) -> Any: + """Complete an awaitable value.""" + try: + resolved = await result + completed = self.complete_value( + return_type, + field_group, + info, + path, + resolved, + incremental_data_record, + defer_map, + ) + if self.is_awaitable(completed): + completed = await completed + except Exception as raw_error: + self.handle_field_error( + raw_error, return_type, field_group, path, incremental_data_record + ) + self.incremental_publisher.filter(path, incremental_data_record) + completed = None + return completed + + def get_stream_usage( + self, field_group: FieldGroup, path: Path + ) -> StreamUsage | None: + """Get stream usage. + + Returns an object containing info for streaming if a field should be + streamed based on the experimental flag, stream directive present and + not disabled by the "if" argument. + """ + # do not stream inner lists of multidimensional lists + if isinstance(path.key, int): + return None + + stream_usage = self._stream_usages.get(field_group) + if stream_usage is not None: + return stream_usage # pragma: no cover + + # validation only allows equivalent streams on multiple fields, so it is + # safe to only check the first field_node for the stream directive + stream = get_directive_values( + GraphQLStreamDirective, field_group.fields[0].node, self.variable_values + ) + + if not stream or stream.get("if") is False: + return None + + initial_count = stream.get("initialCount") + if initial_count is None or initial_count < 0: + msg = "initialCount must be a positive integer" + raise ValueError(msg) + + if self.operation.operation == OperationType.SUBSCRIPTION: + msg = ( + "`@stream` directive not supported on subscription operations." + " Disable `@stream` by setting the `if` argument to `false`." + ) + raise TypeError(msg) + + streamed_field_group = FieldGroup( + [ + FieldDetails(field_details.node, None) + for field_details in field_group.fields + ], + NON_DEFERRED_TARGET_SET, + ) + + stream_usage = StreamUsage( + stream.get("label"), stream["initialCount"], streamed_field_group ) + self._stream_usages[field_group] = stream_usage + + return stream_usage + + async def complete_async_iterator_value( + self, + item_type: GraphQLOutputType, + field_group: FieldGroup, + info: GraphQLResolveInfo, + path: Path, + async_iterator: AsyncIterator[Any], + incremental_data_record: IncrementalDataRecord, + defer_map: RefMap[DeferUsage, DeferredFragmentRecord], + ) -> list[Any]: + """Complete an async iterator. + + Complete an async iterator value by completing the result and calling + recursively until all the results are completed. + """ + stream_usage = self.get_stream_usage(field_group, path) + complete_list_item_value = self.complete_list_item_value + awaitable_indices: list[int] = [] + append_awaitable = awaitable_indices.append + completed_results: list[Any] = [] + index = 0 + while True: + if stream_usage and index >= stream_usage.initial_count: + try: + early_return = async_iterator.aclose # type: ignore + except AttributeError: + early_return = None + stream_record = StreamRecord(path, stream_usage.label, early_return) + + with suppress_timeout_error: + await wait_for( + shield( + self.execute_stream_async_iterator( + index, + async_iterator, + stream_usage.field_group, + info, + item_type, + path, + incremental_data_record, + stream_record, + ) + ), + timeout=ASYNC_DELAY, + ) + break + + item_path = path.add_key(index, None) + try: + try: + value = await anext(async_iterator) + except StopAsyncIteration: + break + except Exception as raw_error: + raise located_error( + raw_error, field_group.to_nodes(), path.as_list() + ) from raw_error + if complete_list_item_value( + value, + completed_results, + item_type, + field_group, + info, + item_path, + incremental_data_record, + defer_map, + ): + append_awaitable(index) + + index += 1 + + if not awaitable_indices: + return completed_results + + if len(awaitable_indices) == 1: + # If there is only one index, avoid the overhead of parallelization. + index = awaitable_indices[0] + completed_results[index] = await completed_results[index] + else: + for index, result in zip( + awaitable_indices, + await gather( + *(completed_results[index] for index in awaitable_indices) + ), + ): + completed_results[index] = result + return completed_results + def complete_list_value( self, return_type: GraphQLList[GraphQLOutputType], - field_nodes: List[FieldNode], + field_group: FieldGroup, info: GraphQLResolveInfo, path: Path, - result: Union[AsyncIterable[Any], Iterable[Any]], - ) -> AwaitableOrValue[List[Any]]: + result: AsyncIterable[Any] | Iterable[Any], + incremental_data_record: IncrementalDataRecord, + defer_map: RefMap[DeferUsage, DeferredFragmentRecord], + ) -> AwaitableOrValue[list[Any]]: """Complete a list value. Complete a list value by completing each item in the list with the inner type. """ - if not is_iterable(result): - # experimental: allow async iterables - if isinstance(result, AsyncIterable): - # noinspection PyShadowingNames - async def async_iterable_to_list( - async_result: AsyncIterable[Any], - ) -> Any: - sync_result = [item async for item in async_result] - return self.complete_list_value( - return_type, field_nodes, info, path, sync_result - ) + item_type = return_type.of_type - return async_iterable_to_list(result) + if isinstance(result, AsyncIterable): + async_iterator = result.__aiter__() + + return self.complete_async_iterator_value( + item_type, + field_group, + info, + path, + async_iterator, + incremental_data_record, + defer_map, + ) - raise GraphQLError( + if not is_iterable(result): + msg = ( "Expected Iterable, but did not find one for field" f" '{info.parent_type.name}.{info.field_name}'." ) - result = cast(Iterable[Any], result) + raise GraphQLError(msg) + + stream_usage = self.get_stream_usage(field_group, path) # This is specified as a simple map, however we're optimizing the path where # the list contains no coroutine objects by avoiding creating another coroutine # object. - item_type = return_type.of_type - is_awaitable = self.is_awaitable - awaitable_indices: List[int] = [] + complete_list_item_value = self.complete_list_item_value + current_parents = incremental_data_record + awaitable_indices: list[int] = [] append_awaitable = awaitable_indices.append - completed_results: List[Any] = [] - append_result = completed_results.append + completed_results: list[Any] = [] + stream_record: StreamRecord | None = None for index, item in enumerate(result): # No need to modify the info object containing the path, since from here on # it is not ever accessed by resolver functions. item_path = path.add_key(index, None) - completed_item: AwaitableOrValue[Any] - if is_awaitable(item): - # noinspection PyShadowingNames - async def await_completed(item: Any, item_path: Path) -> Any: - try: - completed = self.complete_value( - item_type, field_nodes, info, item_path, await item - ) - if is_awaitable(completed): - return await completed - return completed - except Exception as raw_error: - error = located_error( - raw_error, field_nodes, item_path.as_list() - ) - self.handle_field_error(error, item_type) - return None - completed_item = await_completed(item, item_path) - else: - try: - completed_item = self.complete_value( - item_type, field_nodes, info, item_path, item - ) - if is_awaitable(completed_item): - # noinspection PyShadowingNames - async def await_completed(item: Any, item_path: Path) -> Any: - try: - return await item - except Exception as raw_error: - error = located_error( - raw_error, field_nodes, item_path.as_list() - ) - self.handle_field_error(error, item_type) - return None - - completed_item = await_completed(completed_item, item_path) - except Exception as raw_error: - error = located_error(raw_error, field_nodes, item_path.as_list()) - self.handle_field_error(error, item_type) - completed_item = None + if stream_usage and index >= stream_usage.initial_count: + if stream_record is None: + stream_record = StreamRecord(path, stream_usage.label) + current_parents = self.execute_stream_field( + path, + item_path, + item, + stream_usage.field_group, + info, + item_type, + current_parents, + stream_record, + ) + continue - if is_awaitable(completed_item): + if complete_list_item_value( + item, + completed_results, + item_type, + field_group, + info, + item_path, + incremental_data_record, + defer_map, + ): append_awaitable(index) - append_result(completed_item) + + if stream_record is not None: + self.incremental_publisher.set_is_final_record( + cast("StreamItemsRecord", current_parents) + ) if not awaitable_indices: return completed_results # noinspection PyShadowingNames - async def get_completed_results() -> List[Any]: - for index, result in zip( - awaitable_indices, - await gather( - *(completed_results[index] for index in awaitable_indices) - ), - ): - completed_results[index] = result + async def get_completed_results() -> list[Any]: + if len(awaitable_indices) == 1: + # If there is only one index, avoid the overhead of parallelization. + index = awaitable_indices[0] + completed_results[index] = await completed_results[index] + else: + for index, sub_result in zip( + awaitable_indices, + await gather( + *(completed_results[index] for index in awaitable_indices) + ), + ): + completed_results[index] = sub_result return completed_results return get_completed_results() - @staticmethod - def complete_leaf_value(return_type: GraphQLLeafType, result: Any) -> Any: - """Complete a leaf value. + def complete_list_item_value( + self, + item: Any, + complete_results: list[Any], + item_type: GraphQLOutputType, + field_group: FieldGroup, + info: GraphQLResolveInfo, + item_path: Path, + incremental_data_record: IncrementalDataRecord, + defer_map: RefMap[DeferUsage, DeferredFragmentRecord], + ) -> bool: + """Complete a list item value by adding it to the completed results. + + Returns True if the value is awaitable. + """ + is_awaitable = self.is_awaitable + + if is_awaitable(item): + complete_results.append( + self.complete_awaitable_value( + item_type, + field_group, + info, + item_path, + item, + incremental_data_record, + defer_map, + ) + ) + return True + + try: + completed_item = self.complete_value( + item_type, + field_group, + info, + item_path, + item, + incremental_data_record, + defer_map, + ) + + if is_awaitable(completed_item): + # noinspection PyShadowingNames + async def await_completed() -> Any: + try: + return await completed_item + except Exception as raw_error: + self.handle_field_error( + raw_error, + item_type, + field_group, + item_path, + incremental_data_record, + ) + self.incremental_publisher.filter( + item_path, incremental_data_record + ) + return None + + complete_results.append(await_completed()) + return True + + complete_results.append(completed_item) + + except Exception as raw_error: + self.handle_field_error( + raw_error, + item_type, + field_group, + item_path, + incremental_data_record, + ) + self.incremental_publisher.filter(item_path, incremental_data_record) + complete_results.append(None) + + return False + + @staticmethod + def complete_leaf_value(return_type: GraphQLLeafType, result: Any) -> Any: + """Complete a leaf value. Complete a Scalar or Enum by serializing to a valid value, returning null if serialization is not possible. """ serialized_result = return_type.serialize(result) if serialized_result is Undefined or serialized_result is None: - raise TypeError( + msg = ( f"Expected `{inspect(return_type)}.serialize({inspect(result)})`" - f" to return non-nullable value, returned: {inspect(serialized_result)}" + " to return non-nullable value, returned:" + f" {inspect(serialized_result)}" ) + raise TypeError(msg) return serialized_result def complete_abstract_value( self, return_type: GraphQLAbstractType, - field_nodes: List[FieldNode], + field_group: FieldGroup, info: GraphQLResolveInfo, path: Path, result: Any, + incremental_data_record: IncrementalDataRecord, + defer_map: RefMap[DeferUsage, DeferredFragmentRecord], ) -> AwaitableOrValue[Any]: """Complete an abstract value. @@ -793,142 +1110,198 @@ def complete_abstract_value( that value, then complete the value for that type. """ resolve_type_fn = return_type.resolve_type or self.type_resolver - runtime_type = resolve_type_fn(result, info, return_type) # type: ignore + runtime_type = resolve_type_fn(result, info, return_type) if self.is_awaitable(runtime_type): - runtime_type = cast(Awaitable, runtime_type) + runtime_type = cast("Awaitable", runtime_type) async def await_complete_object_value() -> Any: value = self.complete_object_value( self.ensure_valid_runtime_type( await runtime_type, # type: ignore return_type, - field_nodes, + field_group, info, result, ), - field_nodes, + field_group, info, path, result, + incremental_data_record, + defer_map, ) if self.is_awaitable(value): return await value # type: ignore return value # pragma: no cover return await_complete_object_value() - runtime_type = cast(Optional[str], runtime_type) + runtime_type = cast("Optional[str]", runtime_type) return self.complete_object_value( self.ensure_valid_runtime_type( - runtime_type, return_type, field_nodes, info, result + runtime_type, return_type, field_group, info, result ), - field_nodes, + field_group, info, path, result, + incremental_data_record, + defer_map, ) def ensure_valid_runtime_type( self, runtime_type_name: Any, return_type: GraphQLAbstractType, - field_nodes: List[FieldNode], + field_group: FieldGroup, info: GraphQLResolveInfo, result: Any, ) -> GraphQLObjectType: + """Ensure that the given type is valid at runtime.""" if runtime_type_name is None: - raise GraphQLError( + msg = ( f"Abstract type '{return_type.name}' must resolve" " to an Object type at runtime" f" for field '{info.parent_type.name}.{info.field_name}'." f" Either the '{return_type.name}' type should provide" " a 'resolve_type' function or each possible type should provide" - " an 'is_type_of' function.", - field_nodes, + " an 'is_type_of' function." ) + raise GraphQLError(msg, field_group.to_nodes()) if is_object_type(runtime_type_name): # pragma: no cover - raise GraphQLError( + msg = ( "Support for returning GraphQLObjectType from resolve_type was" " removed in GraphQL-core 3.2, please return type name instead." ) + raise GraphQLError(msg) if not isinstance(runtime_type_name, str): - raise GraphQLError( + msg = ( f"Abstract type '{return_type.name}' must resolve" " to an Object type at runtime" f" for field '{info.parent_type.name}.{info.field_name}' with value" - f" {inspect(result)}, received '{inspect(runtime_type_name)}'.", - field_nodes, + f" {inspect(result)}, received '{inspect(runtime_type_name)}'." ) + raise GraphQLError(msg, field_group.to_nodes()) runtime_type = self.schema.get_type(runtime_type_name) if runtime_type is None: - raise GraphQLError( + msg = ( f"Abstract type '{return_type.name}' was resolved to a type" - f" '{runtime_type_name}' that does not exist inside the schema.", - field_nodes, + f" '{runtime_type_name}' that does not exist inside the schema." ) + raise GraphQLError(msg, field_group.to_nodes()) if not is_object_type(runtime_type): - raise GraphQLError( + msg = ( f"Abstract type '{return_type.name}' was resolved" - f" to a non-object type '{runtime_type_name}'.", - field_nodes, + f" to a non-object type '{runtime_type_name}'." ) - - runtime_type = cast(GraphQLObjectType, runtime_type) + raise GraphQLError(msg, field_group.to_nodes()) if not self.schema.is_sub_type(return_type, runtime_type): - raise GraphQLError( + msg = ( f"Runtime Object type '{runtime_type.name}' is not a possible" - f" type for '{return_type.name}'.", - field_nodes, + f" type for '{return_type.name}'." ) + raise GraphQLError(msg, field_group.to_nodes()) + # noinspection PyTypeChecker return runtime_type def complete_object_value( self, return_type: GraphQLObjectType, - field_nodes: List[FieldNode], + field_group: FieldGroup, info: GraphQLResolveInfo, path: Path, result: Any, - ) -> AwaitableOrValue[Dict[str, Any]]: + incremental_data_record: IncrementalDataRecord, + defer_map: RefMap[DeferUsage, DeferredFragmentRecord], + ) -> AwaitableOrValue[dict[str, Any]]: """Complete an Object value by executing all sub-selections.""" - # Collect sub-fields to execute to complete this value. - sub_field_nodes = self.collect_subfields(return_type, field_nodes) - # If there is an `is_type_of()` predicate function, call it with the current # result. If `is_type_of()` returns False, then raise an error rather than - # continuing execution. + # continuing execution. if return_type.is_type_of: is_type_of = return_type.is_type_of(result, info) if self.is_awaitable(is_type_of): - async def execute_subfields_async() -> Dict[str, Any]: + async def execute_subfields_async() -> dict[str, Any]: if not await is_type_of: # type: ignore raise invalid_return_type_error( - return_type, result, field_nodes + return_type, result, field_group ) - return self.execute_fields( - return_type, result, path, sub_field_nodes + return self.collect_and_execute_subfields( + return_type, + field_group, + path, + result, + incremental_data_record, + defer_map, ) # type: ignore return execute_subfields_async() if not is_type_of: - raise invalid_return_type_error(return_type, result, field_nodes) + raise invalid_return_type_error(return_type, result, field_group) + + return self.collect_and_execute_subfields( + return_type, field_group, path, result, incremental_data_record, defer_map + ) + + def collect_and_execute_subfields( + self, + return_type: GraphQLObjectType, + field_group: FieldGroup, + path: Path, + result: Any, + incremental_data_record: IncrementalDataRecord, + defer_map: RefMap[DeferUsage, DeferredFragmentRecord], + ) -> AwaitableOrValue[dict[str, Any]]: + """Collect sub-fields to execute to complete this value.""" + grouped_field_set, new_grouped_field_set_details, new_defer_usages = ( + self.collect_subfields(return_type, field_group) + ) + + incremental_publisher = self.incremental_publisher + new_defer_map = add_new_deferred_fragments( + incremental_publisher, + new_defer_usages, + incremental_data_record, + defer_map, + path, + ) + new_deferred_grouped_field_set_records = add_new_deferred_grouped_field_sets( + incremental_publisher, new_grouped_field_set_details, new_defer_map, path + ) + + sub_fields = self.execute_fields( + return_type, + result, + path, + grouped_field_set, + incremental_data_record, + new_defer_map, + ) + + self.execute_deferred_grouped_field_sets( + return_type, + result, + path, + new_deferred_grouped_field_set_records, + new_defer_map, + ) - return self.execute_fields(return_type, result, path, sub_field_nodes) + return sub_fields def collect_subfields( - self, return_type: GraphQLObjectType, field_nodes: List[FieldNode] - ) -> Dict[str, List[FieldNode]]: + self, return_type: GraphQLObjectType, field_group: FieldGroup + ) -> CollectFieldsResult: """Collect subfields. A cached collection of relevant subfields with regard to the return type is @@ -937,29 +1310,382 @@ def collect_subfields( lists of values. """ cache = self._subfields_cache - # We cannot use the field_nodes themselves as key for the cache, since they - # are not hashable as a list. We also do not want to use the field_nodes - # themselves (converted to a tuple) as keys, since hashing them is slow. - # Therefore we use the ids of the field_nodes as keys. Note that we do not - # use the id of the list, since we want to hit the cache for all lists of + # We cannot use the field_group itself as key for the cache, since it + # is not hashable as a list. We also do not want to use the field_group + # itself (converted to a tuple) as keys, since hashing them is slow. + # Therefore, we use the ids of the field_group items as keys. Note that we do + # not use the id of the list, since we want to hit the cache for all lists of # the same nodes, not only for the same list of nodes. Also, the list id may # even be reused, in which case we would get wrong results from the cache. key = ( - (return_type, id(field_nodes[0])) - if len(field_nodes) == 1 # optimize most frequent case - else tuple((return_type, *map(id, field_nodes))) + (return_type, id(field_group[0])) + if len(field_group) == 1 # optimize most frequent case + else (return_type, *map(id, field_group)) ) - sub_field_nodes = cache.get(key) - if sub_field_nodes is None: - sub_field_nodes = collect_sub_fields( + sub_fields_and_patches = cache.get(key) + if sub_fields_and_patches is None: + sub_fields_and_patches = collect_subfields( self.schema, self.fragments, self.variable_values, + self.operation, return_type, - field_nodes, + field_group, + ) + cache[key] = sub_fields_and_patches + return sub_fields_and_patches + + def map_source_to_response( + self, result_or_stream: ExecutionResult | AsyncIterable[Any] + ) -> AsyncGenerator[ExecutionResult, None] | ExecutionResult: + """Map source result to response. + + For each payload yielded from a subscription, + map it over the normal GraphQL :func:`~graphql.execution.execute` function, + with ``payload`` as the ``root_value``. + This implements the "MapSourceToResponseEvent" algorithm + described in the GraphQL specification. + The :func:`~graphql.execution.execute` function provides + the "ExecuteSubscriptionEvent" algorithm, + as it is nearly identical to the "ExecuteQuery" algorithm, + for which :func:`~graphql.execution.execute` is also used. + """ + if not isinstance(result_or_stream, AsyncIterable): + return result_or_stream # pragma: no cover + + async def callback(payload: Any) -> ExecutionResult: + result = execute_impl(self.build_per_event_execution_context(payload)) + # typecast to ExecutionResult, not possible to return + # ExperimentalIncrementalExecutionResults when operation is 'subscription'. + return ( + await cast("Awaitable[ExecutionResult]", result) + if self.is_awaitable(result) + else cast("ExecutionResult", result) + ) + + return map_async_iterable(result_or_stream, callback) + + def execute_deferred_grouped_field_sets( + self, + parent_type: GraphQLObjectType, + source_value: Any, + path: Path | None, + new_deferred_grouped_field_set_records: Sequence[DeferredGroupedFieldSetRecord], + defer_map: RefMap[DeferUsage, DeferredFragmentRecord], + ) -> None: + """Execute deferred grouped field sets.""" + for deferred_grouped_field_set_record in new_deferred_grouped_field_set_records: + if deferred_grouped_field_set_record.should_initiate_defer: + + async def execute_deferred_grouped_field_set( + deferred_grouped_field_set_record: DeferredGroupedFieldSetRecord, + ) -> None: + self.execute_deferred_grouped_field_set( + parent_type, + source_value, + path, + deferred_grouped_field_set_record, + defer_map, + ) + + self.add_task( + execute_deferred_grouped_field_set( + deferred_grouped_field_set_record + ) + ) + + else: + self.execute_deferred_grouped_field_set( + parent_type, + source_value, + path, + deferred_grouped_field_set_record, + defer_map, + ) + + def execute_deferred_grouped_field_set( + self, + parent_type: GraphQLObjectType, + source_value: Any, + path: Path | None, + deferred_grouped_field_set_record: DeferredGroupedFieldSetRecord, + defer_map: RefMap[DeferUsage, DeferredFragmentRecord], + ) -> None: + """Execute deferred grouped field set.""" + incremental_publisher = self.incremental_publisher + try: + incremental_result = self.execute_fields( + parent_type, + source_value, + path, + deferred_grouped_field_set_record.grouped_field_set, + deferred_grouped_field_set_record, + defer_map, + ) + + if self.is_awaitable(incremental_result): + incremental_result = cast("Awaitable", incremental_result) + + async def await_incremental_result() -> None: + try: + result = await incremental_result + except GraphQLError as error: + incremental_publisher.mark_errored_deferred_grouped_field_set( + deferred_grouped_field_set_record, error + ) + else: + incremental_publisher.complete_deferred_grouped_field_set( + deferred_grouped_field_set_record, result + ) + + self.add_task(await_incremental_result()) + + else: + incremental_publisher.complete_deferred_grouped_field_set( + deferred_grouped_field_set_record, + incremental_result, # type: ignore + ) + + except GraphQLError as error: + incremental_publisher.mark_errored_deferred_grouped_field_set( + deferred_grouped_field_set_record, error + ) + + def execute_stream_field( + self, + path: Path, + item_path: Path, + item: AwaitableOrValue[Any], + field_group: FieldGroup, + info: GraphQLResolveInfo, + item_type: GraphQLOutputType, + incremental_data_record: IncrementalDataRecord, + stream_record: StreamRecord, + ) -> StreamItemsRecord: + """Execute stream field.""" + is_awaitable = self.is_awaitable + incremental_publisher = self.incremental_publisher + stream_items_record = StreamItemsRecord(stream_record, item_path) + incremental_publisher.report_new_stream_items_record( + stream_items_record, incremental_data_record + ) + completed_item: Any + + if is_awaitable(item): + + async def await_completed_awaitable_item() -> None: + try: + value = await self.complete_awaitable_value( + item_type, + field_group, + info, + item_path, + item, + stream_items_record, + RefMap(), + ) + except GraphQLError as error: + incremental_publisher.filter(path, stream_items_record) + incremental_publisher.mark_errored_stream_items_record( + stream_items_record, error + ) + else: + incremental_publisher.complete_stream_items_record( + stream_items_record, [value] + ) + + self.add_task(await_completed_awaitable_item()) + return stream_items_record + + try: + try: + completed_item = self.complete_value( + item_type, + field_group, + info, + item_path, + item, + stream_items_record, + RefMap(), + ) + except Exception as raw_error: + self.handle_field_error( + raw_error, + item_type, + field_group, + item_path, + stream_items_record, + ) + completed_item = None + incremental_publisher.filter(item_path, stream_items_record) + except GraphQLError as error: + incremental_publisher.filter(path, stream_items_record) + incremental_publisher.mark_errored_stream_items_record( + stream_items_record, error ) - cache[key] = sub_field_nodes - return sub_field_nodes + return stream_items_record + + if is_awaitable(completed_item): + + async def await_completed_item() -> None: + try: + try: + value = await completed_item + except Exception as raw_error: # pragma: no cover + self.handle_field_error( + raw_error, + item_type, + field_group, + item_path, + stream_items_record, + ) + incremental_publisher.filter(item_path, stream_items_record) + value = None + except GraphQLError as error: # pragma: no cover + incremental_publisher.filter(path, stream_items_record) + incremental_publisher.mark_errored_stream_items_record( + stream_items_record, error + ) + else: + incremental_publisher.complete_stream_items_record( + stream_items_record, [value] + ) + + self.add_task(await_completed_item()) + return stream_items_record + + incremental_publisher.complete_stream_items_record( + stream_items_record, [completed_item] + ) + return stream_items_record + + async def execute_stream_async_iterator_item( + self, + async_iterator: AsyncIterator[Any], + field_group: FieldGroup, + info: GraphQLResolveInfo, + item_type: GraphQLOutputType, + stream_items_record: StreamItemsRecord, + item_path: Path, + ) -> Any: + """Execute stream iterator item.""" + if async_iterator in self._canceled_iterators: + raise StopAsyncIteration # pragma: no cover + try: + item = await anext(async_iterator) + except StopAsyncIteration as raw_error: + self.incremental_publisher.set_is_completed_async_iterator( + stream_items_record + ) + raise StopAsyncIteration from raw_error + except Exception as raw_error: + raise located_error( + raw_error, + field_group.to_nodes(), + stream_items_record.stream_record.path, + ) from raw_error + else: + if stream_items_record.stream_record.errors: + raise StopAsyncIteration # pragma: no cover + try: + completed_item = self.complete_value( + item_type, + field_group, + info, + item_path, + item, + stream_items_record, + RefMap(), + ) + return ( + await completed_item + if self.is_awaitable(completed_item) + else completed_item + ) + except Exception as raw_error: + self.handle_field_error( + raw_error, item_type, field_group, item_path, stream_items_record + ) + self.incremental_publisher.filter(item_path, stream_items_record) + + async def execute_stream_async_iterator( + self, + initial_index: int, + async_iterator: AsyncIterator[Any], + field_group: FieldGroup, + info: GraphQLResolveInfo, + item_type: GraphQLOutputType, + path: Path, + incremental_data_record: IncrementalDataRecord, + stream_record: StreamRecord, + ) -> None: + """Execute stream iterator.""" + incremental_publisher = self.incremental_publisher + index = initial_index + current_incremental_data_record = incremental_data_record + + while True: + item_path = Path(path, index, None) + stream_items_record = StreamItemsRecord(stream_record, item_path) + incremental_publisher.report_new_stream_items_record( + stream_items_record, current_incremental_data_record + ) + + try: + completed_item = await self.execute_stream_async_iterator_item( + async_iterator, + field_group, + info, + item_type, + stream_items_record, + item_path, + ) + except GraphQLError as error: + incremental_publisher.filter(path, stream_items_record) + incremental_publisher.mark_errored_stream_items_record( + stream_items_record, error + ) + if async_iterator: # pragma: no cover else + with suppress_exceptions: + await async_iterator.aclose() # type: ignore + # running generators cannot be closed since Python 3.8, + # so we need to remember that this iterator is already canceled + self._canceled_iterators.add(async_iterator) + return + except StopAsyncIteration: + done = True + completed_item = None + else: + done = False + + incremental_publisher.complete_stream_items_record( + stream_items_record, [completed_item] + ) + + if done: + break + current_incremental_data_record = stream_items_record + index += 1 + + def add_task(self, awaitable: Awaitable[Any]) -> None: + """Add the given task to the tasks set for later execution.""" + tasks = self._tasks + task = ensure_future(awaitable) + tasks.add(task) + task.add_done_callback(tasks.discard) + + +UNEXPECTED_EXPERIMENTAL_DIRECTIVES = ( + "The provided schema unexpectedly contains experimental directives" + " (@defer or @stream). These directives may only be utilized" + " if experimental execution features are explicitly enabled." +) + + +UNEXPECTED_MULTIPLE_PAYLOADS = ( + "Executing this GraphQL operation would unexpectedly produce multiple payloads" + " (due to @defer or @stream directive)" +) def execute( @@ -967,14 +1693,15 @@ def execute( document: DocumentNode, root_value: Any = None, context_value: Any = None, - variable_values: Optional[Dict[str, Any]] = None, - operation_name: Optional[str] = None, - field_resolver: Optional[GraphQLFieldResolver] = None, - type_resolver: Optional[GraphQLTypeResolver] = None, - subscribe_field_resolver: Optional[GraphQLFieldResolver] = None, - middleware: Optional[Middleware] = None, - execution_context_class: Optional[Type["ExecutionContext"]] = None, - is_awaitable: Optional[Callable[[Any], bool]] = None, + variable_values: dict[str, Any] | None = None, + operation_name: str | None = None, + field_resolver: GraphQLFieldResolver | None = None, + type_resolver: GraphQLTypeResolver | None = None, + subscribe_field_resolver: GraphQLFieldResolver | None = None, + middleware: Middleware | None = None, + execution_context_class: type[ExecutionContext] | None = None, + is_awaitable: Callable[[Any], bool] | None = None, + **custom_context_args: Any, ) -> AwaitableOrValue[ExecutionResult]: """Execute a GraphQL operation. @@ -985,16 +1712,75 @@ def execute( If the arguments to this function do not result in a legal execution context, a GraphQLError will be thrown immediately explaining the invalid input. + + This function does not support incremental delivery (`@defer` and `@stream`). + If an operation that defers or streams data is executed with this function, + it will throw an error instead. Use `experimental_execute_incrementally` if + you want to support incremental delivery. """ - # If arguments are missing or incorrect, throw an error. - assert_valid_execution_arguments(schema, document, variable_values) + if schema.get_directive("defer") or schema.get_directive("stream"): + raise GraphQLError(UNEXPECTED_EXPERIMENTAL_DIRECTIVES) + result = experimental_execute_incrementally( + schema, + document, + root_value, + context_value, + variable_values, + operation_name, + field_resolver, + type_resolver, + subscribe_field_resolver, + middleware, + execution_context_class, + is_awaitable, + **custom_context_args, + ) + if isinstance(result, ExecutionResult): + return result + if isinstance(result, ExperimentalIncrementalExecutionResults): + raise GraphQLError(UNEXPECTED_MULTIPLE_PAYLOADS) + + async def await_result() -> Any: + awaited_result = await result + if isinstance(awaited_result, ExecutionResult): + return awaited_result + raise GraphQLError(UNEXPECTED_MULTIPLE_PAYLOADS) + + return await_result() + + +def experimental_execute_incrementally( + schema: GraphQLSchema, + document: DocumentNode, + root_value: Any = None, + context_value: Any = None, + variable_values: dict[str, Any] | None = None, + operation_name: str | None = None, + field_resolver: GraphQLFieldResolver | None = None, + type_resolver: GraphQLTypeResolver | None = None, + subscribe_field_resolver: GraphQLFieldResolver | None = None, + middleware: Middleware | None = None, + execution_context_class: type[ExecutionContext] | None = None, + is_awaitable: Callable[[Any], bool] | None = None, + **custom_context_args: Any, +) -> AwaitableOrValue[ExecutionResult | ExperimentalIncrementalExecutionResults]: + """Execute GraphQL operation incrementally (internal implementation). + + Implements the "Executing requests" section of the GraphQL specification, + including `@defer` and `@stream` as proposed in + https://github.com/graphql/graphql-spec/pull/742 + + This function returns an awaitable that is either a single ExecutionResult or + an ExperimentalIncrementalExecutionResults object, containing an `initialResult` + and a stream of `subsequent_results`. + """ if execution_context_class is None: execution_context_class = ExecutionContext # If a valid execution context cannot be created due to incorrect arguments, # a "Response" with only errors is returned. - exe_context = execution_context_class.build( + context = execution_context_class.build( schema, document, root_value, @@ -1006,12 +1792,20 @@ def execute( subscribe_field_resolver, middleware, is_awaitable, + **custom_context_args, ) # Return early errors if execution context failed. - if isinstance(exe_context, list): - return ExecutionResult(data=None, errors=exe_context) + if isinstance(context, list): + return ExecutionResult(None, errors=context) + + return execute_impl(context) + +def execute_impl( + context: ExecutionContext, +) -> AwaitableOrValue[ExecutionResult | ExperimentalIncrementalExecutionResults]: + """Execute GraphQL operation (internal implementation).""" # Return a possible coroutine object that will eventually yield the data described # by the "Response" section of the GraphQL specification. # @@ -1023,31 +1817,35 @@ def execute( # Errors from sub-fields of a NonNull type may propagate to the top level, # at which point we still log the error and null the parent field, which # in this case is the entire response. - errors = exe_context.errors - build_response = exe_context.build_response + incremental_publisher = context.incremental_publisher + initial_result_record = InitialResultRecord() try: - operation = exe_context.operation - result = exe_context.execute_operation(operation, root_value) + data = context.execute_operation(initial_result_record) + if context.is_awaitable(data): - if exe_context.is_awaitable(result): - # noinspection PyShadowingNames - async def await_result() -> Any: + async def await_response() -> ( + ExecutionResult | ExperimentalIncrementalExecutionResults + ): try: - return build_response(await result, errors) # type: ignore + return incremental_publisher.build_data_response( + initial_result_record, + await data, # type: ignore + ) except GraphQLError as error: - errors.append(error) - return build_response(None, errors) + return incremental_publisher.build_error_response( + initial_result_record, error + ) + + return await_response() + + return incremental_publisher.build_data_response(initial_result_record, data) # type: ignore - return await_result() except GraphQLError as error: - errors.append(error) - return build_response(None, errors) - else: - return build_response(result, errors) # type: ignore + return incremental_publisher.build_error_response(initial_result_record, error) def assume_not_awaitable(_value: Any) -> bool: - """Replacement for isawaitable if everything is assumed to be synchronous.""" + """Replacement for is_awaitable if everything is assumed to be synchronous.""" return False @@ -1056,12 +1854,12 @@ def execute_sync( document: DocumentNode, root_value: Any = None, context_value: Any = None, - variable_values: Optional[Dict[str, Any]] = None, - operation_name: Optional[str] = None, - field_resolver: Optional[GraphQLFieldResolver] = None, - type_resolver: Optional[GraphQLTypeResolver] = None, - middleware: Optional[Middleware] = None, - execution_context_class: Optional[Type["ExecutionContext"]] = None, + variable_values: dict[str, Any] | None = None, + operation_name: str | None = None, + field_resolver: GraphQLFieldResolver | None = None, + type_resolver: GraphQLTypeResolver | None = None, + middleware: Middleware | None = None, + execution_context_class: type[ExecutionContext] | None = None, check_sync: bool = False, ) -> ExecutionResult: """Execute a GraphQL operation synchronously. @@ -1079,7 +1877,7 @@ def execute_sync( else (None if check_sync else assume_not_awaitable) ) - result = execute( + result = experimental_execute_incrementally( schema, document, root_value, @@ -1095,76 +1893,140 @@ def execute_sync( ) # Assert that the execution was synchronous. - if isawaitable(result): - ensure_future(cast(Awaitable[ExecutionResult], result)).cancel() - raise RuntimeError("GraphQL execution failed to complete synchronously.") + if default_is_awaitable(result) or isinstance( + result, ExperimentalIncrementalExecutionResults + ): + if default_is_awaitable(result): + ensure_future(cast("Awaitable[ExecutionResult]", result)).cancel() + msg = "GraphQL execution failed to complete synchronously." + raise RuntimeError(msg) - return cast(ExecutionResult, result) + return cast("ExecutionResult", result) -def assert_valid_execution_arguments( - schema: GraphQLSchema, - document: DocumentNode, - raw_variable_values: Optional[Dict[str, Any]] = None, -) -> None: - """Check that the arguments are acceptable. +def invalid_return_type_error( + return_type: GraphQLObjectType, result: Any, field_group: FieldGroup +) -> GraphQLError: + """Create a GraphQLError for an invalid return type.""" + return GraphQLError( + f"Expected value of type '{return_type.name}' but got: {inspect(result)}.", + field_group.to_nodes(), + ) - Essential assertions before executing to provide developer feedback for improper use - of the GraphQL library. - For internal use only. +def add_new_deferred_fragments( + incremental_publisher: IncrementalPublisher, + new_defer_usages: Sequence[DeferUsage], + incremental_data_record: IncrementalDataRecord, + defer_map: RefMap[DeferUsage, DeferredFragmentRecord] | None = None, + path: Path | None = None, +) -> RefMap[DeferUsage, DeferredFragmentRecord]: + """Add new deferred fragments to the defer map. + + Instantiates new DeferredFragmentRecords for the given path within an + incremental data record, returning an updated map of DeferUsage + objects to DeferredFragmentRecords. + + Note: As defer directives may be used with operations returning lists, + a DeferUsage object may correspond to many DeferredFragmentRecords. + + DeferredFragmentRecord creation includes the following steps: + 1. The new DeferredFragmentRecord is instantiated at the given path. + 2. The parent result record is calculated from the given incremental data record. + 3. The IncrementalPublisher is notified that a new DeferredFragmentRecord + with the calculated parent has been added; the record will be released only + after the parent has completed. """ - if not document: - raise TypeError("Must provide document.") - - # If the schema used for execution is invalid, throw an error. - assert_valid_schema(schema) - - # Variables, if provided, must be a dictionary. - if not (raw_variable_values is None or isinstance(raw_variable_values, dict)): - raise TypeError( - "Variable values must be provided as a dictionary" - " with variable names as keys. Perhaps look to see" - " if an unparsed JSON string was provided." + if not new_defer_usages: + # Given no DeferUsages, return the existing map, creating one if necessary. + return RefMap() if defer_map is None else defer_map + + # Create a copy of the old map. + new_defer_map = RefMap() if defer_map is None else RefMap(defer_map.items()) + + # For each new DeferUsage object: + for defer_usage in new_defer_usages: + ancestors = defer_usage.ancestors + parent_defer_usage = ancestors[0] if ancestors else None + + # If the parent target is defined, the parent target is a DeferUsage object + # and the parent result record is the DeferredFragmentRecord corresponding + # to that DeferUsage. + # If the parent target is not defined, the parent result record is either: + # - the InitialResultRecord, or + # - a StreamItemsRecord, as `@defer` may be nested under `@stream`. + parent = ( + cast( + "Union[InitialResultRecord, StreamItemsRecord]", incremental_data_record + ) + if parent_defer_usage is None + else deferred_fragment_record_from_defer_usage( + parent_defer_usage, new_defer_map + ) ) + # Instantiate the new record. + deferred_fragment_record = DeferredFragmentRecord(path, defer_usage.label) -def get_field_def( - schema: GraphQLSchema, parent_type: GraphQLObjectType, field_node: FieldNode -) -> GraphQLField: - """Get field definition. + # Report the new record to the Incremental Publisher. + incremental_publisher.report_new_defer_fragment_record( + deferred_fragment_record, parent + ) - This method looks up the field on the given type definition. It has special casing - for the three introspection fields, ``__schema``, ``__type`, and ``__typename``. - ``__typename`` is special because it can always be queried as a field, even in - situations where no other fields are allowed, like on a Union. ``__schema`` and - ``__type`` could get automatically added to the query type, but that would require - mutating type definitions, which would cause issues. + # Update the map. + new_defer_map[defer_usage] = deferred_fragment_record - For internal use only. - """ - field_name = field_node.name.value + return new_defer_map - if field_name == "__schema" and schema.query_type == parent_type: - return SchemaMetaFieldDef - elif field_name == "__type" and schema.query_type == parent_type: - return TypeMetaFieldDef - elif field_name == "__typename": - return TypeNameMetaFieldDef - return parent_type.fields.get(field_name) +def deferred_fragment_record_from_defer_usage( + defer_usage: DeferUsage, defer_map: RefMap[DeferUsage, DeferredFragmentRecord] +) -> DeferredFragmentRecord: + """Get the deferred fragment record mapped to the given defer usage.""" + return defer_map[defer_usage] -def invalid_return_type_error( - return_type: GraphQLObjectType, result: Any, field_nodes: List[FieldNode] -) -> GraphQLError: - """Create a GraphQLError for an invalid return type.""" - return GraphQLError( - f"Expected value of type '{return_type.name}' but got: {inspect(result)}.", - field_nodes, - ) + +def add_new_deferred_grouped_field_sets( + incremental_publisher: IncrementalPublisher, + new_grouped_field_set_details: Mapping[DeferUsageSet, GroupedFieldSetDetails], + defer_map: RefMap[DeferUsage, DeferredFragmentRecord], + path: Path | None = None, +) -> list[DeferredGroupedFieldSetRecord]: + """Add new deferred grouped field sets to the defer map.""" + new_deferred_grouped_field_set_records: list[DeferredGroupedFieldSetRecord] = [] + + for ( + new_grouped_field_set_defer_usages, + grouped_field_set_details, + ) in new_grouped_field_set_details.items(): + deferred_fragment_records = get_deferred_fragment_records( + new_grouped_field_set_defer_usages, defer_map + ) + deferred_grouped_field_set_record = DeferredGroupedFieldSetRecord( + deferred_fragment_records, + grouped_field_set_details.grouped_field_set, + grouped_field_set_details.should_initiate_defer, + path, + ) + incremental_publisher.report_new_deferred_grouped_filed_set_record( + deferred_grouped_field_set_record + ) + new_deferred_grouped_field_set_records.append(deferred_grouped_field_set_record) + + return new_deferred_grouped_field_set_records -def get_typename(value: Any) -> Optional[str]: +def get_deferred_fragment_records( + defer_usages: DeferUsageSet, defer_map: RefMap[DeferUsage, DeferredFragmentRecord] +) -> list[DeferredFragmentRecord]: + """Get the deferred fragment records for the given defer usages.""" + return [ + deferred_fragment_record_from_defer_usage(defer_usage, defer_map) + for defer_usage in defer_usages + ] + + +def get_typename(value: Any) -> str | None: """Get the ``__typename`` property of the given value.""" if isinstance(value, Mapping): return value.get("__typename") @@ -1178,7 +2040,7 @@ def get_typename(value: Any) -> Optional[str]: def default_type_resolver( value: Any, info: GraphQLResolveInfo, abstract_type: GraphQLAbstractType -) -> AwaitableOrValue[Optional[str]]: +) -> AwaitableOrValue[str | None]: """Default type resolver function. If a resolve_type function is not given, then a default resolve behavior is used @@ -1199,9 +2061,9 @@ def default_type_resolver( # Otherwise, test each possible type. possible_types = info.schema.get_possible_types(abstract_type) is_awaitable = info.is_awaitable - awaitable_is_type_of_results: List[Awaitable] = [] + awaitable_is_type_of_results: list[Awaitable] = [] append_awaitable_results = awaitable_is_type_of_results.append - awaitable_types: List[GraphQLObjectType] = [] + awaitable_types: list[GraphQLObjectType] = [] append_awaitable_types = awaitable_types.append for type_ in possible_types: @@ -1209,14 +2071,14 @@ def default_type_resolver( is_type_of_result = type_.is_type_of(value, info) if is_awaitable(is_type_of_result): - append_awaitable_results(cast(Awaitable, is_type_of_result)) + append_awaitable_results(cast("Awaitable", is_type_of_result)) append_awaitable_types(type_) elif is_type_of_result: return type_.name if awaitable_is_type_of_results: # noinspection PyShadowingNames - async def get_type() -> Optional[str]: + async def get_type() -> str | None: is_type_of_results = await gather(*awaitable_is_type_of_results) for is_type_of_result, type_ in zip(is_type_of_results, awaitable_types): if is_type_of_result: @@ -1249,3 +2111,240 @@ def default_field_resolver(source: Any, info: GraphQLResolveInfo, **args: Any) - if callable(value): return value(info, **args) return value + + +def subscribe( + schema: GraphQLSchema, + document: DocumentNode, + root_value: Any = None, + context_value: Any = None, + variable_values: dict[str, Any] | None = None, + operation_name: str | None = None, + field_resolver: GraphQLFieldResolver | None = None, + type_resolver: GraphQLTypeResolver | None = None, + subscribe_field_resolver: GraphQLFieldResolver | None = None, + execution_context_class: type[ExecutionContext] | None = None, + middleware: MiddlewareManager | None = None, + **custom_context_args: Any, +) -> AwaitableOrValue[AsyncIterator[ExecutionResult] | ExecutionResult]: + """Create a GraphQL subscription. + + Implements the "Subscribe" algorithm described in the GraphQL spec. + + Returns a coroutine object which yields either an AsyncIterator (if successful) or + an ExecutionResult (client error). The coroutine will raise an exception if a server + error occurs. + + If the client-provided arguments to this function do not result in a compliant + subscription, a GraphQL Response (ExecutionResult) with descriptive errors and no + data will be returned. + + If the source stream could not be created due to faulty subscription resolver logic + or underlying systems, the coroutine object will yield a single ExecutionResult + containing ``errors`` and no ``data``. + + If the operation succeeded, the coroutine will yield an AsyncIterator, which yields + a stream of ExecutionResults representing the response stream. + + This function does not support incremental delivery (`@defer` and `@stream`). + If an operation that defers or streams data is executed with this function, + a field error will be raised at the location of the `@defer` or `@stream` directive. + """ + if execution_context_class is None: + execution_context_class = ExecutionContext + + # If a valid context cannot be created due to incorrect arguments, + # a "Response" with only errors is returned. + context = execution_context_class.build( + schema, + document, + root_value, + context_value, + variable_values, + operation_name, + field_resolver, + type_resolver, + subscribe_field_resolver, + middleware=middleware, + **custom_context_args, + ) + + # Return early errors if execution context failed. + if isinstance(context, list): + return ExecutionResult(None, errors=context) + + result_or_stream = create_source_event_stream_impl(context) + + if context.is_awaitable(result_or_stream): + # noinspection PyShadowingNames + async def await_result() -> Any: + awaited_result_or_stream = await result_or_stream # type: ignore + if isinstance(awaited_result_or_stream, ExecutionResult): + return awaited_result_or_stream + return context.map_source_to_response(awaited_result_or_stream) + + return await_result() + + if isinstance(result_or_stream, ExecutionResult): + return result_or_stream + + return context.map_source_to_response(result_or_stream) # type: ignore + + +def create_source_event_stream( + schema: GraphQLSchema, + document: DocumentNode, + root_value: Any = None, + context_value: Any = None, + variable_values: dict[str, Any] | None = None, + operation_name: str | None = None, + field_resolver: GraphQLFieldResolver | None = None, + type_resolver: GraphQLTypeResolver | None = None, + subscribe_field_resolver: GraphQLFieldResolver | None = None, + execution_context_class: type[ExecutionContext] | None = None, + **custom_context_args: Any, +) -> AwaitableOrValue[AsyncIterable[Any] | ExecutionResult]: + """Create source event stream + + Implements the "CreateSourceEventStream" algorithm described in the GraphQL + specification, resolving the subscription source event stream. + + Returns a coroutine that yields an AsyncIterable. + + If the client-provided arguments to this function do not result in a compliant + subscription, a GraphQL Response (ExecutionResult) with descriptive errors and no + data will be returned. + + If the source stream could not be created due to faulty subscription resolver logic + or underlying systems, the coroutine object will yield a single ExecutionResult + containing ``errors`` and no ``data``. + + A source event stream represents a sequence of events, each of which triggers a + GraphQL execution for that event. + + This may be useful when hosting the stateful subscription service in a different + process or machine than the stateless GraphQL execution engine, or otherwise + separating these two steps. For more on this, see the "Supporting Subscriptions + at Scale" information in the GraphQL spec. + """ + # If a valid context cannot be created due to incorrect arguments, + # a "Response" with only errors is returned. + context = (execution_context_class or ExecutionContext).build( + schema, + document, + root_value, + context_value, + variable_values, + operation_name, + field_resolver, + type_resolver, + subscribe_field_resolver, + **custom_context_args, + ) + + # Return early errors if execution context failed. + if isinstance(context, list): + return ExecutionResult(None, errors=context) + + return create_source_event_stream_impl(context) + + +def create_source_event_stream_impl( + context: ExecutionContext, +) -> AwaitableOrValue[AsyncIterable[Any] | ExecutionResult]: + """Create source event stream (internal implementation).""" + try: + event_stream = execute_subscription(context) + except GraphQLError as error: + return ExecutionResult(None, errors=[error]) + + if context.is_awaitable(event_stream): + awaitable_event_stream = cast("Awaitable", event_stream) + + # noinspection PyShadowingNames + async def await_event_stream() -> AsyncIterable[Any] | ExecutionResult: + try: + return await awaitable_event_stream + except GraphQLError as error: + return ExecutionResult(None, errors=[error]) + + return await_event_stream() + + return event_stream + + +def execute_subscription( + context: ExecutionContext, +) -> AwaitableOrValue[AsyncIterable[Any]]: + schema = context.schema + + root_type = schema.subscription_type + if root_type is None: + msg = "Schema is not configured to execute subscription operation." + raise GraphQLError(msg, context.operation) + + grouped_field_set = collect_fields( + schema, + context.fragments, + context.variable_values, + root_type, + context.operation, + ).grouped_field_set + first_root_field = next(iter(grouped_field_set.items())) + response_name, field_group = first_root_field + field_name = field_group.fields[0].node.name.value + field_def = schema.get_field(root_type, field_name) + + if not field_def: + msg = f"The subscription field '{field_name}' is not defined." + raise GraphQLError(msg, field_group.to_nodes()) + + path = Path(None, response_name, root_type.name) + info = context.build_resolve_info(field_def, field_group, root_type, path) + + # Implements the "ResolveFieldEventStream" algorithm from GraphQL specification. + # It differs from "ResolveFieldValue" due to providing a different `resolveFn`. + + try: + # Build a dictionary of arguments from the field.arguments AST, using the + # variables scope to fulfill any variable references. + args = get_argument_values( + field_def, field_group.fields[0].node, context.variable_values + ) + + # Call the `subscribe()` resolver or the default resolver to produce an + # AsyncIterable yielding raw payloads. + resolve_fn = field_def.subscribe or context.subscribe_field_resolver + + result = resolve_fn(context.root_value, info, **args) + if context.is_awaitable(result): + # noinspection PyShadowingNames + async def await_result() -> AsyncIterable[Any]: + try: + return assert_event_stream(await result) + except Exception as error: + raise located_error( + error, field_group.to_nodes(), path.as_list() + ) from error + + return await_result() + + return assert_event_stream(result) + + except Exception as error: + raise located_error(error, field_group.to_nodes(), path.as_list()) from error + + +def assert_event_stream(result: Any) -> AsyncIterable: + if isinstance(result, Exception): + raise result + + # Assert field returned an event stream, otherwise yield an error. + if not isinstance(result, AsyncIterable): + msg = ( + "Subscription field must return AsyncIterable." + f" Received: {inspect(result)}." + ) + raise GraphQLError(msg) + + return result diff --git a/src/graphql/execution/incremental_publisher.py b/src/graphql/execution/incremental_publisher.py new file mode 100644 index 00000000..839f62d8 --- /dev/null +++ b/src/graphql/execution/incremental_publisher.py @@ -0,0 +1,1317 @@ +"""Incremental Publisher""" + +from __future__ import annotations + +from asyncio import Event, ensure_future, gather, sleep +from contextlib import suppress +from typing import ( + TYPE_CHECKING, + Any, + AsyncGenerator, + Awaitable, + Callable, + Collection, + Iterator, + NamedTuple, + Union, +) + +try: + from typing import TypedDict +except ImportError: # Python < 3.8 + from typing_extensions import TypedDict + +from ..pyutils import RefSet + +if TYPE_CHECKING: + from ..error import GraphQLError, GraphQLFormattedError + from ..pyutils import Path + from .collect_fields import GroupedFieldSet + +__all__ = [ + "ASYNC_DELAY", + "DeferredFragmentRecord", + "ExecutionResult", + "ExperimentalIncrementalExecutionResults", + "FormattedExecutionResult", + "FormattedIncrementalDeferResult", + "FormattedIncrementalResult", + "FormattedIncrementalStreamResult", + "FormattedInitialIncrementalExecutionResult", + "FormattedSubsequentIncrementalExecutionResult", + "IncrementalDataRecord", + "IncrementalDeferResult", + "IncrementalPublisher", + "IncrementalResult", + "IncrementalStreamResult", + "InitialIncrementalExecutionResult", + "InitialResultRecord", + "StreamItemsRecord", + "SubsequentIncrementalExecutionResult", +] + + +ASYNC_DELAY = 1 / 512 # wait time in seconds for deferring execution + +suppress_key_error = suppress(KeyError) + + +class FormattedPendingResult(TypedDict, total=False): + """Formatted pending execution result""" + + id: str + path: list[str | int] + label: str + + +class PendingResult: + """Pending execution result""" + + id: str + path: list[str | int] + label: str | None + + __slots__ = "id", "label", "path" + + def __init__( + self, + id: str, # noqa: A002 + path: list[str | int], + label: str | None = None, + ) -> None: + self.id = id + self.path = path + self.label = label + + def __repr__(self) -> str: + name = self.__class__.__name__ + args: list[str] = [f"id={self.id!r}, path={self.path!r}"] + if self.label: + args.append(f"label={self.label!r}") + return f"{name}({', '.join(args)})" + + @property + def formatted(self) -> FormattedPendingResult: + """Get pending result formatted according to the specification.""" + formatted: FormattedPendingResult = {"id": self.id, "path": self.path} + if self.label is not None: + formatted["label"] = self.label + return formatted + + def __eq__(self, other: object) -> bool: + if isinstance(other, dict): + return ( + other.get("id") == self.id + and (other.get("path") or None) == (self.path or None) + and (other.get("label") or None) == (self.label or None) + ) + + if isinstance(other, tuple): + size = len(other) + return 1 < size < 4 and (self.id, self.path, self.label)[:size] == other + return ( + isinstance(other, self.__class__) + and other.id == self.id + and other.path == self.path + and other.label == self.label + ) + + def __ne__(self, other: object) -> bool: + return not self == other + + +class FormattedCompletedResult(TypedDict, total=False): + """Formatted completed execution result""" + + id: str + errors: list[GraphQLFormattedError] + + +class CompletedResult: + """Completed execution result""" + + id: str + errors: list[GraphQLError] | None + + __slots__ = "errors", "id" + + def __init__( + self, + id: str, # noqa: A002 + errors: list[GraphQLError] | None = None, + ) -> None: + self.id = id + self.errors = errors + + def __repr__(self) -> str: + name = self.__class__.__name__ + args: list[str] = [f"id={self.id!r}"] + if self.errors: + args.append(f"errors={self.errors!r}") + return f"{name}({', '.join(args)})" + + @property + def formatted(self) -> FormattedCompletedResult: + """Get completed result formatted according to the specification.""" + formatted: FormattedCompletedResult = {"id": self.id} + if self.errors is not None: + formatted["errors"] = [error.formatted for error in self.errors] + return formatted + + def __eq__(self, other: object) -> bool: + if isinstance(other, dict): + return other.get("id") == self.id and (other.get("errors") or None) == ( + self.errors or None + ) + if isinstance(other, tuple): + size = len(other) + return 1 < size < 3 and (self.id, self.errors)[:size] == other + return ( + isinstance(other, self.__class__) + and other.id == self.id + and other.errors == self.errors + ) + + def __ne__(self, other: object) -> bool: + return not self == other + + +class IncrementalUpdate(NamedTuple): + """Incremental update""" + + pending: list[PendingResult] + incremental: list[IncrementalResult] + completed: list[CompletedResult] + + +class FormattedExecutionResult(TypedDict, total=False): + """Formatted execution result""" + + data: dict[str, Any] | None + errors: list[GraphQLFormattedError] + extensions: dict[str, Any] + + +class ExecutionResult: + """The result of GraphQL execution. + + - ``data`` is the result of a successful execution of the query. + - ``errors`` is included when any errors occurred as a non-empty list. + - ``extensions`` is reserved for adding non-standard properties. + """ + + __slots__ = "data", "errors", "extensions" + + data: dict[str, Any] | None + errors: list[GraphQLError] | None + extensions: dict[str, Any] | None + + def __init__( + self, + data: dict[str, Any] | None = None, + errors: list[GraphQLError] | None = None, + extensions: dict[str, Any] | None = None, + ) -> None: + self.data = data + self.errors = errors + self.extensions = extensions + + def __repr__(self) -> str: + name = self.__class__.__name__ + ext = "" if self.extensions is None else f", extensions={self.extensions!r}" + return f"{name}(data={self.data!r}, errors={self.errors!r}{ext})" + + def __iter__(self) -> Iterator[Any]: + return iter((self.data, self.errors)) + + @property + def formatted(self) -> FormattedExecutionResult: + """Get execution result formatted according to the specification.""" + formatted: FormattedExecutionResult = {"data": self.data} + if self.errors is not None: + formatted["errors"] = [error.formatted for error in self.errors] + if self.extensions is not None: + formatted["extensions"] = self.extensions + return formatted + + def __eq__(self, other: object) -> bool: + if isinstance(other, dict): + return ( + (other.get("data") == self.data) + and (other.get("errors") or None) == (self.errors or None) + and (other.get("extensions") or None) == (self.extensions or None) + ) + if isinstance(other, tuple): + if len(other) == 2: + return other == (self.data, self.errors) + return other == (self.data, self.errors, self.extensions) + return ( + isinstance(other, self.__class__) + and other.data == self.data + and other.errors == self.errors + and other.extensions == self.extensions + ) + + def __ne__(self, other: object) -> bool: + return not self == other + + +class FormattedInitialIncrementalExecutionResult(TypedDict, total=False): + """Formatted initial incremental execution result""" + + data: dict[str, Any] | None + errors: list[GraphQLFormattedError] + pending: list[FormattedPendingResult] + hasNext: bool + incremental: list[FormattedIncrementalResult] + extensions: dict[str, Any] + + +class InitialIncrementalExecutionResult: + """Initial incremental execution result.""" + + data: dict[str, Any] | None + errors: list[GraphQLError] | None + pending: list[PendingResult] + has_next: bool + extensions: dict[str, Any] | None + + __slots__ = "data", "errors", "extensions", "has_next", "pending" + + def __init__( + self, + data: dict[str, Any] | None = None, + errors: list[GraphQLError] | None = None, + pending: list[PendingResult] | None = None, + has_next: bool = False, + extensions: dict[str, Any] | None = None, + ) -> None: + self.data = data + self.errors = errors + self.pending = pending or [] + self.has_next = has_next + self.extensions = extensions + + def __repr__(self) -> str: + name = self.__class__.__name__ + args: list[str] = [f"data={self.data!r}"] + if self.errors: + args.append(f"errors={self.errors!r}") + if self.pending: + args.append(f"pending={self.pending!r}") + if self.has_next: + args.append("has_next") + if self.extensions: + args.append(f"extensions={self.extensions!r}") + return f"{name}({', '.join(args)})" + + @property + def formatted(self) -> FormattedInitialIncrementalExecutionResult: + """Get execution result formatted according to the specification.""" + formatted: FormattedInitialIncrementalExecutionResult = {"data": self.data} + if self.errors is not None: + formatted["errors"] = [error.formatted for error in self.errors] + formatted["pending"] = [pending.formatted for pending in self.pending] + formatted["hasNext"] = self.has_next + if self.extensions is not None: + formatted["extensions"] = self.extensions + return formatted + + def __eq__(self, other: object) -> bool: + if isinstance(other, dict): + return ( + other.get("data") == self.data + and (other.get("errors") or None) == (self.errors or None) + and (other.get("pending") or None) == (self.pending or None) + and (other.get("hasNext") or None) == (self.has_next or None) + and (other.get("extensions") or None) == (self.extensions or None) + ) + if isinstance(other, tuple): + size = len(other) + return ( + 1 < size < 6 + and ( + self.data, + self.errors, + self.pending, + self.has_next, + self.extensions, + )[:size] + == other + ) + return ( + isinstance(other, self.__class__) + and other.data == self.data + and other.errors == self.errors + and other.pending == self.pending + and other.has_next == self.has_next + and other.extensions == self.extensions + ) + + def __ne__(self, other: object) -> bool: + return not self == other + + +class ExperimentalIncrementalExecutionResults(NamedTuple): + """Execution results when retrieved incrementally.""" + + initial_result: InitialIncrementalExecutionResult + subsequent_results: AsyncGenerator[SubsequentIncrementalExecutionResult, None] + + +class FormattedIncrementalDeferResult(TypedDict, total=False): + """Formatted incremental deferred execution result""" + + data: dict[str, Any] + id: str + subPath: list[str | int] + errors: list[GraphQLFormattedError] + extensions: dict[str, Any] + + +class IncrementalDeferResult: + """Incremental deferred execution result""" + + data: dict[str, Any] + id: str + sub_path: list[str | int] | None + errors: list[GraphQLError] | None + extensions: dict[str, Any] | None + + __slots__ = "data", "errors", "extensions", "id", "sub_path" + + def __init__( + self, + data: dict[str, Any], + id: str, # noqa: A002 + sub_path: list[str | int] | None = None, + errors: list[GraphQLError] | None = None, + extensions: dict[str, Any] | None = None, + ) -> None: + self.data = data + self.id = id + self.sub_path = sub_path + self.errors = errors + self.extensions = extensions + + def __repr__(self) -> str: + name = self.__class__.__name__ + args: list[str] = [f"data={self.data!r}, id={self.id!r}"] + if self.sub_path is not None: + args.append(f"sub_path={self.sub_path!r}") + if self.errors is not None: + args.append(f"errors={self.errors!r}") + if self.extensions is not None: + args.append(f"extensions={self.extensions!r}") + return f"{name}({', '.join(args)})" + + @property + def formatted(self) -> FormattedIncrementalDeferResult: + """Get execution result formatted according to the specification.""" + formatted: FormattedIncrementalDeferResult = { + "data": self.data, + "id": self.id, + } + if self.sub_path is not None: + formatted["subPath"] = self.sub_path + if self.errors is not None: + formatted["errors"] = [error.formatted for error in self.errors] + if self.extensions is not None: + formatted["extensions"] = self.extensions + return formatted + + def __eq__(self, other: object) -> bool: + if isinstance(other, dict): + return ( + other.get("data") == self.data + and other.get("id") == self.id + and (other.get("subPath") or None) == (self.sub_path or None) + and (other.get("errors") or None) == (self.errors or None) + and (other.get("extensions") or None) == (self.extensions or None) + ) + if isinstance(other, tuple): + size = len(other) + return ( + 1 < size < 6 + and (self.data, self.id, self.sub_path, self.errors, self.extensions)[ + :size + ] + == other + ) + return ( + isinstance(other, self.__class__) + and other.data == self.data + and other.id == self.id + and other.sub_path == self.sub_path + and other.errors == self.errors + and other.extensions == self.extensions + ) + + def __ne__(self, other: object) -> bool: + return not self == other + + +class FormattedIncrementalStreamResult(TypedDict, total=False): + """Formatted incremental stream execution result""" + + items: list[Any] + id: str + subPath: list[str | int] + errors: list[GraphQLFormattedError] + extensions: dict[str, Any] + + +class IncrementalStreamResult: + """Incremental streamed execution result""" + + items: list[Any] + id: str + sub_path: list[str | int] | None + errors: list[GraphQLError] | None + extensions: dict[str, Any] | None + + __slots__ = "errors", "extensions", "id", "items", "label", "sub_path" + + def __init__( + self, + items: list[Any], + id: str, # noqa: A002 + sub_path: list[str | int] | None = None, + errors: list[GraphQLError] | None = None, + extensions: dict[str, Any] | None = None, + ) -> None: + self.items = items + self.id = id + self.sub_path = sub_path + self.errors = errors + self.extensions = extensions + + def __repr__(self) -> str: + name = self.__class__.__name__ + args: list[str] = [f"items={self.items!r}, id={self.id!r}"] + if self.sub_path is not None: + args.append(f"sub_path={self.sub_path!r}") + if self.errors is not None: + args.append(f"errors={self.errors!r}") + if self.extensions is not None: + args.append(f"extensions={self.extensions!r}") + return f"{name}({', '.join(args)})" + + @property + def formatted(self) -> FormattedIncrementalStreamResult: + """Get execution result formatted according to the specification.""" + formatted: FormattedIncrementalStreamResult = { + "items": self.items, + "id": self.id, + } + if self.sub_path is not None: + formatted["subPath"] = self.sub_path + if self.errors is not None: + formatted["errors"] = [error.formatted for error in self.errors] + if self.extensions is not None: + formatted["extensions"] = self.extensions + return formatted + + def __eq__(self, other: object) -> bool: + if isinstance(other, dict): + return ( + other.get("items") == self.items + and other.get("id") == self.id + and (other.get("subPath", None) == (self.sub_path or None)) + and (other.get("errors") or None) == (self.errors or None) + and (other.get("extensions", None) == (self.extensions or None)) + ) + if isinstance(other, tuple): + size = len(other) + return ( + 1 < size < 6 + and (self.items, self.id, self.sub_path, self.errors, self.extensions)[ + :size + ] + == other + ) + return ( + isinstance(other, self.__class__) + and other.items == self.items + and other.id == self.id + and other.sub_path == self.sub_path + and other.errors == self.errors + and other.extensions == self.extensions + ) + + def __ne__(self, other: object) -> bool: + return not self == other + + +FormattedIncrementalResult = Union[ + FormattedIncrementalDeferResult, FormattedIncrementalStreamResult +] + +IncrementalResult = Union[IncrementalDeferResult, IncrementalStreamResult] + + +class FormattedSubsequentIncrementalExecutionResult(TypedDict, total=False): + """Formatted subsequent incremental execution result""" + + hasNext: bool + pending: list[FormattedPendingResult] + incremental: list[FormattedIncrementalResult] + completed: list[FormattedCompletedResult] + extensions: dict[str, Any] + + +class SubsequentIncrementalExecutionResult: + """Subsequent incremental execution result.""" + + __slots__ = "completed", "extensions", "has_next", "incremental", "pending" + + has_next: bool + pending: list[PendingResult] | None + incremental: list[IncrementalResult] | None + completed: list[CompletedResult] | None + extensions: dict[str, Any] | None + + def __init__( + self, + has_next: bool = False, + pending: list[PendingResult] | None = None, + incremental: list[IncrementalResult] | None = None, + completed: list[CompletedResult] | None = None, + extensions: dict[str, Any] | None = None, + ) -> None: + self.has_next = has_next + self.pending = pending or [] + self.incremental = incremental + self.completed = completed + self.extensions = extensions + + def __repr__(self) -> str: + name = self.__class__.__name__ + args: list[str] = [] + if self.has_next: + args.append("has_next") + if self.pending: + args.append(f"pending[{len(self.pending)}]") + if self.incremental: + args.append(f"incremental[{len(self.incremental)}]") + if self.completed: + args.append(f"completed[{len(self.completed)}]") + if self.extensions: + args.append(f"extensions={self.extensions!r}") + return f"{name}({', '.join(args)})" + + @property + def formatted(self) -> FormattedSubsequentIncrementalExecutionResult: + """Get execution result formatted according to the specification.""" + formatted: FormattedSubsequentIncrementalExecutionResult = {} + formatted["hasNext"] = self.has_next + if self.pending: + formatted["pending"] = [result.formatted for result in self.pending] + if self.incremental: + formatted["incremental"] = [result.formatted for result in self.incremental] + if self.completed: + formatted["completed"] = [result.formatted for result in self.completed] + if self.extensions is not None: + formatted["extensions"] = self.extensions + return formatted + + def __eq__(self, other: object) -> bool: + if isinstance(other, dict): + return ( + (other.get("hasNext") or None) == (self.has_next or None) + and (other.get("pending") or None) == (self.pending or None) + and (other.get("incremental") or None) == (self.incremental or None) + and (other.get("completed") or None) == (self.completed or None) + and (other.get("extensions") or None) == (self.extensions or None) + ) + if isinstance(other, tuple): + size = len(other) + return ( + 1 < size < 6 + and ( + self.has_next, + self.pending, + self.incremental, + self.completed, + self.extensions, + )[:size] + == other + ) + return ( + isinstance(other, self.__class__) + and other.has_next == self.has_next + and self.pending == other.pending + and other.incremental == self.incremental + and other.completed == self.completed + and other.extensions == self.extensions + ) + + def __ne__(self, other: object) -> bool: + return not self == other + + +class InitialResult(NamedTuple): + """The state of the initial result""" + + children: dict[IncrementalDataRecord, None] + is_completed: bool + + +class IncrementalPublisher: + """Publish incremental results. + + This class is used to publish incremental results to the client, enabling + semi-concurrent execution while preserving result order. + + The internal publishing state is managed as follows: + + ``_released``: the set of Subsequent Result records that are ready to be sent to the + client, i.e. their parents have completed and they have also completed. + + ``_pending``: the set of Subsequent Result records that are definitely pending, i.e. + their parents have completed so that they can no longer be filtered. This includes + all Subsequent Result records in `released`, as well as the records that have not + yet completed. + + Note: Instead of sets we use dicts (with values set to None) which preserve order + and thereby achieve more deterministic results. + """ + + _next_id: int + _released: dict[SubsequentResultRecord, None] + _pending: dict[SubsequentResultRecord, None] + _resolve: Event | None + _tasks: set[Awaitable] + + def __init__(self) -> None: + self._next_id = 0 + self._released = {} + self._pending = {} + self._resolve = None # lazy initialization + self._tasks = set() + + @staticmethod + def report_new_defer_fragment_record( + deferred_fragment_record: DeferredFragmentRecord, + parent_incremental_result_record: InitialResultRecord + | DeferredFragmentRecord + | StreamItemsRecord, + ) -> None: + """Report a new deferred fragment record.""" + parent_incremental_result_record.children[deferred_fragment_record] = None + + @staticmethod + def report_new_deferred_grouped_filed_set_record( + deferred_grouped_field_set_record: DeferredGroupedFieldSetRecord, + ) -> None: + """Report a new deferred grouped field set record.""" + for ( + deferred_fragment_record + ) in deferred_grouped_field_set_record.deferred_fragment_records: + deferred_fragment_record._pending[deferred_grouped_field_set_record] = None # noqa: SLF001 + deferred_fragment_record.deferred_grouped_field_set_records[ + deferred_grouped_field_set_record + ] = None + + @staticmethod + def report_new_stream_items_record( + stream_items_record: StreamItemsRecord, + parent_incremental_data_record: IncrementalDataRecord, + ) -> None: + """Report a new stream items record.""" + if isinstance(parent_incremental_data_record, DeferredGroupedFieldSetRecord): + for parent in parent_incremental_data_record.deferred_fragment_records: + parent.children[stream_items_record] = None + else: + parent_incremental_data_record.children[stream_items_record] = None + + def complete_deferred_grouped_field_set( + self, + deferred_grouped_field_set_record: DeferredGroupedFieldSetRecord, + data: dict[str, Any], + ) -> None: + """Complete the given deferred grouped field set record with the given data.""" + deferred_grouped_field_set_record.data = data + for ( + deferred_fragment_record + ) in deferred_grouped_field_set_record.deferred_fragment_records: + pending = deferred_fragment_record._pending # noqa: SLF001 + del pending[deferred_grouped_field_set_record] + if not pending: + self.complete_deferred_fragment_record(deferred_fragment_record) + + def mark_errored_deferred_grouped_field_set( + self, + deferred_grouped_field_set_record: DeferredGroupedFieldSetRecord, + error: GraphQLError, + ) -> None: + """Mark the given deferred grouped field set record as errored.""" + for ( + deferred_fragment_record + ) in deferred_grouped_field_set_record.deferred_fragment_records: + deferred_fragment_record.errors.append(error) + self.complete_deferred_fragment_record(deferred_fragment_record) + + def complete_deferred_fragment_record( + self, deferred_fragment_record: DeferredFragmentRecord + ) -> None: + """Complete the given deferred fragment record.""" + self._release(deferred_fragment_record) + + def complete_stream_items_record( + self, + stream_items_record: StreamItemsRecord, + items: list[Any], + ) -> None: + """Complete the given stream items record.""" + stream_items_record.items = items + stream_items_record.is_completed = True + self._release(stream_items_record) + + def mark_errored_stream_items_record( + self, stream_items_record: StreamItemsRecord, error: GraphQLError + ) -> None: + """Mark the given stream items record as errored.""" + stream_items_record.stream_record.errors.append(error) + self.set_is_final_record(stream_items_record) + stream_items_record.is_completed = True + early_return = stream_items_record.stream_record.early_return + if early_return: + self._add_task(early_return()) + self._release(stream_items_record) + + @staticmethod + def set_is_final_record(stream_items_record: StreamItemsRecord) -> None: + """Mark stream items record as final.""" + stream_items_record.is_final_record = True + + def set_is_completed_async_iterator( + self, stream_items_record: StreamItemsRecord + ) -> None: + """Mark async iterator for stream items as completed.""" + stream_items_record.is_completed_async_iterator = True + self.set_is_final_record(stream_items_record) + + def add_field_error( + self, incremental_data_record: IncrementalDataRecord, error: GraphQLError + ) -> None: + """Add a field error to the given incremental data record.""" + incremental_data_record.errors.append(error) + + def build_data_response( + self, initial_result_record: InitialResultRecord, data: dict[str, Any] | None + ) -> ExecutionResult | ExperimentalIncrementalExecutionResults: + """Build response for the given data.""" + for child in initial_result_record.children: + if child.filtered: + continue + self._publish(child) + + errors = initial_result_record.errors or None + if errors: + errors.sort( + key=lambda error: ( + error.locations or [], + error.path or [], + error.message, + ) + ) + pending = self._pending + if pending: + pending_sources: RefSet[DeferredFragmentRecord | StreamRecord] = RefSet( + subsequent_result_record.stream_record + if isinstance(subsequent_result_record, StreamItemsRecord) + else subsequent_result_record + for subsequent_result_record in pending + ) + return ExperimentalIncrementalExecutionResults( + initial_result=InitialIncrementalExecutionResult( + data, + errors, + pending=self._pending_sources_to_results(pending_sources), + has_next=True, + ), + subsequent_results=self._subscribe(), + ) + return ExecutionResult(data, errors) + + def build_error_response( + self, initial_result_record: InitialResultRecord, error: GraphQLError + ) -> ExecutionResult: + """Build response for the given error.""" + errors = initial_result_record.errors + errors.append(error) + # Sort the error list in order to make it deterministic, since we might have + # been using parallel execution. + errors.sort( + key=lambda error: (error.locations or [], error.path or [], error.message) + ) + return ExecutionResult(None, errors) + + def filter( + self, + null_path: Path | None, + erroring_incremental_data_record: IncrementalDataRecord, + ) -> None: + """Filter out the given erroring incremental data record.""" + null_path_list = null_path.as_list() if null_path else [] + + streams: list[StreamRecord] = [] + + children = self._get_children(erroring_incremental_data_record) + descendants = self._get_descendants(children) + + for child in descendants: + if not self._nulls_child_subsequent_result_record(child, null_path_list): + continue + + child.filtered = True + + if isinstance(child, StreamItemsRecord): + streams.append(child.stream_record) + + early_returns = [] + for stream in streams: + early_return = stream.early_return + if early_return: + early_returns.append(early_return()) + if early_returns: + self._add_task(gather(*early_returns)) + + def _pending_sources_to_results( + self, + pending_sources: RefSet[DeferredFragmentRecord | StreamRecord], + ) -> list[PendingResult]: + """Convert pending sources to pending results.""" + pending_results: list[PendingResult] = [] + for pending_source in pending_sources: + pending_source.pending_sent = True + id_ = self._get_next_id() + pending_source.id = id_ + pending_results.append( + PendingResult(id_, pending_source.path, pending_source.label) + ) + return pending_results + + def _get_next_id(self) -> str: + """Get the next ID for pending results.""" + id_ = self._next_id + self._next_id += 1 + return str(id_) + + async def _subscribe( + self, + ) -> AsyncGenerator[SubsequentIncrementalExecutionResult, None]: + """Subscribe to the incremental results.""" + is_done = False + pending = self._pending + + await sleep(0) # execute pending tasks + + try: + while not is_done: + released = self._released + for item in released: + with suppress_key_error: + del pending[item] + self._released = {} + + result = self._get_incremental_result(released) + + if not self._pending: + is_done = True + + if result is not None: + yield result + else: + resolve = self._resolve + if resolve is None: + self._resolve = resolve = Event() + await resolve.wait() + finally: + streams: list[StreamRecord] = [] + descendants = self._get_descendants(pending) + for subsequent_result_record in descendants: # pragma: no cover + if isinstance(subsequent_result_record, StreamItemsRecord): + streams.append(subsequent_result_record.stream_record) + early_returns = [] + for stream in streams: # pragma: no cover + early_return = stream.early_return + if early_return: + early_returns.append(early_return()) + if early_returns: # pragma: no cover + await gather(*early_returns) + + def _trigger(self) -> None: + """Trigger the resolve event.""" + resolve = self._resolve + if resolve is not None: + resolve.set() + self._resolve = Event() + + def _introduce(self, item: SubsequentResultRecord) -> None: + """Introduce a new IncrementalDataRecord.""" + self._pending[item] = None + + def _release(self, item: SubsequentResultRecord) -> None: + """Release the given IncrementalDataRecord.""" + if item in self._pending: + self._released[item] = None + self._trigger() + + def _push(self, item: SubsequentResultRecord) -> None: + """Push the given IncrementalDataRecord.""" + self._released[item] = None + self._pending[item] = None + self._trigger() + + def _get_incremental_result( + self, completed_records: Collection[SubsequentResultRecord] + ) -> SubsequentIncrementalExecutionResult | None: + """Get the incremental result with the completed records.""" + update = self._process_pending(completed_records) + pending, incremental, completed = ( + update.pending, + update.incremental, + update.completed, + ) + + has_next = bool(self._pending) + if not incremental and not completed and has_next: + return None + + return SubsequentIncrementalExecutionResult( + has_next, pending or None, incremental or None, completed or None + ) + + def _process_pending( + self, + completed_records: Collection[SubsequentResultRecord], + ) -> IncrementalUpdate: + """Process the pending records.""" + new_pending_sources: RefSet[DeferredFragmentRecord | StreamRecord] = RefSet() + incremental_results: list[IncrementalResult] = [] + completed_results: list[CompletedResult] = [] + to_result = self._completed_record_to_result + for subsequent_result_record in completed_records: + for child in subsequent_result_record.children: + if child.filtered: + continue + pending_source: DeferredFragmentRecord | StreamRecord = ( + child.stream_record + if isinstance(child, StreamItemsRecord) + else child + ) + if not pending_source.pending_sent: + new_pending_sources.add(pending_source) + self._publish(child) + incremental_result: IncrementalResult + if isinstance(subsequent_result_record, StreamItemsRecord): + if subsequent_result_record.is_final_record: + stream_record = subsequent_result_record.stream_record + new_pending_sources.discard(stream_record) + completed_results.append(to_result(stream_record)) + if subsequent_result_record.is_completed_async_iterator: + # async iterable resolver finished but there may be pending payload + continue + if subsequent_result_record.stream_record.errors: + continue + incremental_result = IncrementalStreamResult( + # safe because `items` is always defined + # when the record is completed + subsequent_result_record.items, + # safe because `id` is defined + # once the stream has been released as pending + subsequent_result_record.stream_record.id, # type: ignore + ) + if subsequent_result_record.errors: + incremental_result.errors = subsequent_result_record.errors + incremental_results.append(incremental_result) + else: + new_pending_sources.discard(subsequent_result_record) + completed_results.append(to_result(subsequent_result_record)) + if subsequent_result_record.errors: + continue + for ( + deferred_grouped_field_set_record + ) in subsequent_result_record.deferred_grouped_field_set_records: + if not deferred_grouped_field_set_record.sent: + deferred_grouped_field_set_record.sent = True + incremental_result = self._get_incremental_defer_result( + deferred_grouped_field_set_record + ) + if deferred_grouped_field_set_record.errors: + incremental_result.errors = ( + deferred_grouped_field_set_record.errors + ) + incremental_results.append(incremental_result) + return IncrementalUpdate( + self._pending_sources_to_results(new_pending_sources), + incremental_results, + completed_results, + ) + + def _get_incremental_defer_result( + self, deferred_grouped_field_set_record: DeferredGroupedFieldSetRecord + ) -> IncrementalDeferResult: + """Get the incremental defer result from the grouped field set record.""" + data = deferred_grouped_field_set_record.data + fragment_records = deferred_grouped_field_set_record.deferred_fragment_records + max_length = len(fragment_records[0].path) + max_index = 0 + for i in range(1, len(fragment_records)): + fragment_record = fragment_records[i] + length = len(fragment_record.path) + if length > max_length: + max_length = length + max_index = i + record_with_longest_path = fragment_records[max_index] + longest_path = record_with_longest_path.path + sub_path = deferred_grouped_field_set_record.path[len(longest_path) :] + id_ = record_with_longest_path.id + return IncrementalDeferResult( + # safe because `data` is always defined when the record is completed + data, # type: ignore + # safe because `id` is defined + # once the fragment has been released as pending + id_, # type: ignore + sub_path or None, + ) + + @staticmethod + def _completed_record_to_result( + completed_record: DeferredFragmentRecord | StreamRecord, + ) -> CompletedResult: + """Convert the completed record to a result.""" + return CompletedResult( + # safe because `id` is defined once the stream has been released as pending + completed_record.id, # type: ignore + completed_record.errors or None, + ) + + def _publish(self, subsequent_result_record: SubsequentResultRecord) -> None: + """Publish the given incremental data record.""" + if isinstance(subsequent_result_record, StreamItemsRecord): + if subsequent_result_record.is_completed: + self._push(subsequent_result_record) + else: + self._introduce(subsequent_result_record) + elif subsequent_result_record._pending: # noqa: SLF001 + self._introduce(subsequent_result_record) + else: + self._push(subsequent_result_record) + + @staticmethod + def _get_children( + erroring_incremental_data_record: IncrementalDataRecord, + ) -> dict[SubsequentResultRecord, None]: + """Get the children of the given erroring incremental data record.""" + children: dict[SubsequentResultRecord, None] = {} + if isinstance(erroring_incremental_data_record, DeferredGroupedFieldSetRecord): + for ( + erroring_incremental_result_record + ) in erroring_incremental_data_record.deferred_fragment_records: + for child in erroring_incremental_result_record.children: + children[child] = None + else: + for child in erroring_incremental_data_record.children: + children[child] = None + return children + + def _get_descendants( + self, + children: dict[SubsequentResultRecord, None], + descendants: dict[SubsequentResultRecord, None] | None = None, + ) -> dict[SubsequentResultRecord, None]: + """Get the descendants of the given children.""" + if descendants is None: + descendants = {} + for child in children: + descendants[child] = None + self._get_descendants(child.children, descendants) + return descendants + + def _nulls_child_subsequent_result_record( + self, + subsequent_result_record: SubsequentResultRecord, + null_path: list[str | int], + ) -> bool: + """Check whether the given subsequent result record is nulled.""" + incremental_data_records: ( + list[SubsequentResultRecord] | dict[DeferredGroupedFieldSetRecord, None] + ) = ( + [subsequent_result_record] + if isinstance(subsequent_result_record, StreamItemsRecord) + else subsequent_result_record.deferred_grouped_field_set_records + ) + return any( + self._matches_path(incremental_data_record.path, null_path) + for incremental_data_record in incremental_data_records + ) + + def _matches_path( + self, test_path: list[str | int], base_path: list[str | int] + ) -> bool: + """Get whether the given test path matches the base path.""" + return all(item == test_path[i] for i, item in enumerate(base_path)) + + def _add_task(self, awaitable: Awaitable[Any]) -> None: + """Add the given task to the tasks set for later execution.""" + tasks = self._tasks + task = ensure_future(awaitable) + tasks.add(task) + task.add_done_callback(tasks.discard) + + +class InitialResultRecord: + """Initial result record""" + + errors: list[GraphQLError] + children: dict[SubsequentResultRecord, None] + + def __init__(self) -> None: + self.errors = [] + self.children = {} + + +class DeferredGroupedFieldSetRecord: + """Deferred grouped field set record""" + + path: list[str | int] + deferred_fragment_records: list[DeferredFragmentRecord] + grouped_field_set: GroupedFieldSet + should_initiate_defer: bool + errors: list[GraphQLError] + data: dict[str, Any] | None + sent: bool + + def __init__( + self, + deferred_fragment_records: list[DeferredFragmentRecord], + grouped_field_set: GroupedFieldSet, + should_initiate_defer: bool, + path: Path | None = None, + ) -> None: + self.path = path.as_list() if path else [] + self.deferred_fragment_records = deferred_fragment_records + self.grouped_field_set = grouped_field_set + self.should_initiate_defer = should_initiate_defer + self.errors = [] + self.sent = False + + def __repr__(self) -> str: + name = self.__class__.__name__ + args: list[str] = [ + f"deferred_fragment_records={self.deferred_fragment_records!r}", + f"grouped_field_set={self.grouped_field_set!r}", + ] + if self.path: + args.append(f"path={self.path!r}") + return f"{name}({', '.join(args)})" + + +class DeferredFragmentRecord: + """Deferred fragment record""" + + path: list[str | int] + label: str | None + id: str | None + children: dict[SubsequentResultRecord, None] + deferred_grouped_field_set_records: dict[DeferredGroupedFieldSetRecord, None] + errors: list[GraphQLError] + filtered: bool + pending_sent: bool + _pending: dict[DeferredGroupedFieldSetRecord, None] + + def __init__(self, path: Path | None = None, label: str | None = None) -> None: + self.path = path.as_list() if path else [] + self.label = label + self.id = None + self.children = {} + self.filtered = False + self.pending_sent = False + self.deferred_grouped_field_set_records = {} + self.errors = [] + self._pending = {} + + def __repr__(self) -> str: + name = self.__class__.__name__ + args: list[str] = [] + if self.path: + args.append(f"path={self.path!r}") + if self.label: + args.append(f"label={self.label!r}") + return f"{name}({', '.join(args)})" + + +class StreamRecord: + """Stream record""" + + label: str | None + path: list[str | int] + id: str | None + errors: list[GraphQLError] + early_return: Callable[[], Awaitable[Any]] | None + pending_sent: bool + + def __init__( + self, + path: Path, + label: str | None = None, + early_return: Callable[[], Awaitable[Any]] | None = None, + ) -> None: + self.path = path.as_list() + self.label = label + self.id = None + self.errors = [] + self.early_return = early_return + self.pending_sent = False + + def __repr__(self) -> str: + name = self.__class__.__name__ + args: list[str] = [] + if self.path: + args.append(f"path={self.path!r}") + if self.label: + args.append(f"label={self.label!r}") + return f"{name}({', '.join(args)})" + + +class StreamItemsRecord: + """Stream items record""" + + errors: list[GraphQLError] + stream_record: StreamRecord + path: list[str | int] + items: list[str] + children: dict[SubsequentResultRecord, None] + is_final_record: bool + is_completed_async_iterator: bool + is_completed: bool + filtered: bool + + def __init__( + self, + stream_record: StreamRecord, + path: Path | None = None, + ) -> None: + self.stream_record = stream_record + self.path = path.as_list() if path else [] + self.children = {} + self.errors = [] + self.is_completed_async_iterator = self.is_completed = False + self.is_final_record = self.filtered = False + + def __repr__(self) -> str: + name = self.__class__.__name__ + args: list[str] = [f"stream_record={self.stream_record!r}"] + if self.path: + args.append(f"path={self.path!r}") + return f"{name}({', '.join(args)})" + + +IncrementalDataRecord = Union[ + InitialResultRecord, DeferredGroupedFieldSetRecord, StreamItemsRecord +] + +SubsequentResultRecord = Union[DeferredFragmentRecord, StreamItemsRecord] diff --git a/src/graphql/execution/map_async_iterator.py b/src/graphql/execution/map_async_iterator.py deleted file mode 100644 index 43400fd3..00000000 --- a/src/graphql/execution/map_async_iterator.py +++ /dev/null @@ -1,115 +0,0 @@ -from asyncio import CancelledError, Event, Task, ensure_future, wait -from concurrent.futures import FIRST_COMPLETED -from inspect import isasyncgen, isawaitable -from typing import cast, Any, AsyncIterable, Callable, Optional, Set, Type, Union -from types import TracebackType - -__all__ = ["MapAsyncIterator"] - - -# noinspection PyAttributeOutsideInit -class MapAsyncIterator: - """Map an AsyncIterable over a callback function. - - Given an AsyncIterable and a callback function, return an AsyncIterator which - produces values mapped via calling the callback function. - - When the resulting AsyncIterator is closed, the underlying AsyncIterable will also - be closed. - """ - - def __init__(self, iterable: AsyncIterable, callback: Callable) -> None: - self.iterator = iterable.__aiter__() - self.callback = callback - self._close_event = Event() - - def __aiter__(self) -> "MapAsyncIterator": - """Get the iterator object.""" - return self - - async def __anext__(self) -> Any: - """Get the next value of the iterator.""" - if self.is_closed: - if not isasyncgen(self.iterator): - raise StopAsyncIteration - value = await self.iterator.__anext__() - else: - aclose = ensure_future(self._close_event.wait()) - anext = ensure_future(self.iterator.__anext__()) - - try: - pending: Set[Task] = ( - await wait([aclose, anext], return_when=FIRST_COMPLETED) - )[1] - except CancelledError: - # cancel underlying tasks and close - aclose.cancel() - anext.cancel() - await self.aclose() - raise # re-raise the cancellation - - for task in pending: - task.cancel() - - if aclose.done(): - raise StopAsyncIteration - - error = anext.exception() - if error: - raise error - - value = anext.result() - - result = self.callback(value) - - return await result if isawaitable(result) else result - - async def athrow( - self, - type_: Union[BaseException, Type[BaseException]], - value: Optional[BaseException] = None, - traceback: Optional[TracebackType] = None, - ) -> None: - """Throw an exception into the asynchronous iterator.""" - if self.is_closed: - return - athrow = getattr(self.iterator, "athrow", None) - if athrow: - await athrow(type_, value, traceback) - else: - await self.aclose() - if value is None: - if traceback is None: - raise type_ - value = ( - type_ - if isinstance(value, BaseException) - else cast(Type[BaseException], type_)() - ) - if traceback is not None: - value = value.with_traceback(traceback) - raise value - - async def aclose(self) -> None: - """Close the iterator.""" - if not self.is_closed: - aclose = getattr(self.iterator, "aclose", None) - if aclose: - try: - await aclose() - except RuntimeError: - pass - self.is_closed = True - - @property - def is_closed(self) -> bool: - """Check whether the iterator is closed.""" - return self._close_event.is_set() - - @is_closed.setter - def is_closed(self, value: bool) -> None: - """Mark the iterator as closed.""" - if value: - self._close_event.set() - else: - self._close_event.clear() diff --git a/src/graphql/execution/middleware.py b/src/graphql/execution/middleware.py index 452b9910..6d999171 100644 --- a/src/graphql/execution/middleware.py +++ b/src/graphql/execution/middleware.py @@ -1,11 +1,20 @@ +"""Middleware manager""" + +from __future__ import annotations + from functools import partial, reduce from inspect import isfunction +from typing import Any, Callable, Iterator + +try: + from typing import TypeAlias +except ImportError: # Python < 3.10 + from typing_extensions import TypeAlias -from typing import Callable, Iterator, Dict, List, Tuple, Any, Optional __all__ = ["MiddlewareManager"] -GraphQLFieldResolver = Callable[..., Any] +GraphQLFieldResolver: TypeAlias = Callable[..., Any] class MiddlewareManager: @@ -21,12 +30,12 @@ class MiddlewareManager: """ # allow custom attributes (not used internally) - __slots__ = "__dict__", "middlewares", "_middleware_resolvers", "_cached_resolvers" + __slots__ = "__dict__", "_cached_resolvers", "_middleware_resolvers", "middlewares" - _cached_resolvers: Dict[GraphQLFieldResolver, GraphQLFieldResolver] - _middleware_resolvers: Optional[List[Callable]] + _cached_resolvers: dict[GraphQLFieldResolver, GraphQLFieldResolver] + _middleware_resolvers: list[Callable] | None - def __init__(self, *middlewares: Any): + def __init__(self, *middlewares: Any) -> None: self.middlewares = middlewares self._middleware_resolvers = ( list(get_middleware_resolvers(middlewares)) if middlewares else None @@ -52,7 +61,7 @@ def get_field_resolver( return self._cached_resolvers[field_resolver] -def get_middleware_resolvers(middlewares: Tuple[Any, ...]) -> Iterator[Callable]: +def get_middleware_resolvers(middlewares: tuple[Any, ...]) -> Iterator[Callable]: """Get a list of resolver functions from a list of classes or functions.""" for middleware in middlewares: if isfunction(middleware): diff --git a/src/graphql/execution/subscribe.py b/src/graphql/execution/subscribe.py deleted file mode 100644 index 21fe4db3..00000000 --- a/src/graphql/execution/subscribe.py +++ /dev/null @@ -1,212 +0,0 @@ -from inspect import isawaitable -from typing import ( - Any, - AsyncIterable, - AsyncIterator, - Dict, - Optional, - Union, -) - -from ..error import GraphQLError, located_error -from ..execution.collect_fields import collect_fields -from ..execution.execute import ( - assert_valid_execution_arguments, - execute, - get_field_def, - ExecutionContext, - ExecutionResult, -) -from ..execution.values import get_argument_values -from ..language import DocumentNode -from ..pyutils import Path, inspect -from ..type import GraphQLFieldResolver, GraphQLSchema -from .map_async_iterator import MapAsyncIterator - -__all__ = ["subscribe", "create_source_event_stream"] - - -async def subscribe( - schema: GraphQLSchema, - document: DocumentNode, - root_value: Any = None, - context_value: Any = None, - variable_values: Optional[Dict[str, Any]] = None, - operation_name: Optional[str] = None, - field_resolver: Optional[GraphQLFieldResolver] = None, - subscribe_field_resolver: Optional[GraphQLFieldResolver] = None, -) -> Union[AsyncIterator[ExecutionResult], ExecutionResult]: - """Create a GraphQL subscription. - - Implements the "Subscribe" algorithm described in the GraphQL spec. - - Returns a coroutine object which yields either an AsyncIterator (if successful) or - an ExecutionResult (client error). The coroutine will raise an exception if a server - error occurs. - - If the client-provided arguments to this function do not result in a compliant - subscription, a GraphQL Response (ExecutionResult) with descriptive errors and no - data will be returned. - - If the source stream could not be created due to faulty subscription resolver logic - or underlying systems, the coroutine object will yield a single ExecutionResult - containing ``errors`` and no ``data``. - - If the operation succeeded, the coroutine will yield an AsyncIterator, which yields - a stream of ExecutionResults representing the response stream. - """ - result_or_stream = await create_source_event_stream( - schema, - document, - root_value, - context_value, - variable_values, - operation_name, - subscribe_field_resolver, - ) - if isinstance(result_or_stream, ExecutionResult): - return result_or_stream - - async def map_source_to_response(payload: Any) -> ExecutionResult: - """Map source to response. - - For each payload yielded from a subscription, map it over the normal GraphQL - :func:`~graphql.execute` function, with ``payload`` as the ``root_value``. - This implements the "MapSourceToResponseEvent" algorithm described in the - GraphQL specification. The :func:`~graphql.execute` function provides the - "ExecuteSubscriptionEvent" algorithm, as it is nearly identical to the - "ExecuteQuery" algorithm, for which :func:`~graphql.execute` is also used. - """ - result = execute( - schema, - document, - payload, - context_value, - variable_values, - operation_name, - field_resolver, - ) - return await result if isawaitable(result) else result - - # Map every source value to a ExecutionResult value as described above. - return MapAsyncIterator(result_or_stream, map_source_to_response) - - -async def create_source_event_stream( - schema: GraphQLSchema, - document: DocumentNode, - root_value: Any = None, - context_value: Any = None, - variable_values: Optional[Dict[str, Any]] = None, - operation_name: Optional[str] = None, - subscribe_field_resolver: Optional[GraphQLFieldResolver] = None, -) -> Union[AsyncIterable[Any], ExecutionResult]: - """Create source event stream - - Implements the "CreateSourceEventStream" algorithm described in the GraphQL - specification, resolving the subscription source event stream. - - Returns a coroutine that yields an AsyncIterable. - - If the client-provided arguments to this function do not result in a compliant - subscription, a GraphQL Response (ExecutionResult) with descriptive errors and no - data will be returned. - - If the source stream could not be created due to faulty subscription resolver logic - or underlying systems, the coroutine object will yield a single ExecutionResult - containing ``errors`` and no ``data``. - - A source event stream represents a sequence of events, each of which triggers a - GraphQL execution for that event. - - This may be useful when hosting the stateful subscription service in a different - process or machine than the stateless GraphQL execution engine, or otherwise - separating these two steps. For more on this, see the "Supporting Subscriptions - at Scale" information in the GraphQL spec. - """ - # If arguments are missing or incorrectly typed, this is an internal developer - # mistake which should throw an early error. - assert_valid_execution_arguments(schema, document, variable_values) - - # If a valid context cannot be created due to incorrect arguments, - # a "Response" with only errors is returned. - context = ExecutionContext.build( - schema, - document, - root_value, - context_value, - variable_values, - operation_name, - subscribe_field_resolver=subscribe_field_resolver, - ) - - # Return early errors if execution context failed. - if isinstance(context, list): - return ExecutionResult(data=None, errors=context) - - try: - event_stream = await execute_subscription(context) - - # Assert field returned an event stream, otherwise yield an error. - if not isinstance(event_stream, AsyncIterable): - raise TypeError( - "Subscription field must return AsyncIterable." - f" Received: {inspect(event_stream)}." - ) - return event_stream - - except GraphQLError as error: - # Report it as an ExecutionResult, containing only errors and no data. - return ExecutionResult(data=None, errors=[error]) - - -async def execute_subscription(context: ExecutionContext) -> AsyncIterable[Any]: - schema = context.schema - - root_type = schema.subscription_type - if root_type is None: - raise GraphQLError( - "Schema is not configured to execute subscription operation.", - context.operation, - ) - - root_fields = collect_fields( - schema, - context.fragments, - context.variable_values, - root_type, - context.operation.selection_set, - ) - response_name, field_nodes = next(iter(root_fields.items())) - field_def = get_field_def(schema, root_type, field_nodes[0]) - - if not field_def: - field_name = field_nodes[0].name.value - raise GraphQLError( - f"The subscription field '{field_name}' is not defined.", field_nodes - ) - - path = Path(None, response_name, root_type.name) - info = context.build_resolve_info(field_def, field_nodes, root_type, path) - - # Implements the "ResolveFieldEventStream" algorithm from GraphQL specification. - # It differs from "ResolveFieldValue" due to providing a different `resolveFn`. - - try: - # Build a dictionary of arguments from the field.arguments AST, using the - # variables scope to fulfill any variable references. - args = get_argument_values(field_def, field_nodes[0], context.variable_values) - - # Call the `subscribe()` resolver or the default resolver to produce an - # AsyncIterable yielding raw payloads. - resolve_fn = field_def.subscribe or context.subscribe_field_resolver - - event_stream = resolve_fn(context.root_value, info, **args) - if context.is_awaitable(event_stream): - event_stream = await event_stream - if isinstance(event_stream, Exception): - raise event_stream - - return event_stream - except Exception as error: - raise located_error(error, field_nodes, path.as_list()) diff --git a/src/graphql/execution/values.py b/src/graphql/execution/values.py index e11733fc..5309996a 100644 --- a/src/graphql/execution/values.py +++ b/src/graphql/execution/values.py @@ -1,12 +1,16 @@ -from typing import Any, Callable, Collection, Dict, List, Optional, Union, cast +"""Helpers for handling values""" + +from __future__ import annotations + +from typing import Any, Callable, Collection, Dict, List, Union from ..error import GraphQLError from ..language import ( DirectiveNode, EnumValueDefinitionNode, ExecutableDefinitionNode, - FieldNode, FieldDefinitionNode, + FieldNode, InputValueDefinitionNode, NullValueNode, SchemaDefinitionNode, @@ -17,12 +21,12 @@ VariableNode, print_ast, ) -from ..pyutils import inspect, print_path_list, Undefined +from ..pyutils import Undefined, inspect, print_path_list from ..type import ( GraphQLDirective, GraphQLField, - GraphQLInputType, GraphQLSchema, + is_input_object_type, is_input_type, is_non_null_type, ) @@ -30,17 +34,21 @@ from ..utilities.type_from_ast import type_from_ast from ..utilities.value_from_ast import value_from_ast -__all__ = ["get_argument_values", "get_directive_values", "get_variable_values"] +try: + from typing import TypeAlias +except ImportError: # Python < 3.10 + from typing_extensions import TypeAlias +__all__ = ["get_argument_values", "get_directive_values", "get_variable_values"] -CoercedVariableValues = Union[List[GraphQLError], Dict[str, Any]] +CoercedVariableValues: TypeAlias = Union[List[GraphQLError], Dict[str, Any]] def get_variable_values( schema: GraphQLSchema, var_def_nodes: Collection[VariableDefinitionNode], - inputs: Dict[str, Any], - max_errors: Optional[int] = None, + inputs: dict[str, Any], + max_errors: int | None = None, ) -> CoercedVariableValues: """Get coerced variable values based on provided definitions. @@ -48,14 +56,15 @@ def get_variable_values( variable definitions and arbitrary input. If the input cannot be parsed to match the variable definitions, a GraphQLError will be raised. """ - errors: List[GraphQLError] = [] + errors: list[GraphQLError] = [] def on_error(error: GraphQLError) -> None: if max_errors is not None and len(errors) >= max_errors: - raise GraphQLError( + msg = ( "Too many errors processing variables," " error limit reached. Execution aborted." ) + raise GraphQLError(msg) errors.append(error) try: @@ -71,10 +80,10 @@ def on_error(error: GraphQLError) -> None: def coerce_variable_values( schema: GraphQLSchema, var_def_nodes: Collection[VariableDefinitionNode], - inputs: Dict[str, Any], + inputs: dict[str, Any], on_error: Callable[[GraphQLError], None], -) -> Dict[str, Any]: - coerced_values: Dict[str, Any] = {} +) -> dict[str, Any]: + coerced_values: dict[str, Any] = {} for var_def_node in var_def_nodes: var_name = var_def_node.variable.name.value var_type = type_from_ast(schema, var_def_node.type) @@ -91,7 +100,6 @@ def coerce_variable_values( ) continue - var_type = cast(GraphQLInputType, var_type) if var_name not in inputs: if var_def_node.default_value: coerced_values[var_name] = value_from_ast( @@ -121,7 +129,11 @@ def coerce_variable_values( continue def on_input_value_error( - path: List[Union[str, int]], invalid_value: Any, error: GraphQLError + path: list[str | int], + invalid_value: Any, + error: GraphQLError, + var_name: str = var_name, + var_def_node: VariableDefinitionNode = var_def_node, ) -> None: invalid_str = inspect(invalid_value) prefix = f"Variable '${var_name}' got invalid value {invalid_str}" @@ -131,7 +143,7 @@ def on_input_value_error( GraphQLError( prefix + "; " + error.message, var_def_node, - original_error=error.original_error, + original_error=error, ) ) @@ -143,16 +155,16 @@ def on_input_value_error( def get_argument_values( - type_def: Union[GraphQLField, GraphQLDirective], - node: Union[FieldNode, DirectiveNode], - variable_values: Optional[Dict[str, Any]] = None, -) -> Dict[str, Any]: + type_def: GraphQLField | GraphQLDirective, + node: FieldNode | DirectiveNode, + variable_values: dict[str, Any] | None = None, +) -> dict[str, Any]: """Get coerced argument values based on provided definitions and nodes. Prepares a dict of argument values given a list of argument definitions and list of argument AST nodes. """ - coerced_values: Dict[str, Any] = {} + coerced_values: dict[str, Any] = {} arg_node_map = {arg.name.value: arg for arg in node.arguments or []} for name, arg_def in type_def.args.items(): @@ -160,14 +172,17 @@ def get_argument_values( argument_node = arg_node_map.get(name) if argument_node is None: - if arg_def.default_value is not Undefined: - coerced_values[arg_def.out_name or name] = arg_def.default_value + value = arg_def.default_value + if value is not Undefined: + if is_input_object_type(arg_def.type): + # coerce input value so that out_names are used + value = coerce_input_value(value, arg_def.type) + coerced_values[arg_def.out_name or name] = value elif is_non_null_type(arg_type): # pragma: no cover else - raise GraphQLError( - f"Argument '{name}' of required type '{arg_type}'" - " was not provided.", - node, + msg = ( + f"Argument '{name}' of required type '{arg_type}' was not provided." ) + raise GraphQLError(msg, node) continue # pragma: no cover value_node = argument_node.value @@ -176,39 +191,40 @@ def get_argument_values( if isinstance(value_node, VariableNode): variable_name = value_node.name.value if variable_values is None or variable_name not in variable_values: - if arg_def.default_value is not Undefined: - coerced_values[arg_def.out_name or name] = arg_def.default_value + value = arg_def.default_value + if value is not Undefined: + if is_input_object_type(arg_def.type): + # coerce input value so that out_names are used + value = coerce_input_value(value, arg_def.type) + coerced_values[arg_def.out_name or name] = value elif is_non_null_type(arg_type): # pragma: no cover else - raise GraphQLError( + msg = ( f"Argument '{name}' of required type '{arg_type}'" f" was provided the variable '${variable_name}'" - " which was not provided a runtime value.", - value_node, + " which was not provided a runtime value." ) + raise GraphQLError(msg, value_node) continue # pragma: no cover - is_null = variable_values[variable_name] is None + variable_value = variable_values[variable_name] + is_null = variable_value is None or variable_value is Undefined if is_null and is_non_null_type(arg_type): - raise GraphQLError( - f"Argument '{name}' of non-null type '{arg_type}' must not be null.", - value_node, - ) + msg = f"Argument '{name}' of non-null type '{arg_type}' must not be null." + raise GraphQLError(msg, value_node) coerced_value = value_from_ast(value_node, arg_type, variable_values) if coerced_value is Undefined: # Note: `values_of_correct_type` validation should catch this before # execution. This is a runtime check to ensure execution does not # continue with an invalid argument value. - raise GraphQLError( - f"Argument '{name}' has invalid value {print_ast(value_node)}.", - value_node, - ) + msg = f"Argument '{name}' has invalid value {print_ast(value_node)}." + raise GraphQLError(msg, value_node) coerced_values[arg_def.out_name or name] = coerced_value return coerced_values -NodeWithDirective = Union[ +NodeWithDirective: TypeAlias = Union[ EnumValueDefinitionNode, ExecutableDefinitionNode, FieldDefinitionNode, @@ -223,8 +239,8 @@ def get_argument_values( def get_directive_values( directive_def: GraphQLDirective, node: NodeWithDirective, - variable_values: Optional[Dict[str, Any]] = None, -) -> Optional[Dict[str, Any]]: + variable_values: dict[str, Any] | None = None, +) -> dict[str, Any] | None: """Get coerced argument values based on provided nodes. Prepares a dict of argument values given a directive definition and an AST node diff --git a/src/graphql/graphql.py b/src/graphql/graphql.py index c2e804cd..fe1dd5c7 100644 --- a/src/graphql/graphql.py +++ b/src/graphql/graphql.py @@ -1,11 +1,15 @@ +"""Execute a GraphQL operation""" + +from __future__ import annotations + from asyncio import ensure_future -from inspect import isawaitable -from typing import Any, Awaitable, Callable, Dict, Optional, Union, Type, cast +from typing import Any, Awaitable, Callable, cast from .error import GraphQLError -from .execution import execute, ExecutionResult, ExecutionContext, Middleware -from .language import parse, Source +from .execution import ExecutionContext, ExecutionResult, Middleware, execute +from .language import Source, parse from .pyutils import AwaitableOrValue +from .pyutils import is_awaitable as default_is_awaitable from .type import ( GraphQLFieldResolver, GraphQLSchema, @@ -18,16 +22,16 @@ async def graphql( schema: GraphQLSchema, - source: Union[str, Source], + source: str | Source, root_value: Any = None, context_value: Any = None, - variable_values: Optional[Dict[str, Any]] = None, - operation_name: Optional[str] = None, - field_resolver: Optional[GraphQLFieldResolver] = None, - type_resolver: Optional[GraphQLTypeResolver] = None, - middleware: Optional[Middleware] = None, - execution_context_class: Optional[Type[ExecutionContext]] = None, - is_awaitable: Optional[Callable[[Any], bool]] = None, + variable_values: dict[str, Any] | None = None, + operation_name: str | None = None, + field_resolver: GraphQLFieldResolver | None = None, + type_resolver: GraphQLTypeResolver | None = None, + middleware: Middleware | None = None, + execution_context_class: type[ExecutionContext] | None = None, + is_awaitable: Callable[[Any], bool] | None = None, ) -> ExecutionResult: """Execute a GraphQL operation asynchronously. @@ -38,6 +42,8 @@ async def graphql( to separate the validation and execution phases to a static time tooling step, and a server runtime step. + This function does not support incremental delivery (`@defer` and `@stream`). + Accepts the following arguments: :arg schema: @@ -89,28 +95,28 @@ async def graphql( is_awaitable, ) - if isawaitable(result): - return await cast(Awaitable[ExecutionResult], result) + if default_is_awaitable(result): + return await cast("Awaitable[ExecutionResult]", result) - return cast(ExecutionResult, result) + return cast("ExecutionResult", result) def assume_not_awaitable(_value: Any) -> bool: - """Replacement for isawaitable if everything is assumed to be synchronous.""" + """Replacement for is_awaitable if everything is assumed to be synchronous.""" return False def graphql_sync( schema: GraphQLSchema, - source: Union[str, Source], + source: str | Source, root_value: Any = None, context_value: Any = None, - variable_values: Optional[Dict[str, Any]] = None, - operation_name: Optional[str] = None, - field_resolver: Optional[GraphQLFieldResolver] = None, - type_resolver: Optional[GraphQLTypeResolver] = None, - middleware: Optional[Middleware] = None, - execution_context_class: Optional[Type[ExecutionContext]] = None, + variable_values: dict[str, Any] | None = None, + operation_name: str | None = None, + field_resolver: GraphQLFieldResolver | None = None, + type_resolver: GraphQLTypeResolver | None = None, + middleware: Middleware | None = None, + execution_context_class: type[ExecutionContext] | None = None, check_sync: bool = False, ) -> ExecutionResult: """Execute a GraphQL operation synchronously. @@ -142,25 +148,26 @@ def graphql_sync( ) # Assert that the execution was synchronous. - if isawaitable(result): - ensure_future(cast(Awaitable[ExecutionResult], result)).cancel() - raise RuntimeError("GraphQL execution failed to complete synchronously.") + if default_is_awaitable(result): + ensure_future(cast("Awaitable[ExecutionResult]", result)).cancel() + msg = "GraphQL execution failed to complete synchronously." + raise RuntimeError(msg) - return cast(ExecutionResult, result) + return cast("ExecutionResult", result) def graphql_impl( schema: GraphQLSchema, - source: Union[str, Source], + source: str | Source, root_value: Any, context_value: Any, - variable_values: Optional[Dict[str, Any]], - operation_name: Optional[str], - field_resolver: Optional[GraphQLFieldResolver], - type_resolver: Optional[GraphQLTypeResolver], - middleware: Optional[Middleware], - execution_context_class: Optional[Type[ExecutionContext]], - is_awaitable: Optional[Callable[[Any], bool]], + variable_values: dict[str, Any] | None, + operation_name: str | None, + field_resolver: GraphQLFieldResolver | None, + type_resolver: GraphQLTypeResolver | None, + middleware: Middleware | None, + execution_context_class: type[ExecutionContext] | None, + is_awaitable: Callable[[Any], bool] | None, ) -> AwaitableOrValue[ExecutionResult]: """Execute a query, return asynchronously only if necessary.""" # Validate Schema diff --git a/src/graphql/language/__init__.py b/src/graphql/language/__init__.py index 7d3120f5..bd5e7be1 100644 --- a/src/graphql/language/__init__.py +++ b/src/graphql/language/__init__.py @@ -46,6 +46,10 @@ SelectionSetNode, SelectionNode, FieldNode, + NullabilityAssertionNode, + NonNullAssertionNode, + ErrorBoundaryNode, + ListNullabilityOperatorNode, ArgumentNode, ConstArgumentNode, FragmentSpreadNode, @@ -98,6 +102,7 @@ from .predicates import ( is_definition_node, is_executable_definition_node, + is_nullability_assertion_node, is_selection_node, is_value_node, is_const_value_node, @@ -110,99 +115,104 @@ from .directive_locations import DirectiveLocation __all__ = [ - "get_location", - "SourceLocation", - "FormattedSourceLocation", - "print_location", - "print_source_location", - "TokenKind", - "Lexer", - "parse", - "parse_value", - "parse_const_value", - "parse_type", - "print_ast", - "Source", - "visit", - "Visitor", - "ParallelVisitor", - "VisitorAction", - "VisitorKeyMap", "BREAK", - "SKIP", - "REMOVE", "IDLE", - "Location", - "Token", + "REMOVE", + "SKIP", + "ArgumentNode", + "BooleanValueNode", + "ConstArgumentNode", + "ConstDirectiveNode", + "ConstListValueNode", + "ConstObjectFieldNode", + "ConstObjectValueNode", + "ConstValueNode", + "DefinitionNode", + "DirectiveDefinitionNode", "DirectiveLocation", - "Node", - "NameNode", + "DirectiveNode", "DocumentNode", - "DefinitionNode", + "EnumTypeDefinitionNode", + "EnumTypeExtensionNode", + "EnumValueDefinitionNode", + "EnumValueNode", + "ErrorBoundaryNode", "ExecutableDefinitionNode", - "OperationDefinitionNode", - "OperationType", - "VariableDefinitionNode", - "VariableNode", - "SelectionSetNode", - "SelectionNode", + "FieldDefinitionNode", "FieldNode", - "ArgumentNode", - "ConstArgumentNode", + "FloatValueNode", + "FormattedSourceLocation", + "FragmentDefinitionNode", "FragmentSpreadNode", "InlineFragmentNode", - "FragmentDefinitionNode", - "ValueNode", - "ConstValueNode", + "InputObjectTypeDefinitionNode", + "InputObjectTypeExtensionNode", + "InputValueDefinitionNode", "IntValueNode", - "FloatValueNode", - "StringValueNode", - "BooleanValueNode", - "NullValueNode", - "EnumValueNode", + "InterfaceTypeDefinitionNode", + "InterfaceTypeExtensionNode", + "Lexer", + "ListNullabilityOperatorNode", + "ListTypeNode", "ListValueNode", - "ConstListValueNode", - "ObjectValueNode", - "ConstObjectValueNode", - "ObjectFieldNode", - "ConstObjectFieldNode", - "DirectiveNode", - "ConstDirectiveNode", - "TypeNode", + "Location", + "NameNode", "NamedTypeNode", - "ListTypeNode", + "Node", + "NonNullAssertionNode", "NonNullTypeNode", - "TypeSystemDefinitionNode", - "SchemaDefinitionNode", + "NullValueNode", + "NullabilityAssertionNode", + "ObjectFieldNode", + "ObjectTypeDefinitionNode", + "ObjectTypeExtensionNode", + "ObjectValueNode", + "OperationDefinitionNode", + "OperationType", "OperationTypeDefinitionNode", - "TypeDefinitionNode", + "ParallelVisitor", "ScalarTypeDefinitionNode", - "ObjectTypeDefinitionNode", - "FieldDefinitionNode", - "InputValueDefinitionNode", - "InterfaceTypeDefinitionNode", - "UnionTypeDefinitionNode", - "EnumTypeDefinitionNode", - "EnumValueDefinitionNode", - "InputObjectTypeDefinitionNode", - "DirectiveDefinitionNode", - "TypeSystemExtensionNode", + "ScalarTypeExtensionNode", + "SchemaDefinitionNode", "SchemaExtensionNode", + "SelectionNode", + "SelectionSetNode", + "Source", + "SourceLocation", + "StringValueNode", + "Token", + "TokenKind", + "TypeDefinitionNode", "TypeExtensionNode", - "ScalarTypeExtensionNode", - "ObjectTypeExtensionNode", - "InterfaceTypeExtensionNode", + "TypeNode", + "TypeSystemDefinitionNode", + "TypeSystemExtensionNode", + "UnionTypeDefinitionNode", "UnionTypeExtensionNode", - "EnumTypeExtensionNode", - "InputObjectTypeExtensionNode", + "ValueNode", + "VariableDefinitionNode", + "VariableNode", + "Visitor", + "VisitorAction", + "VisitorKeyMap", + "get_location", + "is_const_value_node", "is_definition_node", "is_executable_definition_node", + "is_nullability_assertion_node", "is_selection_node", - "is_value_node", - "is_const_value_node", + "is_type_definition_node", + "is_type_extension_node", "is_type_node", "is_type_system_definition_node", - "is_type_definition_node", "is_type_system_extension_node", - "is_type_extension_node", + "is_value_node", + "parse", + "parse_const_value", + "parse_type", + "parse_value", + "print_ast", + "print_location", + "print_source_location", + "visit", ] diff --git a/src/graphql/language/ast.py b/src/graphql/language/ast.py index a2470560..a67ee1ea 100644 --- a/src/graphql/language/ast.py +++ b/src/graphql/language/ast.py @@ -1,75 +1,91 @@ +"""GraphQL Abstract Syntax Tree""" + +from __future__ import annotations + from copy import copy, deepcopy from enum import Enum -from typing import Any, Dict, List, Tuple, Optional, Union +from typing import TYPE_CHECKING, Any, Union + +try: + from typing import TypeAlias +except ImportError: # Python < 3.10 + from typing_extensions import TypeAlias -from .source import Source -from .token_kind import TokenKind from ..pyutils import camel_to_snake +if TYPE_CHECKING: + from .source import Source + from .token_kind import TokenKind + + __all__ = [ - "Location", - "Token", - "Node", - "NameNode", - "DocumentNode", + "QUERY_DOCUMENT_KEYS", + "ArgumentNode", + "BooleanValueNode", + "ConstArgumentNode", + "ConstDirectiveNode", + "ConstListValueNode", + "ConstObjectFieldNode", + "ConstObjectValueNode", + "ConstValueNode", "DefinitionNode", + "DirectiveDefinitionNode", + "DirectiveNode", + "DocumentNode", + "EnumTypeDefinitionNode", + "EnumTypeExtensionNode", + "EnumValueDefinitionNode", + "EnumValueNode", + "ErrorBoundaryNode", "ExecutableDefinitionNode", - "OperationDefinitionNode", - "VariableDefinitionNode", - "SelectionSetNode", - "SelectionNode", + "FieldDefinitionNode", "FieldNode", - "ArgumentNode", - "ConstArgumentNode", + "FloatValueNode", + "FragmentDefinitionNode", "FragmentSpreadNode", "InlineFragmentNode", - "FragmentDefinitionNode", - "ValueNode", - "ConstValueNode", - "VariableNode", + "InputObjectTypeDefinitionNode", + "InputObjectTypeExtensionNode", + "InputValueDefinitionNode", "IntValueNode", - "FloatValueNode", - "StringValueNode", - "BooleanValueNode", - "NullValueNode", - "EnumValueNode", + "InterfaceTypeDefinitionNode", + "InterfaceTypeExtensionNode", + "ListNullabilityOperatorNode", + "ListTypeNode", "ListValueNode", - "ConstListValueNode", - "ObjectValueNode", - "ConstObjectValueNode", - "ObjectFieldNode", - "ConstObjectFieldNode", - "DirectiveNode", - "ConstDirectiveNode", - "TypeNode", + "Location", + "NameNode", "NamedTypeNode", - "ListTypeNode", + "Node", + "NonNullAssertionNode", "NonNullTypeNode", - "TypeSystemDefinitionNode", - "SchemaDefinitionNode", + "NullValueNode", + "NullabilityAssertionNode", + "ObjectFieldNode", + "ObjectTypeDefinitionNode", + "ObjectTypeExtensionNode", + "ObjectValueNode", + "OperationDefinitionNode", "OperationType", "OperationTypeDefinitionNode", - "TypeDefinitionNode", "ScalarTypeDefinitionNode", - "ObjectTypeDefinitionNode", - "FieldDefinitionNode", - "InputValueDefinitionNode", - "InterfaceTypeDefinitionNode", - "UnionTypeDefinitionNode", - "EnumTypeDefinitionNode", - "EnumValueDefinitionNode", - "InputObjectTypeDefinitionNode", - "DirectiveDefinitionNode", + "ScalarTypeExtensionNode", + "SchemaDefinitionNode", "SchemaExtensionNode", + "SelectionNode", + "SelectionSetNode", + "StringValueNode", + "Token", + "TypeDefinitionNode", "TypeExtensionNode", + "TypeNode", + "TypeSystemDefinitionNode", "TypeSystemExtensionNode", - "ScalarTypeExtensionNode", - "ObjectTypeExtensionNode", - "InterfaceTypeExtensionNode", + "UnionTypeDefinitionNode", "UnionTypeExtensionNode", - "EnumTypeExtensionNode", - "InputObjectTypeExtensionNode", - "QUERY_DOCUMENT_KEYS", + "ValueNode", + "VariableDefinitionNode", + "VariableNode", ] @@ -79,7 +95,7 @@ class Token: Represents a range of characters represented by a lexical token within a Source. """ - __slots__ = "kind", "start", "end", "line", "column", "prev", "next", "value" + __slots__ = "column", "end", "kind", "line", "next", "prev", "start", "value" kind: TokenKind # the kind of token start: int # the character offset at which this Node begins @@ -87,11 +103,11 @@ class Token: line: int # the 1-indexed line number on which this Token appears column: int # the 1-indexed column number at which this Token begins # for non-punctuation tokens, represents the interpreted value of the token: - value: Optional[str] + value: str | None # Tokens exist as nodes in a double-linked-list amongst all tokens including # ignored tokens. is always the first node and the last. - prev: Optional["Token"] - next: Optional["Token"] + prev: Token | None + next: Token | None def __init__( self, @@ -100,7 +116,7 @@ def __init__( end: int, line: int, column: int, - value: Optional[str] = None, + value: str | None = None, ) -> None: self.kind = kind self.start, self.end = start, end @@ -118,7 +134,7 @@ def __repr__(self) -> str: def __inspect__(self) -> str: return repr(self) - def __eq__(self, other: Any) -> bool: + def __eq__(self, other: object) -> bool: if isinstance(other, Token): return ( self.kind == other.kind @@ -128,7 +144,7 @@ def __eq__(self, other: Any) -> bool: and self.column == other.column and self.value == other.value ) - elif isinstance(other, str): + if isinstance(other, str): return other == self.desc return False @@ -137,7 +153,7 @@ def __hash__(self) -> int: (self.kind, self.start, self.end, self.line, self.column, self.value) ) - def __copy__(self) -> "Token": + def __copy__(self) -> Token: """Create a shallow copy of the token""" token = self.__class__( self.kind, @@ -150,11 +166,11 @@ def __copy__(self) -> "Token": token.prev = self.prev return token - def __deepcopy__(self, memo: Dict) -> "Token": + def __deepcopy__(self, memo: dict) -> Token: """Allow only shallow copies to avoid recursion.""" return copy(self) - def __getstate__(self) -> Dict[str, Any]: + def __getstate__(self) -> dict[str, Any]: """Remove the links when pickling. Keeping the links would make pickling a schema too expensive. @@ -165,7 +181,7 @@ def __getstate__(self) -> Dict[str, Any]: if key not in {"prev", "next"} } - def __setstate__(self, state: Dict[str, Any]) -> None: + def __setstate__(self, state: dict[str, Any]) -> None: """Reset the links when un-pickling.""" for key, value in state.items(): setattr(self, key, value) @@ -186,11 +202,11 @@ class Location: """ __slots__ = ( - "start", "end", - "start_token", "end_token", "source", + "start", + "start_token", ) start: int # character offset at which this Node begins @@ -216,14 +232,14 @@ def __repr__(self) -> str: def __inspect__(self) -> str: return repr(self) - def __eq__(self, other: Any) -> bool: + def __eq__(self, other: object) -> bool: if isinstance(other, Location): return self.start == other.start and self.end == other.end - elif isinstance(other, (list, tuple)) and len(other) == 2: + if isinstance(other, (list, tuple)) and len(other) == 2: return self.start == other[0] and self.end == other[1] return False - def __ne__(self, other: Any) -> bool: + def __ne__(self, other: object) -> bool: return not self == other def __hash__(self) -> int: @@ -231,14 +247,13 @@ def __hash__(self) -> int: class OperationType(Enum): - QUERY = "query" MUTATION = "mutation" SUBSCRIPTION = "subscription" # Default map from node kinds to their node attributes (internal) -QUERY_DOCUMENT_KEYS: Dict[str, Tuple[str, ...]] = { +QUERY_DOCUMENT_KEYS: dict[str, tuple[str, ...]] = { "name": (), "document": ("definitions",), "operation_definition": ( @@ -250,8 +265,22 @@ class OperationType(Enum): "variable_definition": ("variable", "type", "default_value", "directives"), "variable": ("name",), "selection_set": ("selections",), - "field": ("alias", "name", "arguments", "directives", "selection_set"), + "field": ( + "alias", + "name", + "arguments", + "directives", + "selection_set", + # note: Client controlled Nullability is experimental and may be changed + # or removed in the future. + "nullability_assertion", + ), "argument": ("name", "value"), + # note: Client controlled Nullability is experimental and may be changed + # or removed in the future. + "list_nullability_operator": ("nullability_assertion",), + "non_null_assertion": ("nullability_assertion",), + "error_boundary": ("nullability_assertion",), "fragment_spread": ("name", "directives"), "inline_fragment": ("type_condition", "directives", "selection_set"), "fragment_definition": ( @@ -316,12 +345,12 @@ class Node: """AST nodes""" # allow custom attributes and weak references (not used internally) - __slots__ = "__dict__", "__weakref__", "loc", "_hash" + __slots__ = "__dict__", "__weakref__", "_hash", "loc" - loc: Optional[Location] + loc: Location | None kind: str = "ast" # the kind of the node as a snake_case string - keys: Tuple[str, ...] = ("loc",) # the names of the attributes of this node + keys: tuple[str, ...] = ("loc",) # the names of the attributes of this node def __init__(self, **kwargs: Any) -> None: """Initialize the node with the given keyword arguments.""" @@ -333,10 +362,19 @@ def __init__(self, **kwargs: Any) -> None: def __repr__(self) -> str: """Get a simple representation of the node.""" - name, loc = self.__class__.__name__, getattr(self, "loc", None) - return f"{name} at {loc}" if loc else name - - def __eq__(self, other: Any) -> bool: + rep = self.__class__.__name__ + if isinstance(self, NameNode): + rep += f"({self.value!r})" + else: + name = getattr(self, "name", None) + if name: + rep += f"(name={name.value!r})" + loc = getattr(self, "loc", None) + if loc: + rep += f" at {loc}" + return rep + + def __eq__(self, other: object) -> bool: """Test whether two nodes are equal (recursively).""" return ( isinstance(other, Node) @@ -360,11 +398,11 @@ def __setattr__(self, key: str, value: Any) -> None: del self._hash super().__setattr__(key, value) - def __copy__(self) -> "Node": + def __copy__(self) -> Node: """Create a shallow copy of the node.""" return self.__class__(**{key: getattr(self, key) for key in self.keys}) - def __deepcopy__(self, memo: Dict) -> "Node": + def __deepcopy__(self, memo: dict) -> Node: """Create a deep copy of the node""" # noinspection PyArgumentList return self.__class__( @@ -382,14 +420,15 @@ def __init_subclass__(cls) -> None: if name.endswith("Node"): name = name[:-4] cls.kind = camel_to_snake(name) - keys: List[str] = [] + keys: list[str] = [] for base in cls.__bases__: # noinspection PyUnresolvedReferences keys.extend(base.keys) # type: ignore keys.extend(cls.__slots__) cls.keys = tuple(keys) - def to_dict(self, locations: bool = False) -> Dict: + def to_dict(self, locations: bool = False) -> dict: + """Concert node to a dictionary.""" from ..utilities import ast_to_dict return ast_to_dict(self, locations) @@ -410,7 +449,7 @@ class NameNode(Node): class DocumentNode(Node): __slots__ = ("definitions",) - definitions: Tuple["DefinitionNode", ...] + definitions: tuple[DefinitionNode, ...] class DefinitionNode(Node): @@ -418,12 +457,12 @@ class DefinitionNode(Node): class ExecutableDefinitionNode(DefinitionNode): - __slots__ = "name", "directives", "variable_definitions", "selection_set" + __slots__ = "directives", "name", "selection_set", "variable_definitions" - name: Optional[NameNode] - directives: Tuple["DirectiveNode", ...] - variable_definitions: Tuple["VariableDefinitionNode", ...] - selection_set: "SelectionSetNode" + name: NameNode | None + directives: tuple[DirectiveNode, ...] + variable_definitions: tuple[VariableDefinitionNode, ...] + selection_set: SelectionSetNode class OperationDefinitionNode(ExecutableDefinitionNode): @@ -433,45 +472,64 @@ class OperationDefinitionNode(ExecutableDefinitionNode): class VariableDefinitionNode(Node): - __slots__ = "variable", "type", "default_value", "directives" + __slots__ = "default_value", "directives", "type", "variable" - variable: "VariableNode" - type: "TypeNode" - default_value: Optional["ConstValueNode"] - directives: Tuple["ConstDirectiveNode", ...] + variable: VariableNode + type: TypeNode + default_value: ConstValueNode | None + directives: tuple[ConstDirectiveNode, ...] class SelectionSetNode(Node): __slots__ = ("selections",) - selections: Tuple["SelectionNode", ...] + selections: tuple[SelectionNode, ...] class SelectionNode(Node): __slots__ = ("directives",) - directives: Tuple["DirectiveNode", ...] + directives: tuple[DirectiveNode, ...] class FieldNode(SelectionNode): - __slots__ = "alias", "name", "arguments", "selection_set" + __slots__ = "alias", "arguments", "name", "nullability_assertion", "selection_set" - alias: Optional[NameNode] + alias: NameNode | None name: NameNode - arguments: Tuple["ArgumentNode", ...] - selection_set: Optional[SelectionSetNode] + arguments: tuple[ArgumentNode, ...] + # Note: Client Controlled Nullability is experimental + # and may be changed or removed in the future. + nullability_assertion: NullabilityAssertionNode + selection_set: SelectionSetNode | None + + +class NullabilityAssertionNode(Node): + __slots__ = ("nullability_assertion",) + nullability_assertion: NullabilityAssertionNode | None + + +class ListNullabilityOperatorNode(NullabilityAssertionNode): + pass + + +class NonNullAssertionNode(NullabilityAssertionNode): + nullability_assertion: ListNullabilityOperatorNode + + +class ErrorBoundaryNode(NullabilityAssertionNode): + nullability_assertion: ListNullabilityOperatorNode class ArgumentNode(Node): __slots__ = "name", "value" name: NameNode - value: "ValueNode" + value: ValueNode class ConstArgumentNode(ArgumentNode): - - value: "ConstValueNode" + value: ConstValueNode # Fragments @@ -484,9 +542,9 @@ class FragmentSpreadNode(SelectionNode): class InlineFragmentNode(SelectionNode): - __slots__ = "type_condition", "selection_set" + __slots__ = "selection_set", "type_condition" - type_condition: "NamedTypeNode" + type_condition: NamedTypeNode selection_set: SelectionSetNode @@ -494,7 +552,7 @@ class FragmentDefinitionNode(ExecutableDefinitionNode): __slots__ = ("type_condition",) name: NameNode - type_condition: "NamedTypeNode" + type_condition: NamedTypeNode # Values @@ -523,10 +581,10 @@ class FloatValueNode(ValueNode): class StringValueNode(ValueNode): - __slots__ = "value", "block" + __slots__ = "block", "value" value: str - block: Optional[bool] + block: bool | None class BooleanValueNode(ValueNode): @@ -548,23 +606,21 @@ class EnumValueNode(ValueNode): class ListValueNode(ValueNode): __slots__ = ("values",) - values: Tuple[ValueNode, ...] + values: tuple[ValueNode, ...] class ConstListValueNode(ListValueNode): - - values: Tuple["ConstValueNode", ...] + values: tuple[ConstValueNode, ...] class ObjectValueNode(ValueNode): __slots__ = ("fields",) - fields: Tuple["ObjectFieldNode", ...] + fields: tuple[ObjectFieldNode, ...] class ConstObjectValueNode(ObjectValueNode): - - fields: Tuple["ConstObjectFieldNode", ...] + fields: tuple[ConstObjectFieldNode, ...] class ObjectFieldNode(Node): @@ -575,11 +631,10 @@ class ObjectFieldNode(Node): class ConstObjectFieldNode(ObjectFieldNode): - - value: "ConstValueNode" + value: ConstValueNode -ConstValueNode = Union[ +ConstValueNode: TypeAlias = Union[ IntValueNode, FloatValueNode, StringValueNode, @@ -595,15 +650,14 @@ class ConstObjectFieldNode(ObjectFieldNode): class DirectiveNode(Node): - __slots__ = "name", "arguments" + __slots__ = "arguments", "name" name: NameNode - arguments: Tuple[ArgumentNode, ...] + arguments: tuple[ArgumentNode, ...] class ConstDirectiveNode(DirectiveNode): - - arguments: Tuple[ConstArgumentNode, ...] + arguments: tuple[ConstArgumentNode, ...] # Type Reference @@ -628,7 +682,7 @@ class ListTypeNode(TypeNode): class NonNullTypeNode(TypeNode): __slots__ = ("type",) - type: Union[NamedTypeNode, ListTypeNode] + type: NamedTypeNode | ListTypeNode # Type System Definition @@ -641,9 +695,9 @@ class TypeSystemDefinitionNode(DefinitionNode): class SchemaDefinitionNode(TypeSystemDefinitionNode): __slots__ = "description", "directives", "operation_types" - description: Optional[StringValueNode] - directives: Tuple[ConstDirectiveNode, ...] - operation_types: Tuple["OperationTypeDefinitionNode", ...] + description: StringValueNode | None + directives: tuple[ConstDirectiveNode, ...] + operation_types: tuple[OperationTypeDefinitionNode, ...] class OperationTypeDefinitionNode(Node): @@ -657,95 +711,95 @@ class OperationTypeDefinitionNode(Node): class TypeDefinitionNode(TypeSystemDefinitionNode): - __slots__ = "description", "name", "directives" + __slots__ = "description", "directives", "name" - description: Optional[StringValueNode] + description: StringValueNode | None name: NameNode - directives: Tuple[DirectiveNode, ...] + directives: tuple[DirectiveNode, ...] class ScalarTypeDefinitionNode(TypeDefinitionNode): __slots__ = () - directives: Tuple[ConstDirectiveNode, ...] + directives: tuple[ConstDirectiveNode, ...] class ObjectTypeDefinitionNode(TypeDefinitionNode): - __slots__ = "interfaces", "fields" + __slots__ = "fields", "interfaces" - interfaces: Tuple[NamedTypeNode, ...] - directives: Tuple[ConstDirectiveNode, ...] - fields: Tuple["FieldDefinitionNode", ...] + interfaces: tuple[NamedTypeNode, ...] + directives: tuple[ConstDirectiveNode, ...] + fields: tuple[FieldDefinitionNode, ...] class FieldDefinitionNode(DefinitionNode): - __slots__ = "description", "name", "directives", "arguments", "type" + __slots__ = "arguments", "description", "directives", "name", "type" - description: Optional[StringValueNode] + description: StringValueNode | None name: NameNode - directives: Tuple[ConstDirectiveNode, ...] - arguments: Tuple["InputValueDefinitionNode", ...] + directives: tuple[ConstDirectiveNode, ...] + arguments: tuple[InputValueDefinitionNode, ...] type: TypeNode class InputValueDefinitionNode(DefinitionNode): - __slots__ = "description", "name", "directives", "type", "default_value" + __slots__ = "default_value", "description", "directives", "name", "type" - description: Optional[StringValueNode] + description: StringValueNode | None name: NameNode - directives: Tuple[ConstDirectiveNode, ...] + directives: tuple[ConstDirectiveNode, ...] type: TypeNode - default_value: Optional[ConstValueNode] + default_value: ConstValueNode | None class InterfaceTypeDefinitionNode(TypeDefinitionNode): __slots__ = "fields", "interfaces" - fields: Tuple["FieldDefinitionNode", ...] - directives: Tuple[ConstDirectiveNode, ...] - interfaces: Tuple[NamedTypeNode, ...] + fields: tuple[FieldDefinitionNode, ...] + directives: tuple[ConstDirectiveNode, ...] + interfaces: tuple[NamedTypeNode, ...] class UnionTypeDefinitionNode(TypeDefinitionNode): __slots__ = ("types",) - directives: Tuple[ConstDirectiveNode, ...] - types: Tuple[NamedTypeNode, ...] + directives: tuple[ConstDirectiveNode, ...] + types: tuple[NamedTypeNode, ...] class EnumTypeDefinitionNode(TypeDefinitionNode): __slots__ = ("values",) - directives: Tuple[ConstDirectiveNode, ...] - values: Tuple["EnumValueDefinitionNode", ...] + directives: tuple[ConstDirectiveNode, ...] + values: tuple[EnumValueDefinitionNode, ...] class EnumValueDefinitionNode(DefinitionNode): - __slots__ = "description", "name", "directives" + __slots__ = "description", "directives", "name" - description: Optional[StringValueNode] + description: StringValueNode | None name: NameNode - directives: Tuple[ConstDirectiveNode, ...] + directives: tuple[ConstDirectiveNode, ...] class InputObjectTypeDefinitionNode(TypeDefinitionNode): __slots__ = ("fields",) - directives: Tuple[ConstDirectiveNode, ...] - fields: Tuple[InputValueDefinitionNode, ...] + directives: tuple[ConstDirectiveNode, ...] + fields: tuple[InputValueDefinitionNode, ...] # Directive Definitions class DirectiveDefinitionNode(TypeSystemDefinitionNode): - __slots__ = "description", "name", "arguments", "repeatable", "locations" + __slots__ = "arguments", "description", "locations", "name", "repeatable" - description: Optional[StringValueNode] + description: StringValueNode | None name: NameNode - arguments: Tuple[InputValueDefinitionNode, ...] + arguments: tuple[InputValueDefinitionNode, ...] repeatable: bool - locations: Tuple[NameNode, ...] + locations: tuple[NameNode, ...] # Type System Extensions @@ -754,21 +808,21 @@ class DirectiveDefinitionNode(TypeSystemDefinitionNode): class SchemaExtensionNode(Node): __slots__ = "directives", "operation_types" - directives: Tuple[ConstDirectiveNode, ...] - operation_types: Tuple[OperationTypeDefinitionNode, ...] + directives: tuple[ConstDirectiveNode, ...] + operation_types: tuple[OperationTypeDefinitionNode, ...] # Type Extensions class TypeExtensionNode(TypeSystemDefinitionNode): - __slots__ = "name", "directives" + __slots__ = "directives", "name" name: NameNode - directives: Tuple[ConstDirectiveNode, ...] + directives: tuple[ConstDirectiveNode, ...] -TypeSystemExtensionNode = Union[SchemaExtensionNode, TypeExtensionNode] +TypeSystemExtensionNode: TypeAlias = Union[SchemaExtensionNode, TypeExtensionNode] class ScalarTypeExtensionNode(TypeExtensionNode): @@ -776,32 +830,32 @@ class ScalarTypeExtensionNode(TypeExtensionNode): class ObjectTypeExtensionNode(TypeExtensionNode): - __slots__ = "interfaces", "fields" + __slots__ = "fields", "interfaces" - interfaces: Tuple[NamedTypeNode, ...] - fields: Tuple[FieldDefinitionNode, ...] + interfaces: tuple[NamedTypeNode, ...] + fields: tuple[FieldDefinitionNode, ...] class InterfaceTypeExtensionNode(TypeExtensionNode): - __slots__ = "interfaces", "fields" + __slots__ = "fields", "interfaces" - interfaces: Tuple[NamedTypeNode, ...] - fields: Tuple[FieldDefinitionNode, ...] + interfaces: tuple[NamedTypeNode, ...] + fields: tuple[FieldDefinitionNode, ...] class UnionTypeExtensionNode(TypeExtensionNode): __slots__ = ("types",) - types: Tuple[NamedTypeNode, ...] + types: tuple[NamedTypeNode, ...] class EnumTypeExtensionNode(TypeExtensionNode): __slots__ = ("values",) - values: Tuple[EnumValueDefinitionNode, ...] + values: tuple[EnumValueDefinitionNode, ...] class InputObjectTypeExtensionNode(TypeExtensionNode): __slots__ = ("fields",) - fields: Tuple[InputValueDefinitionNode, ...] + fields: tuple[InputValueDefinitionNode, ...] diff --git a/src/graphql/language/block_string.py b/src/graphql/language/block_string.py index 9b9e2fb8..248927b4 100644 --- a/src/graphql/language/block_string.py +++ b/src/graphql/language/block_string.py @@ -1,5 +1,9 @@ -from typing import Collection, List +"""Helpers for block strings""" + +from __future__ import annotations + from sys import maxsize +from typing import Collection __all__ = [ "dedent_block_string_lines", @@ -8,7 +12,7 @@ ] -def dedent_block_string_lines(lines: Collection[str]) -> List[str]: +def dedent_block_string_lines(lines: Collection[str]) -> list[str]: """Produce the value of a block string from its parsed raw value. This function works similar to CoffeeScript's block string, @@ -93,7 +97,7 @@ def is_printable_as_block_string(value: str) -> bool: if is_empty_line: return False # has trailing empty lines - if has_common_indent and seen_non_empty_line: + if has_common_indent and seen_non_empty_line: # noqa: SIM103 return False # has internal indent return True @@ -145,8 +149,7 @@ def print_block_string(value: str, minimize: bool = False) -> str: skip_leading_new_line = is_single_line and value and value[0] in " \t" before = ( "\n" - if print_as_multiple_lines - and not skip_leading_new_line + if (print_as_multiple_lines and not skip_leading_new_line) or force_leading_new_line else "" ) diff --git a/src/graphql/language/character_classes.py b/src/graphql/language/character_classes.py index 0062a1fc..5d870576 100644 --- a/src/graphql/language/character_classes.py +++ b/src/graphql/language/character_classes.py @@ -1,68 +1,35 @@ -__all__ = ["is_digit", "is_letter", "is_name_start", "is_name_continue"] +"""Character classes""" -try: - "string".isascii() -except AttributeError: # Python < 3.7 +__all__ = ["is_digit", "is_letter", "is_name_continue", "is_name_start"] - def is_digit(char: str) -> bool: - """Check whether char is a digit - For internal use by the lexer only. - """ - return "0" <= char <= "9" +def is_digit(char: str) -> bool: + """Check whether char is a digit - def is_letter(char: str) -> bool: - """Check whether char is a plain ASCII letter + For internal use by the lexer only. + """ + return char.isascii() and char.isdigit() - For internal use by the lexer only. - """ - return "a" <= char <= "z" or "A" <= char <= "Z" - def is_name_start(char: str) -> bool: - """Check whether char is allowed at the beginning of a GraphQL name +def is_letter(char: str) -> bool: + """Check whether char is a plain ASCII letter - For internal use by the lexer only. - """ - return "a" <= char <= "z" or "A" <= char <= "Z" or char == "_" + For internal use by the lexer only. + """ + return char.isascii() and char.isalpha() - def is_name_continue(char: str) -> bool: - """Check whether char is allowed in the continuation of a GraphQL name - For internal use by the lexer only. - """ - return ( - "a" <= char <= "z" - or "A" <= char <= "Z" - or "0" <= char <= "9" - or char == "_" - ) +def is_name_start(char: str) -> bool: + """Check whether char is allowed at the beginning of a GraphQL name -else: + For internal use by the lexer only. + """ + return char.isascii() and (char.isalpha() or char == "_") - def is_digit(char: str) -> bool: - """Check whether char is a digit - For internal use by the lexer only. - """ - return char.isascii() and char.isdigit() +def is_name_continue(char: str) -> bool: + """Check whether char is allowed in the continuation of a GraphQL name - def is_letter(char: str) -> bool: - """Check whether char is a plain ASCII letter - - For internal use by the lexer only. - """ - return char.isascii() and char.isalpha() - - def is_name_start(char: str) -> bool: - """Check whether char is allowed at the beginning of a GraphQL name - - For internal use by the lexer only. - """ - return char.isascii() and (char.isalpha() or char == "_") - - def is_name_continue(char: str) -> bool: - """Check whether char is allowed in the continuation of a GraphQL name - - For internal use by the lexer only. - """ - return char.isascii() and (char.isalnum() or char == "_") + For internal use by the lexer only. + """ + return char.isascii() and (char.isalnum() or char == "_") diff --git a/src/graphql/language/directive_locations.py b/src/graphql/language/directive_locations.py index dfce34d9..f251658e 100644 --- a/src/graphql/language/directive_locations.py +++ b/src/graphql/language/directive_locations.py @@ -1,3 +1,5 @@ +"""Directive locations""" + from enum import Enum __all__ = ["DirectiveLocation"] diff --git a/src/graphql/language/lexer.py b/src/graphql/language/lexer.py index f41932bf..9ec37427 100644 --- a/src/graphql/language/lexer.py +++ b/src/graphql/language/lexer.py @@ -1,12 +1,18 @@ -from typing import List, NamedTuple, Optional +"""GraphQL Lexer""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, NamedTuple from ..error import GraphQLSyntaxError from .ast import Token from .block_string import dedent_block_string_lines -from .character_classes import is_digit, is_name_start, is_name_continue -from .source import Source +from .character_classes import is_digit, is_name_continue, is_name_start from .token_kind import TokenKind +if TYPE_CHECKING: + from .source import Source + __all__ = ["Lexer", "is_punctuator_token_kind"] @@ -26,7 +32,7 @@ class Lexer: EOF token whenever called. """ - def __init__(self, source: Source): + def __init__(self, source: Source) -> None: """Given a Source object, initialize a Lexer for that source.""" self.source = source self.token = self.last_token = Token(TokenKind.SOF, 0, 0, 0, 0) @@ -69,7 +75,7 @@ def print_code_point_at(self, location: int) -> str: return TokenKind.EOF.value char = body[location] # Printable ASCII - if "\x20" <= char <= "\x7E": + if "\x20" <= char <= "\x7e": return "'\"'" if char == '"' else f"'{char}'" # Unicode code point point = ord( @@ -82,7 +88,7 @@ def print_code_point_at(self, location: int) -> str: return f"U+{point:04X}" def create_token( - self, kind: TokenKind, start: int, end: int, value: Optional[str] = None + self, kind: TokenKind, start: int, end: int, value: str | None = None ) -> Token: """Create a token with line and column location information.""" line = self.line @@ -106,12 +112,12 @@ def read_next_token(self, start: int) -> Token: if char in " \t,\ufeff": position += 1 continue - elif char == "\n": + if char == "\n": position += 1 self.line += 1 self.line_start = position continue - elif char == "\r": + if char == "\r": if body[position + 1 : position + 2] == "\n": position += 2 else: @@ -138,9 +144,8 @@ def read_next_token(self, start: int) -> Token: if is_name_start(char): return self.read_name(position) - if char == ".": - if body[position + 1 : position + 3] == "..": - return self.create_token(TokenKind.SPREAD, position, position + 3) + if char == "." and body[position + 1 : position + 3] == "..": + return self.create_token(TokenKind.SPREAD, position, position + 3) message = ( "Unexpected single quote character (')," @@ -264,7 +269,7 @@ def read_string(self, start: int) -> Token: body_length = len(body) position = start + 1 chunk_start = position - value: List[str] = [] + value: list[str] = [] append = value.append while position < body_length: @@ -313,6 +318,7 @@ def read_string(self, start: int) -> Token: raise GraphQLSyntaxError(self.source, position, "Unterminated string.") def read_escaped_unicode_variable_width(self, position: int) -> EscapeSequence: + """Read escaped unicode with variable width""" body = self.source.body point = 0 size = 3 @@ -336,10 +342,11 @@ def read_escaped_unicode_variable_width(self, position: int) -> EscapeSequence: raise GraphQLSyntaxError( self.source, position, - f"Invalid Unicode escape sequence: '{body[position: position + size]}'.", + f"Invalid Unicode escape sequence: '{body[position : position + size]}'.", ) def read_escaped_unicode_fixed_width(self, position: int) -> EscapeSequence: + """Read escaped unicode with fixed width""" body = self.source.body code = read_16_bit_hex_code(body, position + 2) @@ -348,24 +355,24 @@ def read_escaped_unicode_fixed_width(self, position: int) -> EscapeSequence: # GraphQL allows JSON-style surrogate pair escape sequences, but only when # a valid pair is formed. - if 0xD800 <= code <= 0xDBFF: - if body[position + 6 : position + 8] == "\\u": - trailing_code = read_16_bit_hex_code(body, position + 8) - if 0xDC00 <= trailing_code <= 0xDFFF: - return EscapeSequence( - (chr(code) + chr(trailing_code)) - .encode("utf-16", "surrogatepass") - .decode("utf-16"), - 12, - ) + if 0xD800 <= code <= 0xDBFF and body[position + 6 : position + 8] == "\\u": + trailing_code = read_16_bit_hex_code(body, position + 8) + if 0xDC00 <= trailing_code <= 0xDFFF: + return EscapeSequence( + (chr(code) + chr(trailing_code)) + .encode("utf-16", "surrogatepass") + .decode("utf-16"), + 12, + ) raise GraphQLSyntaxError( self.source, position, - f"Invalid Unicode escape sequence: '{body[position: position + 6]}'.", + f"Invalid Unicode escape sequence: '{body[position : position + 6]}'.", ) def read_escaped_character(self, position: int) -> EscapeSequence: + """Read escaped character sequence""" body = self.source.body value = _ESCAPED_CHARS.get(body[position + 1]) if value: @@ -373,7 +380,7 @@ def read_escaped_character(self, position: int) -> EscapeSequence: raise GraphQLSyntaxError( self.source, position, - f"Invalid character escape sequence: '{body[position: position + 2]}'.", + f"Invalid character escape sequence: '{body[position : position + 2]}'.", ) def read_block_string(self, start: int) -> Token: @@ -457,6 +464,7 @@ def read_name(self, start: int) -> Token: _punctuator_token_kinds = frozenset( [ TokenKind.BANG, + TokenKind.QUESTION_MARK, TokenKind.DOLLAR, TokenKind.AMP, TokenKind.PAREN_L, @@ -484,6 +492,7 @@ def is_punctuator_token_kind(kind: TokenKind) -> bool: _KIND_FOR_PUNCT = { "!": TokenKind.BANG, + "?": TokenKind.QUESTION_MARK, "$": TokenKind.DOLLAR, "&": TokenKind.AMP, "(": TokenKind.PAREN_L, @@ -541,9 +550,9 @@ def read_hex_digit(char: str) -> int: """ if "0" <= char <= "9": return ord(char) - 48 - elif "A" <= char <= "F": + if "A" <= char <= "F": return ord(char) - 55 - elif "a" <= char <= "f": + if "a" <= char <= "f": return ord(char) - 87 return -1 @@ -559,8 +568,7 @@ def is_unicode_scalar_value(char: str) -> bool: def is_supplementary_code_point(body: str, location: int) -> bool: - """ - Check whether the current location is a supplementary code point. + """Check whether the current location is a supplementary code point. The GraphQL specification defines source text as a sequence of unicode scalar values (which Unicode defines to exclude surrogate code points). diff --git a/src/graphql/language/location.py b/src/graphql/language/location.py index 41aec902..7af55082 100644 --- a/src/graphql/language/location.py +++ b/src/graphql/language/location.py @@ -1,4 +1,8 @@ -from typing import Any, NamedTuple, TYPE_CHECKING +"""Source locations""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, NamedTuple try: from typing import TypedDict @@ -6,9 +10,9 @@ from typing_extensions import TypedDict if TYPE_CHECKING: - from .source import Source # noqa: F401 + from .source import Source -__all__ = ["get_location", "SourceLocation", "FormattedSourceLocation"] +__all__ = ["FormattedSourceLocation", "SourceLocation", "get_location"] class FormattedSourceLocation(TypedDict): @@ -26,18 +30,19 @@ class SourceLocation(NamedTuple): @property def formatted(self) -> FormattedSourceLocation: - return dict(line=self.line, column=self.column) + """Get formatted source location.""" + return {"line": self.line, "column": self.column} - def __eq__(self, other: Any) -> bool: + def __eq__(self, other: object) -> bool: if isinstance(other, dict): return self.formatted == other return tuple(self) == other - def __ne__(self, other: Any) -> bool: + def __ne__(self, other: object) -> bool: return not self == other -def get_location(source: "Source", position: int) -> SourceLocation: +def get_location(source: Source, position: int) -> SourceLocation: """Get the line and column for a character position in the source. Takes a Source and a UTF-8 character offset, and returns the corresponding line and diff --git a/src/graphql/language/parser.py b/src/graphql/language/parser.py index 389913a5..59299a1d 100644 --- a/src/graphql/language/parser.py +++ b/src/graphql/language/parser.py @@ -1,6 +1,11 @@ -from typing import Callable, Dict, List, Optional, Union, TypeVar, cast +"""GraphQL parser""" + +from __future__ import annotations + from functools import partial +from typing import Callable, List, Mapping, TypeVar, Union, cast +from ..error import GraphQLError, GraphQLSyntaxError from .ast import ( ArgumentNode, BooleanValueNode, @@ -15,6 +20,7 @@ EnumTypeExtensionNode, EnumValueDefinitionNode, EnumValueNode, + ErrorBoundaryNode, FieldDefinitionNode, FieldNode, FloatValueNode, @@ -24,15 +30,18 @@ InputObjectTypeDefinitionNode, InputObjectTypeExtensionNode, InputValueDefinitionNode, - IntValueNode, InterfaceTypeDefinitionNode, InterfaceTypeExtensionNode, + IntValueNode, + ListNullabilityOperatorNode, ListTypeNode, ListValueNode, Location, - NameNode, NamedTypeNode, + NameNode, + NonNullAssertionNode, NonNullTypeNode, + NullabilityAssertionNode, NullValueNode, ObjectFieldNode, ObjectTypeDefinitionNode, @@ -48,6 +57,7 @@ SelectionNode, SelectionSetNode, StringValueNode, + Token, TypeNode, TypeSystemExtensionNode, UnionTypeDefinitionNode, @@ -57,23 +67,29 @@ VariableNode, ) from .directive_locations import DirectiveLocation -from .ast import Token from .lexer import Lexer, is_punctuator_token_kind from .source import Source, is_source from .token_kind import TokenKind -from ..error import GraphQLError, GraphQLSyntaxError -__all__ = ["parse", "parse_type", "parse_value", "parse_const_value"] +try: + from typing import TypeAlias +except ImportError: # Python < 3.10 + from typing_extensions import TypeAlias + + +__all__ = ["parse", "parse_const_value", "parse_type", "parse_value"] T = TypeVar("T") -SourceType = Union[Source, str] +SourceType: TypeAlias = Union[Source, str] def parse( source: SourceType, no_location: bool = False, + max_tokens: int | None = None, allow_legacy_fragment_variables: bool = False, + experimental_client_controlled_nullability: bool = False, ) -> DocumentNode: """Given a GraphQL source, parse it into a Document. @@ -83,6 +99,12 @@ def parse( they correspond to. The ``no_location`` option disables that behavior for performance or testing. + Parser CPU and memory usage is linear to the number of tokens in a document, + however in extreme cases it becomes quadratic due to memory exhaustion. + Parsing happens before validation so even invalid queries can burn lots of + CPU time and memory. + To prevent this you can set a maximum number of tokens allowed within a document. + Legacy feature (will be removed in v3.3): If ``allow_legacy_fragment_variables`` is set to ``True``, the parser will @@ -96,11 +118,32 @@ def parse( fragment A($var: Boolean = false) on T { ... } + + EXPERIMENTAL: + + If enabled, the parser will understand and parse Client Controlled Nullability + Designators contained in Fields. They'll be represented in the + :attr:`~graphql.language.FieldNode.nullability_assertion` field + of the :class:`~graphql.language.FieldNode`. + + The syntax looks like the following:: + + { + nullableField! + nonNullableField? + nonNullableSelectionSet? { + childField! + } + } + + Note: this feature is experimental and may change or be removed in the future. """ parser = Parser( source, no_location=no_location, + max_tokens=max_tokens, allow_legacy_fragment_variables=allow_legacy_fragment_variables, + experimental_client_controlled_nullability=experimental_client_controlled_nullability, ) return parser.parse_document() @@ -108,6 +151,7 @@ def parse( def parse_value( source: SourceType, no_location: bool = False, + max_tokens: int | None = None, allow_legacy_fragment_variables: bool = False, ) -> ValueNode: """Parse the AST for a given string containing a GraphQL value. @@ -123,6 +167,7 @@ def parse_value( parser = Parser( source, no_location=no_location, + max_tokens=max_tokens, allow_legacy_fragment_variables=allow_legacy_fragment_variables, ) parser.expect_token(TokenKind.SOF) @@ -134,6 +179,7 @@ def parse_value( def parse_const_value( source: SourceType, no_location: bool = False, + max_tokens: int | None = None, allow_legacy_fragment_variables: bool = False, ) -> ConstValueNode: """Parse the AST for a given string containing a GraphQL constant value. @@ -144,6 +190,7 @@ def parse_const_value( parser = Parser( source, no_location=no_location, + max_tokens=max_tokens, allow_legacy_fragment_variables=allow_legacy_fragment_variables, ) parser.expect_token(TokenKind.SOF) @@ -155,6 +202,7 @@ def parse_const_value( def parse_type( source: SourceType, no_location: bool = False, + max_tokens: int | None = None, allow_legacy_fragment_variables: bool = False, ) -> TypeNode: """Parse the AST for a given string containing a GraphQL Type. @@ -170,6 +218,7 @@ def parse_type( parser = Parser( source, no_location=no_location, + max_tokens=max_tokens, allow_legacy_fragment_variables=allow_legacy_fragment_variables, ) parser.expect_token(TokenKind.SOF) @@ -190,23 +239,32 @@ class Parser: library, please use the `__version_info__` variable for version detection. """ - _lexer: Lexer - _no_Location: bool + _no_location: bool + _max_tokens: int | None _allow_legacy_fragment_variables: bool + _experimental_client_controlled_nullability: bool + _lexer: Lexer + _token_counter: int def __init__( self, source: SourceType, no_location: bool = False, + max_tokens: int | None = None, allow_legacy_fragment_variables: bool = False, - ): - source = ( - cast(Source, source) if is_source(source) else Source(cast(str, source)) - ) + experimental_client_controlled_nullability: bool = False, + ) -> None: + if not is_source(source): + source = Source(cast("str", source)) - self._lexer = Lexer(source) self._no_location = no_location + self._max_tokens = max_tokens self._allow_legacy_fragment_variables = allow_legacy_fragment_variables + self._experimental_client_controlled_nullability = ( + experimental_client_controlled_nullability + ) + self._lexer = Lexer(source) + self._token_counter = 0 def parse_name(self) -> NameNode: """Convert a name lex token into a name parse node.""" @@ -223,7 +281,7 @@ def parse_document(self) -> DocumentNode: loc=self.loc(start), ) - _parse_type_system_definition_method_names: Dict[str, str] = { + _parse_type_system_definition_method_names: Mapping[str, str] = { "schema": "schema_definition", "scalar": "scalar_type_definition", "type": "object_type_definition", @@ -234,7 +292,7 @@ def parse_document(self) -> DocumentNode: "directive": "directive_definition", } - _parse_other_definition_method_names: Dict[str, str] = { + _parse_other_definition_method_names: Mapping[str, str] = { **dict.fromkeys(("query", "mutation", "subscription"), "operation_definition"), "fragment": "fragment_definition", "extend": "type_system_extension", @@ -261,7 +319,7 @@ def parse_definition(self) -> DefinitionNode: ) if keyword_token.kind is TokenKind.NAME: - token_name = cast(str, keyword_token.value) + token_name = cast("str", keyword_token.value) method_name = self._parse_type_system_definition_method_names.get( token_name ) @@ -312,10 +370,10 @@ def parse_operation_type(self) -> OperationType: operation_token = self.expect_token(TokenKind.NAME) try: return OperationType(operation_token.value) - except ValueError: - raise self.unexpected(operation_token) + except ValueError as error: + raise self.unexpected(operation_token) from error - def parse_variable_definitions(self) -> List[VariableDefinitionNode]: + def parse_variable_definitions(self) -> list[VariableDefinitionNode]: """VariableDefinitions: (VariableDefinition+)""" return self.optional_many( TokenKind.PAREN_L, self.parse_variable_definition, TokenKind.PAREN_R @@ -361,7 +419,7 @@ def parse_field(self) -> FieldNode: start = self._lexer.token name_or_alias = self.parse_name() if self.expect_optional_token(TokenKind.COLON): - alias: Optional[NameNode] = name_or_alias + alias: NameNode | None = name_or_alias name = self.parse_name() else: alias = None @@ -370,6 +428,9 @@ def parse_field(self) -> FieldNode: alias=alias, name=name, arguments=self.parse_arguments(False), + # Experimental support for Client Controlled Nullability changes + # the grammar of Field: + nullability_assertion=self.parse_nullability_assertion(), directives=self.parse_directives(False), selection_set=self.parse_selection_set() if self.peek(TokenKind.BRACE_L) @@ -377,11 +438,44 @@ def parse_field(self) -> FieldNode: loc=self.loc(start), ) - def parse_arguments(self, is_const: bool) -> List[ArgumentNode]: + def parse_nullability_assertion(self) -> NullabilityAssertionNode | None: + """NullabilityAssertion (grammar not yet finalized) + + # Note: Client Controlled Nullability is experimental and may be changed or + # removed in the future. + """ + if not self._experimental_client_controlled_nullability: + return None + + start = self._lexer.token + nullability_assertion: NullabilityAssertionNode | None = None + + if self.expect_optional_token(TokenKind.BRACKET_L): + inner_modifier = self.parse_nullability_assertion() + self.expect_token(TokenKind.BRACKET_R) + nullability_assertion = ListNullabilityOperatorNode( + nullability_assertion=inner_modifier, loc=self.loc(start) + ) + + if self.expect_optional_token(TokenKind.BANG): + nullability_assertion = NonNullAssertionNode( + nullability_assertion=nullability_assertion, loc=self.loc(start) + ) + elif self.expect_optional_token(TokenKind.QUESTION_MARK): + nullability_assertion = ErrorBoundaryNode( + nullability_assertion=nullability_assertion, loc=self.loc(start) + ) + + return nullability_assertion + + def parse_arguments(self, is_const: bool) -> list[ArgumentNode]: """Arguments[Const]: (Argument[?Const]+)""" item = self.parse_const_argument if is_const else self.parse_argument - item = cast(Callable[[], ArgumentNode], item) - return self.optional_many(TokenKind.PAREN_L, item, TokenKind.PAREN_R) + return self.optional_many( + TokenKind.PAREN_L, + cast("Callable[[], ArgumentNode]", item), + TokenKind.PAREN_R, + ) def parse_argument(self, is_const: bool = False) -> ArgumentNode: """Argument[Const]: Name : Value[?Const]""" @@ -395,11 +489,11 @@ def parse_argument(self, is_const: bool = False) -> ArgumentNode: def parse_const_argument(self) -> ConstArgumentNode: """Argument[Const]: Name : Value[Const]""" - return cast(ConstArgumentNode, self.parse_argument(True)) + return cast("ConstArgumentNode", self.parse_argument(True)) # Implement the parsing rules in the Fragments section. - def parse_fragment(self) -> Union[FragmentSpreadNode, InlineFragmentNode]: + def parse_fragment(self) -> FragmentSpreadNode | InlineFragmentNode: """Corresponds to both FragmentSpread and InlineFragment in the spec. FragmentSpread: ... FragmentName Directives? @@ -458,7 +552,7 @@ def parse_type_condition(self) -> NamedTypeNode: # Implement the parsing rules in the Values section. - _parse_value_literal_method_names: Dict[TokenKind, str] = { + _parse_value_literal_method_names: Mapping[TokenKind, str] = { TokenKind.BRACKET_L: "list", TokenKind.BRACE_L: "object", TokenKind.INT: "int", @@ -477,7 +571,7 @@ def parse_value_literal(self, is_const: bool) -> ValueNode: def parse_string_literal(self, _is_const: bool = False) -> StringValueNode: token = self._lexer.token - self._lexer.advance() + self.advance_lexer() return StringValueNode( value=token.value, block=token.kind == TokenKind.BLOCK_STRING, @@ -514,18 +608,18 @@ def parse_object(self, is_const: bool) -> ObjectValueNode: def parse_int(self, _is_const: bool = False) -> IntValueNode: token = self._lexer.token - self._lexer.advance() + self.advance_lexer() return IntValueNode(value=token.value, loc=self.loc(token)) def parse_float(self, _is_const: bool = False) -> FloatValueNode: token = self._lexer.token - self._lexer.advance() + self.advance_lexer() return FloatValueNode(value=token.value, loc=self.loc(token)) def parse_named_values(self, _is_const: bool = False) -> ValueNode: token = self._lexer.token value = token.value - self._lexer.advance() + self.advance_lexer() if value == "true": return BooleanValueNode(value=True, loc=self.loc(token)) if value == "false": @@ -549,20 +643,20 @@ def parse_variable_value(self, is_const: bool) -> VariableNode: return self.parse_variable() def parse_const_value_literal(self) -> ConstValueNode: - return cast(ConstValueNode, self.parse_value_literal(True)) + return cast("ConstValueNode", self.parse_value_literal(True)) # Implement the parsing rules in the Directives section. - def parse_directives(self, is_const: bool) -> List[DirectiveNode]: + def parse_directives(self, is_const: bool) -> list[DirectiveNode]: """Directives[Const]: Directive[?Const]+""" - directives: List[DirectiveNode] = [] + directives: list[DirectiveNode] = [] append = directives.append while self.peek(TokenKind.AT): append(self.parse_directive(is_const)) return directives - def parse_const_directives(self) -> List[ConstDirectiveNode]: - return cast(List[ConstDirectiveNode], self.parse_directives(True)) + def parse_const_directives(self) -> list[ConstDirectiveNode]: + return cast("List[ConstDirectiveNode]", self.parse_directives(True)) def parse_directive(self, is_const: bool) -> DirectiveNode: """Directive[Const]: @ Name Arguments[?Const]?""" @@ -597,7 +691,7 @@ def parse_named_type(self) -> NamedTypeNode: # Implement the parsing rules in the Type Definition section. - _parse_type_extension_method_names: Dict[str, str] = { + _parse_type_extension_method_names: Mapping[str, str] = { "schema": "schema_extension", "scalar": "scalar_type_extension", "type": "object_type_extension", @@ -612,7 +706,7 @@ def parse_type_system_extension(self) -> TypeSystemExtensionNode: keyword_token = self._lexer.lookahead() if keyword_token.kind == TokenKind.NAME: method_name = self._parse_type_extension_method_names.get( - cast(str, keyword_token.value) + cast("str", keyword_token.value) ) if method_name: # pragma: no cover return getattr(self, f"parse_{method_name}")() @@ -621,7 +715,7 @@ def parse_type_system_extension(self) -> TypeSystemExtensionNode: def peek_description(self) -> bool: return self.peek(TokenKind.STRING) or self.peek(TokenKind.BLOCK_STRING) - def parse_description(self) -> Optional[StringValueNode]: + def parse_description(self) -> StringValueNode | None: """Description: StringValue""" if self.peek_description(): return self.parse_string_literal() @@ -685,7 +779,7 @@ def parse_object_type_definition(self) -> ObjectTypeDefinitionNode: loc=self.loc(start), ) - def parse_implements_interfaces(self) -> List[NamedTypeNode]: + def parse_implements_interfaces(self) -> list[NamedTypeNode]: """ImplementsInterfaces""" return ( self.delimited_many(TokenKind.AMP, self.parse_named_type) @@ -693,7 +787,7 @@ def parse_implements_interfaces(self) -> List[NamedTypeNode]: else [] ) - def parse_fields_definition(self) -> List[FieldDefinitionNode]: + def parse_fields_definition(self) -> list[FieldDefinitionNode]: """FieldsDefinition: {FieldDefinition+}""" return self.optional_many( TokenKind.BRACE_L, self.parse_field_definition, TokenKind.BRACE_R @@ -717,7 +811,7 @@ def parse_field_definition(self) -> FieldDefinitionNode: loc=self.loc(start), ) - def parse_argument_defs(self) -> List[InputValueDefinitionNode]: + def parse_argument_defs(self) -> list[InputValueDefinitionNode]: """ArgumentsDefinition: (InputValueDefinition+)""" return self.optional_many( TokenKind.PAREN_L, self.parse_input_value_def, TokenKind.PAREN_R @@ -779,7 +873,7 @@ def parse_union_type_definition(self) -> UnionTypeDefinitionNode: loc=self.loc(start), ) - def parse_union_member_types(self) -> List[NamedTypeNode]: + def parse_union_member_types(self) -> list[NamedTypeNode]: """UnionMemberTypes""" return ( self.delimited_many(TokenKind.PIPE, self.parse_named_type) @@ -803,7 +897,7 @@ def parse_enum_type_definition(self) -> EnumTypeDefinitionNode: loc=self.loc(start), ) - def parse_enum_values_definition(self) -> List[EnumValueDefinitionNode]: + def parse_enum_values_definition(self) -> list[EnumValueDefinitionNode]: """EnumValuesDefinition: {EnumValueDefinition+}""" return self.optional_many( TokenKind.BRACE_L, self.parse_enum_value_definition, TokenKind.BRACE_R @@ -849,7 +943,7 @@ def parse_input_object_type_definition(self) -> InputObjectTypeDefinitionNode: loc=self.loc(start), ) - def parse_input_fields_definition(self) -> List[InputValueDefinitionNode]: + def parse_input_fields_definition(self) -> list[InputValueDefinitionNode]: """InputFieldsDefinition: {InputValueDefinition+}""" return self.optional_many( TokenKind.BRACE_L, self.parse_input_value_def, TokenKind.BRACE_R @@ -983,7 +1077,7 @@ def parse_directive_definition(self) -> DirectiveDefinitionNode: loc=self.loc(start), ) - def parse_directive_locations(self) -> List[NameNode]: + def parse_directive_locations(self) -> list[NameNode]: """DirectiveLocations""" return self.delimited_many(TokenKind.PIPE, self.parse_directive_location) @@ -997,7 +1091,7 @@ def parse_directive_location(self) -> NameNode: # Core parsing utility functions - def loc(self, start_token: Token) -> Optional[Location]: + def loc(self, start_token: Token) -> Location | None: """Return a location object. Used to identify the place in the source that created a given parsed object. @@ -1020,7 +1114,7 @@ def expect_token(self, kind: TokenKind) -> Token: """ token = self._lexer.token if token.kind == kind: - self._lexer.advance() + self.advance_lexer() return token raise GraphQLSyntaxError( @@ -1037,7 +1131,7 @@ def expect_optional_token(self, kind: TokenKind) -> bool: """ token = self._lexer.token if token.kind == kind: - self._lexer.advance() + self.advance_lexer() return True return False @@ -1050,7 +1144,7 @@ def expect_keyword(self, value: str) -> None: """ token = self._lexer.token if token.kind == TokenKind.NAME and token.value == value: - self._lexer.advance() + self.advance_lexer() else: raise GraphQLSyntaxError( self._lexer.source, @@ -1066,12 +1160,12 @@ def expect_optional_keyword(self, value: str) -> bool: """ token = self._lexer.token if token.kind == TokenKind.NAME and token.value == value: - self._lexer.advance() + self.advance_lexer() return True return False - def unexpected(self, at_token: Optional[Token] = None) -> GraphQLError: + def unexpected(self, at_token: Token | None = None) -> GraphQLError: """Create an error when an unexpected lexed token is encountered.""" token = at_token or self._lexer.token return GraphQLSyntaxError( @@ -1080,7 +1174,7 @@ def unexpected(self, at_token: Optional[Token] = None) -> GraphQLError: def any( self, open_kind: TokenKind, parse_fn: Callable[[], T], close_kind: TokenKind - ) -> List[T]: + ) -> list[T]: """Fetch any matching nodes, possibly none. Returns a possibly empty list of parse nodes, determined by the ``parse_fn``. @@ -1089,7 +1183,7 @@ def any( token. """ self.expect_token(open_kind) - nodes: List[T] = [] + nodes: list[T] = [] append = nodes.append expect_optional_token = partial(self.expect_optional_token, close_kind) while not expect_optional_token(): @@ -1098,7 +1192,7 @@ def any( def optional_many( self, open_kind: TokenKind, parse_fn: Callable[[], T], close_kind: TokenKind - ) -> List[T]: + ) -> list[T]: """Fetch matching nodes, maybe none. Returns a list of parse nodes, determined by the ``parse_fn``. It can be empty @@ -1118,7 +1212,7 @@ def optional_many( def many( self, open_kind: TokenKind, parse_fn: Callable[[], T], close_kind: TokenKind - ) -> List[T]: + ) -> list[T]: """Fetch matching nodes, at least one. Returns a non-empty list of parse nodes, determined by the ``parse_fn``. This @@ -1136,7 +1230,7 @@ def many( def delimited_many( self, delimiter_kind: TokenKind, parse_fn: Callable[[], T] - ) -> List[T]: + ) -> list[T]: """Fetch many delimited nodes. Returns a non-empty list of parse nodes, determined by the ``parse_fn``. This @@ -1146,7 +1240,7 @@ def delimited_many( """ expect_optional_token = partial(self.expect_optional_token, delimiter_kind) expect_optional_token() - nodes: List[T] = [] + nodes: list[T] = [] append = nodes.append while True: append(parse_fn()) @@ -1154,6 +1248,20 @@ def delimited_many( break return nodes + def advance_lexer(self) -> None: + """Advance the lexer.""" + token = self._lexer.advance() + max_tokens = self._max_tokens + if max_tokens is not None and token.kind is not TokenKind.EOF: + self._token_counter += 1 + if self._token_counter > max_tokens: + raise GraphQLSyntaxError( + self._lexer.source, + token.start, + f"Document contains more than {max_tokens} tokens." + " Parsing aborted.", + ) + def get_token_desc(token: Token) -> str: """Describe a token as a string for debugging.""" diff --git a/src/graphql/language/predicates.py b/src/graphql/language/predicates.py index 24d7c7a5..280662f8 100644 --- a/src/graphql/language/predicates.py +++ b/src/graphql/language/predicates.py @@ -1,8 +1,13 @@ +"""Predicates for GraphQL nodes""" + +from __future__ import annotations + from .ast import ( - Node, DefinitionNode, ExecutableDefinitionNode, ListValueNode, + Node, + NullabilityAssertionNode, ObjectValueNode, SchemaExtensionNode, SelectionNode, @@ -14,41 +19,53 @@ VariableNode, ) +try: + from typing import TypeGuard +except ImportError: # Python < 3.10 + from typing_extensions import TypeGuard + + __all__ = [ + "is_const_value_node", "is_definition_node", "is_executable_definition_node", + "is_nullability_assertion_node", "is_selection_node", - "is_value_node", - "is_const_value_node", + "is_type_definition_node", + "is_type_extension_node", "is_type_node", "is_type_system_definition_node", - "is_type_definition_node", "is_type_system_extension_node", - "is_type_extension_node", + "is_value_node", ] -def is_definition_node(node: Node) -> bool: +def is_definition_node(node: Node) -> TypeGuard[DefinitionNode]: """Check whether the given node represents a definition.""" return isinstance(node, DefinitionNode) -def is_executable_definition_node(node: Node) -> bool: +def is_executable_definition_node(node: Node) -> TypeGuard[ExecutableDefinitionNode]: """Check whether the given node represents an executable definition.""" return isinstance(node, ExecutableDefinitionNode) -def is_selection_node(node: Node) -> bool: +def is_selection_node(node: Node) -> TypeGuard[SelectionNode]: """Check whether the given node represents a selection.""" return isinstance(node, SelectionNode) -def is_value_node(node: Node) -> bool: +def is_nullability_assertion_node(node: Node) -> TypeGuard[NullabilityAssertionNode]: + """Check whether the given node represents a nullability assertion node.""" + return isinstance(node, NullabilityAssertionNode) + + +def is_value_node(node: Node) -> TypeGuard[ValueNode]: """Check whether the given node represents a value.""" return isinstance(node, ValueNode) -def is_const_value_node(node: Node) -> bool: +def is_const_value_node(node: Node) -> TypeGuard[ValueNode]: """Check whether the given node represents a constant value.""" return is_value_node(node) and ( any(is_const_value_node(value) for value in node.values) @@ -59,26 +76,28 @@ def is_const_value_node(node: Node) -> bool: ) -def is_type_node(node: Node) -> bool: +def is_type_node(node: Node) -> TypeGuard[TypeNode]: """Check whether the given node represents a type.""" return isinstance(node, TypeNode) -def is_type_system_definition_node(node: Node) -> bool: +def is_type_system_definition_node(node: Node) -> TypeGuard[TypeSystemDefinitionNode]: """Check whether the given node represents a type system definition.""" return isinstance(node, TypeSystemDefinitionNode) -def is_type_definition_node(node: Node) -> bool: +def is_type_definition_node(node: Node) -> TypeGuard[TypeDefinitionNode]: """Check whether the given node represents a type definition.""" return isinstance(node, TypeDefinitionNode) -def is_type_system_extension_node(node: Node) -> bool: +def is_type_system_extension_node( + node: Node, +) -> TypeGuard[SchemaExtensionNode | TypeExtensionNode]: """Check whether the given node represents a type system extension.""" return isinstance(node, (SchemaExtensionNode, TypeExtensionNode)) -def is_type_extension_node(node: Node) -> bool: +def is_type_extension_node(node: Node) -> TypeGuard[TypeExtensionNode]: """Check whether the given node represents a type extension.""" return isinstance(node, TypeExtensionNode) diff --git a/src/graphql/language/print_location.py b/src/graphql/language/print_location.py index 6d13b1e1..21fb1b8a 100644 --- a/src/graphql/language/print_location.py +++ b/src/graphql/language/print_location.py @@ -1,10 +1,15 @@ +"""Print location in GraphQL source""" + +from __future__ import annotations + import re -from typing import Optional, Tuple, cast +from typing import TYPE_CHECKING, Tuple, cast -from .ast import Location from .location import SourceLocation, get_location -from .source import Source +if TYPE_CHECKING: + from .ast import Location + from .source import Source __all__ = ["print_location", "print_source_location"] @@ -65,10 +70,10 @@ def print_source_location(source: Source, source_location: SourceLocation) -> st ) -def print_prefixed_lines(*lines: Tuple[str, Optional[str]]) -> str: +def print_prefixed_lines(*lines: tuple[str, str | None]) -> str: """Print lines specified like this: ("prefix", "string")""" existing_lines = [ - cast(Tuple[str, str], line) for line in lines if line[1] is not None + cast("Tuple[str, str]", line) for line in lines if line[1] is not None ] pad_len = max(len(line[0]) for line in existing_lines) return "\n".join( diff --git a/src/graphql/language/print_string.py b/src/graphql/language/print_string.py index f390b9d0..c90c67c6 100644 --- a/src/graphql/language/print_string.py +++ b/src/graphql/language/print_string.py @@ -1,10 +1,12 @@ +"""Print a string as a GraphQL expression.""" + __all__ = ["print_string"] def print_string(s: str) -> str: - """Print a string as a GraphQL StringValue literal. + r"""Print a string as a GraphQL StringValue literal. - Replaces control characters and excluded characters (" U+0022 and \\ U+005C) + Replaces control characters and excluded characters (" U+0022 and \ U+005C) with escape sequences. """ if not isinstance(s, str): diff --git a/src/graphql/language/printer.py b/src/graphql/language/printer.py index 07ed1b15..d4898b06 100644 --- a/src/graphql/language/printer.py +++ b/src/graphql/language/printer.py @@ -1,16 +1,28 @@ -from typing import Any, Collection, Optional +"""Print AST""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, Collection -from ..language.ast import Node, OperationType from .block_string import print_block_string from .print_string import print_string -from .visitor import visit, Visitor +from .visitor import Visitor, visit + +if TYPE_CHECKING: + from ..language.ast import Node, OperationType + +try: + from typing import TypeAlias +except ImportError: # Python < 3.10 + from typing_extensions import TypeAlias + __all__ = ["print_ast"] MAX_LINE_LENGTH = 80 -Strings = Collection[str] +Strings: TypeAlias = Collection[str] class PrintedNode: @@ -27,6 +39,7 @@ class PrintedNode: interfaces: Strings locations: Strings name: str + nullability_assertion: str operation: OperationType operation_types: Strings repeatable: bool @@ -93,18 +106,41 @@ def leave_selection_set(node: PrintedNode, *_args: Any) -> str: @staticmethod def leave_field(node: PrintedNode, *_args: Any) -> str: - prefix = wrap("", node.alias, ": ") + node.name + prefix = join((wrap("", node.alias, ": "), node.name)) args_line = prefix + wrap("(", join(node.arguments, ", "), ")") if len(args_line) > MAX_LINE_LENGTH: args_line = prefix + wrap("(\n", indent(join(node.arguments, "\n")), "\n)") - return join((args_line, join(node.directives, " "), node.selection_set), " ") + return join( + ( + args_line, + # Note: Client Controlled Nullability is experimental and may be + # changed or removed in the future. + node.nullability_assertion, + wrap(" ", join(node.directives, " ")), + wrap(" ", node.selection_set), + ), + ) @staticmethod def leave_argument(node: PrintedNode, *_args: Any) -> str: return f"{node.name}: {node.value}" + # Nullability Modifiers + + @staticmethod + def leave_list_nullability_operator(node: PrintedNode, *_args: Any) -> str: + return join(("[", node.nullability_assertion, "]")) + + @staticmethod + def leave_non_null_assertion(node: PrintedNode, *_args: Any) -> str: + return join((node.nullability_assertion, "!")) + + @staticmethod + def leave_error_boundary(node: PrintedNode, *_args: Any) -> str: + return join((node.nullability_assertion, "?")) + # Fragments @staticmethod @@ -164,11 +200,19 @@ def leave_enum_value(node: PrintedNode, *_args: Any) -> str: @staticmethod def leave_list_value(node: PrintedNode, *_args: Any) -> str: - return f"[{join(node.values, ', ')}]" + values = node.values + values_line = f"[{join(values, ', ')}]" + return ( + "\n".join(("[", indent(join(values, "\n")), "]")) + if len(values_line) > 80 + else values_line + ) @staticmethod def leave_object_value(node: PrintedNode, *_args: Any) -> str: - return f"{{{join(node.fields, ', ')}}}" + fields = node.fields + fields_line = f"{{ {join(fields, ', ')} }}" + return block(fields) if len(fields_line) > MAX_LINE_LENGTH else fields_line @staticmethod def leave_object_field(node: PrintedNode, *_args: Any) -> str: @@ -382,7 +426,7 @@ def leave_input_object_type_extension(node: PrintedNode, *_args: Any) -> str: ) -def join(strings: Optional[Strings], separator: str = "") -> str: +def join(strings: Strings | None, separator: str = "") -> str: """Join strings in a given collection. Return an empty string if it is None or empty, otherwise join all items together @@ -391,7 +435,7 @@ def join(strings: Optional[Strings], separator: str = "") -> str: return separator.join(s for s in strings if s) if strings else "" -def block(strings: Optional[Strings]) -> str: +def block(strings: Strings | None) -> str: """Return strings inside a block. Given a collection of strings, return a string with each item on its own line, @@ -400,7 +444,7 @@ def block(strings: Optional[Strings]) -> str: return wrap("{\n", indent(join(strings, "\n")), "\n}") -def wrap(start: str, string: Optional[str], end: str = "") -> str: +def wrap(start: str, string: str | None, end: str = "") -> str: """Wrap string inside other strings at start and end. If the string is not None or empty, then wrap with start and end, otherwise return @@ -423,6 +467,6 @@ def is_multiline(string: str) -> bool: return "\n" in string -def has_multiline_items(strings: Optional[Strings]) -> bool: +def has_multiline_items(strings: Strings | None) -> bool: """Check whether one of the items in the list has multiple lines.""" return any(is_multiline(item) for item in strings) if strings else False diff --git a/src/graphql/language/source.py b/src/graphql/language/source.py index 4143c13a..d54bf969 100644 --- a/src/graphql/language/source.py +++ b/src/graphql/language/source.py @@ -1,21 +1,33 @@ +"""GraphQL source input""" + +from __future__ import annotations + from typing import Any from .location import SourceLocation +try: + from typing import TypeGuard +except ImportError: # Python < 3.10 + from typing_extensions import TypeGuard + __all__ = ["Source", "is_source"] +DEFAULT_NAME = "GraphQL request" +DEFAULT_SOURCE_LOCATION = SourceLocation(1, 1) + class Source: """A representation of source input to GraphQL.""" # allow custom attributes and weak references (not used internally) - __slots__ = "__weakref__", "__dict__", "body", "name", "location_offset" + __slots__ = "__dict__", "__weakref__", "body", "location_offset", "name" def __init__( self, body: str, - name: str = "GraphQL request", - location_offset: SourceLocation = SourceLocation(1, 1), + name: str = DEFAULT_NAME, + location_offset: SourceLocation = DEFAULT_SOURCE_LOCATION, ) -> None: """Initialize source input. @@ -31,16 +43,15 @@ def __init__( if not isinstance(location_offset, SourceLocation): location_offset = SourceLocation._make(location_offset) if location_offset.line <= 0: - raise ValueError( - "line in location_offset is 1-indexed and must be positive." - ) + msg = "line in location_offset is 1-indexed and must be positive." + raise ValueError(msg) if location_offset.column <= 0: - raise ValueError( - "column in location_offset is 1-indexed and must be positive." - ) + msg = "column in location_offset is 1-indexed and must be positive." + raise ValueError(msg) self.location_offset = location_offset def get_location(self, position: int) -> SourceLocation: + """Get source location.""" lines = self.body[:position].splitlines() if lines: line = len(lines) @@ -53,16 +64,16 @@ def get_location(self, position: int) -> SourceLocation: def __repr__(self) -> str: return f"<{self.__class__.__name__} name={self.name!r}>" - def __eq__(self, other: Any) -> bool: + def __eq__(self, other: object) -> bool: return (isinstance(other, Source) and other.body == self.body) or ( isinstance(other, str) and other == self.body ) - def __ne__(self, other: Any) -> bool: + def __ne__(self, other: object) -> bool: return not self == other -def is_source(source: Any) -> bool: +def is_source(source: Any) -> TypeGuard[Source]: """Test if the given value is a Source object. For internal use only. diff --git a/src/graphql/language/token_kind.py b/src/graphql/language/token_kind.py index 45f6e82a..f4eda1c5 100644 --- a/src/graphql/language/token_kind.py +++ b/src/graphql/language/token_kind.py @@ -1,3 +1,5 @@ +"""Token kinds""" + from enum import Enum __all__ = ["TokenKind"] @@ -9,6 +11,7 @@ class TokenKind(Enum): SOF = "" EOF = "" BANG = "!" + QUESTION_MARK = "?" DOLLAR = "$" AMP = "&" PAREN_L = "(" diff --git a/src/graphql/language/visitor.py b/src/graphql/language/visitor.py index 996c7194..c9901230 100644 --- a/src/graphql/language/visitor.py +++ b/src/graphql/language/visitor.py @@ -1,3 +1,7 @@ +"""AST Visitor""" + +from __future__ import annotations + from copy import copy from enum import Enum from typing import ( @@ -5,27 +9,31 @@ Callable, Collection, Dict, - List, NamedTuple, Optional, Tuple, - Union, ) from ..pyutils import inspect, snake_to_camel from . import ast +from .ast import QUERY_DOCUMENT_KEYS, Node + +try: + from typing import TypeAlias +except ImportError: # Python < 3.10 + from typing_extensions import TypeAlias -from .ast import Node, QUERY_DOCUMENT_KEYS __all__ = [ - "Visitor", + "BREAK", + "IDLE", + "REMOVE", + "SKIP", "ParallelVisitor", + "Visitor", "VisitorAction", + "VisitorKeyMap", "visit", - "BREAK", - "SKIP", - "REMOVE", - "IDLE", ] @@ -40,9 +48,9 @@ class VisitorActionEnum(Enum): REMOVE = Ellipsis -VisitorAction = Optional[VisitorActionEnum] +VisitorAction: TypeAlias = Optional[VisitorActionEnum] -# Note that in GraphQL.js these are defined differently: +# Note that in GraphQL.js these are defined *differently*: # BREAK = {}, SKIP = false, REMOVE = null, IDLE = undefined BREAK = VisitorActionEnum.BREAK @@ -50,14 +58,14 @@ class VisitorActionEnum(Enum): REMOVE = VisitorActionEnum.REMOVE IDLE = None -VisitorKeyMap = Dict[str, Tuple[str, ...]] +VisitorKeyMap: TypeAlias = Dict[str, Tuple[str, ...]] class EnterLeaveVisitor(NamedTuple): """Visitor with functions for entering and leaving.""" - enter: Optional[Callable[..., Optional[VisitorAction]]] - leave: Optional[Callable[..., Optional[VisitorAction]]] + enter: Callable[..., VisitorAction | None] | None + leave: Callable[..., VisitorAction | None] | None class Visitor: @@ -96,7 +104,7 @@ def leave(self, node, key, parent, path, ancestors): You can also define node kind specific methods by suffixing them with an underscore followed by the kind of the node to be visited. For instance, to visit ``field`` - nodes, you would defined the methods ``enter_field()`` and/or ``leave_field()``, + nodes, you would define the methods ``enter_field()`` and/or ``leave_field()``, with the same signature as above. If no kind specific method has been defined for a given node, the generic method is called. """ @@ -104,19 +112,19 @@ def leave(self, node, key, parent, path, ancestors): # Provide special return values as attributes BREAK, SKIP, REMOVE, IDLE = BREAK, SKIP, REMOVE, IDLE - enter_leave_map: Dict[str, EnterLeaveVisitor] + enter_leave_map: dict[str, EnterLeaveVisitor] def __init_subclass__(cls) -> None: """Verify that all defined handlers are valid.""" super().__init_subclass__() - for attr, val in cls.__dict__.items(): + for attr in cls.__dict__: if attr.startswith("_"): continue attr_kind = attr.split("_", 1) if len(attr_kind) < 2: - kind: Optional[str] = None + kind: str | None = None else: - attr, kind = attr_kind + attr, kind = attr_kind # noqa: PLW2901 if attr in ("enter", "leave") and kind: name = snake_to_camel(kind) + "Node" node_cls = getattr(ast, name, None) @@ -125,7 +133,8 @@ def __init_subclass__(cls) -> None: or not isinstance(node_cls, type) or not issubclass(node_cls, Node) ): - raise TypeError(f"Invalid AST node kind: {kind}.") + msg = f"Invalid AST node kind: {kind}." + raise TypeError(msg) def __init__(self) -> None: self.enter_leave_map = {} @@ -145,30 +154,19 @@ def get_enter_leave_for_kind(self, kind: str) -> EnterLeaveVisitor: self.enter_leave_map[kind] = enter_leave return enter_leave - def get_visit_fn( - self, kind: str, is_leaving: bool = False - ) -> Optional[Callable[..., Optional[VisitorAction]]]: - """Get the visit function for the given node kind and direction. - - .. deprecated:: 3.2 - Please use ``get_enter_leave_for_kind`` instead. Will be removed in v3.3. - """ - enter_leave = self.get_enter_leave_for_kind(kind) - return enter_leave.leave if is_leaving else enter_leave.enter - class Stack(NamedTuple): """A stack for the visit function.""" in_array: bool idx: int - keys: Tuple[Node, ...] - edits: List[Tuple[Union[int, str], Node]] - prev: Any # 'Stack' (python/mypy/issues/731) + keys: tuple[Node, ...] + edits: list[tuple[int | str, Node]] + prev: Stack def visit( - root: Node, visitor: Visitor, visitor_keys: Optional[VisitorKeyMap] = None + root: Node, visitor: Visitor, visitor_keys: VisitorKeyMap | None = None ) -> Any: """Visit each node in an AST. @@ -189,24 +187,26 @@ def visit( dictionary visitor_keys mapping node kinds to node attributes. """ if not isinstance(root, Node): - raise TypeError(f"Not an AST Node: {inspect(root)}.") + msg = f"Not an AST Node: {inspect(root)}." + raise TypeError(msg) if not isinstance(visitor, Visitor): - raise TypeError(f"Not an AST Visitor: {inspect(visitor)}.") + msg = f"Not an AST Visitor: {inspect(visitor)}." + raise TypeError(msg) if visitor_keys is None: visitor_keys = QUERY_DOCUMENT_KEYS stack: Any = None in_array = False - keys: Tuple[Node, ...] = (root,) + keys: tuple[Node, ...] = (root,) idx = -1 - edits: List[Any] = [] + edits: list[Any] = [] node: Any = root key: Any = None parent: Any = None - path: List[Any] = [] + path: list[Any] = [] path_append = path.append path_pop = path.pop - ancestors: List[Any] = [] + ancestors: list[Any] = [] ancestors_append = ancestors.append ancestors_pop = ancestors.pop @@ -254,7 +254,8 @@ def visit( result = None else: if not isinstance(node, Node): - raise TypeError(f"Invalid AST Node: {inspect(node)}.") + msg = f"Invalid AST Node: {inspect(node)}." + raise TypeError(msg) enter_leave = visitor.get_enter_leave_for_kind(node.kind) visit_fn = enter_leave.leave if is_leaving else enter_leave.enter if visit_fn: @@ -288,7 +289,7 @@ def visit( else: stack = Stack(in_array, idx, keys, edits, stack) in_array = isinstance(node, tuple) - keys = node if in_array else visitor_keys.get(node.kind, ()) + keys = node if in_array else visitor_keys.get(node.kind, ()) # type: ignore idx = -1 edits = [] if parent: @@ -312,11 +313,11 @@ class ParallelVisitor(Visitor): If a prior visitor edits a node, no following visitors will see that node. """ - def __init__(self, visitors: Collection[Visitor]): + def __init__(self, visitors: Collection[Visitor]) -> None: """Create a new visitor from the given list of parallel visitors.""" super().__init__() self.visitors = visitors - self.skipping: List[Any] = [None] * len(visitors) + self.skipping: list[Any] = [None] * len(visitors) def get_enter_leave_for_kind(self, kind: str) -> EnterLeaveVisitor: """Given a node kind, return the EnterLeaveVisitor for that kind.""" @@ -324,8 +325,8 @@ def get_enter_leave_for_kind(self, kind: str) -> EnterLeaveVisitor: return self.enter_leave_map[kind] except KeyError: has_visitor = False - enter_list: List[Optional[Callable[..., Optional[VisitorAction]]]] = [] - leave_list: List[Optional[Callable[..., Optional[VisitorAction]]]] = [] + enter_list: list[Callable[..., VisitorAction | None] | None] = [] + leave_list: list[Callable[..., VisitorAction | None] | None] = [] for visitor in self.visitors: enter, leave = visitor.get_enter_leave_for_kind(kind) if not has_visitor and (enter or leave): @@ -335,21 +336,20 @@ def get_enter_leave_for_kind(self, kind: str) -> EnterLeaveVisitor: if has_visitor: - def enter(node: Node, *args: Any) -> Optional[VisitorAction]: + def enter(node: Node, *args: Any) -> VisitorAction | None: skipping = self.skipping for i, fn in enumerate(enter_list): - if not skipping[i]: - if fn: - result = fn(node, *args) - if result is SKIP or result is False: - skipping[i] = node - elif result is BREAK or result is True: - skipping[i] = BREAK - elif result is not None: - return result + if not skipping[i] and fn: + result = fn(node, *args) + if result is SKIP or result is False: + skipping[i] = node + elif result is BREAK or result is True: + skipping[i] = BREAK + elif result is not None: + return result return None - def leave(node: Node, *args: Any) -> Optional[VisitorAction]: + def leave(node: Node, *args: Any) -> VisitorAction | None: skipping = self.skipping for i, fn in enumerate(leave_list): if not skipping[i]: @@ -368,7 +368,6 @@ def leave(node: Node, *args: Any) -> Optional[VisitorAction]: return None else: - enter = leave = None enter_leave = EnterLeaveVisitor(enter, leave) diff --git a/src/graphql/pyutils/__init__.py b/src/graphql/pyutils/__init__.py index c156de41..28ad1a92 100644 --- a/src/graphql/pyutils/__init__.py +++ b/src/graphql/pyutils/__init__.py @@ -8,6 +8,7 @@ These functions are not part of the module interface and are subject to change. """ +from .async_reduce import async_reduce from .convert_case import camel_to_snake, snake_to_camel from .cached_property import cached_property from .description import ( @@ -17,6 +18,7 @@ unregister_description, ) from .did_you_mean import did_you_mean +from .format_list import or_list, and_list from .group_by import group_by from .identity_func import identity_func from .inspect import inspect @@ -26,40 +28,43 @@ from .awaitable_or_value import AwaitableOrValue from .suggestion_list import suggestion_list from .frozen_error import FrozenError -from .frozen_list import FrozenList -from .frozen_dict import FrozenDict from .merge_kwargs import merge_kwargs from .path import Path from .print_path_list import print_path_list from .simple_pub_sub import SimplePubSub, SimplePubSubIterator from .undefined import Undefined, UndefinedType +from .ref_map import RefMap +from .ref_set import RefSet __all__ = [ - "camel_to_snake", - "snake_to_camel", + "AwaitableOrValue", + "Description", + "FrozenError", + "Path", + "RefMap", + "RefSet", + "SimplePubSub", + "SimplePubSubIterator", + "Undefined", + "UndefinedType", + "and_list", + "async_reduce", "cached_property", + "camel_to_snake", "did_you_mean", - "Description", "group_by", - "is_description", - "register_description", - "unregister_description", "identity_func", "inspect", "is_awaitable", "is_collection", + "is_description", "is_iterable", "merge_kwargs", "natural_comparison_key", - "AwaitableOrValue", - "suggestion_list", - "FrozenError", - "FrozenList", - "FrozenDict", - "Path", + "or_list", "print_path_list", - "SimplePubSub", - "SimplePubSubIterator", - "Undefined", - "UndefinedType", + "register_description", + "snake_to_camel", + "suggestion_list", + "unregister_description", ] diff --git a/src/graphql/pyutils/async_reduce.py b/src/graphql/pyutils/async_reduce.py new file mode 100644 index 00000000..4eb79748 --- /dev/null +++ b/src/graphql/pyutils/async_reduce.py @@ -0,0 +1,47 @@ +"""Reduce awaitable values""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, Awaitable, Callable, Collection, TypeVar, cast + +from .is_awaitable import is_awaitable as default_is_awaitable + +if TYPE_CHECKING: + from .awaitable_or_value import AwaitableOrValue + +__all__ = ["async_reduce"] + +T = TypeVar("T") +U = TypeVar("U") + + +def async_reduce( + callback: Callable[[U, T], AwaitableOrValue[U]], + values: Collection[T], + initial_value: AwaitableOrValue[U], + is_awaitable: Callable[[Any], bool] = default_is_awaitable, +) -> AwaitableOrValue[U]: + """Reduce the given potentially awaitable values using a callback function. + + Similar to functools.reduce(), however the reducing callback may return + an awaitable, in which case reduction will continue after each promise resolves. + + If the callback does not return an awaitable, then this function will also not + return an awaitable. + """ + accumulator: AwaitableOrValue[U] = initial_value + for value in values: + if is_awaitable(accumulator): + + async def async_callback( + current_accumulator: Awaitable[U], current_value: T + ) -> U: + result: AwaitableOrValue[U] = callback( + await current_accumulator, current_value + ) + return await result if is_awaitable(result) else result # type: ignore + + accumulator = async_callback(cast("Awaitable[U]", accumulator), value) + else: + accumulator = callback(cast("U", accumulator), value) + return accumulator diff --git a/src/graphql/pyutils/awaitable_or_value.py b/src/graphql/pyutils/awaitable_or_value.py index b497a787..7348db9b 100644 --- a/src/graphql/pyutils/awaitable_or_value.py +++ b/src/graphql/pyutils/awaitable_or_value.py @@ -1,8 +1,18 @@ +"""Awaitable or value type""" + +from __future__ import annotations + from typing import Awaitable, TypeVar, Union +try: + from typing import TypeAlias +except ImportError: # Python < 3.10 + from typing_extensions import TypeAlias + + __all__ = ["AwaitableOrValue"] T = TypeVar("T") -AwaitableOrValue = Union[Awaitable[T], T] +AwaitableOrValue: TypeAlias = Union[Awaitable[T], T] diff --git a/src/graphql/pyutils/cached_property.py b/src/graphql/pyutils/cached_property.py index ddb33725..fcd49a10 100644 --- a/src/graphql/pyutils/cached_property.py +++ b/src/graphql/pyutils/cached_property.py @@ -1,4 +1,8 @@ -from typing import Any, Callable, TYPE_CHECKING +"""Cached properties""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, Callable if TYPE_CHECKING: standard_cached_property = None @@ -21,7 +25,7 @@ class CachedProperty: """ def __init__(self, func: Callable) -> None: - self.__doc__ = getattr(func, "__doc__") + self.__doc__ = func.__doc__ self.func = func def __get__(self, obj: object, cls: type) -> Any: diff --git a/src/graphql/pyutils/convert_case.py b/src/graphql/pyutils/convert_case.py index 1fe0300f..4b211e27 100644 --- a/src/graphql/pyutils/convert_case.py +++ b/src/graphql/pyutils/convert_case.py @@ -1,3 +1,5 @@ +"""Conversion between camel and snake case""" + # uses code from https://github.com/daveoncode/python-string-utils import re diff --git a/src/graphql/pyutils/description.py b/src/graphql/pyutils/description.py index ccb858ea..9d43a86d 100644 --- a/src/graphql/pyutils/description.py +++ b/src/graphql/pyutils/description.py @@ -1,4 +1,8 @@ -from typing import Any, Tuple, Union +"""Human-readable descriptions""" + +from __future__ import annotations + +from typing import Any __all__ = [ "Description", @@ -9,7 +13,7 @@ class Description: - """Type checker for human readable descriptions. + """Type checker for human-readable descriptions. By default, only ordinary strings are accepted as descriptions, but you can register() other classes that will also be allowed, @@ -17,17 +21,19 @@ class Description: If you register(object), any object will be allowed as description. """ - bases: Union[type, Tuple[type, ...]] = str + bases: type | tuple[type, ...] = str @classmethod def isinstance(cls, obj: Any) -> bool: + """Check whether this is an instance of a description.""" return isinstance(obj, cls.bases) @classmethod def register(cls, base: type) -> None: """Register a class that shall be accepted as a description.""" if not isinstance(base, type): - raise TypeError("Only types can be registered.") + msg = "Only types can be registered." + raise TypeError(msg) if base is object: cls.bases = object elif cls.bases is object: @@ -42,9 +48,10 @@ def register(cls, base: type) -> None: def unregister(cls, base: type) -> None: """Unregister a class that shall no more be accepted as a description.""" if not isinstance(base, type): - raise TypeError("Only types can be unregistered.") + msg = "Only types can be unregistered." + raise TypeError(msg) if isinstance(cls.bases, tuple): - if base in cls.bases: + if base in cls.bases: # pragma: no branch cls.bases = tuple(b for b in cls.bases if b is not base) if not cls.bases: cls.bases = object diff --git a/src/graphql/pyutils/did_you_mean.py b/src/graphql/pyutils/did_you_mean.py index 88ae6201..ae2022b5 100644 --- a/src/graphql/pyutils/did_you_mean.py +++ b/src/graphql/pyutils/did_you_mean.py @@ -1,28 +1,25 @@ -from typing import Optional, Sequence +"""Generating suggestions""" + +from __future__ import annotations + +from typing import Sequence + +from .format_list import or_list __all__ = ["did_you_mean"] MAX_LENGTH = 5 -def did_you_mean(suggestions: Sequence[str], sub_message: Optional[str] = None) -> str: +def did_you_mean(suggestions: Sequence[str], sub_message: str | None = None) -> str: """Given [ A, B, C ] return ' Did you mean A, B, or C?'""" if not suggestions or not MAX_LENGTH: return "" - parts = [" Did you mean "] + message = " Did you mean " if sub_message: - parts.extend([sub_message, " "]) + message += sub_message + " " suggestions = suggestions[:MAX_LENGTH] - n = len(suggestions) - if n == 1: - parts.append(f"'{suggestions[0]}'?") - elif n == 2: - parts.append(f"'{suggestions[0]}' or '{suggestions[1]}'?") - else: - parts.extend( - [ - ", ".join(f"'{s}'" for s in suggestions[:-1]), - f", or '{suggestions[-1]}'?", - ] - ) - return "".join(parts) + suggestion_list = or_list( + [f"'{suggestion}'" for suggestion in suggestions[:MAX_LENGTH]] + ) + return message + suggestion_list + "?" diff --git a/src/graphql/pyutils/format_list.py b/src/graphql/pyutils/format_list.py new file mode 100644 index 00000000..368e7ae0 --- /dev/null +++ b/src/graphql/pyutils/format_list.py @@ -0,0 +1,33 @@ +"""List formatting""" + +from __future__ import annotations + +from typing import Sequence + +__all__ = ["and_list", "or_list"] + + +def or_list(items: Sequence[str]) -> str: + """Given [ A, B, C ] return 'A, B, or C'.""" + return format_list("or", items) + + +def and_list(items: Sequence[str]) -> str: + """Given [ A, B, C ] return 'A, B, and C'.""" + return format_list("and", items) + + +def format_list(conjunction: str, items: Sequence[str]) -> str: + """Given [ A, B, C ] return 'A, B, (conjunction) C'""" + if not items: + msg = "Missing list items to be formatted." + raise ValueError(msg) + + n = len(items) + if n == 1: + return items[0] + if n == 2: + return f"{items[0]} {conjunction} {items[1]}" + + *all_but_last, last_item = items + return f"{', '.join(all_but_last)}, {conjunction} {last_item}" diff --git a/src/graphql/pyutils/frozen_dict.py b/src/graphql/pyutils/frozen_dict.py deleted file mode 100644 index 93283596..00000000 --- a/src/graphql/pyutils/frozen_dict.py +++ /dev/null @@ -1,52 +0,0 @@ -from copy import deepcopy -from typing import Dict, TypeVar - -from .frozen_error import FrozenError - -__all__ = ["FrozenDict"] - -KT = TypeVar("KT") -VT = TypeVar("VT", covariant=True) - - -class FrozenDict(Dict[KT, VT]): - """Dictionary that can only be read, but not changed. - - .. deprecated:: 3.2 - Use dicts and the Mapping type instead. Will be removed in v3.3. - """ - - def __delitem__(self, key): - raise FrozenError - - def __setitem__(self, key, value): - raise FrozenError - - def __iadd__(self, value): - raise FrozenError - - def __hash__(self): - return hash(tuple(self.items())) - - def __copy__(self) -> "FrozenDict": - return FrozenDict(self) - - copy = __copy__ - - def __deepcopy__(self, memo: Dict) -> "FrozenDict": - return FrozenDict({k: deepcopy(v, memo) for k, v in self.items()}) - - def clear(self): - raise FrozenError - - def pop(self, key, default=None): - raise FrozenError - - def popitem(self): - raise FrozenError - - def setdefault(self, key, default=None): - raise FrozenError - - def update(self, other=None): - raise FrozenError diff --git a/src/graphql/pyutils/frozen_error.py b/src/graphql/pyutils/frozen_error.py index 01c02d15..76b359a9 100644 --- a/src/graphql/pyutils/frozen_error.py +++ b/src/graphql/pyutils/frozen_error.py @@ -1,3 +1,5 @@ +"""Error when changing immutable values""" + __all__ = ["FrozenError"] diff --git a/src/graphql/pyutils/frozen_list.py b/src/graphql/pyutils/frozen_list.py deleted file mode 100644 index 01ead7c4..00000000 --- a/src/graphql/pyutils/frozen_list.py +++ /dev/null @@ -1,70 +0,0 @@ -from copy import deepcopy -from typing import Dict, List, TypeVar - -from .frozen_error import FrozenError - -__all__ = ["FrozenList"] - - -T = TypeVar("T", covariant=True) - - -class FrozenList(List[T]): - """List that can only be read, but not changed. - - .. deprecated:: 3.2 - Use tuples or lists and the Collection type instead. Will be removed in v3.3. - """ - - def __delitem__(self, key): - raise FrozenError - - def __setitem__(self, key, value): - raise FrozenError - - def __add__(self, value): - if isinstance(value, tuple): - value = list(value) - return list.__add__(self, value) - - def __iadd__(self, value): - raise FrozenError - - def __mul__(self, value): - return list.__mul__(self, value) - - def __imul__(self, value): - raise FrozenError - - def __hash__(self): - return hash(tuple(self)) - - def __copy__(self) -> "FrozenList": - return FrozenList(self) - - def __deepcopy__(self, memo: Dict) -> "FrozenList": - return FrozenList(deepcopy(value, memo) for value in self) - - def append(self, x): - raise FrozenError - - def extend(self, iterable): - raise FrozenError - - def insert(self, i, x): - raise FrozenError - - def remove(self, x): - raise FrozenError - - def pop(self, i=None): - raise FrozenError - - def clear(self): - raise FrozenError - - def sort(self, *, key=None, reverse=False): - raise FrozenError - - def reverse(self): - raise FrozenError diff --git a/src/graphql/pyutils/group_by.py b/src/graphql/pyutils/group_by.py index e9cd0ba3..60c77b30 100644 --- a/src/graphql/pyutils/group_by.py +++ b/src/graphql/pyutils/group_by.py @@ -1,5 +1,9 @@ +"""Grouping function""" + +from __future__ import annotations + from collections import defaultdict -from typing import Callable, Collection, Dict, List, TypeVar +from typing import Callable, Collection, TypeVar __all__ = ["group_by"] @@ -7,9 +11,9 @@ T = TypeVar("T") -def group_by(items: Collection[T], key_fn: Callable[[T], K]) -> Dict[K, List[T]]: +def group_by(items: Collection[T], key_fn: Callable[[T], K]) -> dict[K, list[T]]: """Group an unsorted collection of items by a key derived via a function.""" - result: Dict[K, List[T]] = defaultdict(list) + result: dict[K, list[T]] = defaultdict(list) for item in items: key = key_fn(item) result[key].append(item) diff --git a/src/graphql/pyutils/identity_func.py b/src/graphql/pyutils/identity_func.py index 88a96738..1a13936b 100644 --- a/src/graphql/pyutils/identity_func.py +++ b/src/graphql/pyutils/identity_func.py @@ -1,4 +1,8 @@ -from typing import cast, Any, TypeVar +"""Identity function""" + +from __future__ import annotations + +from typing import Any, TypeVar, cast from .undefined import Undefined @@ -7,7 +11,9 @@ T = TypeVar("T") +DEFAULT_VALUE = cast("Any", Undefined) + -def identity_func(x: T = cast(Any, Undefined), *_args: Any) -> T: +def identity_func(x: T = DEFAULT_VALUE, *_args: Any) -> T: """Return the first received argument.""" return x diff --git a/src/graphql/pyutils/inspect.py b/src/graphql/pyutils/inspect.py index 8fc99dce..ed4920be 100644 --- a/src/graphql/pyutils/inspect.py +++ b/src/graphql/pyutils/inspect.py @@ -1,15 +1,19 @@ +"""Value inspection for error messages""" + +from __future__ import annotations + from inspect import ( + isasyncgen, + isasyncgenfunction, isclass, - ismethod, + iscoroutine, + iscoroutinefunction, isfunction, - isgeneratorfunction, isgenerator, - iscoroutinefunction, - iscoroutine, - isasyncgenfunction, - isasyncgen, + isgeneratorfunction, + ismethod, ) -from typing import Any, List +from typing import Any from .undefined import Undefined @@ -34,7 +38,7 @@ def inspect(value: Any) -> str: return inspect_recursive(value, []) -def inspect_recursive(value: Any, seen_values: List) -> str: +def inspect_recursive(value: Any, seen_values: list) -> str: if value is None or value is Undefined or isinstance(value, (bool, float, complex)): return repr(value) if isinstance(value, (int, str, bytes, bytearray)): @@ -83,20 +87,18 @@ def inspect_recursive(value: Any, seen_values: List) -> str: if isinstance(value, frozenset): return f"frozenset({{{s}}})" return f"[{s}]" - else: - # handle collections that are nested too deep - if isinstance(value, (list, tuple, dict, set, frozenset)): - if not value: - return repr(value) - if isinstance(value, list): - return "[...]" - if isinstance(value, tuple): - return "(...)" - if isinstance(value, dict): - return "{...}" - if isinstance(value, set): - return "set(...)" - return "frozenset(...)" + elif isinstance(value, (list, tuple, dict, set, frozenset)): + if not value: + return repr(value) + if isinstance(value, list): + return "[...]" + if isinstance(value, tuple): + return "(...)" + if isinstance(value, dict): + return "{...}" + if isinstance(value, set): + return "set(...)" + return "frozenset(...)" if isinstance(value, Exception): type_ = "exception" value = type(value) @@ -140,7 +142,7 @@ def inspect_recursive(value: Any, seen_values: List) -> str: try: name = type(value).__name__ if not name or "<" in name or ">" in name: - raise AttributeError + raise AttributeError # noqa: TRY301 except AttributeError: return "" else: @@ -148,7 +150,7 @@ def inspect_recursive(value: Any, seen_values: List) -> str: try: name = value.__name__ if not name or "<" in name or ">" in name: - raise AttributeError + raise AttributeError # noqa: TRY301 except AttributeError: return f"<{type_}>" else: @@ -164,7 +166,7 @@ def trunc_str(s: str) -> str: return s -def trunc_list(s: List) -> List: +def trunc_list(s: list) -> list: """Truncate lists to maximum length.""" if len(s) > max_list_size: i = max_list_size // 2 diff --git a/src/graphql/pyutils/is_awaitable.py b/src/graphql/pyutils/is_awaitable.py index 80c3be3f..158bcd40 100644 --- a/src/graphql/pyutils/is_awaitable.py +++ b/src/graphql/pyutils/is_awaitable.py @@ -1,24 +1,36 @@ +"""Check whether objects are awaitable""" + +from __future__ import annotations + import inspect -from typing import Any from types import CoroutineType, GeneratorType +from typing import Any, Awaitable + +try: + from typing import TypeGuard +except ImportError: # Python < 3.10 + from typing_extensions import TypeGuard + __all__ = ["is_awaitable"] CO_ITERABLE_COROUTINE = inspect.CO_ITERABLE_COROUTINE -def is_awaitable(value: Any) -> bool: - """Return true if object can be passed to an ``await`` expression. +def is_awaitable(value: Any) -> TypeGuard[Awaitable]: + """Return True if object can be passed to an ``await`` expression. - Instead of testing if the object is an instance of abc.Awaitable, it checks - the existence of an `__await__` attribute. This is much faster. + Instead of testing whether the object is an instance of abc.Awaitable, we + check the existence of an `__await__` attribute. This is much faster. """ return ( # check for coroutine objects isinstance(value, CoroutineType) # check for old-style generator based coroutine objects - or isinstance(value, GeneratorType) - and bool(value.gi_code.co_flags & CO_ITERABLE_COROUTINE) + or ( + isinstance(value, GeneratorType) # for Python < 3.11 + and bool(value.gi_code.co_flags & CO_ITERABLE_COROUTINE) + ) # check for other awaitables (e.g. futures) or hasattr(value, "__await__") ) diff --git a/src/graphql/pyutils/is_iterable.py b/src/graphql/pyutils/is_iterable.py index cffbc51c..3ec027bb 100644 --- a/src/graphql/pyutils/is_iterable.py +++ b/src/graphql/pyutils/is_iterable.py @@ -1,30 +1,38 @@ -from typing import ( - Any, - ByteString, - Collection, - Iterable, - Mapping, - Text, - ValuesView, -) +"""Check whether objects are iterable""" + +from __future__ import annotations + +from array import array +from typing import Any, Collection, Iterable, Mapping, ValuesView + +try: + from typing import TypeGuard +except ImportError: # Python < 3.10 + from typing_extensions import TypeGuard + __all__ = ["is_collection", "is_iterable"] -collection_types: Any = Collection +collection_types: Any = [Collection] if not isinstance({}.values(), Collection): # Python < 3.7.2 - collection_types = (Collection, ValuesView) + collection_types.append(ValuesView) +if not issubclass(array, Collection): # PyPy <= 7.3.9 + collection_types.append(array) +collection_types = ( + collection_types[0] if len(collection_types) == 1 else tuple(collection_types) +) iterable_types: Any = Iterable -not_iterable_types: Any = (ByteString, Mapping, Text) +not_iterable_types: Any = (bytearray, bytes, str, memoryview, Mapping) -def is_collection(value: Any) -> bool: +def is_collection(value: Any) -> TypeGuard[Collection]: """Check if value is a collection, but not a string or a mapping.""" return isinstance(value, collection_types) and not isinstance( value, not_iterable_types ) -def is_iterable(value: Any) -> bool: +def is_iterable(value: Any) -> TypeGuard[Iterable]: """Check if value is an iterable, but not a string or a mapping.""" return isinstance(value, iterable_types) and not isinstance( value, not_iterable_types diff --git a/src/graphql/pyutils/merge_kwargs.py b/src/graphql/pyutils/merge_kwargs.py index 5b7ac202..21144524 100644 --- a/src/graphql/pyutils/merge_kwargs.py +++ b/src/graphql/pyutils/merge_kwargs.py @@ -1,8 +1,12 @@ -from typing import cast, Any, Dict, TypeVar +"""Merge arguments""" + +from __future__ import annotations + +from typing import Any, Dict, TypeVar, cast T = TypeVar("T") def merge_kwargs(base_dict: T, **kwargs: Any) -> T: """Return arbitrary typed dictionary with some keyword args merged in.""" - return cast(T, {**cast(Dict, base_dict), **kwargs}) + return cast("T", {**cast("Dict", base_dict), **kwargs}) diff --git a/src/graphql/pyutils/natural_compare.py b/src/graphql/pyutils/natural_compare.py index 6af02038..9c357cc6 100644 --- a/src/graphql/pyutils/natural_compare.py +++ b/src/graphql/pyutils/natural_compare.py @@ -1,6 +1,8 @@ -import re -from typing import Tuple +"""Natural sort order""" + +from __future__ import annotations +import re from itertools import cycle __all__ = ["natural_comparison_key"] @@ -8,7 +10,7 @@ _re_digits = re.compile(r"(\d+)") -def natural_comparison_key(key: str) -> Tuple: +def natural_comparison_key(key: str) -> tuple: """Comparison key function for sorting strings by natural sort order. See: https://en.wikipedia.org/wiki/Natural_sort_order diff --git a/src/graphql/pyutils/path.py b/src/graphql/pyutils/path.py index c928d296..cc2202c4 100644 --- a/src/graphql/pyutils/path.py +++ b/src/graphql/pyutils/path.py @@ -1,4 +1,8 @@ -from typing import Any, List, NamedTuple, Optional, Union +"""Path of indices""" + +from __future__ import annotations + +from typing import NamedTuple __all__ = ["Path"] @@ -6,22 +10,22 @@ class Path(NamedTuple): """A generic path of string or integer indices""" - prev: Any # Optional['Path'] (python/mypy/issues/731) + prev: Path | None """path with the previous indices""" - key: Union[str, int] + key: str | int """current index in the path (string or integer)""" - typename: Optional[str] + typename: str | None """name of the parent type to avoid path ambiguity""" - def add_key(self, key: Union[str, int], typename: Optional[str] = None) -> "Path": + def add_key(self, key: str | int, typename: str | None = None) -> Path: """Return a new Path containing the given key.""" return Path(self, key, typename) - def as_list(self) -> List[Union[str, int]]: + def as_list(self) -> list[str | int]: """Return a list of the path keys.""" - flattened: List[Union[str, int]] = [] + flattened: list[str | int] = [] append = flattened.append - curr: Path = self + curr: Path | None = self while curr: append(curr.key) curr = curr.prev diff --git a/src/graphql/pyutils/print_path_list.py b/src/graphql/pyutils/print_path_list.py index 125829b0..37dca741 100644 --- a/src/graphql/pyutils/print_path_list.py +++ b/src/graphql/pyutils/print_path_list.py @@ -1,6 +1,10 @@ -from typing import Collection, Union +"""Path printing""" +from __future__ import annotations -def print_path_list(path: Collection[Union[str, int]]) -> str: +from typing import Collection + + +def print_path_list(path: Collection[str | int]) -> str: """Build a string describing the path.""" return "".join(f"[{key}]" if isinstance(key, int) else f".{key}" for key in path) diff --git a/src/graphql/pyutils/ref_map.py b/src/graphql/pyutils/ref_map.py new file mode 100644 index 00000000..0cffd533 --- /dev/null +++ b/src/graphql/pyutils/ref_map.py @@ -0,0 +1,79 @@ +"""A Map class that work similar to JavaScript.""" + +from __future__ import annotations + +from collections.abc import MutableMapping + +try: + MutableMapping[str, int] +except TypeError: # Python < 3.9 + from typing import MutableMapping +from typing import Any, Iterable, Iterator, TypeVar + +__all__ = ["RefMap"] + +K = TypeVar("K") +V = TypeVar("V") + + +class RefMap(MutableMapping[K, V]): + """A dictionary like object that allows mutable objects as keys. + + This class keeps the insertion order like a normal dictionary. + + Note that the implementation is limited to what is needed internally. + """ + + _map: dict[int, tuple[K, V]] + + def __init__(self, items: Iterable[tuple[K, V]] | None = None) -> None: + super().__init__() + self._map = {} + if items: + self.update(items) + + def __setitem__(self, key: K, value: V) -> None: + self._map[id(key)] = (key, value) + + def __getitem__(self, key: K) -> Any: + return self._map[id(key)][1] + + def __delitem__(self, key: K) -> None: + del self._map[id(key)] + + def __contains__(self, key: Any) -> bool: + return id(key) in self._map + + def __len__(self) -> int: + return len(self._map) + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({list(self.items())!r})" + + def get(self, key: Any, default: Any = None) -> Any: + """Get the mapped value for the given key.""" + try: + return self._map[id(key)][1] + except KeyError: + return default + + def __iter__(self) -> Iterator[K]: + return self.keys() + + def keys(self) -> Iterator[K]: # type: ignore + """Return an iterator over the keys of the map.""" + return (item[0] for item in self._map.values()) + + def values(self) -> Iterator[V]: # type: ignore + """Return an iterator over the values of the map.""" + return (item[1] for item in self._map.values()) + + def items(self) -> Iterator[tuple[K, V]]: # type: ignore + """Return an iterator over the key/value-pairs of the map.""" + return self._map.values() # type: ignore + + def update(self, items: Iterable[tuple[K, V]] | None = None) -> None: # type: ignore + """Update the map with the given key/value-pairs.""" + if items: + for key, value in items: + self[key] = value diff --git a/src/graphql/pyutils/ref_set.py b/src/graphql/pyutils/ref_set.py new file mode 100644 index 00000000..731c021d --- /dev/null +++ b/src/graphql/pyutils/ref_set.py @@ -0,0 +1,67 @@ +"""A Set class that work similar to JavaScript.""" + +from __future__ import annotations + +from collections.abc import MutableSet + +try: + MutableSet[int] +except TypeError: # Python < 3.9 + from typing import MutableSet +from contextlib import suppress +from typing import Any, Iterable, Iterator, TypeVar + +from .ref_map import RefMap + +__all__ = ["RefSet"] + + +T = TypeVar("T") + + +class RefSet(MutableSet[T]): + """A set like object that allows mutable objects as elements. + + This class keeps the insertion order unlike a normal set. + + Note that the implementation is limited to what is needed internally. + """ + + _map: RefMap[T, None] + + def __init__(self, values: Iterable[T] | None = None) -> None: + super().__init__() + self._map = RefMap() + if values: + self.update(values) + + def __contains__(self, key: Any) -> bool: + return key in self._map + + def __iter__(self) -> Iterator[T]: + return iter(self._map) + + def __len__(self) -> int: + return len(self._map) + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({list(self)!r})" + + def add(self, value: T) -> None: + """Add the given item to the set.""" + self._map[value] = None + + def remove(self, value: T) -> None: + """Remove the given item from the set.""" + del self._map[value] + + def discard(self, value: T) -> None: + """Remove the given item from the set if it exists.""" + with suppress(KeyError): + self.remove(value) + + def update(self, values: Iterable[T] | None = None) -> None: + """Update the set with the given items.""" + if values: + for item in values: + self.add(item) diff --git a/src/graphql/pyutils/simple_pub_sub.py b/src/graphql/pyutils/simple_pub_sub.py index 6b1ba050..3e88d3b8 100644 --- a/src/graphql/pyutils/simple_pub_sub.py +++ b/src/graphql/pyutils/simple_pub_sub.py @@ -1,12 +1,11 @@ -from asyncio import Future, Queue, ensure_future, sleep -from inspect import isawaitable -from typing import Any, AsyncIterator, Callable, Optional, Set +"""Simple public-subscribe system""" -try: - from asyncio import get_running_loop -except ImportError: - from asyncio import get_event_loop as get_running_loop # Python < 3.7 +from __future__ import annotations +from asyncio import Future, Queue, create_task, get_running_loop, sleep +from typing import Any, AsyncIterator, Callable + +from .is_awaitable import is_awaitable __all__ = ["SimplePubSub", "SimplePubSubIterator"] @@ -19,7 +18,7 @@ class SimplePubSub: Useful for mocking a PubSub system for tests. """ - subscribers: Set[Callable] + subscribers: set[Callable] def __init__(self) -> None: self.subscribers = set() @@ -28,18 +27,19 @@ def emit(self, event: Any) -> bool: """Emit an event.""" for subscriber in self.subscribers: result = subscriber(event) - if isawaitable(result): - ensure_future(result) + if is_awaitable(result): + create_task(result) # type: ignore # noqa: RUF006 return bool(self.subscribers) - def get_subscriber( - self, transform: Optional[Callable] = None - ) -> "SimplePubSubIterator": + def get_subscriber(self, transform: Callable | None = None) -> SimplePubSubIterator: + """Return subscriber iterator""" return SimplePubSubIterator(self, transform) class SimplePubSubIterator(AsyncIterator): - def __init__(self, pubsub: SimplePubSub, transform: Optional[Callable]) -> None: + """Async iterator used for subscriptions.""" + + def __init__(self, pubsub: SimplePubSub, transform: Callable | None) -> None: self.pubsub = pubsub self.transform = transform self.pull_queue: Queue[Future] = Queue() @@ -47,7 +47,7 @@ def __init__(self, pubsub: SimplePubSub, transform: Optional[Callable]) -> None: self.listening = True pubsub.subscribers.add(self.push_value) - def __aiter__(self) -> "SimplePubSubIterator": + def __aiter__(self) -> SimplePubSubIterator: return self async def __anext__(self) -> Any: @@ -61,10 +61,12 @@ async def __anext__(self) -> Any: return future async def aclose(self) -> None: + """Close the iterator.""" if self.listening: await self.empty_queue() async def empty_queue(self) -> None: + """Empty the queue.""" self.listening = False self.pubsub.subscribers.remove(self.push_value) while not self.pull_queue.empty(): @@ -74,6 +76,7 @@ async def empty_queue(self) -> None: await self.push_queue.get() async def push_value(self, event: Any) -> None: + """Push a new value.""" value = event if self.transform is None else self.transform(event) if self.pull_queue.empty(): await self.push_queue.put(value) diff --git a/src/graphql/pyutils/suggestion_list.py b/src/graphql/pyutils/suggestion_list.py index 0020b3ce..35240c77 100644 --- a/src/graphql/pyutils/suggestion_list.py +++ b/src/graphql/pyutils/suggestion_list.py @@ -1,11 +1,15 @@ -from typing import Collection, Optional, List +"""List with suggestions""" + +from __future__ import annotations + +from typing import Collection from .natural_compare import natural_comparison_key __all__ = ["suggestion_list"] -def suggestion_list(input_: str, options: Collection[str]) -> List[str]: +def suggestion_list(input_: str, options: Collection[str]) -> list[str]: """Get list with suggestions for a given input. Given an invalid input string and list of valid options, returns a filtered list @@ -42,10 +46,10 @@ class LexicalDistance: _input: str _input_lower_case: str - _input_list: List[int] - _rows: List[List[int]] + _input_list: list[int] + _rows: list[list[int]] - def __init__(self, input_: str): + def __init__(self, input_: str) -> None: self._input = input_ self._input_lower_case = input_.lower() row_size = len(input_) + 1 @@ -53,7 +57,7 @@ def __init__(self, input_: str): self._rows = [[0] * row_size, [0] * row_size, [0] * row_size] - def measure(self, option: str, threshold: int) -> Optional[int]: + def measure(self, option: str, threshold: int) -> int | None: if self._input == option: return 0 @@ -95,8 +99,7 @@ def measure(self, option: str, threshold: int) -> Optional[int]: double_diagonal_cell = rows[(i - 2) % 3][j - 2] current_cell = min(current_cell, double_diagonal_cell + 1) - if current_cell < smallest_cell: - smallest_cell = current_cell + smallest_cell = min(current_cell, smallest_cell) current_row[j] = current_cell diff --git a/src/graphql/pyutils/undefined.py b/src/graphql/pyutils/undefined.py index 73dc5314..10e2c69e 100644 --- a/src/graphql/pyutils/undefined.py +++ b/src/graphql/pyutils/undefined.py @@ -1,11 +1,28 @@ -from typing import Any +"""The Undefined value""" + +from __future__ import annotations + +import warnings __all__ = ["Undefined", "UndefinedType"] -class UndefinedType(ValueError): +class UndefinedType: """Auxiliary class for creating the Undefined singleton.""" + _instance: UndefinedType | None = None + + def __new__(cls) -> UndefinedType: + """Create the Undefined singleton.""" + if cls._instance is None: + cls._instance = super().__new__(cls) + else: + warnings.warn("Redefinition of 'Undefined'", RuntimeWarning, stacklevel=2) + return cls._instance + + def __reduce__(self) -> str: + return "Undefined" + def __repr__(self) -> str: return "Undefined" @@ -17,10 +34,10 @@ def __hash__(self) -> int: def __bool__(self) -> bool: return False - def __eq__(self, other: Any) -> bool: - return other is Undefined + def __eq__(self, other: object) -> bool: + return other is Undefined or other is None - def __ne__(self, other: Any) -> bool: + def __ne__(self, other: object) -> bool: return not self == other diff --git a/src/graphql/subscription/__init__.py b/src/graphql/subscription/__init__.py deleted file mode 100644 index f0c90910..00000000 --- a/src/graphql/subscription/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -"""GraphQL Subscription - -The :mod:`graphql.subscription` package is responsible for subscribing to updates -on specific data. - -.. deprecated:: 3.2 - This package has been deprecated with its exported functions integrated into the - :mod:`graphql.execution` package, to better conform with the terminology of the - GraphQL specification. For backwards compatibility, the :mod:`graphql.subscription` - package currently re-exports the moved functions from the :mod:`graphql.execution` - package. In v3.3, the :mod:`graphql.subscription` package will be dropped entirely. -""" - -from ..execution import subscribe, create_source_event_stream, MapAsyncIterator - -__all__ = ["subscribe", "create_source_event_stream", "MapAsyncIterator"] diff --git a/src/graphql/type/__init__.py b/src/graphql/type/__init__.py index 6a86c0f7..8c41bd28 100644 --- a/src/graphql/type/__init__.py +++ b/src/graphql/type/__init__.py @@ -82,6 +82,8 @@ GraphQLAbstractType, GraphQLWrappingType, GraphQLNullableType, + GraphQLNullableInputType, + GraphQLNullableOutputType, GraphQLNamedType, GraphQLNamedInputType, GraphQLNamedOutputType, @@ -96,6 +98,7 @@ GraphQLFieldMap, GraphQLInputField, GraphQLInputFieldMap, + GraphQLInputFieldOutType, GraphQLScalarSerializer, GraphQLScalarValueParser, GraphQLScalarLiteralParser, @@ -130,8 +133,11 @@ specified_directives, GraphQLIncludeDirective, GraphQLSkipDirective, + GraphQLDeferDirective, + GraphQLStreamDirective, GraphQLDeprecatedDirective, GraphQLSpecifiedByDirective, + GraphQLOneOfDirective, # Keyword Args GraphQLDirectiveKwargs, # Constant Deprecation Reason @@ -171,128 +177,134 @@ from .validate import validate_schema, assert_valid_schema __all__ = [ - "is_schema", - "assert_schema", - "assert_name", - "assert_enum_value_name", - "GraphQLSchema", - "GraphQLSchemaKwargs", - "is_type", - "is_scalar_type", - "is_object_type", - "is_interface_type", - "is_union_type", - "is_enum_type", - "is_input_object_type", - "is_list_type", - "is_non_null_type", - "is_input_type", - "is_output_type", - "is_leaf_type", - "is_composite_type", - "is_abstract_type", - "is_wrapping_type", - "is_nullable_type", - "is_named_type", - "is_required_argument", - "is_required_input_field", - "assert_type", - "assert_scalar_type", - "assert_object_type", - "assert_interface_type", - "assert_union_type", - "assert_enum_type", - "assert_input_object_type", - "assert_list_type", - "assert_non_null_type", - "assert_input_type", - "assert_output_type", - "assert_leaf_type", - "assert_composite_type", - "assert_abstract_type", - "assert_wrapping_type", - "assert_nullable_type", - "assert_named_type", - "get_nullable_type", - "get_named_type", - "resolve_thunk", - "GraphQLScalarType", - "GraphQLObjectType", - "GraphQLInterfaceType", - "GraphQLUnionType", - "GraphQLEnumType", - "GraphQLInputObjectType", - "GraphQLInputType", - "GraphQLArgument", - "GraphQLList", - "GraphQLNonNull", - "GraphQLType", - "GraphQLInputType", - "GraphQLOutputType", - "GraphQLLeafType", - "GraphQLCompositeType", + "DEFAULT_DEPRECATION_REASON", + "GRAPHQL_MAX_INT", + "GRAPHQL_MIN_INT", "GraphQLAbstractType", - "GraphQLWrappingType", - "GraphQLNullableType", - "GraphQLNamedType", - "GraphQLNamedInputType", - "GraphQLNamedOutputType", - "Thunk", - "ThunkCollection", - "ThunkMapping", "GraphQLArgument", + "GraphQLArgument", + "GraphQLArgumentKwargs", "GraphQLArgumentMap", + "GraphQLBoolean", + "GraphQLCompositeType", + "GraphQLDeferDirective", + "GraphQLDeprecatedDirective", + "GraphQLDirective", + "GraphQLDirectiveKwargs", + "GraphQLEnumType", + "GraphQLEnumTypeKwargs", "GraphQLEnumValue", + "GraphQLEnumValueKwargs", "GraphQLEnumValueMap", "GraphQLField", + "GraphQLFieldKwargs", "GraphQLFieldMap", + "GraphQLFieldResolver", + "GraphQLFloat", + "GraphQLID", + "GraphQLIncludeDirective", "GraphQLInputField", - "GraphQLInputFieldMap", - "GraphQLScalarSerializer", - "GraphQLScalarValueParser", - "GraphQLScalarLiteralParser", - "GraphQLArgumentKwargs", - "GraphQLEnumTypeKwargs", - "GraphQLEnumValueKwargs", - "GraphQLFieldKwargs", "GraphQLInputFieldKwargs", + "GraphQLInputFieldMap", + "GraphQLInputFieldOutType", + "GraphQLInputObjectType", "GraphQLInputObjectTypeKwargs", + "GraphQLInputType", + "GraphQLInputType", + "GraphQLInt", + "GraphQLInterfaceType", "GraphQLInterfaceTypeKwargs", + "GraphQLIsTypeOfFn", + "GraphQLLeafType", + "GraphQLList", + "GraphQLNamedInputType", + "GraphQLNamedOutputType", + "GraphQLNamedType", "GraphQLNamedTypeKwargs", + "GraphQLNonNull", + "GraphQLNullableInputType", + "GraphQLNullableOutputType", + "GraphQLNullableType", + "GraphQLObjectType", "GraphQLObjectTypeKwargs", - "GraphQLScalarTypeKwargs", - "GraphQLUnionTypeKwargs", - "GraphQLFieldResolver", - "GraphQLTypeResolver", - "GraphQLIsTypeOfFn", + "GraphQLOneOfDirective", + "GraphQLOutputType", "GraphQLResolveInfo", - "ResponsePath", - "is_directive", - "assert_directive", - "is_specified_directive", - "specified_directives", - "GraphQLDirective", - "GraphQLIncludeDirective", + "GraphQLScalarLiteralParser", + "GraphQLScalarSerializer", + "GraphQLScalarType", + "GraphQLScalarTypeKwargs", + "GraphQLScalarValueParser", + "GraphQLSchema", + "GraphQLSchemaKwargs", "GraphQLSkipDirective", - "GraphQLDeprecatedDirective", "GraphQLSpecifiedByDirective", - "GraphQLDirectiveKwargs", - "DEFAULT_DEPRECATION_REASON", - "is_specified_scalar_type", - "specified_scalar_types", - "GraphQLInt", - "GraphQLFloat", + "GraphQLStreamDirective", "GraphQLString", - "GraphQLBoolean", - "GraphQLID", - "GRAPHQL_MAX_INT", - "GRAPHQL_MIN_INT", - "is_introspection_type", - "introspection_types", - "TypeKind", + "GraphQLType", + "GraphQLTypeResolver", + "GraphQLUnionType", + "GraphQLUnionTypeKwargs", + "GraphQLWrappingType", + "ResponsePath", "SchemaMetaFieldDef", + "Thunk", + "ThunkCollection", + "ThunkMapping", + "TypeKind", "TypeMetaFieldDef", "TypeNameMetaFieldDef", - "validate_schema", + "assert_abstract_type", + "assert_composite_type", + "assert_directive", + "assert_enum_type", + "assert_enum_value_name", + "assert_input_object_type", + "assert_input_type", + "assert_interface_type", + "assert_leaf_type", + "assert_list_type", + "assert_name", + "assert_named_type", + "assert_non_null_type", + "assert_nullable_type", + "assert_object_type", + "assert_output_type", + "assert_scalar_type", + "assert_schema", + "assert_type", + "assert_union_type", "assert_valid_schema", + "assert_wrapping_type", + "get_named_type", + "get_nullable_type", + "introspection_types", + "is_abstract_type", + "is_composite_type", + "is_directive", + "is_enum_type", + "is_input_object_type", + "is_input_type", + "is_interface_type", + "is_introspection_type", + "is_leaf_type", + "is_list_type", + "is_named_type", + "is_non_null_type", + "is_nullable_type", + "is_object_type", + "is_output_type", + "is_required_argument", + "is_required_input_field", + "is_scalar_type", + "is_schema", + "is_specified_directive", + "is_specified_scalar_type", + "is_type", + "is_union_type", + "is_wrapping_type", + "resolve_thunk", + "specified_directives", + "specified_scalar_types", + "validate_schema", ] diff --git a/src/graphql/type/assert_name.py b/src/graphql/type/assert_name.py index 0ab6b2e7..1a8f7689 100644 --- a/src/graphql/type/assert_name.py +++ b/src/graphql/type/assert_name.py @@ -1,23 +1,28 @@ +"""Assertions for naming conventions""" + from ..error import GraphQLError -from ..language.character_classes import is_name_start, is_name_continue +from ..language.character_classes import is_name_continue, is_name_start -__all__ = ["assert_name", "assert_enum_value_name"] +__all__ = ["assert_enum_value_name", "assert_name"] def assert_name(name: str) -> str: """Uphold the spec rules about naming.""" if name is None: - raise TypeError("Must provide name.") + msg = "Must provide name." + raise TypeError(msg) if not isinstance(name, str): - raise TypeError("Expected name to be a string.") + msg = "Expected name to be a string." + raise TypeError(msg) if not name: - raise GraphQLError("Expected name to be a non-empty string.") + msg = "Expected name to be a non-empty string." + raise GraphQLError(msg) if not all(is_name_continue(char) for char in name[1:]): - raise GraphQLError( - f"Names must only contain [_a-zA-Z0-9] but {name!r} does not." - ) + msg = f"Names must only contain [_a-zA-Z0-9] but {name!r} does not." + raise GraphQLError(msg) if not is_name_start(name[0]): - raise GraphQLError(f"Names must start with [_a-zA-Z] but {name!r} does not.") + msg = f"Names must start with [_a-zA-Z] but {name!r} does not." + raise GraphQLError(msg) return name @@ -25,5 +30,6 @@ def assert_enum_value_name(name: str) -> str: """Uphold the spec rules about naming enum values.""" assert_name(name) if name in {"true", "false", "null"}: - raise GraphQLError(f"Enum values cannot be named: {name}.") + msg = f"Enum values cannot be named: {name}." + raise GraphQLError(msg) return name diff --git a/src/graphql/type/definition.py b/src/graphql/type/definition.py index dbe03ada..c334488d 100644 --- a/src/graphql/type/definition.py +++ b/src/graphql/type/definition.py @@ -1,28 +1,40 @@ -from enum import Enum +"""GraphQL type definitions.""" + +from __future__ import annotations + from typing import ( + TYPE_CHECKING, Any, Callable, Collection, Dict, Generic, - List, Mapping, NamedTuple, Optional, - Tuple, - TYPE_CHECKING, - Type, TypeVar, Union, cast, overload, ) +try: + from typing import TypedDict +except ImportError: # Python < 3.8 + from typing_extensions import TypedDict +try: + from typing import TypeAlias, TypeGuard +except ImportError: # Python < 3.10 + from typing_extensions import TypeAlias, TypeGuard + +if TYPE_CHECKING: + from enum import Enum + from ..error import GraphQLError from ..language import ( EnumTypeDefinitionNode, - EnumValueDefinitionNode, EnumTypeExtensionNode, + EnumValueDefinitionNode, EnumValueNode, FieldDefinitionNode, FieldNode, @@ -47,65 +59,20 @@ from ..pyutils import ( AwaitableOrValue, Path, + Undefined, cached_property, did_you_mean, inspect, - is_collection, - is_description, suggestion_list, - Undefined, ) from ..utilities.value_from_ast_untyped import value_from_ast_untyped -from .assert_name import assert_name, assert_enum_value_name - -try: - from typing import TypedDict -except ImportError: # Python < 3.8 - from typing_extensions import TypedDict +from .assert_name import assert_enum_value_name, assert_name if TYPE_CHECKING: - from .schema import GraphQLSchema # noqa: F401 + from .schema import GraphQLSchema + __all__ = [ - "is_type", - "is_scalar_type", - "is_object_type", - "is_interface_type", - "is_union_type", - "is_enum_type", - "is_input_object_type", - "is_list_type", - "is_non_null_type", - "is_input_type", - "is_output_type", - "is_leaf_type", - "is_composite_type", - "is_abstract_type", - "is_wrapping_type", - "is_nullable_type", - "is_named_type", - "is_required_argument", - "is_required_input_field", - "assert_type", - "assert_scalar_type", - "assert_object_type", - "assert_interface_type", - "assert_union_type", - "assert_enum_type", - "assert_input_object_type", - "assert_list_type", - "assert_non_null_type", - "assert_input_type", - "assert_output_type", - "assert_leaf_type", - "assert_composite_type", - "assert_abstract_type", - "assert_wrapping_type", - "assert_nullable_type", - "assert_named_type", - "get_nullable_type", - "get_named_type", - "resolve_thunk", "GraphQLAbstractType", "GraphQLArgument", "GraphQLArgumentKwargs", @@ -123,6 +90,7 @@ "GraphQLInputField", "GraphQLInputFieldKwargs", "GraphQLInputFieldMap", + "GraphQLInputFieldOutType", "GraphQLInputObjectType", "GraphQLInputObjectTypeKwargs", "GraphQLInputType", @@ -131,21 +99,23 @@ "GraphQLIsTypeOfFn", "GraphQLLeafType", "GraphQLList", - "GraphQLNamedType", - "GraphQLNamedTypeKwargs", "GraphQLNamedInputType", "GraphQLNamedOutputType", - "GraphQLNullableType", + "GraphQLNamedType", + "GraphQLNamedTypeKwargs", "GraphQLNonNull", + "GraphQLNullableInputType", + "GraphQLNullableOutputType", + "GraphQLNullableType", + "GraphQLObjectType", + "GraphQLObjectTypeKwargs", + "GraphQLOutputType", "GraphQLResolveInfo", + "GraphQLScalarLiteralParser", + "GraphQLScalarSerializer", "GraphQLScalarType", "GraphQLScalarTypeKwargs", - "GraphQLScalarSerializer", "GraphQLScalarValueParser", - "GraphQLScalarLiteralParser", - "GraphQLObjectType", - "GraphQLObjectTypeKwargs", - "GraphQLOutputType", "GraphQLType", "GraphQLTypeResolver", "GraphQLUnionType", @@ -154,6 +124,45 @@ "Thunk", "ThunkCollection", "ThunkMapping", + "assert_abstract_type", + "assert_composite_type", + "assert_enum_type", + "assert_input_object_type", + "assert_input_type", + "assert_interface_type", + "assert_leaf_type", + "assert_list_type", + "assert_named_type", + "assert_non_null_type", + "assert_nullable_type", + "assert_object_type", + "assert_output_type", + "assert_scalar_type", + "assert_type", + "assert_union_type", + "assert_wrapping_type", + "get_named_type", + "get_nullable_type", + "is_abstract_type", + "is_composite_type", + "is_enum_type", + "is_input_object_type", + "is_input_type", + "is_interface_type", + "is_leaf_type", + "is_list_type", + "is_named_type", + "is_non_null_type", + "is_nullable_type", + "is_object_type", + "is_output_type", + "is_required_argument", + "is_required_input_field", + "is_scalar_type", + "is_type", + "is_union_type", + "is_wrapping_type", + "resolve_thunk", ] @@ -161,109 +170,112 @@ class GraphQLType: """Base class for all GraphQL types""" # Note: We don't use slots for GraphQLType objects because memory considerations - # are not really important for the schema definition and it would make caching + # are not really important for the schema definition, and it would make caching # properties slower or more complicated. # There are predicates for each kind of GraphQL type. -def is_type(type_: Any) -> bool: +def is_type(type_: Any) -> TypeGuard[GraphQLType]: + """Check whether this is a GraphQL type.""" return isinstance(type_, GraphQLType) def assert_type(type_: Any) -> GraphQLType: + """Assert that this is a GraphQL type.""" if not is_type(type_): - raise TypeError(f"Expected {type_} to be a GraphQL type.") - return cast(GraphQLType, type_) + msg = f"Expected {type_} to be a GraphQL type." + raise TypeError(msg) + return type_ # These types wrap and modify other types -GT = TypeVar("GT", bound=GraphQLType) +GT_co = TypeVar("GT_co", bound=GraphQLType, covariant=True) -class GraphQLWrappingType(GraphQLType, Generic[GT]): +class GraphQLWrappingType(GraphQLType, Generic[GT_co]): """Base class for all GraphQL wrapping types""" - of_type: GT + of_type: GT_co - def __init__(self, type_: GT) -> None: - if not is_type(type_): - raise TypeError( - f"Can only create a wrapper for a GraphQLType, but got: {type_}." - ) + def __init__(self, type_: GT_co) -> None: self.of_type = type_ def __repr__(self) -> str: return f"<{self.__class__.__name__} {self.of_type!r}>" -def is_wrapping_type(type_: Any) -> bool: +def is_wrapping_type(type_: Any) -> TypeGuard[GraphQLWrappingType]: + """Check whether this is a GraphQL wrapping type.""" return isinstance(type_, GraphQLWrappingType) def assert_wrapping_type(type_: Any) -> GraphQLWrappingType: + """Assert that this is a GraphQL wrapping type.""" if not is_wrapping_type(type_): - raise TypeError(f"Expected {type_} to be a GraphQL wrapping type.") - return cast(GraphQLWrappingType, type_) + msg = f"Expected {type_} to be a GraphQL wrapping type." + raise TypeError(msg) + return type_ class GraphQLNamedTypeKwargs(TypedDict, total=False): + """Arguments for GraphQL named types""" + name: str - description: Optional[str] - extensions: Dict[str, Any] + description: str | None + extensions: dict[str, Any] # unfortunately, we cannot make the following more specific, because they are # used by subclasses with different node types and typed dicts cannot be refined - ast_node: Optional[Any] - extension_ast_nodes: Tuple[Any, ...] + ast_node: Any | None + extension_ast_nodes: tuple[Any, ...] class GraphQLNamedType(GraphQLType): """Base class for all GraphQL named types""" name: str - description: Optional[str] - extensions: Dict[str, Any] - ast_node: Optional[TypeDefinitionNode] - extension_ast_nodes: Tuple[TypeExtensionNode, ...] + description: str | None + extensions: dict[str, Any] + ast_node: TypeDefinitionNode | None + extension_ast_nodes: tuple[TypeExtensionNode, ...] + + reserved_types: Mapping[str, GraphQLNamedType] = {} + + def __new__(cls, name: str, *_args: Any, **_kwargs: Any) -> GraphQLNamedType: + """Create a GraphQL named type.""" + if name in cls.reserved_types: + msg = f"Redefinition of reserved type {name!r}" + raise TypeError(msg) + return super().__new__(cls) + + def __reduce__(self) -> tuple[Callable, tuple]: + return self._get_instance, (self.name, tuple(self.to_kwargs().items())) + + @classmethod + def _get_instance(cls, name: str, args: tuple) -> GraphQLNamedType: + try: + return cls.reserved_types[name] + except KeyError: + return cls(**dict(args)) # pyright: ignore def __init__( self, name: str, - description: Optional[str] = None, - extensions: Optional[Dict[str, Any]] = None, - ast_node: Optional[TypeDefinitionNode] = None, - extension_ast_nodes: Optional[Collection[TypeExtensionNode]] = None, + description: str | None = None, + extensions: dict[str, Any] | None = None, + ast_node: TypeDefinitionNode | None = None, + extension_ast_nodes: Collection[TypeExtensionNode] | None = None, ) -> None: assert_name(name) - if description is not None and not is_description(description): - raise TypeError("The description must be a string.") - if extensions is None: - extensions = {} - elif not isinstance(extensions, dict) or not all( - isinstance(key, str) for key in extensions - ): - raise TypeError(f"{name} extensions must be a dictionary with string keys.") - if ast_node and not isinstance(ast_node, TypeDefinitionNode): - raise TypeError(f"{name} AST node must be a TypeDefinitionNode.") - if extension_ast_nodes: - if not is_collection(extension_ast_nodes) or not all( - isinstance(node, TypeExtensionNode) for node in extension_ast_nodes - ): - raise TypeError( - f"{name} extension AST nodes must be specified" - " as a collection of TypeExtensionNode instances." - ) - if not isinstance(extension_ast_nodes, tuple): - extension_ast_nodes = tuple(extension_ast_nodes) - else: - extension_ast_nodes = () self.name = name self.description = description - self.extensions = extensions + self.extensions = extensions or {} self.ast_node = ast_node - self.extension_ast_nodes = extension_ast_nodes + self.extension_ast_nodes = ( + tuple(extension_ast_nodes) if extension_ast_nodes else () + ) def __repr__(self) -> str: return f"<{self.__class__.__name__} {self.name!r}>" @@ -272,6 +284,7 @@ def __str__(self) -> str: return self.name def to_kwargs(self) -> GraphQLNamedTypeKwargs: + """Get corresponding arguments.""" return GraphQLNamedTypeKwargs( name=self.name, description=self.description, @@ -280,15 +293,15 @@ def to_kwargs(self) -> GraphQLNamedTypeKwargs: extension_ast_nodes=self.extension_ast_nodes, ) - def __copy__(self) -> "GraphQLNamedType": # pragma: no cover + def __copy__(self) -> GraphQLNamedType: # pragma: no cover return self.__class__(**self.to_kwargs()) T = TypeVar("T") -ThunkCollection = Union[Callable[[], Collection[T]], Collection[T]] -ThunkMapping = Union[Callable[[], Mapping[str, T]], Mapping[str, T]] -Thunk = Union[Callable[[], T], T] +ThunkCollection: TypeAlias = Union[Callable[[], Collection[T]], Collection[T]] +ThunkMapping: TypeAlias = Union[Callable[[], Mapping[str, T]], Mapping[str, T]] +Thunk: TypeAlias = Union[Callable[[], T], T] def resolve_thunk(thunk: Thunk[T]) -> T: @@ -300,16 +313,20 @@ def resolve_thunk(thunk: Thunk[T]) -> T: return thunk() if callable(thunk) else thunk -GraphQLScalarSerializer = Callable[[Any], Any] -GraphQLScalarValueParser = Callable[[Any], Any] -GraphQLScalarLiteralParser = Callable[[ValueNode, Optional[Dict[str, Any]]], Any] +GraphQLScalarSerializer: TypeAlias = Callable[[Any], Any] +GraphQLScalarValueParser: TypeAlias = Callable[[Any], Any] +GraphQLScalarLiteralParser: TypeAlias = Callable[ + [ValueNode, Optional[Dict[str, Any]]], Any +] class GraphQLScalarTypeKwargs(GraphQLNamedTypeKwargs, total=False): - serialize: Optional[GraphQLScalarSerializer] - parse_value: Optional[GraphQLScalarValueParser] - parse_literal: Optional[GraphQLScalarLiteralParser] - specified_by_url: Optional[str] + """Arguments for GraphQL scalar types""" + + serialize: GraphQLScalarSerializer | None + parse_value: GraphQLScalarValueParser | None + parse_literal: GraphQLScalarLiteralParser | None + specified_by_url: str | None class GraphQLScalarType(GraphQLNamedType): @@ -340,21 +357,21 @@ def serialize_odd(value: Any) -> int: """ - specified_by_url: Optional[str] - ast_node: Optional[ScalarTypeDefinitionNode] - extension_ast_nodes: Tuple[ScalarTypeExtensionNode, ...] + specified_by_url: str | None + ast_node: ScalarTypeDefinitionNode | None + extension_ast_nodes: tuple[ScalarTypeExtensionNode, ...] def __init__( self, name: str, - serialize: Optional[GraphQLScalarSerializer] = None, - parse_value: Optional[GraphQLScalarValueParser] = None, - parse_literal: Optional[GraphQLScalarLiteralParser] = None, - description: Optional[str] = None, - specified_by_url: Optional[str] = None, - extensions: Optional[Dict[str, Any]] = None, - ast_node: Optional[ScalarTypeDefinitionNode] = None, - extension_ast_nodes: Optional[Collection[ScalarTypeExtensionNode]] = None, + serialize: GraphQLScalarSerializer | None = None, + parse_value: GraphQLScalarValueParser | None = None, + parse_literal: GraphQLScalarLiteralParser | None = None, + description: str | None = None, + specified_by_url: str | None = None, + extensions: dict[str, Any] | None = None, + ast_node: ScalarTypeDefinitionNode | None = None, + extension_ast_nodes: Collection[ScalarTypeExtensionNode] | None = None, ) -> None: super().__init__( name=name, @@ -363,41 +380,18 @@ def __init__( ast_node=ast_node, extension_ast_nodes=extension_ast_nodes, ) - if specified_by_url is not None and not isinstance(specified_by_url, str): - raise TypeError( - f"{name} must provide 'specified_by_url' as a string," - f" but got: {inspect(specified_by_url)}." - ) - if serialize is not None and not callable(serialize): - raise TypeError( - f"{name} must provide 'serialize' as a function." - " If this custom Scalar is also used as an input type," - " ensure 'parse_value' and 'parse_literal' functions" - " are also provided." - ) - if parse_literal is not None and ( - not callable(parse_literal) - or (parse_value is None or not callable(parse_value)) - ): - raise TypeError( - f"{name} must provide" - " both 'parse_value' and 'parse_literal' as functions." - ) - if ast_node and not isinstance(ast_node, ScalarTypeDefinitionNode): - raise TypeError(f"{name} AST node must be a ScalarTypeDefinitionNode.") - if extension_ast_nodes and not all( - isinstance(node, ScalarTypeExtensionNode) for node in extension_ast_nodes - ): - raise TypeError( - f"{name} extension AST nodes must be specified" - " as a collection of ScalarTypeExtensionNode instances." - ) + if serialize is not None: self.serialize = serialize # type: ignore if parse_value is not None: self.parse_value = parse_value # type: ignore if parse_literal is not None: self.parse_literal = parse_literal # type: ignore + if parse_literal is not None and parse_value is None: + msg = ( + f"{name} must provide both 'parse_value' and 'parse_literal' functions." + ) + raise TypeError(msg) self.specified_by_url = specified_by_url def __repr__(self) -> str: @@ -425,7 +419,7 @@ def parse_value(value: Any) -> Any: return value def parse_literal( - self, node: ValueNode, variables: Optional[Dict[str, Any]] = None + self, node: ValueNode, variables: dict[str, Any] | None = None ) -> Any: """Parses an externally provided literal value to use as an input. @@ -435,9 +429,10 @@ def parse_literal( return self.parse_value(value_from_ast_untyped(node, variables)) def to_kwargs(self) -> GraphQLScalarTypeKwargs: + """Get corresponding arguments.""" # noinspection PyArgumentList - return GraphQLScalarTypeKwargs( # type: ignore - super().to_kwargs(), + return GraphQLScalarTypeKwargs( + super().to_kwargs(), # type: ignore serialize=None if self.serialize is GraphQLScalarType.serialize else self.serialize, @@ -451,101 +446,78 @@ def to_kwargs(self) -> GraphQLScalarTypeKwargs: specified_by_url=self.specified_by_url, ) - def __copy__(self) -> "GraphQLScalarType": # pragma: no cover + def __copy__(self) -> GraphQLScalarType: # pragma: no cover return self.__class__(**self.to_kwargs()) -def is_scalar_type(type_: Any) -> bool: +def is_scalar_type(type_: Any) -> TypeGuard[GraphQLScalarType]: + """Check whether this is a GraphQL scalar type.""" return isinstance(type_, GraphQLScalarType) def assert_scalar_type(type_: Any) -> GraphQLScalarType: + """Assert that this is a GraphQL scalar type.""" if not is_scalar_type(type_): - raise TypeError(f"Expected {type_} to be a GraphQL Scalar type.") - return cast(GraphQLScalarType, type_) + msg = f"Expected {type_} to be a GraphQL Scalar type." + raise TypeError(msg) + return type_ -GraphQLArgumentMap = Dict[str, "GraphQLArgument"] +GraphQLArgumentMap: TypeAlias = Dict[str, "GraphQLArgument"] class GraphQLFieldKwargs(TypedDict, total=False): - type_: "GraphQLOutputType" - args: Optional[GraphQLArgumentMap] - resolve: Optional["GraphQLFieldResolver"] - subscribe: Optional["GraphQLFieldResolver"] - description: Optional[str] - deprecation_reason: Optional[str] - extensions: Dict[str, Any] - ast_node: Optional[FieldDefinitionNode] + """Arguments for GraphQL fields""" + + type_: GraphQLOutputType + args: GraphQLArgumentMap | None + resolve: GraphQLFieldResolver | None + subscribe: GraphQLFieldResolver | None + description: str | None + deprecation_reason: str | None + extensions: dict[str, Any] + ast_node: FieldDefinitionNode | None class GraphQLField: """Definition of a GraphQL field""" - type: "GraphQLOutputType" + type: GraphQLOutputType args: GraphQLArgumentMap - resolve: Optional["GraphQLFieldResolver"] - subscribe: Optional["GraphQLFieldResolver"] - description: Optional[str] - deprecation_reason: Optional[str] - extensions: Dict[str, Any] - ast_node: Optional[FieldDefinitionNode] + resolve: GraphQLFieldResolver | None + subscribe: GraphQLFieldResolver | None + description: str | None + deprecation_reason: str | None + extensions: dict[str, Any] + ast_node: FieldDefinitionNode | None def __init__( self, - type_: "GraphQLOutputType", - args: Optional[GraphQLArgumentMap] = None, - resolve: Optional["GraphQLFieldResolver"] = None, - subscribe: Optional["GraphQLFieldResolver"] = None, - description: Optional[str] = None, - deprecation_reason: Optional[str] = None, - extensions: Optional[Dict[str, Any]] = None, - ast_node: Optional[FieldDefinitionNode] = None, + type_: GraphQLOutputType, + args: GraphQLArgumentMap | None = None, + resolve: GraphQLFieldResolver | None = None, + subscribe: GraphQLFieldResolver | None = None, + description: str | None = None, + deprecation_reason: str | None = None, + extensions: dict[str, Any] | None = None, + ast_node: FieldDefinitionNode | None = None, ) -> None: - if not is_output_type(type_): - raise TypeError("Field type must be an output type.") - if args is None: - args = {} - elif not isinstance(args, dict): - raise TypeError("Field args must be a dict with argument names as keys.") - elif not all( - isinstance(value, GraphQLArgument) or is_input_type(value) - for value in args.values() - ): - raise TypeError( - "Field args must be GraphQLArguments or input type objects." - ) - else: + if args: args = { assert_name(name): value if isinstance(value, GraphQLArgument) - else GraphQLArgument(cast(GraphQLInputType, value)) + else GraphQLArgument(cast("GraphQLInputType", value)) for name, value in args.items() } - if resolve is not None and not callable(resolve): - raise TypeError( - "Field resolver must be a function if provided, " - f" but got: {inspect(resolve)}." - ) - if description is not None and not is_description(description): - raise TypeError("The description must be a string.") - if deprecation_reason is not None and not is_description(deprecation_reason): - raise TypeError("The deprecation reason must be a string.") - if extensions is None: - extensions = {} - elif not isinstance(extensions, dict) or not all( - isinstance(key, str) for key in extensions - ): - raise TypeError("Field extensions must be a dictionary with string keys.") - if ast_node and not isinstance(ast_node, FieldDefinitionNode): - raise TypeError("Field AST node must be a FieldDefinitionNode.") + else: + args = {} self.type = type_ self.args = args or {} self.resolve = resolve self.subscribe = subscribe self.description = description self.deprecation_reason = deprecation_reason - self.extensions = extensions + self.extensions = extensions or {} self.ast_node = ast_node def __repr__(self) -> str: @@ -554,7 +526,7 @@ def __repr__(self) -> str: def __str__(self) -> str: return f"Field: {self.type}" - def __eq__(self, other: Any) -> bool: + def __eq__(self, other: object) -> bool: return self is other or ( isinstance(other, GraphQLField) and self.type == other.type @@ -566,6 +538,7 @@ def __eq__(self, other: Any) -> bool: ) def to_kwargs(self) -> GraphQLFieldKwargs: + """Get corresponding arguments.""" return GraphQLFieldKwargs( type_=self.type, args=self.args.copy() if self.args else None, @@ -577,114 +550,130 @@ def to_kwargs(self) -> GraphQLFieldKwargs: ast_node=self.ast_node, ) - def __copy__(self) -> "GraphQLField": # pragma: no cover + def __copy__(self) -> GraphQLField: # pragma: no cover return self.__class__(**self.to_kwargs()) -class GraphQLResolveInfo(NamedTuple): - """Collection of information passed to the resolvers. +TContext = TypeVar("TContext") # pylint: disable=invalid-name + +try: - This is always passed as the first argument to the resolvers. + class GraphQLResolveInfo(NamedTuple, Generic[TContext]): # pyright: ignore + """Collection of information passed to the resolvers. - Note that contrary to the JavaScript implementation, the context (commonly used to - represent an authenticated user, or request-specific caches) is included here and - not passed as an additional argument. - """ + This is always passed as the first argument to the resolvers. - field_name: str - field_nodes: List[FieldNode] - return_type: "GraphQLOutputType" - parent_type: "GraphQLObjectType" - path: Path - schema: "GraphQLSchema" - fragments: Dict[str, FragmentDefinitionNode] - root_value: Any - operation: OperationDefinitionNode - variable_values: Dict[str, Any] - context: Any - is_awaitable: Callable[[Any], bool] + Note that contrary to the JavaScript implementation, the context (commonly used + to represent an authenticated user, or request-specific caches) is included here + and not passed as an additional argument. + """ + + field_name: str + field_nodes: list[FieldNode] + return_type: GraphQLOutputType + parent_type: GraphQLObjectType + path: Path + schema: GraphQLSchema + fragments: dict[str, FragmentDefinitionNode] + root_value: Any + operation: OperationDefinitionNode + variable_values: dict[str, Any] + context: TContext + is_awaitable: Callable[[Any], bool] +except TypeError as error: # pragma: no cover + if "Multiple inheritance with NamedTuple is not supported" not in str(error): + raise # only catch expected error for Python 3.9 and 3.10 + + class GraphQLResolveInfo(NamedTuple): # type: ignore[no-redef] + """Collection of information passed to the resolvers. + + This is always passed as the first argument to the resolvers. + + Note that contrary to the JavaScript implementation, the context (commonly used + to represent an authenticated user, or request-specific caches) is included here + and not passed as an additional argument. + """ + + field_name: str + field_nodes: list[FieldNode] + return_type: GraphQLOutputType + parent_type: GraphQLObjectType + path: Path + schema: GraphQLSchema + fragments: dict[str, FragmentDefinitionNode] + root_value: Any + operation: OperationDefinitionNode + variable_values: dict[str, Any] + context: Any + is_awaitable: Callable[[Any], bool] # Note: Contrary to the Javascript implementation of GraphQLFieldResolver, # the context is passed as part of the GraphQLResolveInfo and any arguments # are passed individually as keyword arguments. -GraphQLFieldResolverWithoutArgs = Callable[[Any, GraphQLResolveInfo], Any] +GraphQLFieldResolverWithoutArgs: TypeAlias = Callable[[Any, GraphQLResolveInfo], Any] # Unfortunately there is currently no syntax to indicate optional or keyword # arguments in Python, so we also allow any other Callable as a workaround: -GraphQLFieldResolver = Callable[..., Any] +GraphQLFieldResolver: TypeAlias = Callable[..., Any] # Note: Contrary to the Javascript implementation of GraphQLTypeResolver, # the context is passed as part of the GraphQLResolveInfo: -GraphQLTypeResolver = Callable[ +GraphQLTypeResolver: TypeAlias = Callable[ [Any, GraphQLResolveInfo, "GraphQLAbstractType"], AwaitableOrValue[Optional[str]], ] # Note: Contrary to the Javascript implementation of GraphQLIsTypeOfFn, # the context is passed as part of the GraphQLResolveInfo: -GraphQLIsTypeOfFn = Callable[[Any, GraphQLResolveInfo], AwaitableOrValue[bool]] +GraphQLIsTypeOfFn: TypeAlias = Callable[ + [Any, GraphQLResolveInfo], AwaitableOrValue[bool] +] -GraphQLFieldMap = Dict[str, GraphQLField] +GraphQLFieldMap: TypeAlias = Dict[str, GraphQLField] class GraphQLArgumentKwargs(TypedDict, total=False): - type_: "GraphQLInputType" + """Python arguments for GraphQL arguments""" + + type_: GraphQLInputType default_value: Any - description: Optional[str] - deprecation_reason: Optional[str] - out_name: Optional[str] - extensions: Dict[str, Any] - ast_node: Optional[InputValueDefinitionNode] + description: str | None + deprecation_reason: str | None + out_name: str | None + extensions: dict[str, Any] + ast_node: InputValueDefinitionNode | None class GraphQLArgument: """Definition of a GraphQL argument""" - type: "GraphQLInputType" + type: GraphQLInputType default_value: Any - description: Optional[str] - deprecation_reason: Optional[str] - out_name: Optional[str] # for transforming names (extension of GraphQL.js) - extensions: Dict[str, Any] - ast_node: Optional[InputValueDefinitionNode] + description: str | None + deprecation_reason: str | None + out_name: str | None # for transforming names (extension of GraphQL.js) + extensions: dict[str, Any] + ast_node: InputValueDefinitionNode | None def __init__( self, - type_: "GraphQLInputType", + type_: GraphQLInputType, default_value: Any = Undefined, - description: Optional[str] = None, - deprecation_reason: Optional[str] = None, - out_name: Optional[str] = None, - extensions: Optional[Dict[str, Any]] = None, - ast_node: Optional[InputValueDefinitionNode] = None, + description: str | None = None, + deprecation_reason: str | None = None, + out_name: str | None = None, + extensions: dict[str, Any] | None = None, + ast_node: InputValueDefinitionNode | None = None, ) -> None: - if not is_input_type(type_): - raise TypeError("Argument type must be a GraphQL input type.") - if description is not None and not is_description(description): - raise TypeError("Argument description must be a string.") - if deprecation_reason is not None and not is_description(deprecation_reason): - raise TypeError("Argument deprecation reason must be a string.") - if out_name is not None and not isinstance(out_name, str): - raise TypeError("Argument out name must be a string.") - if extensions is None: - extensions = {} - elif not isinstance(extensions, dict) or not all( - isinstance(key, str) for key in extensions - ): - raise TypeError( - "Argument extensions must be a dictionary with string keys." - ) - if ast_node and not isinstance(ast_node, InputValueDefinitionNode): - raise TypeError("Argument AST node must be an InputValueDefinitionNode.") self.type = type_ self.default_value = default_value self.description = description self.deprecation_reason = deprecation_reason self.out_name = out_name - self.extensions = extensions + self.extensions = extensions or {} self.ast_node = ast_node - def __eq__(self, other: Any) -> bool: + def __eq__(self, other: object) -> bool: return self is other or ( isinstance(other, GraphQLArgument) and self.type == other.type @@ -696,6 +685,7 @@ def __eq__(self, other: Any) -> bool: ) def to_kwargs(self) -> GraphQLArgumentKwargs: + """Get corresponding arguments.""" return GraphQLArgumentKwargs( type_=self.type, default_value=self.default_value, @@ -706,19 +696,21 @@ def to_kwargs(self) -> GraphQLArgumentKwargs: ast_node=self.ast_node, ) - def __copy__(self) -> "GraphQLArgument": # pragma: no cover + def __copy__(self) -> GraphQLArgument: # pragma: no cover return self.__class__(**self.to_kwargs()) def is_required_argument(arg: GraphQLArgument) -> bool: + """Check whether the argument is required.""" return is_non_null_type(arg.type) and arg.default_value is Undefined class GraphQLObjectTypeKwargs(GraphQLNamedTypeKwargs, total=False): + """Arguments for GraphQL object types""" fields: GraphQLFieldMap - interfaces: Tuple["GraphQLInterfaceType", ...] - is_type_of: Optional[GraphQLIsTypeOfFn] + interfaces: tuple[GraphQLInterfaceType, ...] + is_type_of: GraphQLIsTypeOfFn | None class GraphQLObjectType(GraphQLNamedType): @@ -749,20 +741,20 @@ class GraphQLObjectType(GraphQLNamedType): """ - is_type_of: Optional[GraphQLIsTypeOfFn] - ast_node: Optional[ObjectTypeDefinitionNode] - extension_ast_nodes: Tuple[ObjectTypeExtensionNode, ...] + is_type_of: GraphQLIsTypeOfFn | None + ast_node: ObjectTypeDefinitionNode | None + extension_ast_nodes: tuple[ObjectTypeExtensionNode, ...] def __init__( self, name: str, fields: ThunkMapping[GraphQLField], - interfaces: Optional[ThunkCollection["GraphQLInterfaceType"]] = None, - is_type_of: Optional[GraphQLIsTypeOfFn] = None, - extensions: Optional[Dict[str, Any]] = None, - description: Optional[str] = None, - ast_node: Optional[ObjectTypeDefinitionNode] = None, - extension_ast_nodes: Optional[Collection[ObjectTypeExtensionNode]] = None, + interfaces: ThunkCollection[GraphQLInterfaceType] | None = None, + is_type_of: GraphQLIsTypeOfFn | None = None, + extensions: dict[str, Any] | None = None, + description: str | None = None, + ast_node: ObjectTypeDefinitionNode | None = None, + extension_ast_nodes: Collection[ObjectTypeExtensionNode] | None = None, ) -> None: super().__init__( name=name, @@ -771,34 +763,21 @@ def __init__( ast_node=ast_node, extension_ast_nodes=extension_ast_nodes, ) - if is_type_of is not None and not callable(is_type_of): - raise TypeError( - f"{name} must provide 'is_type_of' as a function," - f" but got: {inspect(is_type_of)}." - ) - if ast_node and not isinstance(ast_node, ObjectTypeDefinitionNode): - raise TypeError(f"{name} AST node must be an ObjectTypeDefinitionNode.") - if extension_ast_nodes and not all( - isinstance(node, ObjectTypeExtensionNode) for node in extension_ast_nodes - ): - raise TypeError( - f"{name} extension AST nodes must be specified" - " as a collection of ObjectTypeExtensionNode instances." - ) self._fields = fields self._interfaces = interfaces self.is_type_of = is_type_of def to_kwargs(self) -> GraphQLObjectTypeKwargs: + """Get corresponding arguments.""" # noinspection PyArgumentList - return GraphQLObjectTypeKwargs( # type: ignore - super().to_kwargs(), + return GraphQLObjectTypeKwargs( + super().to_kwargs(), # type: ignore fields=self.fields.copy(), interfaces=self.interfaces, is_type_of=self.is_type_of, ) - def __copy__(self) -> "GraphQLObjectType": # pragma: no cover + def __copy__(self) -> GraphQLObjectType: # pragma: no cover return self.__class__(**self.to_kwargs()) @cached_property @@ -808,64 +787,48 @@ def fields(self) -> GraphQLFieldMap: fields = resolve_thunk(self._fields) except Exception as error: cls = GraphQLError if isinstance(error, GraphQLError) else TypeError - raise cls(f"{self.name} fields cannot be resolved. {error}") from error - if not isinstance(fields, Mapping) or not all( - isinstance(key, str) for key in fields - ): - raise TypeError( - f"{self.name} fields must be specified" - " as a mapping with field names as keys." - ) - if not all( - isinstance(value, GraphQLField) or is_output_type(value) - for value in fields.values() - ): - raise TypeError( - f"{self.name} fields must be GraphQLField or output type objects." - ) + msg = f"{self.name} fields cannot be resolved. {error}" + raise cls(msg) from error return { assert_name(name): value if isinstance(value, GraphQLField) - else GraphQLField(value) # type: ignore + else GraphQLField(value) for name, value in fields.items() } @cached_property - def interfaces(self) -> Tuple["GraphQLInterfaceType", ...]: + def interfaces(self) -> tuple[GraphQLInterfaceType, ...]: """Get provided interfaces.""" try: - interfaces: Collection["GraphQLInterfaceType"] = resolve_thunk( + interfaces: Collection[GraphQLInterfaceType] = resolve_thunk( self._interfaces # type: ignore ) except Exception as error: cls = GraphQLError if isinstance(error, GraphQLError) else TypeError - raise cls(f"{self.name} interfaces cannot be resolved. {error}") from error - if interfaces is None: - interfaces = () - elif not is_collection(interfaces) or not all( - isinstance(value, GraphQLInterfaceType) for value in interfaces - ): - raise TypeError( - f"{self.name} interfaces must be specified" - " as a collection of GraphQLInterfaceType instances." - ) - return tuple(interfaces) + msg = f"{self.name} interfaces cannot be resolved. {error}" + raise cls(msg) from error + return tuple(interfaces) if interfaces else () -def is_object_type(type_: Any) -> bool: +def is_object_type(type_: Any) -> TypeGuard[GraphQLObjectType]: + """Check whether this is a graphql object type""" return isinstance(type_, GraphQLObjectType) def assert_object_type(type_: Any) -> GraphQLObjectType: + """Assume that this is a graphql object type""" if not is_object_type(type_): - raise TypeError(f"Expected {type_} to be a GraphQL Object type.") - return cast(GraphQLObjectType, type_) + msg = f"Expected {type_} to be a GraphQL Object type." + raise TypeError(msg) + return type_ class GraphQLInterfaceTypeKwargs(GraphQLNamedTypeKwargs, total=False): + """Arguments for GraphQL interface types""" + fields: GraphQLFieldMap - interfaces: Tuple["GraphQLInterfaceType", ...] - resolve_type: Optional[GraphQLTypeResolver] + interfaces: tuple[GraphQLInterfaceType, ...] + resolve_type: GraphQLTypeResolver | None class GraphQLInterfaceType(GraphQLNamedType): @@ -883,20 +846,20 @@ class GraphQLInterfaceType(GraphQLNamedType): }) """ - resolve_type: Optional[GraphQLTypeResolver] - ast_node: Optional[InterfaceTypeDefinitionNode] - extension_ast_nodes: Tuple[InterfaceTypeExtensionNode, ...] + resolve_type: GraphQLTypeResolver | None + ast_node: InterfaceTypeDefinitionNode | None + extension_ast_nodes: tuple[InterfaceTypeExtensionNode, ...] def __init__( self, name: str, fields: ThunkMapping[GraphQLField], - interfaces: Optional[ThunkCollection["GraphQLInterfaceType"]] = None, - resolve_type: Optional[GraphQLTypeResolver] = None, - description: Optional[str] = None, - extensions: Optional[Dict[str, Any]] = None, - ast_node: Optional[InterfaceTypeDefinitionNode] = None, - extension_ast_nodes: Optional[Collection[InterfaceTypeExtensionNode]] = None, + interfaces: ThunkCollection[GraphQLInterfaceType] | None = None, + resolve_type: GraphQLTypeResolver | None = None, + description: str | None = None, + extensions: dict[str, Any] | None = None, + ast_node: InterfaceTypeDefinitionNode | None = None, + extension_ast_nodes: Collection[InterfaceTypeExtensionNode] | None = None, ) -> None: super().__init__( name=name, @@ -905,34 +868,21 @@ def __init__( ast_node=ast_node, extension_ast_nodes=extension_ast_nodes, ) - if resolve_type is not None and not callable(resolve_type): - raise TypeError( - f"{name} must provide 'resolve_type' as a function," - f" but got: {inspect(resolve_type)}." - ) - if ast_node and not isinstance(ast_node, InterfaceTypeDefinitionNode): - raise TypeError(f"{name} AST node must be an InterfaceTypeDefinitionNode.") - if extension_ast_nodes and not all( - isinstance(node, InterfaceTypeExtensionNode) for node in extension_ast_nodes - ): - raise TypeError( - f"{name} extension AST nodes must be specified" - " as a collection of InterfaceTypeExtensionNode instances." - ) self._fields = fields self._interfaces = interfaces self.resolve_type = resolve_type def to_kwargs(self) -> GraphQLInterfaceTypeKwargs: + """Get corresponding arguments.""" # noinspection PyArgumentList - return GraphQLInterfaceTypeKwargs( # type: ignore - super().to_kwargs(), + return GraphQLInterfaceTypeKwargs( + super().to_kwargs(), # type: ignore fields=self.fields.copy(), interfaces=self.interfaces, resolve_type=self.resolve_type, ) - def __copy__(self) -> "GraphQLInterfaceType": # pragma: no cover + def __copy__(self) -> GraphQLInterfaceType: # pragma: no cover return self.__class__(**self.to_kwargs()) @cached_property @@ -942,63 +892,47 @@ def fields(self) -> GraphQLFieldMap: fields = resolve_thunk(self._fields) except Exception as error: cls = GraphQLError if isinstance(error, GraphQLError) else TypeError - raise cls(f"{self.name} fields cannot be resolved. {error}") from error - if not isinstance(fields, Mapping) or not all( - isinstance(key, str) for key in fields - ): - raise TypeError( - f"{self.name} fields must be specified" - " as a mapping with field names as keys." - ) - if not all( - isinstance(value, GraphQLField) or is_output_type(value) - for value in fields.values() - ): - raise TypeError( - f"{self.name} fields must be GraphQLField or output type objects." - ) + msg = f"{self.name} fields cannot be resolved. {error}" + raise cls(msg) from error return { assert_name(name): value if isinstance(value, GraphQLField) - else GraphQLField(value) # type: ignore + else GraphQLField(value) for name, value in fields.items() } @cached_property - def interfaces(self) -> Tuple["GraphQLInterfaceType", ...]: + def interfaces(self) -> tuple[GraphQLInterfaceType, ...]: """Get provided interfaces.""" try: - interfaces: Collection["GraphQLInterfaceType"] = resolve_thunk( + interfaces: Collection[GraphQLInterfaceType] = resolve_thunk( self._interfaces # type: ignore ) except Exception as error: cls = GraphQLError if isinstance(error, GraphQLError) else TypeError - raise cls(f"{self.name} interfaces cannot be resolved. {error}") from error - if interfaces is None: - interfaces = () - elif not is_collection(interfaces) or not all( - isinstance(value, GraphQLInterfaceType) for value in interfaces - ): - raise TypeError( - f"{self.name} interfaces must be specified" - " as a collection of GraphQLInterfaceType instances." - ) - return tuple(interfaces) + msg = f"{self.name} interfaces cannot be resolved. {error}" + raise cls(msg) from error + return tuple(interfaces) if interfaces else () -def is_interface_type(type_: Any) -> bool: +def is_interface_type(type_: Any) -> TypeGuard[GraphQLInterfaceType]: + """Check whether this is a GraphQL interface type.""" return isinstance(type_, GraphQLInterfaceType) def assert_interface_type(type_: Any) -> GraphQLInterfaceType: + """Assert that this is a GraphQL interface type.""" if not is_interface_type(type_): - raise TypeError(f"Expected {type_} to be a GraphQL Interface type.") - return cast(GraphQLInterfaceType, type_) + msg = f"Expected {type_} to be a GraphQL Interface type." + raise TypeError(msg) + return type_ class GraphQLUnionTypeKwargs(GraphQLNamedTypeKwargs, total=False): - types: Tuple[GraphQLObjectType, ...] - resolve_type: Optional[GraphQLTypeResolver] + """Arguments for GraphQL union types""" + + types: tuple[GraphQLObjectType, ...] + resolve_type: GraphQLTypeResolver | None class GraphQLUnionType(GraphQLNamedType): @@ -1019,19 +953,19 @@ def resolve_type(obj, _info, _type): PetType = GraphQLUnionType('Pet', [DogType, CatType], resolve_type) """ - resolve_type: Optional[GraphQLTypeResolver] - ast_node: Optional[UnionTypeDefinitionNode] - extension_ast_nodes: Tuple[UnionTypeExtensionNode, ...] + resolve_type: GraphQLTypeResolver | None + ast_node: UnionTypeDefinitionNode | None + extension_ast_nodes: tuple[UnionTypeExtensionNode, ...] def __init__( self, name: str, types: ThunkCollection[GraphQLObjectType], - resolve_type: Optional[GraphQLTypeResolver] = None, - description: Optional[str] = None, - extensions: Optional[Dict[str, Any]] = None, - ast_node: Optional[UnionTypeDefinitionNode] = None, - extension_ast_nodes: Optional[Collection[UnionTypeExtensionNode]] = None, + resolve_type: GraphQLTypeResolver | None = None, + description: str | None = None, + extensions: dict[str, Any] | None = None, + ast_node: UnionTypeDefinitionNode | None = None, + extension_ast_nodes: Collection[UnionTypeExtensionNode] | None = None, ) -> None: super().__init__( name=name, @@ -1040,68 +974,54 @@ def __init__( ast_node=ast_node, extension_ast_nodes=extension_ast_nodes, ) - if resolve_type is not None and not callable(resolve_type): - raise TypeError( - f"{name} must provide 'resolve_type' as a function," - f" but got: {inspect(resolve_type)}." - ) - if ast_node and not isinstance(ast_node, UnionTypeDefinitionNode): - raise TypeError(f"{name} AST node must be a UnionTypeDefinitionNode.") - if extension_ast_nodes and not all( - isinstance(node, UnionTypeExtensionNode) for node in extension_ast_nodes - ): - raise TypeError( - f"{name} extension AST nodes must be specified" - " as a collection of UnionTypeExtensionNode instances." - ) self._types = types self.resolve_type = resolve_type def to_kwargs(self) -> GraphQLUnionTypeKwargs: + """Get corresponding arguments.""" # noinspection PyArgumentList - return GraphQLUnionTypeKwargs( # type: ignore - super().to_kwargs(), types=self.types, resolve_type=self.resolve_type + return GraphQLUnionTypeKwargs( + super().to_kwargs(), # type: ignore + types=self.types, + resolve_type=self.resolve_type, ) - def __copy__(self) -> "GraphQLUnionType": # pragma: no cover + def __copy__(self) -> GraphQLUnionType: # pragma: no cover return self.__class__(**self.to_kwargs()) @cached_property - def types(self) -> Tuple[GraphQLObjectType, ...]: + def types(self) -> tuple[GraphQLObjectType, ...]: """Get provided types.""" try: types: Collection[GraphQLObjectType] = resolve_thunk(self._types) except Exception as error: cls = GraphQLError if isinstance(error, GraphQLError) else TypeError - raise cls(f"{self.name} types cannot be resolved. {error}") from error - if types is None: - types = () - elif not is_collection(types) or not all( - isinstance(value, GraphQLObjectType) for value in types - ): - raise TypeError( - f"{self.name} types must be specified" - " as a collection of GraphQLObjectType instances." - ) - return tuple(types) + msg = f"{self.name} types cannot be resolved. {error}" + raise cls(msg) from error + return tuple(types) if types else () -def is_union_type(type_: Any) -> bool: +def is_union_type(type_: Any) -> TypeGuard[GraphQLUnionType]: + """Check whether this is a GraphQL union type.""" return isinstance(type_, GraphQLUnionType) def assert_union_type(type_: Any) -> GraphQLUnionType: + """Assert that this is a GraphQL union type.""" if not is_union_type(type_): - raise TypeError(f"Expected {type_} to be a GraphQL Union type.") - return cast(GraphQLUnionType, type_) + msg = f"Expected {type_} to be a GraphQL Union type." + raise TypeError(msg) + return type_ -GraphQLEnumValueMap = Dict[str, "GraphQLEnumValue"] +GraphQLEnumValueMap: TypeAlias = Dict[str, "GraphQLEnumValue"] class GraphQLEnumTypeKwargs(GraphQLNamedTypeKwargs, total=False): + """Arguments for GraphQL enum types""" + values: GraphQLEnumValueMap - names_as_values: Optional[bool] + names_as_values: bool | None class GraphQLEnumType(GraphQLNamedType): @@ -1139,18 +1059,18 @@ class RGBEnum(enum.Enum): """ values: GraphQLEnumValueMap - ast_node: Optional[EnumTypeDefinitionNode] - extension_ast_nodes: Tuple[EnumTypeExtensionNode, ...] + ast_node: EnumTypeDefinitionNode | None + extension_ast_nodes: tuple[EnumTypeExtensionNode, ...] def __init__( self, name: str, - values: Union[GraphQLEnumValueMap, Mapping[str, Any], Type[Enum]], - names_as_values: Optional[bool] = False, - description: Optional[str] = None, - extensions: Optional[Dict[str, Any]] = None, - ast_node: Optional[EnumTypeDefinitionNode] = None, - extension_ast_nodes: Optional[Collection[EnumTypeExtensionNode]] = None, + values: GraphQLEnumValueMap | Mapping[str, Any] | type[Enum], + names_as_values: bool | None = False, + description: str | None = None, + extensions: dict[str, Any] | None = None, + ast_node: EnumTypeDefinitionNode | None = None, + extension_ast_nodes: Collection[EnumTypeExtensionNode] | None = None, ) -> None: super().__init__( name=name, @@ -1160,22 +1080,22 @@ def __init__( extension_ast_nodes=extension_ast_nodes, ) try: # check for enum - values = cast(Enum, values).__members__ # type: ignore + values = cast("Enum", values).__members__ # type: ignore except AttributeError: if not isinstance(values, Mapping) or not all( isinstance(name, str) for name in values ): try: - # noinspection PyTypeChecker - values = dict(values) # type: ignore - except (TypeError, ValueError): - raise TypeError( + values = dict(values) # pyright: ignore + except (TypeError, ValueError) as error: + msg = ( f"{name} values must be an Enum or a mapping" " with value names as keys." ) - values = cast(Dict[str, Any], values) + raise TypeError(msg) from error + values = cast("Dict[str, Any]", values) else: - values = cast(Dict[str, Enum], values) + values = cast("Dict[str, Enum]", values) if names_as_values is False: values = {key: value.value for key, value in values.items()} elif names_as_values is True: @@ -1186,30 +1106,23 @@ def __init__( else GraphQLEnumValue(value) for key, value in values.items() } - if ast_node and not isinstance(ast_node, EnumTypeDefinitionNode): - raise TypeError(f"{name} AST node must be an EnumTypeDefinitionNode.") - if extension_ast_nodes and not all( - isinstance(node, EnumTypeExtensionNode) for node in extension_ast_nodes - ): - raise TypeError( - f"{name} extension AST nodes must be specified" - " as a collection of EnumTypeExtensionNode instances." - ) self.values = values def to_kwargs(self) -> GraphQLEnumTypeKwargs: + """Get corresponding arguments.""" # noinspection PyArgumentList - return GraphQLEnumTypeKwargs( # type: ignore - super().to_kwargs(), values=self.values.copy() + return GraphQLEnumTypeKwargs( + super().to_kwargs(), # type: ignore + values=self.values.copy(), ) - def __copy__(self) -> "GraphQLEnumType": # pragma: no cover + def __copy__(self) -> GraphQLEnumType: # pragma: no cover return self.__class__(**self.to_kwargs()) @cached_property - def _value_lookup(self) -> Dict[Any, str]: + def _value_lookup(self) -> dict[Any, str]: # use first value or name as lookup - lookup: Dict[Any, str] = {} + lookup: dict[Any, str] = {} for name, enum_value in self.values.items(): value = enum_value.value if value is None or value is Undefined: @@ -1222,6 +1135,7 @@ def _value_lookup(self) -> Dict[Any, str]: return lookup def serialize(self, output_value: Any) -> str: + """Serialize an output value.""" try: return self._value_lookup[output_value] except KeyError: # hashable value not found @@ -1230,111 +1144,105 @@ def serialize(self, output_value: Any) -> str: for enum_name, enum_value in self.values.items(): if enum_value.value == output_value: return enum_name - raise GraphQLError( - f"Enum '{self.name}' cannot represent value: {inspect(output_value)}" - ) + msg = f"Enum '{self.name}' cannot represent value: {inspect(output_value)}" + raise GraphQLError(msg) def parse_value(self, input_value: str) -> Any: + """Parse an enum value.""" if isinstance(input_value, str): try: enum_value = self.values[input_value] - except KeyError: - raise GraphQLError( + except KeyError as error: + msg = ( f"Value '{input_value}' does not exist in '{self.name}' enum." + did_you_mean_enum_value(self, input_value) ) + raise GraphQLError(msg) from error return enum_value.value value_str = inspect(input_value) - raise GraphQLError( + msg = ( f"Enum '{self.name}' cannot represent non-string value: {value_str}." + did_you_mean_enum_value(self, value_str) ) + raise GraphQLError(msg) def parse_literal( - self, value_node: ValueNode, _variables: Optional[Dict[str, Any]] = None + self, value_node: ValueNode, _variables: dict[str, Any] | None = None ) -> Any: + """Parse literal value.""" # Note: variables will be resolved before calling this method. if isinstance(value_node, EnumValueNode): try: enum_value = self.values[value_node.value] - except KeyError: + except KeyError as error: value_str = print_ast(value_node) - raise GraphQLError( + msg = ( f"Value '{value_str}' does not exist in '{self.name}' enum." - + did_you_mean_enum_value(self, value_str), - value_node, + + did_you_mean_enum_value(self, value_str) ) + raise GraphQLError(msg, value_node) from error return enum_value.value value_str = print_ast(value_node) - raise GraphQLError( + msg = ( f"Enum '{self.name}' cannot represent non-enum value: {value_str}." - + did_you_mean_enum_value(self, value_str), - value_node, + + did_you_mean_enum_value(self, value_str) ) + raise GraphQLError(msg, value_node) -def is_enum_type(type_: Any) -> bool: +def is_enum_type(type_: Any) -> TypeGuard[GraphQLEnumType]: + """Check whether this is a GraphQL enum type.""" return isinstance(type_, GraphQLEnumType) def assert_enum_type(type_: Any) -> GraphQLEnumType: + """Assert that this is a GraphQL enum type.""" if not is_enum_type(type_): - raise TypeError(f"Expected {type_} to be a GraphQL Enum type.") - return cast(GraphQLEnumType, type_) + msg = f"Expected {type_} to be a GraphQL Enum type." + raise TypeError(msg) + return type_ def did_you_mean_enum_value(enum_type: GraphQLEnumType, unknown_value_str: str) -> str: + """Return suggestions for enum value.""" suggested_values = suggestion_list(unknown_value_str, enum_type.values) return did_you_mean(suggested_values, "the enum value") class GraphQLEnumValueKwargs(TypedDict, total=False): + """Arguments for GraphQL enum values""" + value: Any - description: Optional[str] - deprecation_reason: Optional[str] - extensions: Dict[str, Any] - ast_node: Optional[EnumValueDefinitionNode] + description: str | None + deprecation_reason: str | None + extensions: dict[str, Any] + ast_node: EnumValueDefinitionNode | None class GraphQLEnumValue: + """A GraphQL enum value.""" value: Any - description: Optional[str] - deprecation_reason: Optional[str] - extensions: Dict[str, Any] - ast_node: Optional[EnumValueDefinitionNode] + description: str | None + deprecation_reason: str | None + extensions: dict[str, Any] + ast_node: EnumValueDefinitionNode | None def __init__( self, value: Any = None, - description: Optional[str] = None, - deprecation_reason: Optional[str] = None, - extensions: Optional[Dict[str, Any]] = None, - ast_node: Optional[EnumValueDefinitionNode] = None, + description: str | None = None, + deprecation_reason: str | None = None, + extensions: dict[str, Any] | None = None, + ast_node: EnumValueDefinitionNode | None = None, ) -> None: - if description is not None and not is_description(description): - raise TypeError("The description of the enum value must be a string.") - if deprecation_reason is not None and not is_description(deprecation_reason): - raise TypeError( - "The deprecation reason for the enum value must be a string." - ) - if extensions is None: - extensions = {} - elif not isinstance(extensions, dict) or not all( - isinstance(key, str) for key in extensions - ): - raise TypeError( - "Enum value extensions must be a dictionary with string keys." - ) - if ast_node and not isinstance(ast_node, EnumValueDefinitionNode): - raise TypeError("AST node must be an EnumValueDefinitionNode.") self.value = value self.description = description self.deprecation_reason = deprecation_reason - self.extensions = extensions + self.extensions = extensions or {} self.ast_node = ast_node - def __eq__(self, other: Any) -> bool: + def __eq__(self, other: object) -> bool: return self is other or ( isinstance(other, GraphQLEnumValue) and self.value == other.value @@ -1344,6 +1252,7 @@ def __eq__(self, other: Any) -> bool: ) def to_kwargs(self) -> GraphQLEnumValueKwargs: + """Get corresponding arguments.""" return GraphQLEnumValueKwargs( value=self.value, description=self.description, @@ -1352,17 +1261,20 @@ def to_kwargs(self) -> GraphQLEnumValueKwargs: ast_node=self.ast_node, ) - def __copy__(self) -> "GraphQLEnumValue": # pragma: no cover + def __copy__(self) -> GraphQLEnumValue: # pragma: no cover return self.__class__(**self.to_kwargs()) -GraphQLInputFieldMap = Dict[str, "GraphQLInputField"] +GraphQLInputFieldMap: TypeAlias = Dict[str, "GraphQLInputField"] GraphQLInputFieldOutType = Callable[[Dict[str, Any]], Any] class GraphQLInputObjectTypeKwargs(GraphQLNamedTypeKwargs, total=False): + """Arguments for GraphQL input object types""" + fields: GraphQLInputFieldMap - out_type: Optional[GraphQLInputFieldOutType] + out_type: GraphQLInputFieldOutType | None + is_one_of: bool class GraphQLInputObjectType(GraphQLNamedType): @@ -1375,7 +1287,7 @@ class GraphQLInputObjectType(GraphQLNamedType): Example:: - NonNullFloat = GraphQLNonNull(GraphQLFloat()) + NonNullFloat = GraphQLNonNull(GraphQLFloat) class GeoPoint(GraphQLInputObjectType): name = 'GeoPoint' @@ -1383,25 +1295,27 @@ class GeoPoint(GraphQLInputObjectType): 'lat': GraphQLInputField(NonNullFloat), 'lon': GraphQLInputField(NonNullFloat), 'alt': GraphQLInputField( - GraphQLFloat(), default_value=0) + GraphQLFloat, default_value=0) } The outbound values will be Python dictionaries by default, but you can have them converted to other types by specifying an ``out_type`` function or class. """ - ast_node: Optional[InputObjectTypeDefinitionNode] - extension_ast_nodes: Tuple[InputObjectTypeExtensionNode, ...] + ast_node: InputObjectTypeDefinitionNode | None + extension_ast_nodes: tuple[InputObjectTypeExtensionNode, ...] + is_one_of: bool def __init__( self, name: str, - fields: ThunkMapping["GraphQLInputField"], - description: Optional[str] = None, - out_type: Optional[GraphQLInputFieldOutType] = None, - extensions: Optional[Dict[str, Any]] = None, - ast_node: Optional[InputObjectTypeDefinitionNode] = None, - extension_ast_nodes: Optional[Collection[InputObjectTypeExtensionNode]] = None, + fields: ThunkMapping[GraphQLInputField], + description: str | None = None, + out_type: GraphQLInputFieldOutType | None = None, + extensions: dict[str, Any] | None = None, + ast_node: InputObjectTypeDefinitionNode | None = None, + extension_ast_nodes: Collection[InputObjectTypeExtensionNode] | None = None, + is_one_of: bool = False, ) -> None: super().__init__( name=name, @@ -1410,26 +1324,13 @@ def __init__( ast_node=ast_node, extension_ast_nodes=extension_ast_nodes, ) - if out_type is not None and not callable(out_type): - raise TypeError(f"The out type for {name} must be a function or a class.") - if ast_node and not isinstance(ast_node, InputObjectTypeDefinitionNode): - raise TypeError( - f"{name} AST node must be an InputObjectTypeDefinitionNode." - ) - if extension_ast_nodes and not all( - isinstance(node, InputObjectTypeExtensionNode) - for node in extension_ast_nodes - ): - raise TypeError( - f"{name} extension AST nodes must be specified" - " as a collection of InputObjectTypeExtensionNode instances." - ) self._fields = fields if out_type is not None: self.out_type = out_type # type: ignore + self.is_one_of = is_one_of @staticmethod - def out_type(value: Dict[str, Any]) -> Any: + def out_type(value: dict[str, Any]) -> Any: """Transform outbound values (this is an extension of GraphQL.js). This default implementation passes values unaltered as dictionaries. @@ -1437,16 +1338,18 @@ def out_type(value: Dict[str, Any]) -> Any: return value def to_kwargs(self) -> GraphQLInputObjectTypeKwargs: + """Get corresponding arguments.""" # noinspection PyArgumentList - return GraphQLInputObjectTypeKwargs( # type: ignore - super().to_kwargs(), + return GraphQLInputObjectTypeKwargs( + super().to_kwargs(), # type: ignore fields=self.fields.copy(), out_type=None if self.out_type is GraphQLInputObjectType.out_type else self.out_type, + is_one_of=self.is_one_of, ) - def __copy__(self) -> "GraphQLInputObjectType": # pragma: no cover + def __copy__(self) -> GraphQLInputObjectType: # pragma: no cover return self.__class__(**self.to_kwargs()) @cached_property @@ -1456,98 +1359,71 @@ def fields(self) -> GraphQLInputFieldMap: fields = resolve_thunk(self._fields) except Exception as error: cls = GraphQLError if isinstance(error, GraphQLError) else TypeError - raise cls(f"{self.name} fields cannot be resolved. {error}") from error - if not isinstance(fields, Mapping) or not all( - isinstance(key, str) for key in fields - ): - raise TypeError( - f"{self.name} fields must be specified" - " as a mapping with field names as keys." - ) - if not all( - isinstance(value, GraphQLInputField) or is_input_type(value) - for value in fields.values() - ): - raise TypeError( - f"{self.name} fields must be" - " GraphQLInputField or input type objects." - ) + msg = f"{self.name} fields cannot be resolved. {error}" + raise cls(msg) from error return { assert_name(name): value if isinstance(value, GraphQLInputField) - else GraphQLInputField(value) # type: ignore + else GraphQLInputField(value) for name, value in fields.items() } -def is_input_object_type(type_: Any) -> bool: +def is_input_object_type(type_: Any) -> TypeGuard[GraphQLInputObjectType]: + """Check whether this is a GraphQL input type.""" return isinstance(type_, GraphQLInputObjectType) def assert_input_object_type(type_: Any) -> GraphQLInputObjectType: + """Assert that this is a GraphQL input type.""" if not is_input_object_type(type_): - raise TypeError(f"Expected {type_} to be a GraphQL Input Object type.") - return cast(GraphQLInputObjectType, type_) + msg = f"Expected {type_} to be a GraphQL Input Object type." + raise TypeError(msg) + return type_ class GraphQLInputFieldKwargs(TypedDict, total=False): - type_: "GraphQLInputType" + """Arguments for GraphQL input fields""" + + type_: GraphQLInputType default_value: Any - description: Optional[str] - deprecation_reason: Optional[str] - out_name: Optional[str] - extensions: Dict[str, Any] - ast_node: Optional[InputValueDefinitionNode] + description: str | None + deprecation_reason: str | None + out_name: str | None + extensions: dict[str, Any] + ast_node: InputValueDefinitionNode | None class GraphQLInputField: """Definition of a GraphQL input field""" - type: "GraphQLInputType" + type: GraphQLInputType default_value: Any - description: Optional[str] - deprecation_reason: Optional[str] - out_name: Optional[str] # for transforming names (extension of GraphQL.js) - extensions: Dict[str, Any] - ast_node: Optional[InputValueDefinitionNode] + description: str | None + deprecation_reason: str | None + out_name: str | None # for transforming names (extension of GraphQL.js) + extensions: dict[str, Any] + ast_node: InputValueDefinitionNode | None def __init__( self, - type_: "GraphQLInputType", + type_: GraphQLInputType, default_value: Any = Undefined, - description: Optional[str] = None, - deprecation_reason: Optional[str] = None, - out_name: Optional[str] = None, - extensions: Optional[Dict[str, Any]] = None, - ast_node: Optional[InputValueDefinitionNode] = None, + description: str | None = None, + deprecation_reason: str | None = None, + out_name: str | None = None, + extensions: dict[str, Any] | None = None, + ast_node: InputValueDefinitionNode | None = None, ) -> None: - if not is_input_type(type_): - raise TypeError("Input field type must be a GraphQL input type.") - if description is not None and not is_description(description): - raise TypeError("Input field description must be a string.") - if deprecation_reason is not None and not is_description(deprecation_reason): - raise TypeError("Input field deprecation reason must be a string.") - if out_name is not None and not isinstance(out_name, str): - raise TypeError("Input field out name must be a string.") - if extensions is None: - extensions = {} - elif not isinstance(extensions, dict) or not all( - isinstance(key, str) for key in extensions - ): - raise TypeError( - "Input field extensions must be a dictionary with string keys." - ) - if ast_node and not isinstance(ast_node, InputValueDefinitionNode): - raise TypeError("Input field AST node must be an InputValueDefinitionNode.") self.type = type_ self.default_value = default_value self.description = description self.deprecation_reason = deprecation_reason self.out_name = out_name - self.extensions = extensions + self.extensions = extensions or {} self.ast_node = ast_node - def __eq__(self, other: Any) -> bool: + def __eq__(self, other: object) -> bool: return self is other or ( isinstance(other, GraphQLInputField) and self.type == other.type @@ -1559,6 +1435,7 @@ def __eq__(self, other: Any) -> bool: ) def to_kwargs(self) -> GraphQLInputFieldKwargs: + """Get corresponding arguments.""" return GraphQLInputFieldKwargs( type_=self.type, default_value=self.default_value, @@ -1569,18 +1446,19 @@ def to_kwargs(self) -> GraphQLInputFieldKwargs: ast_node=self.ast_node, ) - def __copy__(self) -> "GraphQLInputField": # pragma: no cover + def __copy__(self) -> GraphQLInputField: # pragma: no cover return self.__class__(**self.to_kwargs()) def is_required_input_field(field: GraphQLInputField) -> bool: + """Check whether this is input field is required.""" return is_non_null_type(field.type) and field.default_value is Undefined # Wrapper types -class GraphQLList(Generic[GT], GraphQLWrappingType[GT]): +class GraphQLList(GraphQLWrappingType[GT_co]): """List Type Wrapper A list is a wrapping type which points to another type. Lists are often created @@ -1599,27 +1477,30 @@ def fields(self): } """ - def __init__(self, type_: GT) -> None: + def __init__(self, type_: GT_co) -> None: super().__init__(type_=type_) def __str__(self) -> str: return f"[{self.of_type}]" -def is_list_type(type_: Any) -> bool: +def is_list_type(type_: Any) -> TypeGuard[GraphQLList]: + """Check whether this is a GraphQL list type.""" return isinstance(type_, GraphQLList) def assert_list_type(type_: Any) -> GraphQLList: + """Assert that this is a GraphQL list type.""" if not is_list_type(type_): - raise TypeError(f"Expected {type_} to be a GraphQL List type.") - return cast(GraphQLList, type_) + msg = f"Expected {type_} to be a GraphQL List type." + raise TypeError(msg) + return type_ -GNT = TypeVar("GNT", bound="GraphQLNullableType") +GNT_co = TypeVar("GNT_co", bound="GraphQLNullableType", covariant=True) -class GraphQLNonNull(GraphQLWrappingType[GNT], Generic[GNT]): +class GraphQLNonNull(GraphQLWrappingType[GNT_co]): """Non-Null Type Wrapper A non-null is a wrapping type which points to another type. Non-null types enforce @@ -1633,37 +1514,22 @@ class GraphQLNonNull(GraphQLWrappingType[GNT], Generic[GNT]): class RowType(GraphQLObjectType): name = 'Row' fields = { - 'id': GraphQLField(GraphQLNonNull(GraphQLString())) + 'id': GraphQLField(GraphQLNonNull(GraphQLString)) } Note: the enforcement of non-nullability occurs within the executor. """ - def __init__(self, type_: GNT): + def __init__(self, type_: GNT_co) -> None: super().__init__(type_=type_) - if isinstance(type_, GraphQLNonNull): - raise TypeError( - "Can only create NonNull of a Nullable GraphQLType but got:" - f" {type_}." - ) def __str__(self) -> str: return f"{self.of_type}!" -def is_non_null_type(type_: Any) -> bool: - return isinstance(type_, GraphQLNonNull) - - -def assert_non_null_type(type_: Any) -> GraphQLNonNull: - if not is_non_null_type(type_): - raise TypeError(f"Expected {type_} to be a GraphQL Non-Null type.") - return cast(GraphQLNonNull, type_) - - # These types can all accept null as a value. -graphql_nullable_types = ( +GraphQLNullableType: TypeAlias = Union[ GraphQLScalarType, GraphQLObjectType, GraphQLInterfaceType, @@ -1671,114 +1537,144 @@ def assert_non_null_type(type_: Any) -> GraphQLNonNull: GraphQLEnumType, GraphQLInputObjectType, GraphQLList, -) +] -GraphQLNullableType = Union[ +# These types may be used as input types for arguments and directives. + +GraphQLNullableInputType: TypeAlias = Union[ GraphQLScalarType, - GraphQLObjectType, - GraphQLInterfaceType, - GraphQLUnionType, GraphQLEnumType, GraphQLInputObjectType, + # actually GraphQLList[GraphQLInputType], but we can't recurse GraphQLList, ] +GraphQLInputType: TypeAlias = Union[ + GraphQLNullableInputType, GraphQLNonNull[GraphQLNullableInputType] +] -def is_nullable_type(type_: Any) -> bool: - return isinstance(type_, graphql_nullable_types) +# These types may be used as output types as the result of fields. +GraphQLNullableOutputType: TypeAlias = Union[ + GraphQLScalarType, + GraphQLObjectType, + GraphQLInterfaceType, + GraphQLUnionType, + GraphQLEnumType, + # actually GraphQLList[GraphQLOutputType], but we can't recurse + GraphQLList, +] -def assert_nullable_type(type_: Any) -> GraphQLNullableType: - if not is_nullable_type(type_): - raise TypeError(f"Expected {type_} to be a GraphQL nullable type.") - return cast(GraphQLNullableType, type_) +GraphQLOutputType: TypeAlias = Union[ + GraphQLNullableOutputType, GraphQLNonNull[GraphQLNullableOutputType] +] -@overload -def get_nullable_type(type_: None) -> None: - ... +# Predicates and Assertions -@overload -def get_nullable_type(type_: GraphQLNullableType) -> GraphQLNullableType: - ... +def is_input_type(type_: Any) -> TypeGuard[GraphQLInputType]: + """Check whether this is a GraphQL input type.""" + return isinstance( + type_, (GraphQLScalarType, GraphQLEnumType, GraphQLInputObjectType) + ) or (isinstance(type_, GraphQLWrappingType) and is_input_type(type_.of_type)) -@overload -def get_nullable_type(type_: GraphQLNonNull) -> GraphQLNullableType: - ... +def assert_input_type(type_: Any) -> GraphQLInputType: + """Assert that this is a GraphQL input type.""" + if not is_input_type(type_): + msg = f"Expected {type_} to be a GraphQL input type." + raise TypeError(msg) + return type_ -def get_nullable_type( - type_: Optional[Union[GraphQLNullableType, GraphQLNonNull]] -) -> Optional[GraphQLNullableType]: - """Unwrap possible non-null type""" - if is_non_null_type(type_): - type_ = cast(GraphQLNonNull, type_) - type_ = type_.of_type - return cast(Optional[GraphQLNullableType], type_) +def is_output_type(type_: Any) -> TypeGuard[GraphQLOutputType]: + """Check whether this is a GraphQL output type.""" + return isinstance( + type_, + ( + GraphQLScalarType, + GraphQLObjectType, + GraphQLInterfaceType, + GraphQLUnionType, + GraphQLEnumType, + ), + ) or (isinstance(type_, GraphQLWrappingType) and is_output_type(type_.of_type)) -# These types may be used as input types for arguments and directives. +def assert_output_type(type_: Any) -> GraphQLOutputType: + """Assert that this is a GraphQL output type.""" + if not is_output_type(type_): + msg = f"Expected {type_} to be a GraphQL output type." + raise TypeError(msg) + return type_ -graphql_input_types = (GraphQLScalarType, GraphQLEnumType, GraphQLInputObjectType) -GraphQLInputType = Union[ - GraphQLScalarType, GraphQLEnumType, GraphQLInputObjectType, GraphQLWrappingType -] +def is_non_null_type(type_: Any) -> TypeGuard[GraphQLNonNull]: + """Check whether this is a non-null GraphQL type.""" + return isinstance(type_, GraphQLNonNull) -def is_input_type(type_: Any) -> bool: - return isinstance(type_, graphql_input_types) or ( - isinstance(type_, GraphQLWrappingType) and is_input_type(type_.of_type) +def assert_non_null_type(type_: Any) -> GraphQLNonNull: + """Assert that this is a non-null GraphQL type.""" + if not is_non_null_type(type_): + msg = f"Expected {type_} to be a GraphQL Non-Null type." + raise TypeError(msg) + return type_ + + +def is_nullable_type(type_: Any) -> TypeGuard[GraphQLNullableType]: + """Check whether this is a nullable GraphQL type.""" + return isinstance( + type_, + ( + GraphQLScalarType, + GraphQLObjectType, + GraphQLInterfaceType, + GraphQLUnionType, + GraphQLEnumType, + GraphQLInputObjectType, + GraphQLList, + ), ) -def assert_input_type(type_: Any) -> GraphQLInputType: - if not is_input_type(type_): - raise TypeError(f"Expected {type_} to be a GraphQL input type.") - return cast(GraphQLInputType, type_) +def assert_nullable_type(type_: Any) -> GraphQLNullableType: + """Assert that this is a nullable GraphQL type.""" + if not is_nullable_type(type_): + msg = f"Expected {type_} to be a GraphQL nullable type." + raise TypeError(msg) + return type_ -# These types may be used as output types as the result of fields. +@overload +def get_nullable_type(type_: None) -> None: ... -graphql_output_types = ( - GraphQLScalarType, - GraphQLObjectType, - GraphQLInterfaceType, - GraphQLUnionType, - GraphQLEnumType, -) -GraphQLOutputType = Union[ - GraphQLScalarType, - GraphQLObjectType, - GraphQLInterfaceType, - GraphQLUnionType, - GraphQLEnumType, - GraphQLWrappingType, -] +@overload +def get_nullable_type(type_: GraphQLNullableType) -> GraphQLNullableType: ... -def is_output_type(type_: Any) -> bool: - return isinstance(type_, graphql_output_types) or ( - isinstance(type_, GraphQLWrappingType) and is_output_type(type_.of_type) - ) +@overload +def get_nullable_type(type_: GraphQLNonNull) -> GraphQLNullableType: ... -def assert_output_type(type_: Any) -> GraphQLOutputType: - if not is_output_type(type_): - raise TypeError(f"Expected {type_} to be a GraphQL output type.") - return cast(GraphQLOutputType, type_) +def get_nullable_type( + type_: GraphQLNullableType | GraphQLNonNull | None, +) -> GraphQLNullableType | None: + """Unwrap possible non-null type""" + if is_non_null_type(type_): + type_ = type_.of_type + return cast("Optional[GraphQLNullableType]", type_) # These named types do not include modifiers like List or NonNull. -GraphQLNamedInputType = Union[ +GraphQLNamedInputType: TypeAlias = Union[ GraphQLScalarType, GraphQLEnumType, GraphQLInputObjectType ] -GraphQLNamedOutputType = Union[ +GraphQLNamedOutputType: TypeAlias = Union[ GraphQLScalarType, GraphQLObjectType, GraphQLInterfaceType, @@ -1787,83 +1683,90 @@ def assert_output_type(type_: Any) -> GraphQLOutputType: ] -def is_named_type(type_: Any) -> bool: +def is_named_type(type_: Any) -> TypeGuard[GraphQLNamedType]: + """Check whether this is a named GraphQL type.""" return isinstance(type_, GraphQLNamedType) def assert_named_type(type_: Any) -> GraphQLNamedType: + """Assert that this is a named GraphQL type.""" if not is_named_type(type_): - raise TypeError(f"Expected {type_} to be a GraphQL named type.") - return cast(GraphQLNamedType, type_) + msg = f"Expected {type_} to be a GraphQL named type." + raise TypeError(msg) + return type_ @overload -def get_named_type(type_: None) -> None: - ... +def get_named_type(type_: None) -> None: ... @overload -def get_named_type(type_: GraphQLType) -> GraphQLNamedType: - ... +def get_named_type(type_: GraphQLType) -> GraphQLNamedType: ... -def get_named_type(type_: Optional[GraphQLType]) -> Optional[GraphQLNamedType]: +def get_named_type(type_: GraphQLType | None) -> GraphQLNamedType | None: """Unwrap possible wrapping type""" if type_: unwrapped_type = type_ while is_wrapping_type(unwrapped_type): - unwrapped_type = cast(GraphQLWrappingType, unwrapped_type) unwrapped_type = unwrapped_type.of_type - return cast(GraphQLNamedType, unwrapped_type) + return cast("GraphQLNamedType", unwrapped_type) return None # These types may describe types which may be leaf values. -graphql_leaf_types = (GraphQLScalarType, GraphQLEnumType) - -GraphQLLeafType = Union[GraphQLScalarType, GraphQLEnumType] +GraphQLLeafType: TypeAlias = Union[GraphQLScalarType, GraphQLEnumType] -def is_leaf_type(type_: Any) -> bool: - return isinstance(type_, graphql_leaf_types) +def is_leaf_type(type_: Any) -> TypeGuard[GraphQLLeafType]: + """Check whether this is a GraphQL leaf type.""" + return isinstance(type_, (GraphQLScalarType, GraphQLEnumType)) def assert_leaf_type(type_: Any) -> GraphQLLeafType: + """Assert that this is a GraphQL leaf type.""" if not is_leaf_type(type_): - raise TypeError(f"Expected {type_} to be a GraphQL leaf type.") - return cast(GraphQLLeafType, type_) + msg = f"Expected {type_} to be a GraphQL leaf type." + raise TypeError(msg) + return type_ # These types may describe the parent context of a selection set. -graphql_composite_types = (GraphQLObjectType, GraphQLInterfaceType, GraphQLUnionType) - -GraphQLCompositeType = Union[GraphQLObjectType, GraphQLInterfaceType, GraphQLUnionType] +GraphQLCompositeType: TypeAlias = Union[ + GraphQLObjectType, GraphQLInterfaceType, GraphQLUnionType +] -def is_composite_type(type_: Any) -> bool: - return isinstance(type_, graphql_composite_types) +def is_composite_type(type_: Any) -> TypeGuard[GraphQLCompositeType]: + """Check whether this is a GraphQL composite type.""" + return isinstance( + type_, (GraphQLObjectType, GraphQLInterfaceType, GraphQLUnionType) + ) -def assert_composite_type(type_: Any) -> GraphQLType: +def assert_composite_type(type_: Any) -> GraphQLCompositeType: + """Assert that this is a GraphQL composite type.""" if not is_composite_type(type_): - raise TypeError(f"Expected {type_} to be a GraphQL composite type.") - return cast(GraphQLType, type_) + msg = f"Expected {type_} to be a GraphQL composite type." + raise TypeError(msg) + return type_ # These types may describe abstract types. -graphql_abstract_types = (GraphQLInterfaceType, GraphQLUnionType) - -GraphQLAbstractType = Union[GraphQLInterfaceType, GraphQLUnionType] +GraphQLAbstractType: TypeAlias = Union[GraphQLInterfaceType, GraphQLUnionType] -def is_abstract_type(type_: Any) -> bool: - return isinstance(type_, graphql_abstract_types) +def is_abstract_type(type_: Any) -> TypeGuard[GraphQLAbstractType]: + """Check whether this is a GraphQL abstract type.""" + return isinstance(type_, (GraphQLInterfaceType, GraphQLUnionType)) def assert_abstract_type(type_: Any) -> GraphQLAbstractType: + """Assert that this is a GraphQL abstract type.""" if not is_abstract_type(type_): - raise TypeError(f"Expected {type_} to be a GraphQL composite type.") - return cast(GraphQLAbstractType, type_) + msg = f"Expected {type_} to be a GraphQL composite type." + raise TypeError(msg) + return type_ diff --git a/src/graphql/type/directives.py b/src/graphql/type/directives.py index 310968d1..ecd201c2 100644 --- a/src/graphql/type/directives.py +++ b/src/graphql/type/directives.py @@ -1,40 +1,52 @@ -from typing import Any, Collection, Dict, Optional, Tuple, cast +"""GraphQL directives""" -from ..language import ast, DirectiveLocation -from ..pyutils import inspect, is_description +from __future__ import annotations + +from typing import Any, Collection, cast + +from ..language import DirectiveLocation, ast +from ..pyutils import inspect from .assert_name import assert_name -from .definition import GraphQLArgument, GraphQLInputType, GraphQLNonNull, is_input_type -from .scalars import GraphQLBoolean, GraphQLString +from .definition import GraphQLArgument, GraphQLInputType, GraphQLNonNull +from .scalars import GraphQLBoolean, GraphQLInt, GraphQLString try: from typing import TypedDict except ImportError: # Python < 3.8 from typing_extensions import TypedDict +try: + from typing import TypeGuard +except ImportError: # Python < 3.10 + from typing_extensions import TypeGuard __all__ = [ - "is_directive", - "assert_directive", - "is_specified_directive", - "specified_directives", + "DEFAULT_DEPRECATION_REASON", + "DirectiveLocation", + "GraphQLDeferDirective", + "GraphQLDeprecatedDirective", "GraphQLDirective", "GraphQLDirectiveKwargs", "GraphQLIncludeDirective", "GraphQLSkipDirective", - "GraphQLDeprecatedDirective", "GraphQLSpecifiedByDirective", - "DirectiveLocation", - "DEFAULT_DEPRECATION_REASON", + "GraphQLStreamDirective", + "assert_directive", + "is_directive", + "is_specified_directive", + "specified_directives", ] class GraphQLDirectiveKwargs(TypedDict, total=False): + """Arguments for GraphQL directives""" + name: str - locations: Tuple[DirectiveLocation, ...] - args: Dict[str, GraphQLArgument] + locations: tuple[DirectiveLocation, ...] + args: dict[str, GraphQLArgument] is_repeatable: bool - description: Optional[str] - extensions: Dict[str, Any] - ast_node: Optional[ast.DirectiveDefinitionNode] + description: str | None + extensions: dict[str, Any] + ast_node: ast.DirectiveDefinitionNode | None class GraphQLDirective: @@ -45,74 +57,52 @@ class GraphQLDirective: """ name: str - locations: Tuple[DirectiveLocation, ...] + locations: tuple[DirectiveLocation, ...] is_repeatable: bool - args: Dict[str, GraphQLArgument] - description: Optional[str] - extensions: Dict[str, Any] - ast_node: Optional[ast.DirectiveDefinitionNode] + args: dict[str, GraphQLArgument] + description: str | None + extensions: dict[str, Any] + ast_node: ast.DirectiveDefinitionNode | None def __init__( self, name: str, locations: Collection[DirectiveLocation], - args: Optional[Dict[str, GraphQLArgument]] = None, + args: dict[str, GraphQLArgument] | None = None, is_repeatable: bool = False, - description: Optional[str] = None, - extensions: Optional[Dict[str, Any]] = None, - ast_node: Optional[ast.DirectiveDefinitionNode] = None, + description: str | None = None, + extensions: dict[str, Any] | None = None, + ast_node: ast.DirectiveDefinitionNode | None = None, ) -> None: assert_name(name) try: locations = tuple( value if isinstance(value, DirectiveLocation) - else DirectiveLocation[cast(str, value)] + else DirectiveLocation[cast("str", value)] for value in locations ) - except (KeyError, TypeError): - raise TypeError( + except (KeyError, TypeError) as error: + msg = ( f"{name} locations must be specified" " as a collection of DirectiveLocation enum values." ) - if args is None: - args = {} - elif not isinstance(args, dict) or not all( - isinstance(key, str) for key in args - ): - raise TypeError(f"{name} args must be a dict with argument names as keys.") - elif not all( - isinstance(value, GraphQLArgument) or is_input_type(value) - for value in args.values() - ): - raise TypeError( - f"{name} args must be GraphQLArgument or input type objects." - ) - else: + raise TypeError(msg) from error + if args: args = { assert_name(name): value if isinstance(value, GraphQLArgument) - else GraphQLArgument(cast(GraphQLInputType, value)) + else GraphQLArgument(cast("GraphQLInputType", value)) for name, value in args.items() } - if not isinstance(is_repeatable, bool): - raise TypeError(f"{name} is_repeatable flag must be True or False.") - if ast_node and not isinstance(ast_node, ast.DirectiveDefinitionNode): - raise TypeError(f"{name} AST node must be a DirectiveDefinitionNode.") - if description is not None and not is_description(description): - raise TypeError(f"{name} description must be a string.") - if extensions is None: - extensions = {} - elif not isinstance(extensions, dict) or not all( - isinstance(key, str) for key in extensions - ): - raise TypeError(f"{name} extensions must be a dictionary with string keys.") + else: + args = {} self.name = name self.locations = locations self.args = args self.is_repeatable = is_repeatable self.description = description - self.extensions = extensions + self.extensions = extensions or {} self.ast_node = ast_node def __str__(self) -> str: @@ -121,7 +111,7 @@ def __str__(self) -> str: def __repr__(self) -> str: return f"<{self.__class__.__name__}({self})>" - def __eq__(self, other: Any) -> bool: + def __eq__(self, other: object) -> bool: return self is other or ( isinstance(other, GraphQLDirective) and self.name == other.name @@ -133,6 +123,7 @@ def __eq__(self, other: Any) -> bool: ) def to_kwargs(self) -> GraphQLDirectiveKwargs: + """Get corresponding arguments.""" return GraphQLDirectiveKwargs( name=self.name, locations=self.locations, @@ -143,19 +134,21 @@ def to_kwargs(self) -> GraphQLDirectiveKwargs: ast_node=self.ast_node, ) - def __copy__(self) -> "GraphQLDirective": # pragma: no cover + def __copy__(self) -> GraphQLDirective: # pragma: no cover return self.__class__(**self.to_kwargs()) -def is_directive(directive: Any) -> bool: - """Test if the given value is a GraphQL directive.""" +def is_directive(directive: Any) -> TypeGuard[GraphQLDirective]: + """Check whether this is a GraphQL directive.""" return isinstance(directive, GraphQLDirective) def assert_directive(directive: Any) -> GraphQLDirective: + """Assert that this is a GraphQL directive.""" if not is_directive(directive): - raise TypeError(f"Expected {inspect(directive)} to be a GraphQL directive.") - return cast(GraphQLDirective, directive) + msg = f"Expected {inspect(directive)} to be a GraphQL directive." + raise TypeError(msg) + return directive # Used to conditionally include fields or fragments. @@ -175,7 +168,6 @@ def assert_directive(directive: Any) -> GraphQLDirective: " only when the `if` argument is true.", ) - # Used to conditionally skip (exclude) fields or fragments: GraphQLSkipDirective = GraphQLDirective( name="skip", @@ -193,6 +185,42 @@ def assert_directive(directive: Any) -> GraphQLDirective: " when the `if` argument is true.", ) +# Used to conditionally defer fragments: +GraphQLDeferDirective = GraphQLDirective( + name="defer", + description="Directs the executor to defer this fragment" + " when the `if` argument is true or undefined.", + locations=[DirectiveLocation.FRAGMENT_SPREAD, DirectiveLocation.INLINE_FRAGMENT], + args={ + "if": GraphQLArgument( + GraphQLNonNull(GraphQLBoolean), + description="Deferred when true or undefined.", + default_value=True, + ), + "label": GraphQLArgument(GraphQLString, description="Unique name"), + }, +) + +# Used to conditionally stream list fields: +GraphQLStreamDirective = GraphQLDirective( + name="stream", + description="Directs the executor to stream plural fields" + " when the `if` argument is true or undefined.", + locations=[DirectiveLocation.FIELD], + args={ + "if": GraphQLArgument( + GraphQLNonNull(GraphQLBoolean), + description="Stream when true or undefined.", + default_value=True, + ), + "label": GraphQLArgument(GraphQLString, description="Unique name"), + "initialCount": GraphQLArgument( + GraphQLInt, + description="Number of items to return immediately", + default_value=0, + ), + }, +) # Constant string used for default reason for a deprecation: DEFAULT_DEPRECATION_REASON = "No longer supported" @@ -220,25 +248,34 @@ def assert_directive(directive: Any) -> GraphQLDirective: description="Marks an element of a GraphQL schema as no longer supported.", ) -# Used to provide a URL for specifying the behaviour of custom scalar definitions: +# Used to provide a URL for specifying the behavior of custom scalar definitions: GraphQLSpecifiedByDirective = GraphQLDirective( name="specifiedBy", locations=[DirectiveLocation.SCALAR], args={ "url": GraphQLArgument( GraphQLNonNull(GraphQLString), - description="The URL that specifies the behaviour of this scalar.", + description="The URL that specifies the behavior of this scalar.", ) }, - description="Exposes a URL that specifies the behaviour of this scalar.", + description="Exposes a URL that specifies the behavior of this scalar.", ) +# Used to indicate an Input Object is a OneOf Input Object. +GraphQLOneOfDirective = GraphQLDirective( + name="oneOf", + locations=[DirectiveLocation.INPUT_OBJECT], + args={}, + description="Indicates exactly one field must be supplied" + " and this field must not be `null`.", +) -specified_directives: Tuple[GraphQLDirective, ...] = ( +specified_directives: tuple[GraphQLDirective, ...] = ( GraphQLIncludeDirective, GraphQLSkipDirective, GraphQLDeprecatedDirective, GraphQLSpecifiedByDirective, + GraphQLOneOfDirective, ) """A tuple with all directives from the GraphQL specification""" diff --git a/src/graphql/type/introspection.py b/src/graphql/type/introspection.py index 123bf8f9..313c3679 100644 --- a/src/graphql/type/introspection.py +++ b/src/graphql/type/introspection.py @@ -1,11 +1,18 @@ +"""GraphQL introspection""" + +from __future__ import annotations + from enum import Enum from typing import Mapping +from ..language import DirectiveLocation, print_ast +from ..pyutils import inspect from .definition import ( GraphQLArgument, GraphQLEnumType, GraphQLEnumValue, GraphQLField, + GraphQLFieldMap, GraphQLList, GraphQLNamedType, GraphQLNonNull, @@ -20,8 +27,6 @@ is_scalar_type, is_union_type, ) -from ..language import DirectiveLocation, print_ast -from ..pyutils import inspect from .scalars import GraphQLBoolean, GraphQLString __all__ = [ @@ -34,88 +39,105 @@ ] -__Schema: GraphQLObjectType = GraphQLObjectType( +class SchemaFields(GraphQLFieldMap): + def __new__(cls): + return { + "description": GraphQLField(GraphQLString, resolve=cls.description), + "types": GraphQLField( + GraphQLNonNull(GraphQLList(GraphQLNonNull(_Type))), + resolve=cls.types, + description="A list of all types supported by this server.", + ), + "queryType": GraphQLField( + GraphQLNonNull(_Type), + resolve=cls.query_type, + description="The type that query operations will be rooted at.", + ), + "mutationType": GraphQLField( + _Type, + resolve=cls.mutation_type, + description="If this server supports mutation, the type that" + " mutation operations will be rooted at.", + ), + "subscriptionType": GraphQLField( + _Type, + resolve=cls.subscription_type, + description="If this server supports subscription, the type that" + " subscription operations will be rooted at.", + ), + "directives": GraphQLField( + GraphQLNonNull(GraphQLList(GraphQLNonNull(_Directive))), + resolve=cls.directives, + description="A list of all directives supported by this server.", + ), + } + + @staticmethod + def description(schema, _info): + return schema.description + + @staticmethod + def types(schema, _info): + return schema.type_map.values() + + @staticmethod + def query_type(schema, _info): + return schema.query_type + + @staticmethod + def mutation_type(schema, _info): + return schema.mutation_type + + @staticmethod + def subscription_type(schema, _info): + return schema.subscription_type + + @staticmethod + def directives(schema, _info): + return schema.directives + + +_Schema: GraphQLObjectType = GraphQLObjectType( name="__Schema", description="A GraphQL Schema defines the capabilities of a GraphQL" " server. It exposes all available types and directives" " on the server, as well as the entry points for query," " mutation, and subscription operations.", - fields=lambda: { - "description": GraphQLField( - GraphQLString, resolve=lambda schema, _info: schema.description - ), - "types": GraphQLField( - GraphQLNonNull(GraphQLList(GraphQLNonNull(__Type))), - resolve=lambda schema, _info: schema.type_map.values(), - description="A list of all types supported by this server.", - ), - "queryType": GraphQLField( - GraphQLNonNull(__Type), - resolve=lambda schema, _info: schema.query_type, - description="The type that query operations will be rooted at.", - ), - "mutationType": GraphQLField( - __Type, - resolve=lambda schema, _info: schema.mutation_type, - description="If this server supports mutation, the type that" - " mutation operations will be rooted at.", - ), - "subscriptionType": GraphQLField( - __Type, - resolve=lambda schema, _info: schema.subscription_type, - description="If this server support subscription, the type that" - " subscription operations will be rooted at.", - ), - "directives": GraphQLField( - GraphQLNonNull(GraphQLList(GraphQLNonNull(__Directive))), - resolve=lambda schema, _info: schema.directives, - description="A list of all directives supported by this server.", - ), - }, + fields=SchemaFields, ) -__Directive: GraphQLObjectType = GraphQLObjectType( - name="__Directive", - description="A Directive provides a way to describe alternate runtime" - " execution and type validation behavior in a GraphQL" - " document.\n\nIn some cases, you need to provide options" - " to alter GraphQL's execution behavior in ways field" - " arguments will not suffice, such as conditionally including" - " or skipping a field. Directives provide this by describing" - " additional information to the executor.", - fields=lambda: { - # Note: The fields onOperation, onFragment and onField are deprecated - "name": GraphQLField( - GraphQLNonNull(GraphQLString), - resolve=DirectiveResolvers.name, - ), - "description": GraphQLField( - GraphQLString, - resolve=DirectiveResolvers.description, - ), - "isRepeatable": GraphQLField( - GraphQLNonNull(GraphQLBoolean), - resolve=DirectiveResolvers.is_repeatable, - ), - "locations": GraphQLField( - GraphQLNonNull(GraphQLList(GraphQLNonNull(__DirectiveLocation))), - resolve=DirectiveResolvers.locations, - ), - "args": GraphQLField( - GraphQLNonNull(GraphQLList(GraphQLNonNull(__InputValue))), - args={ - "includeDeprecated": GraphQLArgument( - GraphQLBoolean, default_value=False - ) - }, - resolve=DirectiveResolvers.args, - ), - }, -) - +class DirectiveFields(GraphQLFieldMap): + def __new__(cls): + return { + # Note: The fields onOperation, onFragment and onField are deprecated + "name": GraphQLField( + GraphQLNonNull(GraphQLString), + resolve=cls.name, + ), + "description": GraphQLField( + GraphQLString, + resolve=cls.description, + ), + "isRepeatable": GraphQLField( + GraphQLNonNull(GraphQLBoolean), + resolve=cls.is_repeatable, + ), + "locations": GraphQLField( + GraphQLNonNull(GraphQLList(GraphQLNonNull(_DirectiveLocation))), + resolve=cls.locations, + ), + "args": GraphQLField( + GraphQLNonNull(GraphQLList(GraphQLNonNull(_InputValue))), + args={ + "includeDeprecated": GraphQLArgument( + GraphQLBoolean, default_value=False + ) + }, + resolve=cls.args, + ), + } -class DirectiveResolvers: @staticmethod def name(directive, _info): return directive.name @@ -143,7 +165,20 @@ def args(directive, _info, includeDeprecated=False): ) -__DirectiveLocation: GraphQLEnumType = GraphQLEnumType( +_Directive: GraphQLObjectType = GraphQLObjectType( + name="__Directive", + description="A Directive provides a way to describe alternate runtime" + " execution and type validation behavior in a GraphQL" + " document.\n\nIn some cases, you need to provide options" + " to alter GraphQL's execution behavior in ways field" + " arguments will not suffice, such as conditionally including" + " or skipping a field. Directives provide this by describing" + " additional information to the executor.", + fields=DirectiveFields, +) + + +_DirectiveLocation: GraphQLEnumType = GraphQLEnumType( name="__DirectiveLocation", description="A Directive can be adjacent to many parts of the GraphQL" " language, a __DirectiveLocation describes one such possible" @@ -228,65 +263,51 @@ def args(directive, _info, includeDeprecated=False): ) -__Type: GraphQLObjectType = GraphQLObjectType( - name="__Type", - description="The fundamental unit of any GraphQL Schema is the type." - " There are many kinds of types in GraphQL as represented" - " by the `__TypeKind` enum.\n\nDepending on the kind of a" - " type, certain fields describe information about that type." - " Scalar types provide no information beyond a name, description" - " and optional `specifiedByURL`, while Enum types provide their values." - " Object and Interface types provide the fields they describe." - " Abstract types, Union and Interface, provide the Object" - " types possible at runtime. List and NonNull types compose" - " other types.", - fields=lambda: { - "kind": GraphQLField(GraphQLNonNull(__TypeKind), resolve=TypeResolvers.kind), - "name": GraphQLField(GraphQLString, resolve=TypeResolvers.name), - "description": GraphQLField(GraphQLString, resolve=TypeResolvers.description), - "specifiedByURL": GraphQLField( - GraphQLString, resolve=TypeResolvers.specified_by_url - ), - "fields": GraphQLField( - GraphQLList(GraphQLNonNull(__Field)), - args={ - "includeDeprecated": GraphQLArgument( - GraphQLBoolean, default_value=False - ) - }, - resolve=TypeResolvers.fields, - ), - "interfaces": GraphQLField( - GraphQLList(GraphQLNonNull(__Type)), resolve=TypeResolvers.interfaces - ), - "possibleTypes": GraphQLField( - GraphQLList(GraphQLNonNull(__Type)), - resolve=TypeResolvers.possible_types, - ), - "enumValues": GraphQLField( - GraphQLList(GraphQLNonNull(__EnumValue)), - args={ - "includeDeprecated": GraphQLArgument( - GraphQLBoolean, default_value=False - ) - }, - resolve=TypeResolvers.enum_values, - ), - "inputFields": GraphQLField( - GraphQLList(GraphQLNonNull(__InputValue)), - args={ - "includeDeprecated": GraphQLArgument( - GraphQLBoolean, default_value=False - ) - }, - resolve=TypeResolvers.input_fields, - ), - "ofType": GraphQLField(__Type, resolve=TypeResolvers.of_type), - }, -) - +class TypeFields(GraphQLFieldMap): + def __new__(cls): + return { + "kind": GraphQLField(GraphQLNonNull(_TypeKind), resolve=cls.kind), + "name": GraphQLField(GraphQLString, resolve=cls.name), + "description": GraphQLField(GraphQLString, resolve=cls.description), + "specifiedByURL": GraphQLField(GraphQLString, resolve=cls.specified_by_url), + "fields": GraphQLField( + GraphQLList(GraphQLNonNull(_Field)), + args={ + "includeDeprecated": GraphQLArgument( + GraphQLBoolean, default_value=False + ) + }, + resolve=cls.fields, + ), + "interfaces": GraphQLField( + GraphQLList(GraphQLNonNull(_Type)), resolve=cls.interfaces + ), + "possibleTypes": GraphQLField( + GraphQLList(GraphQLNonNull(_Type)), + resolve=cls.possible_types, + ), + "enumValues": GraphQLField( + GraphQLList(GraphQLNonNull(_EnumValue)), + args={ + "includeDeprecated": GraphQLArgument( + GraphQLBoolean, default_value=False + ) + }, + resolve=cls.enum_values, + ), + "inputFields": GraphQLField( + GraphQLList(GraphQLNonNull(_InputValue)), + args={ + "includeDeprecated": GraphQLArgument( + GraphQLBoolean, default_value=False + ) + }, + resolve=cls.input_fields, + ), + "ofType": GraphQLField(_Type, resolve=cls.of_type), + "isOneOf": GraphQLField(GraphQLBoolean, resolve=cls.is_one_of), + } -class TypeResolvers: @staticmethod def kind(type_, _info): if is_scalar_type(type_): @@ -307,7 +328,8 @@ def kind(type_, _info): return TypeKind.NON_NULL # Not reachable. All possible types have been considered. - raise TypeError(f"Unexpected type: {inspect(type_)}.") # pragma: no cover + msg = f"Unexpected type: {inspect(type_)}." # pragma: no cover + raise TypeError(msg) # pragma: no cover @staticmethod def name(type_, _info): @@ -324,83 +346,102 @@ def specified_by_url(type_, _info): # noinspection PyPep8Naming @staticmethod def fields(type_, _info, includeDeprecated=False): - if is_object_type(type_) or is_interface_type(type_): - items = type_.fields.items() - return ( - list(items) - if includeDeprecated - else [item for item in items if item[1].deprecation_reason is None] - ) + if not (is_object_type(type_) or is_interface_type(type_)): + return None + items = type_.fields.items() + return ( + list(items) + if includeDeprecated + else [item for item in items if item[1].deprecation_reason is None] + ) @staticmethod def interfaces(type_, _info): - if is_object_type(type_) or is_interface_type(type_): - return type_.interfaces + return ( + type_.interfaces + if is_object_type(type_) or is_interface_type(type_) + else None + ) @staticmethod def possible_types(type_, info): - if is_abstract_type(type_): - return info.schema.get_possible_types(type_) + return ( + info.schema.get_possible_types(type_) if is_abstract_type(type_) else None + ) # noinspection PyPep8Naming @staticmethod def enum_values(type_, _info, includeDeprecated=False): - if is_enum_type(type_): - items = type_.values.items() - return ( - items - if includeDeprecated - else [item for item in items if item[1].deprecation_reason is None] - ) + if not is_enum_type(type_): + return None + items = type_.values.items() + return ( + items + if includeDeprecated + else [item for item in items if item[1].deprecation_reason is None] + ) # noinspection PyPep8Naming @staticmethod def input_fields(type_, _info, includeDeprecated=False): - if is_input_object_type(type_): - items = type_.fields.items() - return ( - items - if includeDeprecated - else [item for item in items if item[1].deprecation_reason is None] - ) + if not is_input_object_type(type_): + return None + items = type_.fields.items() + return ( + items + if includeDeprecated + else [item for item in items if item[1].deprecation_reason is None] + ) @staticmethod def of_type(type_, _info): return getattr(type_, "of_type", None) + @staticmethod + def is_one_of(type_, _info): + return type_.is_one_of if is_input_object_type(type_) else None -__Field: GraphQLObjectType = GraphQLObjectType( - name="__Field", - description="Object and Interface types are described by a list of Fields," - " each of which has a name, potentially a list of arguments," - " and a return type.", - fields=lambda: { - "name": GraphQLField( - GraphQLNonNull(GraphQLString), resolve=FieldResolvers.name - ), - "description": GraphQLField(GraphQLString, resolve=FieldResolvers.description), - "args": GraphQLField( - GraphQLNonNull(GraphQLList(GraphQLNonNull(__InputValue))), - args={ - "includeDeprecated": GraphQLArgument( - GraphQLBoolean, default_value=False - ) - }, - resolve=FieldResolvers.args, - ), - "type": GraphQLField(GraphQLNonNull(__Type), resolve=FieldResolvers.type), - "isDeprecated": GraphQLField( - GraphQLNonNull(GraphQLBoolean), - resolve=FieldResolvers.is_deprecated, - ), - "deprecationReason": GraphQLField( - GraphQLString, resolve=FieldResolvers.deprecation_reason - ), - }, + +_Type: GraphQLObjectType = GraphQLObjectType( + name="__Type", + description="The fundamental unit of any GraphQL Schema is the type." + " There are many kinds of types in GraphQL as represented" + " by the `__TypeKind` enum.\n\nDepending on the kind of a" + " type, certain fields describe information about that type." + " Scalar types provide no information beyond a name, description" + " and optional `specifiedByURL`, while Enum types provide their values." + " Object and Interface types provide the fields they describe." + " Abstract types, Union and Interface, provide the Object" + " types possible at runtime. List and NonNull types compose" + " other types.", + fields=TypeFields, ) -class FieldResolvers: +class FieldFields(GraphQLFieldMap): + def __new__(cls): + return { + "name": GraphQLField(GraphQLNonNull(GraphQLString), resolve=cls.name), + "description": GraphQLField(GraphQLString, resolve=cls.description), + "args": GraphQLField( + GraphQLNonNull(GraphQLList(GraphQLNonNull(_InputValue))), + args={ + "includeDeprecated": GraphQLArgument( + GraphQLBoolean, default_value=False + ) + }, + resolve=cls.args, + ), + "type": GraphQLField(GraphQLNonNull(_Type), resolve=cls.type), + "isDeprecated": GraphQLField( + GraphQLNonNull(GraphQLBoolean), + resolve=cls.is_deprecated, + ), + "deprecationReason": GraphQLField( + GraphQLString, resolve=cls.deprecation_reason + ), + } + @staticmethod def name(item, _info): return item[0] @@ -432,39 +473,38 @@ def deprecation_reason(item, _info): return item[1].deprecation_reason -__InputValue: GraphQLObjectType = GraphQLObjectType( - name="__InputValue", - description="Arguments provided to Fields or Directives and the input" - " fields of an InputObject are represented as Input Values" - " which describe their type and optionally a default value.", - fields=lambda: { - "name": GraphQLField( - GraphQLNonNull(GraphQLString), resolve=InputValueFieldResolvers.name - ), - "description": GraphQLField( - GraphQLString, resolve=InputValueFieldResolvers.description - ), - "type": GraphQLField( - GraphQLNonNull(__Type), resolve=InputValueFieldResolvers.type - ), - "defaultValue": GraphQLField( - GraphQLString, - description="A GraphQL-formatted string representing" - " the default value for this input value.", - resolve=InputValueFieldResolvers.default_value, - ), - "isDeprecated": GraphQLField( - GraphQLNonNull(GraphQLBoolean), - resolve=InputValueFieldResolvers.is_deprecated, - ), - "deprecationReason": GraphQLField( - GraphQLString, resolve=InputValueFieldResolvers.deprecation_reason - ), - }, +_Field: GraphQLObjectType = GraphQLObjectType( + name="__Field", + description="Object and Interface types are described by a list of Fields," + " each of which has a name, potentially a list of arguments," + " and a return type.", + fields=FieldFields, ) -class InputValueFieldResolvers: +class InputValueFields(GraphQLFieldMap): + def __new__(cls): + return { + "name": GraphQLField(GraphQLNonNull(GraphQLString), resolve=cls.name), + "description": GraphQLField( + GraphQLString, resolve=InputValueFields.description + ), + "type": GraphQLField(GraphQLNonNull(_Type), resolve=cls.type), + "defaultValue": GraphQLField( + GraphQLString, + description="A GraphQL-formatted string representing" + " the default value for this input value.", + resolve=cls.default_value, + ), + "isDeprecated": GraphQLField( + GraphQLNonNull(GraphQLBoolean), + resolve=cls.is_deprecated, + ), + "deprecationReason": GraphQLField( + GraphQLString, resolve=cls.deprecation_reason + ), + } + @staticmethod def name(item, _info): return item[0] @@ -494,31 +534,63 @@ def deprecation_reason(item, _info): return item[1].deprecation_reason -__EnumValue: GraphQLObjectType = GraphQLObjectType( +_InputValue: GraphQLObjectType = GraphQLObjectType( + name="__InputValue", + description="Arguments provided to Fields or Directives and the input" + " fields of an InputObject are represented as Input Values" + " which describe their type and optionally a default value.", + fields=InputValueFields, +) + + +class EnumValueFields(GraphQLFieldMap): + def __new__(cls): + return { + "name": GraphQLField( + GraphQLNonNull(GraphQLString), resolve=EnumValueFields.name + ), + "description": GraphQLField( + GraphQLString, resolve=EnumValueFields.description + ), + "isDeprecated": GraphQLField( + GraphQLNonNull(GraphQLBoolean), + resolve=EnumValueFields.is_deprecated, + ), + "deprecationReason": GraphQLField( + GraphQLString, resolve=EnumValueFields.deprecation_reason + ), + } + + @staticmethod + def name(item, _info): + return item[0] + + @staticmethod + def description(item, _info): + return item[1].description + + @staticmethod + def is_deprecated(item, _info): + return item[1].deprecation_reason is not None + + @staticmethod + def deprecation_reason(item, _info): + return item[1].deprecation_reason + + +_EnumValue: GraphQLObjectType = GraphQLObjectType( name="__EnumValue", description="One possible value for a given Enum. Enum values are unique" " values, not a placeholder for a string or numeric value." " However an Enum value is returned in a JSON response as a" " string.", - fields=lambda: { - "name": GraphQLField( - GraphQLNonNull(GraphQLString), resolve=lambda item, _info: item[0] - ), - "description": GraphQLField( - GraphQLString, resolve=lambda item, _info: item[1].description - ), - "isDeprecated": GraphQLField( - GraphQLNonNull(GraphQLBoolean), - resolve=lambda item, _info: item[1].deprecation_reason is not None, - ), - "deprecationReason": GraphQLField( - GraphQLString, resolve=lambda item, _info: item[1].deprecation_reason - ), - }, + fields=EnumValueFields, ) class TypeKind(Enum): + """Kinds of types""" + SCALAR = "scalar" OBJECT = "object" INTERFACE = "interface" @@ -529,7 +601,7 @@ class TypeKind(Enum): NON_NULL = "non-null" -__TypeKind: GraphQLEnumType = GraphQLEnumType( +_TypeKind: GraphQLEnumType = GraphQLEnumType( name="__TypeKind", description="An enum describing what kind of type a given `__Type` is.", values={ @@ -567,26 +639,39 @@ class TypeKind(Enum): ), "NON_NULL": GraphQLEnumValue( TypeKind.NON_NULL, - description="Indicates this type is a non-null." - " `ofType` is a valid field.", + description="Indicates this type is a non-null. `ofType` is a valid field.", ), }, ) +class MetaFields: + @staticmethod + def schema(_source, info): + return info.schema + + @staticmethod + def type(_source, info, **args): + return info.schema.get_type(args["name"]) + + @staticmethod + def type_name(_source, info, **_args): + return info.parent_type.name + + SchemaMetaFieldDef = GraphQLField( - GraphQLNonNull(__Schema), # name = '__schema' + GraphQLNonNull(_Schema), # name = '__schema' description="Access the current type schema of this server.", args={}, - resolve=lambda _source, info: info.schema, + resolve=MetaFields.schema, ) TypeMetaFieldDef = GraphQLField( - __Type, # name = '__type' + _Type, # name = '__type' description="Request the type information of a single type.", args={"name": GraphQLArgument(GraphQLNonNull(GraphQLString))}, - resolve=lambda _source, info, **args: info.schema.get_type(args["name"]), + resolve=MetaFields.type, ) @@ -594,21 +679,21 @@ class TypeKind(Enum): GraphQLNonNull(GraphQLString), # name='__typename' description="The name of the current Object type at runtime.", args={}, - resolve=lambda _source, info, **_args: info.parent_type.name, + resolve=MetaFields.type_name, ) # Since double underscore names are subject to name mangling in Python, # the introspection classes are best imported via this dictionary: introspection_types: Mapping[str, GraphQLNamedType] = { # treat as read-only - "__Schema": __Schema, - "__Directive": __Directive, - "__DirectiveLocation": __DirectiveLocation, - "__Type": __Type, - "__Field": __Field, - "__InputValue": __InputValue, - "__EnumValue": __EnumValue, - "__TypeKind": __TypeKind, + "__Schema": _Schema, + "__Directive": _Directive, + "__DirectiveLocation": _DirectiveLocation, + "__Type": _Type, + "__Field": _Field, + "__InputValue": _InputValue, + "__EnumValue": _EnumValue, + "__TypeKind": _TypeKind, } """A mapping containing all introspection types with their names as keys""" @@ -616,3 +701,7 @@ class TypeKind(Enum): def is_introspection_type(type_: GraphQLNamedType) -> bool: """Check whether the given named GraphQL type is an introspection type.""" return type_.name in introspection_types + + +# register the introspection types to avoid redefinition +GraphQLNamedType.reserved_types.update(introspection_types) # type: ignore diff --git a/src/graphql/type/scalars.py b/src/graphql/type/scalars.py index 2609d095..d35e6e26 100644 --- a/src/graphql/type/scalars.py +++ b/src/graphql/type/scalars.py @@ -1,8 +1,11 @@ +"""GraphQL scalar types""" + +from __future__ import annotations + from math import isfinite from typing import Any, Mapping from ..error import GraphQLError -from ..pyutils import inspect from ..language.ast import ( BooleanValueNode, FloatValueNode, @@ -11,18 +14,24 @@ ValueNode, ) from ..language.printer import print_ast +from ..pyutils import inspect from .definition import GraphQLNamedType, GraphQLScalarType +try: + from typing import TypeGuard +except ImportError: # Python < 3.10 + from typing_extensions import TypeGuard + __all__ = [ - "is_specified_scalar_type", - "specified_scalar_types", - "GraphQLInt", - "GraphQLFloat", - "GraphQLString", - "GraphQLBoolean", - "GraphQLID", "GRAPHQL_MAX_INT", "GRAPHQL_MIN_INT", + "GraphQLBoolean", + "GraphQLFloat", + "GraphQLID", + "GraphQLInt", + "GraphQLString", + "is_specified_scalar_type", + "specified_scalar_types", ] # As per the GraphQL Spec, Integers are only treated as valid @@ -48,21 +57,20 @@ def serialize_int(output_value: Any) -> int: elif isinstance(output_value, float): num = int(output_value) if num != output_value: - raise ValueError + raise ValueError # noqa: TRY301 elif not output_value and isinstance(output_value, str): output_value = "" - raise ValueError + raise ValueError # noqa: TRY301 else: num = int(output_value) # raises ValueError if not an integer - except (OverflowError, ValueError, TypeError): - raise GraphQLError( - "Int cannot represent non-integer value: " + inspect(output_value) - ) + except (OverflowError, ValueError, TypeError) as error: + msg = "Int cannot represent non-integer value: " + inspect(output_value) + raise GraphQLError(msg) from error if not GRAPHQL_MIN_INT <= num <= GRAPHQL_MAX_INT: - raise GraphQLError( - "Int cannot represent non 32-bit signed integer value: " - + inspect(output_value) + msg = "Int cannot represent non 32-bit signed integer value: " + inspect( + output_value ) + raise GraphQLError(msg) return num @@ -74,31 +82,27 @@ def coerce_int(input_value: Any) -> int: and isfinite(input_value) and int(input_value) == input_value ): - raise GraphQLError( - "Int cannot represent non-integer value: " + inspect(input_value) - ) + msg = "Int cannot represent non-integer value: " + inspect(input_value) + raise GraphQLError(msg) if not GRAPHQL_MIN_INT <= input_value <= GRAPHQL_MAX_INT: - raise GraphQLError( - "Int cannot represent non 32-bit signed integer value: " - + inspect(input_value) + msg = "Int cannot represent non 32-bit signed integer value: " + inspect( + input_value ) + raise GraphQLError(msg) return int(input_value) def parse_int_literal(value_node: ValueNode, _variables: Any = None) -> int: """Parse an integer value node in the AST.""" if not isinstance(value_node, IntValueNode): - raise GraphQLError( - "Int cannot represent non-integer value: " + print_ast(value_node), - value_node, - ) + msg = "Int cannot represent non-integer value: " + print_ast(value_node) + raise GraphQLError(msg, value_node) num = int(value_node.value) if not GRAPHQL_MIN_INT <= num <= GRAPHQL_MAX_INT: - raise GraphQLError( - "Int cannot represent non 32-bit signed integer value: " - + print_ast(value_node), - value_node, + msg = "Int cannot represent non 32-bit signed integer value: " + print_ast( + value_node ) + raise GraphQLError(msg, value_node) return num @@ -119,14 +123,13 @@ def serialize_float(output_value: Any) -> float: try: if not output_value and isinstance(output_value, str): output_value = "" - raise ValueError + raise ValueError # noqa: TRY301 num = output_value if isinstance(output_value, float) else float(output_value) if not isfinite(num): - raise ValueError - except (ValueError, TypeError): - raise GraphQLError( - "Float cannot represent non numeric value: " + inspect(output_value) - ) + raise ValueError # noqa: TRY301 + except (ValueError, TypeError) as error: + msg = "Float cannot represent non numeric value: " + inspect(output_value) + raise GraphQLError(msg) from error return num @@ -303,7 +306,6 @@ def parse_id_literal(value_node: ValueNode, _variables: Any = None) -> str: parse_literal=parse_id_literal, ) - specified_scalar_types: Mapping[str, GraphQLScalarType] = { type_.name: type_ for type_ in ( @@ -313,9 +315,13 @@ def parse_id_literal(value_node: ValueNode, _variables: Any = None) -> str: GraphQLBoolean, GraphQLID, ) -} +} # pyright: ignore -def is_specified_scalar_type(type_: GraphQLNamedType) -> bool: +def is_specified_scalar_type(type_: GraphQLNamedType) -> TypeGuard[GraphQLScalarType]: """Check whether the given named GraphQL type is a specified scalar type.""" return type_.name in specified_scalar_types + + +# register the scalar types to avoid redefinition +GraphQLNamedType.reserved_types.update(specified_scalar_types) # type: ignore diff --git a/src/graphql/type/schema.py b/src/graphql/type/schema.py index 321659c5..f8ab756b 100644 --- a/src/graphql/type/schema.py +++ b/src/graphql/type/schema.py @@ -1,29 +1,31 @@ +"""GraphQL schemas""" + +from __future__ import annotations + from copy import copy, deepcopy from typing import ( + TYPE_CHECKING, Any, Collection, Dict, - List, NamedTuple, - Optional, - Set, - Tuple, - Union, cast, ) -from ..error import GraphQLError -from ..language import ast, OperationType -from ..pyutils import inspect, is_collection, is_description +if TYPE_CHECKING: + from ..error import GraphQLError + from ..language import OperationType, ast + +from ..pyutils import inspect from .definition import ( GraphQLAbstractType, + GraphQLCompositeType, + GraphQLField, + GraphQLInputType, GraphQLInterfaceType, - GraphQLInputObjectType, GraphQLNamedType, GraphQLObjectType, - GraphQLUnionType, GraphQLType, - GraphQLWrappingType, get_named_type, is_input_object_type, is_interface_type, @@ -31,36 +33,45 @@ is_union_type, is_wrapping_type, ) -from .directives import GraphQLDirective, specified_directives, is_directive -from .introspection import introspection_types +from .directives import GraphQLDirective, is_directive, specified_directives +from .introspection import ( + SchemaMetaFieldDef, + TypeMetaFieldDef, + TypeNameMetaFieldDef, + introspection_types, +) try: from typing import TypedDict except ImportError: # Python < 3.8 from typing_extensions import TypedDict +try: + from typing import TypeAlias, TypeGuard +except ImportError: # Python < 3.10 + from typing_extensions import TypeAlias, TypeGuard -__all__ = ["GraphQLSchema", "GraphQLSchemaKwargs", "is_schema", "assert_schema"] - +__all__ = ["GraphQLSchema", "GraphQLSchemaKwargs", "assert_schema", "is_schema"] -TypeMap = Dict[str, GraphQLNamedType] +TypeMap: TypeAlias = Dict[str, GraphQLNamedType] class InterfaceImplementations(NamedTuple): - - objects: List[GraphQLObjectType] - interfaces: List[GraphQLInterfaceType] + objects: list[GraphQLObjectType] + interfaces: list[GraphQLInterfaceType] class GraphQLSchemaKwargs(TypedDict, total=False): - query: Optional[GraphQLObjectType] - mutation: Optional[GraphQLObjectType] - subscription: Optional[GraphQLObjectType] - types: Optional[Tuple[GraphQLNamedType, ...]] - directives: Tuple[GraphQLDirective, ...] - description: Optional[str] - extensions: Dict[str, Any] - ast_node: Optional[ast.SchemaDefinitionNode] - extension_ast_nodes: Tuple[ast.SchemaExtensionNode, ...] + """Arguments for GraphQL schemas""" + + query: GraphQLObjectType | None + mutation: GraphQLObjectType | None + subscription: GraphQLObjectType | None + types: tuple[GraphQLNamedType, ...] | None + directives: tuple[GraphQLDirective, ...] + description: str | None + extensions: dict[str, Any] + ast_node: ast.SchemaDefinitionNode | None + extension_ast_nodes: tuple[ast.SchemaExtensionNode, ...] assume_valid: bool @@ -114,31 +125,31 @@ class GraphQLSchema: directives=specified_directives + [my_custom_directive]) """ - query_type: Optional[GraphQLObjectType] - mutation_type: Optional[GraphQLObjectType] - subscription_type: Optional[GraphQLObjectType] + query_type: GraphQLObjectType | None + mutation_type: GraphQLObjectType | None + subscription_type: GraphQLObjectType | None type_map: TypeMap - directives: Tuple[GraphQLDirective, ...] - description: Optional[str] - extensions: Dict[str, Any] - ast_node: Optional[ast.SchemaDefinitionNode] - extension_ast_nodes: Tuple[ast.SchemaExtensionNode, ...] + directives: tuple[GraphQLDirective, ...] + description: str | None + extensions: dict[str, Any] + ast_node: ast.SchemaDefinitionNode | None + extension_ast_nodes: tuple[ast.SchemaExtensionNode, ...] - _implementations_map: Dict[str, InterfaceImplementations] - _sub_type_map: Dict[str, Set[str]] - _validation_errors: Optional[List[GraphQLError]] + _implementations_map: dict[str, InterfaceImplementations] + _sub_type_map: dict[str, set[str]] + _validation_errors: list[GraphQLError] | None def __init__( self, - query: Optional[GraphQLObjectType] = None, - mutation: Optional[GraphQLObjectType] = None, - subscription: Optional[GraphQLObjectType] = None, - types: Optional[Collection[GraphQLNamedType]] = None, - directives: Optional[Collection[GraphQLDirective]] = None, - description: Optional[str] = None, - extensions: Optional[Dict[str, Any]] = None, - ast_node: Optional[ast.SchemaDefinitionNode] = None, - extension_ast_nodes: Optional[Collection[ast.SchemaExtensionNode]] = None, + query: GraphQLObjectType | None = None, + mutation: GraphQLObjectType | None = None, + subscription: GraphQLObjectType | None = None, + types: Collection[GraphQLNamedType] | None = None, + directives: Collection[GraphQLDirective] | None = None, + description: str | None = None, + extensions: dict[str, Any] | None = None, + ast_node: ast.SchemaDefinitionNode | None = None, + extension_ast_nodes: Collection[ast.SchemaExtensionNode] | None = None, assume_valid: bool = False, ) -> None: """Initialize GraphQL schema. @@ -148,65 +159,22 @@ def __init__( """ self._validation_errors = [] if assume_valid else None - # Check for common mistakes during construction to produce clear and early - # error messages, but we leave the specific tests for the validation. - if query and not isinstance(query, GraphQLType): - raise TypeError("Expected query to be a GraphQL type.") - if mutation and not isinstance(mutation, GraphQLType): - raise TypeError("Expected mutation to be a GraphQL type.") - if subscription and not isinstance(subscription, GraphQLType): - raise TypeError("Expected subscription to be a GraphQL type.") - if types is None: - types = [] - else: - if not is_collection(types) or not all( - isinstance(type_, GraphQLType) for type_ in types - ): - raise TypeError( - "Schema types must be specified as a collection of GraphQL types." - ) - if directives is not None: - # noinspection PyUnresolvedReferences - if not is_collection(directives): - raise TypeError("Schema directives must be a collection.") - if not isinstance(directives, tuple): - directives = tuple(directives) - if description is not None and not is_description(description): - raise TypeError("Schema description must be a string.") - if extensions is None: - extensions = {} - elif not isinstance(extensions, dict) or not all( - isinstance(key, str) for key in extensions - ): - raise TypeError("Schema extensions must be a dictionary with string keys.") - if ast_node and not isinstance(ast_node, ast.SchemaDefinitionNode): - raise TypeError("Schema AST node must be a SchemaDefinitionNode.") - if extension_ast_nodes: - if not is_collection(extension_ast_nodes) or not all( - isinstance(node, ast.SchemaExtensionNode) - for node in extension_ast_nodes - ): - raise TypeError( - "Schema extension AST nodes must be specified" - " as a collection of SchemaExtensionNode instances." - ) - if not isinstance(extension_ast_nodes, tuple): - extension_ast_nodes = tuple(extension_ast_nodes) - else: - extension_ast_nodes = () - self.description = description - self.extensions = extensions + self.extensions = extensions or {} self.ast_node = ast_node - self.extension_ast_nodes = extension_ast_nodes + self.extension_ast_nodes = ( + tuple(extension_ast_nodes) if extension_ast_nodes else () + ) self.query_type = query self.mutation_type = mutation self.subscription_type = subscription # Provide specified directives (e.g. @include and @skip) by default - self.directives = specified_directives if directives is None else directives + self.directives = ( + specified_directives if directives is None else tuple(directives) + ) - # To preserve order of user-provided types, we add first to add them to - # the set of "collected" types, so `collect_referenced_types` ignore them. + # To preserve order of user-provided types, we first add them to the set + # of "collected" types, so `collect_referenced_types` ignores them. if types: all_referenced_types = TypeSet.with_initial_types(types) collect_referenced_types = all_referenced_types.collect_referenced_types @@ -241,7 +209,7 @@ def __init__( self._sub_type_map = {} # Keep track of all implementations by interface name. - implementations_map: Dict[str, InterfaceImplementations] = {} + implementations_map: dict[str, InterfaceImplementations] = {} self._implementations_map = implementations_map for named_type in all_referenced_types: @@ -250,47 +218,47 @@ def __init__( type_name = getattr(named_type, "name", None) if not type_name: - raise TypeError( + msg = ( "One of the provided types for building the Schema" - " is missing a name.", + " is missing a name." ) + raise TypeError(msg) if type_name in type_map: - raise TypeError( + msg = ( "Schema must contain uniquely named types" f" but contains multiple types named '{type_name}'." ) + raise TypeError(msg) + type_map[type_name] = named_type if is_interface_type(named_type): - named_type = cast(GraphQLInterfaceType, named_type) # Store implementations by interface. for iface in named_type.interfaces: if is_interface_type(iface): - iface = cast(GraphQLInterfaceType, iface) if iface.name in implementations_map: implementations = implementations_map[iface.name] else: - implementations = implementations_map[ - iface.name - ] = InterfaceImplementations(objects=[], interfaces=[]) + implementations = implementations_map[iface.name] = ( + InterfaceImplementations(objects=[], interfaces=[]) + ) implementations.interfaces.append(named_type) elif is_object_type(named_type): - named_type = cast(GraphQLObjectType, named_type) # Store implementations by objects. for iface in named_type.interfaces: if is_interface_type(iface): - iface = cast(GraphQLInterfaceType, iface) if iface.name in implementations_map: implementations = implementations_map[iface.name] else: - implementations = implementations_map[ - iface.name - ] = InterfaceImplementations(objects=[], interfaces=[]) + implementations = implementations_map[iface.name] = ( + InterfaceImplementations(objects=[], interfaces=[]) + ) implementations.objects.append(named_type) def to_kwargs(self) -> GraphQLSchemaKwargs: + """Get corresponding arguments.""" return GraphQLSchemaKwargs( query=self.query_type, mutation=self.mutation_type, @@ -304,14 +272,14 @@ def to_kwargs(self) -> GraphQLSchemaKwargs: assume_valid=self._validation_errors is not None, ) - def __copy__(self) -> "GraphQLSchema": # pragma: no cover + def __copy__(self) -> GraphQLSchema: # pragma: no cover return self.__class__(**self.to_kwargs()) - def __deepcopy__(self, memo_: Dict) -> "GraphQLSchema": + def __deepcopy__(self, memo_: dict) -> GraphQLSchema: from ..type import ( is_introspection_type, - is_specified_scalar_type, is_specified_directive, + is_specified_scalar_type, ) type_map: TypeMap = { @@ -326,12 +294,15 @@ def __deepcopy__(self, memo_: Dict) -> "GraphQLSchema": directive if is_specified_directive(directive) else copy(directive) for directive in self.directives ] + for directive in directives: + remap_directive(directive, type_map) return self.__class__( - self.query_type and cast(GraphQLObjectType, type_map[self.query_type.name]), + self.query_type + and cast("GraphQLObjectType", type_map[self.query_type.name]), self.mutation_type - and cast(GraphQLObjectType, type_map[self.mutation_type.name]), + and cast("GraphQLObjectType", type_map[self.mutation_type.name]), self.subscription_type - and cast(GraphQLObjectType, type_map[self.subscription_type.name]), + and cast("GraphQLObjectType", type_map[self.subscription_type.name]), types, directives, self.description, @@ -341,27 +312,30 @@ def __deepcopy__(self, memo_: Dict) -> "GraphQLSchema": assume_valid=True, ) - def get_root_type(self, operation: OperationType) -> Optional[GraphQLObjectType]: + def get_root_type(self, operation: OperationType) -> GraphQLObjectType | None: + """Get the root type.""" return getattr(self, f"{operation.value}_type") - def get_type(self, name: str) -> Optional[GraphQLNamedType]: + def get_type(self, name: str) -> GraphQLNamedType | None: + """Get the type with the given name.""" return self.type_map.get(name) def get_possible_types( self, abstract_type: GraphQLAbstractType - ) -> List[GraphQLObjectType]: + ) -> list[GraphQLObjectType]: """Get list of all possible concrete types for given abstract type.""" return ( - cast(GraphQLUnionType, abstract_type).types + abstract_type.types if is_union_type(abstract_type) else self.get_implementations( - cast(GraphQLInterfaceType, abstract_type) + cast("GraphQLInterfaceType", abstract_type) ).objects ) def get_implementations( self, interface_type: GraphQLInterfaceType ) -> InterfaceImplementations: + """Get implementations for the given interface type.""" return self._implementations_map.get( interface_type.name, InterfaceImplementations(objects=[], interfaces=[]) ) @@ -377,11 +351,11 @@ def is_sub_type( types = set() add = types.add if is_union_type(abstract_type): - for type_ in cast(GraphQLUnionType, abstract_type).types: + for type_ in abstract_type.types: add(type_.name) else: implementations = self.get_implementations( - cast(GraphQLInterfaceType, abstract_type) + cast("GraphQLInterfaceType", abstract_type) ) for type_ in implementations.objects: add(type_.name) @@ -390,14 +364,45 @@ def is_sub_type( self._sub_type_map[abstract_type.name] = types return maybe_sub_type.name in types - def get_directive(self, name: str) -> Optional[GraphQLDirective]: + def get_directive(self, name: str) -> GraphQLDirective | None: + """Get the directive with the given name.""" for directive in self.directives: if directive.name == name: return directive return None + def get_field( + self, parent_type: GraphQLCompositeType, field_name: str + ) -> GraphQLField | None: + """Get field of a given type with the given name. + + This method looks up the field on the given type definition. + It has special casing for the three introspection fields, `__schema`, + `__type` and `__typename`. + + `__typename` is special because it can always be queried as a field, even + in situations where no other fields are allowed, like on a Union. + + `__schema` and `__type` could get automatically added to the query type, + but that would require mutating type definitions, which would cause issues. + """ + if field_name == "__schema": + return SchemaMetaFieldDef if self.query_type is parent_type else None + if field_name == "__type": + return TypeMetaFieldDef if self.query_type is parent_type else None + if field_name == "__typename": + return TypeNameMetaFieldDef + + # this function is part of a "hot" path inside executor and to assume presence + # of 'fields' is faster than to use `not is_union_type` + try: + return parent_type.fields[field_name] # type: ignore + except (AttributeError, KeyError): + return None + @property - def validation_errors(self) -> Optional[List[GraphQLError]]: + def validation_errors(self) -> list[GraphQLError] | None: + """Get validation errors.""" return self._validation_errors @@ -405,8 +410,8 @@ class TypeSet(Dict[GraphQLNamedType, None]): """An ordered set of types that can be collected starting from initial types.""" @classmethod - def with_initial_types(cls, types: Collection[GraphQLType]) -> "TypeSet": - return cast(TypeSet, super().fromkeys(types)) + def with_initial_types(cls, types: Collection[GraphQLType]) -> TypeSet: + return cast("TypeSet", super().fromkeys(types)) def collect_referenced_types(self, type_: GraphQLType) -> None: """Recursive function supplementing the type starting from an initial type.""" @@ -419,13 +424,9 @@ def collect_referenced_types(self, type_: GraphQLType) -> None: collect_referenced_types = self.collect_referenced_types if is_union_type(named_type): - named_type = cast(GraphQLUnionType, named_type) for member_type in named_type.types: collect_referenced_types(member_type) elif is_object_type(named_type) or is_interface_type(named_type): - named_type = cast( - Union[GraphQLObjectType, GraphQLInterfaceType], named_type - ) for interface_type in named_type.interfaces: collect_referenced_types(interface_type) @@ -434,58 +435,64 @@ def collect_referenced_types(self, type_: GraphQLType) -> None: for arg in field.args.values(): collect_referenced_types(arg.type) elif is_input_object_type(named_type): - named_type = cast(GraphQLInputObjectType, named_type) for field in named_type.fields.values(): collect_referenced_types(field.type) -def is_schema(schema: Any) -> bool: - """Test if the given value is a GraphQL schema.""" +def is_schema(schema: Any) -> TypeGuard[GraphQLSchema]: + """Check whether this is a GraphQL schema.""" return isinstance(schema, GraphQLSchema) def assert_schema(schema: Any) -> GraphQLSchema: + """Assert that this is a GraphQL schema.""" if not is_schema(schema): - raise TypeError(f"Expected {inspect(schema)} to be a GraphQL schema.") - return cast(GraphQLSchema, schema) + msg = f"Expected {inspect(schema)} to be a GraphQL schema." + raise TypeError(msg) + return schema def remapped_type(type_: GraphQLType, type_map: TypeMap) -> GraphQLType: """Get a copy of the given type that uses this type map.""" if is_wrapping_type(type_): - type_ = cast(GraphQLWrappingType, type_) return type_.__class__(remapped_type(type_.of_type, type_map)) - type_ = cast(GraphQLNamedType, type_) + type_ = cast("GraphQLNamedType", type_) return type_map.get(type_.name, type_) def remap_named_type(type_: GraphQLNamedType, type_map: TypeMap) -> None: """Change all references in the given named type to use this type map.""" - if is_union_type(type_): - type_ = cast(GraphQLUnionType, type_) - type_.types = [ - type_map.get(member_type.name, member_type) for member_type in type_.types - ] - elif is_object_type(type_) or is_interface_type(type_): - type_ = cast(Union[GraphQLObjectType, GraphQLInterfaceType], type_) + if is_object_type(type_) or is_interface_type(type_): type_.interfaces = [ type_map.get(interface_type.name, interface_type) for interface_type in type_.interfaces ] fields = type_.fields for field_name, field in fields.items(): - field = copy(field) + field = copy(field) # noqa: PLW2901 field.type = remapped_type(field.type, type_map) args = field.args for arg_name, arg in args.items(): - arg = copy(arg) + arg = copy(arg) # noqa: PLW2901 arg.type = remapped_type(arg.type, type_map) args[arg_name] = arg fields[field_name] = field + elif is_union_type(type_): + type_.types = [ + type_map.get(member_type.name, member_type) for member_type in type_.types + ] elif is_input_object_type(type_): - type_ = cast(GraphQLInputObjectType, type_) fields = type_.fields for field_name, field in fields.items(): - field = copy(field) + field = copy(field) # noqa: PLW2901 field.type = remapped_type(field.type, type_map) fields[field_name] = field + + +def remap_directive(directive: GraphQLDirective, type_map: TypeMap) -> None: + """Change all references in the given directive to use this type map.""" + args = directive.args + for arg_name, arg in args.items(): + arg = copy(arg) # noqa: PLW2901 + arg.type = cast("GraphQLInputType", remapped_type(arg.type, type_map)) + args[arg_name] = arg diff --git a/src/graphql/type/validate.py b/src/graphql/type/validate.py index 555bc30d..9b22f44e 100644 --- a/src/graphql/type/validate.py +++ b/src/graphql/type/validate.py @@ -1,18 +1,12 @@ +"""Schema validation""" + +from __future__ import annotations + +from collections import defaultdict from operator import attrgetter, itemgetter -from typing import ( - Any, - Collection, - Dict, - List, - Optional, - Set, - Tuple, - Union, - cast, -) +from typing import Any, Collection, Optional, cast from ..error import GraphQLError -from ..pyutils import inspect from ..language import ( DirectiveNode, InputValueDefinitionNode, @@ -22,6 +16,8 @@ SchemaDefinitionNode, SchemaExtensionNode, ) +from ..pyutils import Undefined, and_list, inspect +from ..utilities.type_comparators import is_equal_type, is_type_sub_type_of from .definition import ( GraphQLEnumType, GraphQLInputField, @@ -37,19 +33,18 @@ is_non_null_type, is_object_type, is_output_type, - is_union_type, is_required_argument, is_required_input_field, + is_union_type, ) -from ..utilities.type_comparators import is_equal_type, is_type_sub_type_of -from .directives import is_directive, GraphQLDeprecatedDirective +from .directives import GraphQLDeprecatedDirective, is_directive from .introspection import is_introspection_type from .schema import GraphQLSchema, assert_schema -__all__ = ["validate_schema", "assert_valid_schema"] +__all__ = ["assert_valid_schema", "validate_schema"] -def validate_schema(schema: GraphQLSchema) -> List[GraphQLError]: +def validate_schema(schema: GraphQLSchema) -> list[GraphQLError]: """Validate a GraphQL schema. Implements the "Type Validation" sub-sections of the specification's "Type System" @@ -63,9 +58,8 @@ def validate_schema(schema: GraphQLSchema) -> List[GraphQLError]: # If this Schema has already been validated, return the previous results. # noinspection PyProtectedMember - errors = schema._validation_errors + errors = schema._validation_errors # noqa: SLF001 if errors is None: - # Validate the schema, producing a list of errors. context = SchemaValidationContext(schema) context.validate_root_types() @@ -75,7 +69,7 @@ def validate_schema(schema: GraphQLSchema) -> List[GraphQLError]: # Persist the results of validation before returning to ensure validation does # not run multiple times for this schema. errors = context.errors - schema._validation_errors = errors + schema._validation_errors = errors # noqa: SLF001 return errors @@ -93,52 +87,59 @@ def assert_valid_schema(schema: GraphQLSchema) -> None: class SchemaValidationContext: """Utility class providing a context for schema validation.""" - errors: List[GraphQLError] + errors: list[GraphQLError] schema: GraphQLSchema - def __init__(self, schema: GraphQLSchema): + def __init__(self, schema: GraphQLSchema) -> None: self.errors = [] self.schema = schema def report_error( self, message: str, - nodes: Union[Optional[Node], Collection[Optional[Node]]] = None, + nodes: Node | None | Collection[Node | None] = None, ) -> None: if nodes and not isinstance(nodes, Node): nodes = [node for node in nodes if node] - nodes = cast(Optional[Collection[Node]], nodes) + nodes = cast("Optional[Collection[Node]]", nodes) self.errors.append(GraphQLError(message, nodes)) def validate_root_types(self) -> None: schema = self.schema - query_type = schema.query_type - if not query_type: + if not schema.query_type: self.report_error("Query root type must be provided.", schema.ast_node) - elif not is_object_type(query_type): - self.report_error( - f"Query root type must be Object type, it cannot be {query_type}.", - get_operation_type_node(schema, OperationType.QUERY) - or query_type.ast_node, - ) + root_types_map: dict[GraphQLObjectType, list[OperationType]] = defaultdict(list) - mutation_type = schema.mutation_type - if mutation_type and not is_object_type(mutation_type): - self.report_error( - "Mutation root type must be Object type if provided," - f" it cannot be {mutation_type}.", - get_operation_type_node(schema, OperationType.MUTATION) - or mutation_type.ast_node, - ) - - subscription_type = schema.subscription_type - if subscription_type and not is_object_type(subscription_type): - self.report_error( - "Subscription root type must be Object type if provided," - f" it cannot be {subscription_type}.", - get_operation_type_node(schema, OperationType.SUBSCRIPTION) - or subscription_type.ast_node, - ) + for operation_type in OperationType: + root_type = schema.get_root_type(operation_type) + if root_type: + if is_object_type(root_type): + root_types_map[root_type].append(operation_type) + else: + operation_type_str = operation_type.value.capitalize() + root_type_str = inspect(root_type) + if_provided_str = ( + "" if operation_type == operation_type.QUERY else " if provided" + ) + self.report_error( + f"{operation_type_str} root type must be Object type" + f"{if_provided_str}, it cannot be {root_type_str}.", + get_operation_type_node(schema, operation_type) + or root_type.ast_node, + ) + for root_type, operation_types in root_types_map.items(): + if len(operation_types) > 1: + operation_list = and_list( + [operation_type.value for operation_type in operation_types] + ) + self.report_error( + "All root types must be different," + f" '{root_type.name}' type is used as {operation_list} root types.", + [ + get_operation_type_node(schema, operation_type) + for operation_type in operation_types + ], + ) def validate_directives(self) -> None: directives = self.schema.directives @@ -177,12 +178,12 @@ def validate_directives(self) -> None: ], ) - def validate_name(self, node: Any, name: Optional[str] = None) -> None: + def validate_name(self, node: Any, name: str | None = None) -> None: # Ensure names are valid, however introspection types opt out. try: if not name: name = node.name - name = cast(str, name) + name = cast("str", name) ast_node = node.ast_node except AttributeError: # pragma: no cover pass @@ -197,7 +198,6 @@ def validate_name(self, node: Any, name: Optional[str] = None) -> None: def validate_types(self) -> None: validate_input_object_circular_refs = InputObjectCircularRefsValidator(self) for type_ in self.schema.type_map.values(): - # Ensure all provided types are in fact GraphQL type. if not is_named_type(type_): self.report_error( @@ -211,38 +211,31 @@ def validate_types(self) -> None: self.validate_name(type_) if is_object_type(type_): - type_ = cast(GraphQLObjectType, type_) # Ensure fields are valid self.validate_fields(type_) # Ensure objects implement the interfaces they claim to. self.validate_interfaces(type_) elif is_interface_type(type_): - type_ = cast(GraphQLInterfaceType, type_) # Ensure fields are valid. self.validate_fields(type_) # Ensure interfaces implement the interfaces they claim to. self.validate_interfaces(type_) elif is_union_type(type_): - type_ = cast(GraphQLUnionType, type_) # Ensure Unions include valid member types. self.validate_union_members(type_) elif is_enum_type(type_): - type_ = cast(GraphQLEnumType, type_) # Ensure Enums have valid values. self.validate_enum_values(type_) elif is_input_object_type(type_): - type_ = cast(GraphQLInputObjectType, type_) # Ensure Input Object fields are valid. self.validate_input_fields(type_) # Ensure Input Objects do not contain non-nullable circular references validate_input_object_circular_refs(type_) - def validate_fields( - self, type_: Union[GraphQLObjectType, GraphQLInterfaceType] - ) -> None: + def validate_fields(self, type_: GraphQLObjectType | GraphQLInterfaceType) -> None: fields = type_.fields # Objects and Interfaces both must define one or more fields. @@ -253,7 +246,6 @@ def validate_fields( ) for field_name, field in fields.items(): - # Ensure they are named correctly. self.validate_name(field, field_name) @@ -289,9 +281,9 @@ def validate_fields( ) def validate_interfaces( - self, type_: Union[GraphQLObjectType, GraphQLInterfaceType] + self, type_: GraphQLObjectType | GraphQLInterfaceType ) -> None: - iface_type_names: Set[str] = set() + iface_type_names: set[str] = set() for iface in type_.interfaces: if not is_interface_type(iface): self.report_error( @@ -322,7 +314,7 @@ def validate_interfaces( def validate_type_implements_interface( self, - type_: Union[GraphQLObjectType, GraphQLInterfaceType], + type_: GraphQLObjectType | GraphQLInterfaceType, iface: GraphQLInterfaceType, ) -> None: type_fields, iface_fields = type_.fields, iface.fields @@ -401,7 +393,7 @@ def validate_type_implements_interface( def validate_type_implements_ancestors( self, - type_: Union[GraphQLObjectType, GraphQLInterfaceType], + type_: GraphQLObjectType | GraphQLInterfaceType, iface: GraphQLInterfaceType, ) -> None: type_interfaces, iface_interfaces = type_.interfaces, iface.interfaces @@ -426,7 +418,7 @@ def validate_union_members(self, union: GraphQLUnionType) -> None: [union.ast_node, *union.extension_ast_nodes], ) - included_type_names: Set[str] = set() + included_type_names: set[str] = set() for member_type in member_types: if is_object_type(member_type): if member_type.name in included_type_names: @@ -462,14 +454,12 @@ def validate_input_fields(self, input_obj: GraphQLInputObjectType) -> None: if not fields: self.report_error( - f"Input Object type {input_obj.name}" - " must define one or more fields.", + f"Input Object type {input_obj.name} must define one or more fields.", [input_obj.ast_node, *input_obj.extension_ast_nodes], ) # Ensure the arguments are valid for field_name, field in fields.items(): - # Ensure they are named correctly. self.validate_name(field, field_name) @@ -491,11 +481,33 @@ def validate_input_fields(self, input_obj: GraphQLInputObjectType) -> None: ], ) + if input_obj.is_one_of: + self.validate_one_of_input_object_field(input_obj, field_name, field) + + def validate_one_of_input_object_field( + self, + type_: GraphQLInputObjectType, + field_name: str, + field: GraphQLInputField, + ) -> None: + if is_non_null_type(field.type): + self.report_error( + f"OneOf input field {type_.name}.{field_name} must be nullable.", + field.ast_node and field.ast_node.type, + ) + + if field.default_value is not Undefined: + self.report_error( + f"OneOf input field {type_.name}.{field_name}" + " cannot have a default value.", + field.ast_node, + ) + def get_operation_type_node( schema: GraphQLSchema, operation: OperationType -) -> Optional[Node]: - ast_node: Optional[Union[SchemaDefinitionNode, SchemaExtensionNode]] +) -> Node | None: + ast_node: SchemaDefinitionNode | SchemaExtensionNode | None for ast_node in [schema.ast_node, *(schema.extension_ast_nodes or ())]: if ast_node: operation_types = ast_node.operation_types @@ -509,15 +521,15 @@ def get_operation_type_node( class InputObjectCircularRefsValidator: """Modified copy of algorithm from validation.rules.NoFragmentCycles""" - def __init__(self, context: SchemaValidationContext): + def __init__(self, context: SchemaValidationContext) -> None: self.context = context # Tracks already visited types to maintain O(N) and to ensure that cycles # are not redundantly reported. - self.visited_types: Set[str] = set() + self.visited_types: set[str] = set() # Array of input fields used to produce meaningful errors - self.field_path: List[Tuple[str, GraphQLInputField]] = [] + self.field_path: list[tuple[str, GraphQLInputField]] = [] # Position in the type path - self.field_path_index_by_type_name: Dict[str, int] = {} + self.field_path_index_by_type_name: dict[str, int] = {} def __call__(self, input_obj: GraphQLInputObjectType) -> None: """Detect cycles recursively.""" @@ -535,7 +547,7 @@ def __call__(self, input_obj: GraphQLInputObjectType) -> None: if is_non_null_type(field.type) and is_input_object_type( field.type.of_type ): - field_type = cast(GraphQLInputObjectType, field.type.of_type) + field_type = field.type.of_type cycle_index = self.field_path_index_by_type_name.get(field_type.name) self.field_path.append((field_name, field)) @@ -549,7 +561,7 @@ def __call__(self, input_obj: GraphQLInputObjectType) -> None: " within itself through a series of non-null fields:" f" '{'.'.join(field_names)}'.", cast( - Collection[Node], + "Collection[Node]", map(attrgetter("ast_node"), map(itemgetter(1), cycle_path)), ), ) @@ -559,13 +571,13 @@ def __call__(self, input_obj: GraphQLInputObjectType) -> None: def get_all_implements_interface_nodes( - type_: Union[GraphQLObjectType, GraphQLInterfaceType], iface: GraphQLInterfaceType -) -> List[NamedTypeNode]: + type_: GraphQLObjectType | GraphQLInterfaceType, iface: GraphQLInterfaceType +) -> list[NamedTypeNode]: ast_node = type_.ast_node nodes = type_.extension_ast_nodes if ast_node is not None: nodes = [ast_node, *nodes] # type: ignore - implements_nodes: List[NamedTypeNode] = [] + implements_nodes: list[NamedTypeNode] = [] for node in nodes: iface_nodes = node.interfaces if iface_nodes: # pragma: no cover else @@ -579,12 +591,12 @@ def get_all_implements_interface_nodes( def get_union_member_type_nodes( union: GraphQLUnionType, type_name: str -) -> List[NamedTypeNode]: +) -> list[NamedTypeNode]: ast_node = union.ast_node nodes = union.extension_ast_nodes if ast_node is not None: nodes = [ast_node, *nodes] # type: ignore - member_type_nodes: List[NamedTypeNode] = [] + member_type_nodes: list[NamedTypeNode] = [] for node in nodes: type_nodes = node.types if type_nodes: # pragma: no cover else @@ -597,8 +609,8 @@ def get_union_member_type_nodes( def get_deprecated_directive_node( - definition_node: Optional[Union[InputValueDefinitionNode]], -) -> Optional[DirectiveNode]: + definition_node: InputValueDefinitionNode | None, +) -> DirectiveNode | None: directives = definition_node and definition_node.directives if directives: for directive in directives: diff --git a/src/graphql/utilities/__init__.py b/src/graphql/utilities/__init__.py index 1571485b..5aadcc31 100644 --- a/src/graphql/utilities/__init__.py +++ b/src/graphql/utilities/__init__.py @@ -10,9 +10,6 @@ # Get the target Operation from a Document. from .get_operation_ast import get_operation_ast -# Get the Type for the target Operation AST. -from .get_operation_root_type import get_operation_root_type - # Convert a GraphQLSchema to an IntrospectionQuery. from .introspection_from_schema import introspection_from_schema @@ -30,9 +27,10 @@ # Print a GraphQLSchema to GraphQL Schema language. from .print_schema import ( - print_introspection_schema, print_schema, print_type, + print_directive, + print_introspection_schema, print_value, # deprecated ) @@ -71,9 +69,6 @@ # Comparators for types from .type_comparators import is_equal_type, is_type_sub_type_of, do_types_overlap -# Assert that a string is a valid GraphQL name. -from .assert_valid_name import assert_valid_name, is_valid_name_error - # Compare two GraphQLSchemas and detect breaking changes. from .find_breaking_changes import ( BreakingChange, @@ -92,7 +87,6 @@ "IntrospectionQuery", "TypeInfo", "TypeInfoVisitor", - "assert_valid_name", "ast_from_value", "ast_to_dict", "build_ast_schema", @@ -106,12 +100,11 @@ "find_dangerous_changes", "get_introspection_query", "get_operation_ast", - "get_operation_root_type", + "introspection_from_schema", "is_equal_type", "is_type_sub_type_of", - "is_valid_name_error", - "introspection_from_schema", "lexicographic_sort_schema", + "print_directive", "print_introspection_schema", "print_schema", "print_type", diff --git a/src/graphql/utilities/assert_valid_name.py b/src/graphql/utilities/assert_valid_name.py deleted file mode 100644 index e727a482..00000000 --- a/src/graphql/utilities/assert_valid_name.py +++ /dev/null @@ -1,38 +0,0 @@ -from typing import Optional - -from ..type.assert_name import assert_name -from ..error import GraphQLError - -__all__ = ["assert_valid_name", "is_valid_name_error"] - - -def assert_valid_name(name: str) -> str: - """Uphold the spec rules about naming. - - .. deprecated:: 3.2 - Please use ``assert_name`` instead. Will be removed in v3.3. - """ - error = is_valid_name_error(name) - if error: - raise error - return name - - -def is_valid_name_error(name: str) -> Optional[GraphQLError]: - """Return an Error if a name is invalid. - - .. deprecated:: 3.2 - Please use ``assert_name`` instead. Will be removed in v3.3. - """ - if not isinstance(name, str): - raise TypeError("Expected name to be a string.") - if name.startswith("__"): - return GraphQLError( - f"Name {name!r} must not begin with '__'," - " which is reserved by GraphQL introspection." - ) - try: - assert_name(name) - except GraphQLError as error: - return error - return None diff --git a/src/graphql/utilities/ast_from_value.py b/src/graphql/utilities/ast_from_value.py index 208d9d95..dea67665 100644 --- a/src/graphql/utilities/ast_from_value.py +++ b/src/graphql/utilities/ast_from_value.py @@ -1,27 +1,28 @@ +"""GraphQL AST creation from Python""" + +from __future__ import annotations + import re from math import isfinite -from typing import Any, Mapping, Optional, cast +from typing import Any, Mapping from ..language import ( BooleanValueNode, + ConstListValueNode, + ConstObjectFieldNode, + ConstObjectValueNode, + ConstValueNode, EnumValueNode, FloatValueNode, IntValueNode, - ListValueNode, NameNode, NullValueNode, - ObjectFieldNode, - ObjectValueNode, StringValueNode, - ValueNode, ) -from ..pyutils import inspect, is_iterable, Undefined +from ..pyutils import Undefined, inspect, is_iterable from ..type import ( GraphQLID, GraphQLInputType, - GraphQLInputObjectType, - GraphQLList, - GraphQLNonNull, is_enum_type, is_input_object_type, is_leaf_type, @@ -34,7 +35,7 @@ _re_integer_string = re.compile("^-?(?:0|[1-9][0-9]*)$") -def ast_from_value(value: Any, type_: GraphQLInputType) -> Optional[ValueNode]: +def ast_from_value(value: Any, type_: GraphQLInputType) -> ConstValueNode | None: """Produce a GraphQL Value AST given a Python object. This function will match Python/JSON values to GraphQL AST schema format by using @@ -59,7 +60,6 @@ def ast_from_value(value: Any, type_: GraphQLInputType) -> Optional[ValueNode]: """ if is_non_null_type(type_): - type_ = cast(GraphQLNonNull, type_) ast_value = ast_from_value(value, type_.of_type) if isinstance(ast_value, NullValueNode): return None @@ -76,12 +76,11 @@ def ast_from_value(value: Any, type_: GraphQLInputType) -> Optional[ValueNode]: # Convert Python list to GraphQL list. If the GraphQLType is a list, but the value # is not a list, convert the value using the list's item type. if is_list_type(type_): - type_ = cast(GraphQLList, type_) item_type = type_.of_type if is_iterable(value): maybe_value_nodes = (ast_from_value(item, item_type) for item in value) value_nodes = tuple(node for node in maybe_value_nodes if node) - return ListValueNode(values=value_nodes) + return ConstListValueNode(values=value_nodes) return ast_from_value(value, item_type) # Populate the fields of the input object by creating ASTs from each value in the @@ -89,23 +88,22 @@ def ast_from_value(value: Any, type_: GraphQLInputType) -> Optional[ValueNode]: if is_input_object_type(type_): if value is None or not isinstance(value, Mapping): return None - type_ = cast(GraphQLInputObjectType, type_) field_items = ( (field_name, ast_from_value(value[field_name], field.type)) for field_name, field in type_.fields.items() if field_name in value ) field_nodes = tuple( - ObjectFieldNode(name=NameNode(value=field_name), value=field_value) + ConstObjectFieldNode(name=NameNode(value=field_name), value=field_value) for field_name, field_value in field_items if field_value ) - return ObjectValueNode(fields=field_nodes) + return ConstObjectValueNode(fields=field_nodes) if is_leaf_type(type_): # Since value is an internally represented value, it must be serialized to an # externally represented value before converting into an AST. - serialized = type_.serialize(value) # type: ignore + serialized = type_.serialize(value) if serialized is None or serialized is Undefined: return None @@ -133,7 +131,9 @@ def ast_from_value(value: Any, type_: GraphQLInputType) -> Optional[ValueNode]: return StringValueNode(value=serialized) - raise TypeError(f"Cannot convert value to AST: {inspect(serialized)}.") + msg = f"Cannot convert value to AST: {inspect(serialized)}." + raise TypeError(msg) # Not reachable. All possible input types have been considered. - raise TypeError(f"Unexpected input type: {inspect(type_)}.") + msg = f"Unexpected input type: {inspect(type_)}." # pragma: no cover + raise TypeError(msg) # pragma: no cover diff --git a/src/graphql/utilities/ast_to_dict.py b/src/graphql/utilities/ast_to_dict.py index 9cacd8ab..3a2b3504 100644 --- a/src/graphql/utilities/ast_to_dict.py +++ b/src/graphql/utilities/ast_to_dict.py @@ -1,46 +1,44 @@ -from typing import Any, Collection, Dict, List, Optional, overload +"""Python dictionary creation from GraphQL AST""" + +from __future__ import annotations + +from typing import Any, Collection, overload from ..language import Node, OperationType from ..pyutils import is_iterable - __all__ = ["ast_to_dict"] @overload def ast_to_dict( - node: Node, locations: bool = False, cache: Optional[Dict[Node, Any]] = None -) -> Dict: - ... + node: Node, locations: bool = False, cache: dict[Node, Any] | None = None +) -> dict: ... @overload def ast_to_dict( node: Collection[Node], locations: bool = False, - cache: Optional[Dict[Node, Any]] = None, -) -> List[Node]: - ... + cache: dict[Node, Any] | None = None, +) -> list[Node]: ... @overload def ast_to_dict( node: OperationType, locations: bool = False, - cache: Optional[Dict[Node, Any]] = None, -) -> str: - ... + cache: dict[Node, Any] | None = None, +) -> str: ... def ast_to_dict( - node: Any, locations: bool = False, cache: Optional[Dict[Node, Any]] = None + node: Any, locations: bool = False, cache: dict[Node, Any] | None = None ) -> Any: """Convert a language AST to a nested Python dictionary. - Set `location` to True in order to get the locations as well. + Set `locations` to True in order to get the locations as well. """ - - """Convert a node to a nested Python dictionary.""" if isinstance(node, Node): if cache is None: cache = {} @@ -56,7 +54,7 @@ def ast_to_dict( if locations: loc = node.loc if loc: - res["loc"] = dict(start=loc.start, end=loc.end) + res["loc"] = {"start": loc.start, "end": loc.end} return res if is_iterable(node): return [ast_to_dict(sub_node, locations, cache) for sub_node in node] diff --git a/src/graphql/utilities/build_ast_schema.py b/src/graphql/utilities/build_ast_schema.py index 30990f49..26ccfea2 100644 --- a/src/graphql/utilities/build_ast_schema.py +++ b/src/graphql/utilities/build_ast_schema.py @@ -1,4 +1,8 @@ -from typing import cast, Union +"""GraphQL Schema creation from GraphQL AST""" + +from __future__ import annotations + +from typing import cast from ..language import DocumentNode, Source, parse from ..type import ( @@ -7,7 +11,7 @@ GraphQLSchemaKwargs, specified_directives, ) -from .extend_schema import extend_schema_impl +from .extend_schema import ExtendSchemaImpl __all__ = [ "build_ast_schema", @@ -36,9 +40,6 @@ def build_ast_schema( the produced schema is valid. Set ``assume_valid_sdl`` to ``True`` to assume it is already a valid SDL document. """ - if not isinstance(document_ast, DocumentNode): - raise TypeError("Must provide valid Document AST.") - if not (assume_valid or assume_valid_sdl): from ..validation.validate import assert_valid_sdl @@ -56,7 +57,9 @@ def build_ast_schema( extension_ast_nodes=(), assume_valid=False, ) - schema_kwargs = extend_schema_impl(empty_schema_kwargs, document_ast, assume_valid) + schema_kwargs = ExtendSchemaImpl.extend_schema_args( + empty_schema_kwargs, document_ast, assume_valid + ) if not schema_kwargs["ast_node"]: for type_ in schema_kwargs["types"] or (): @@ -65,15 +68,15 @@ def build_ast_schema( # validation with validate_schema() will produce more actionable results. type_name = type_.name if type_name == "Query": - schema_kwargs["query"] = cast(GraphQLObjectType, type_) + schema_kwargs["query"] = cast("GraphQLObjectType", type_) elif type_name == "Mutation": - schema_kwargs["mutation"] = cast(GraphQLObjectType, type_) + schema_kwargs["mutation"] = cast("GraphQLObjectType", type_) elif type_name == "Subscription": - schema_kwargs["subscription"] = cast(GraphQLObjectType, type_) + schema_kwargs["subscription"] = cast("GraphQLObjectType", type_) # If specified directives were not explicitly declared, add them. directives = schema_kwargs["directives"] - directive_names = set(directive.name for directive in directives) + directive_names = {directive.name for directive in directives} missing_directives = [] for directive in specified_directives: if directive.name not in directive_names: @@ -85,7 +88,7 @@ def build_ast_schema( def build_schema( - source: Union[str, Source], + source: str | Source, assume_valid: bool = False, assume_valid_sdl: bool = False, no_location: bool = False, diff --git a/src/graphql/utilities/build_client_schema.py b/src/graphql/utilities/build_client_schema.py index b9ad32ed..0e2cbd0e 100644 --- a/src/graphql/utilities/build_client_schema.py +++ b/src/graphql/utilities/build_client_schema.py @@ -1,8 +1,12 @@ +"""GraphQL client schema creation""" + +from __future__ import annotations + from itertools import chain -from typing import cast, Callable, Collection, Dict, List, Union +from typing import TYPE_CHECKING, Callable, Collection, cast from ..language import DirectiveLocation, parse_value -from ..pyutils import inspect, Undefined +from ..pyutils import Undefined, inspect from ..type import ( GraphQLArgument, GraphQLDirective, @@ -11,13 +15,11 @@ GraphQLField, GraphQLInputField, GraphQLInputObjectType, - GraphQLInputType, GraphQLInterfaceType, GraphQLList, GraphQLNamedType, GraphQLNonNull, GraphQLObjectType, - GraphQLOutputType, GraphQLScalarType, GraphQLSchema, GraphQLType, @@ -31,22 +33,25 @@ is_output_type, specified_scalar_types, ) -from .get_introspection_query import ( - IntrospectionDirective, - IntrospectionEnumType, - IntrospectionField, - IntrospectionInterfaceType, - IntrospectionInputObjectType, - IntrospectionInputValue, - IntrospectionObjectType, - IntrospectionQuery, - IntrospectionScalarType, - IntrospectionType, - IntrospectionTypeRef, - IntrospectionUnionType, -) from .value_from_ast import value_from_ast +if TYPE_CHECKING: + from .get_introspection_query import ( + IntrospectionDirective, + IntrospectionEnumType, + IntrospectionField, + IntrospectionInputObjectType, + IntrospectionInputValue, + IntrospectionInterfaceType, + IntrospectionObjectType, + IntrospectionQuery, + IntrospectionScalarType, + IntrospectionType, + IntrospectionTypeRef, + IntrospectionUnionType, + ) + + __all__ = ["build_client_schema"] @@ -64,14 +69,17 @@ def build_client_schema( This function expects a complete introspection result. Don't forget to check the "errors" field of a server response before calling this function. """ + # Even though the `introspection` argument is typed, in most cases it's received + # as an untyped value from the server, so we will do an additional check here. if not isinstance(introspection, dict) or not isinstance( introspection.get("__schema"), dict ): - raise TypeError( + msg = ( "Invalid or incomplete introspection result. Ensure that you" " are passing the 'data' attribute of an introspection response" f" and no 'errors' were returned alongside: {inspect(introspection)}." ) + raise TypeError(msg) # Get the schema from the introspection result. schema_introspection = introspection["__schema"] @@ -83,31 +91,35 @@ def get_type(type_ref: IntrospectionTypeRef) -> GraphQLType: if kind == TypeKind.LIST.name: item_ref = type_ref.get("ofType") if not item_ref: - raise TypeError("Decorated type deeper than introspection query.") - item_ref = cast(IntrospectionTypeRef, item_ref) + msg = "Decorated type deeper than introspection query." + raise TypeError(msg) + item_ref = cast("IntrospectionTypeRef", item_ref) return GraphQLList(get_type(item_ref)) if kind == TypeKind.NON_NULL.name: nullable_ref = type_ref.get("ofType") if not nullable_ref: - raise TypeError("Decorated type deeper than introspection query.") - nullable_ref = cast(IntrospectionTypeRef, nullable_ref) + msg = "Decorated type deeper than introspection query." + raise TypeError(msg) + nullable_ref = cast("IntrospectionTypeRef", nullable_ref) nullable_type = get_type(nullable_ref) return GraphQLNonNull(assert_nullable_type(nullable_type)) - type_ref = cast(IntrospectionType, type_ref) + type_ref = cast("IntrospectionType", type_ref) return get_named_type(type_ref) def get_named_type(type_ref: IntrospectionType) -> GraphQLNamedType: type_name = type_ref.get("name") if not type_name: - raise TypeError(f"Unknown type reference: {inspect(type_ref)}.") + msg = f"Unknown type reference: {inspect(type_ref)}." + raise TypeError(msg) type_ = type_map.get(type_name) if not type_: - raise TypeError( + msg = ( f"Invalid or incomplete schema, unknown type: {type_name}." " Ensure that a full introspection query is used in order" " to build a client schema." ) + raise TypeError(msg) return type_ def get_object_type(type_ref: IntrospectionObjectType) -> GraphQLObjectType: @@ -124,48 +136,57 @@ def build_type(type_: IntrospectionType) -> GraphQLNamedType: builder = type_builders.get(type_["kind"]) if builder: # pragma: no cover else return builder(type_) - raise TypeError( + msg = ( "Invalid or incomplete introspection result." " Ensure that a full introspection query is used in order" f" to build a client schema: {inspect(type_)}." ) + raise TypeError(msg) def build_scalar_def( scalar_introspection: IntrospectionScalarType, ) -> GraphQLScalarType: - return GraphQLScalarType( - name=scalar_introspection["name"], - description=scalar_introspection.get("description"), - specified_by_url=scalar_introspection.get("specifiedByURL"), - ) + name = scalar_introspection["name"] + try: + return cast("GraphQLScalarType", GraphQLScalarType.reserved_types[name]) + except KeyError: + return GraphQLScalarType( + name=name, + description=scalar_introspection.get("description"), + specified_by_url=scalar_introspection.get("specifiedByURL"), + ) def build_implementations_list( - implementing_introspection: Union[ - IntrospectionObjectType, IntrospectionInterfaceType - ], - ) -> List[GraphQLInterfaceType]: + implementing_introspection: IntrospectionObjectType + | IntrospectionInterfaceType, + ) -> list[GraphQLInterfaceType]: maybe_interfaces = implementing_introspection.get("interfaces") if maybe_interfaces is None: # Temporary workaround until GraphQL ecosystem will fully support # 'interfaces' on interface types if implementing_introspection["kind"] == TypeKind.INTERFACE.name: return [] - raise TypeError( + msg = ( "Introspection result missing interfaces:" f" {inspect(implementing_introspection)}." ) - interfaces = cast(Collection[IntrospectionInterfaceType], maybe_interfaces) + raise TypeError(msg) + interfaces = cast("Collection[IntrospectionInterfaceType]", maybe_interfaces) return [get_interface_type(interface) for interface in interfaces] def build_object_def( object_introspection: IntrospectionObjectType, ) -> GraphQLObjectType: - return GraphQLObjectType( - name=object_introspection["name"], - description=object_introspection.get("description"), - interfaces=lambda: build_implementations_list(object_introspection), - fields=lambda: build_field_def_map(object_introspection), - ) + name = object_introspection["name"] + try: + return cast("GraphQLObjectType", GraphQLObjectType.reserved_types[name]) + except KeyError: + return GraphQLObjectType( + name=name, + description=object_introspection.get("description"), + interfaces=lambda: build_implementations_list(object_introspection), + fields=lambda: build_field_def_map(object_introspection), + ) def build_interface_def( interface_introspection: IntrospectionInterfaceType, @@ -182,11 +203,14 @@ def build_union_def( ) -> GraphQLUnionType: maybe_possible_types = union_introspection.get("possibleTypes") if maybe_possible_types is None: - raise TypeError( + msg = ( "Introspection result missing possibleTypes:" f" {inspect(union_introspection)}." ) - possible_types = cast(Collection[IntrospectionObjectType], maybe_possible_types) + raise TypeError(msg) + possible_types = cast( + "Collection[IntrospectionObjectType]", maybe_possible_types + ) return GraphQLUnionType( name=union_introspection["name"], description=union_introspection.get("description"), @@ -195,31 +219,37 @@ def build_union_def( def build_enum_def(enum_introspection: IntrospectionEnumType) -> GraphQLEnumType: if enum_introspection.get("enumValues") is None: - raise TypeError( + msg = ( "Introspection result missing enumValues:" f" {inspect(enum_introspection)}." ) - return GraphQLEnumType( - name=enum_introspection["name"], - description=enum_introspection.get("description"), - values={ - value_introspect["name"]: GraphQLEnumValue( - value=value_introspect["name"], - description=value_introspect.get("description"), - deprecation_reason=value_introspect.get("deprecationReason"), - ) - for value_introspect in enum_introspection["enumValues"] - }, - ) + raise TypeError(msg) + name = enum_introspection["name"] + try: + return cast("GraphQLEnumType", GraphQLEnumType.reserved_types[name]) + except KeyError: + return GraphQLEnumType( + name=name, + description=enum_introspection.get("description"), + values={ + value_introspect["name"]: GraphQLEnumValue( + value=value_introspect["name"], + description=value_introspect.get("description"), + deprecation_reason=value_introspect.get("deprecationReason"), + ) + for value_introspect in enum_introspection["enumValues"] + }, + ) def build_input_object_def( input_object_introspection: IntrospectionInputObjectType, ) -> GraphQLInputObjectType: if input_object_introspection.get("inputFields") is None: - raise TypeError( + msg = ( "Introspection result missing inputFields:" f" {inspect(input_object_introspection)}." ) + raise TypeError(msg) return GraphQLInputObjectType( name=input_object_introspection["name"], description=input_object_introspection.get("description"), @@ -228,7 +258,7 @@ def build_input_object_def( ), ) - type_builders: Dict[str, Callable[[IntrospectionType], GraphQLNamedType]] = { + type_builders: dict[str, Callable[[IntrospectionType], GraphQLNamedType]] = { TypeKind.SCALAR.name: build_scalar_def, # type: ignore TypeKind.OBJECT.name: build_object_def, # type: ignore TypeKind.INTERFACE.name: build_interface_def, # type: ignore @@ -238,33 +268,34 @@ def build_input_object_def( } def build_field_def_map( - type_introspection: Union[IntrospectionObjectType, IntrospectionInterfaceType], - ) -> Dict[str, GraphQLField]: + type_introspection: IntrospectionObjectType | IntrospectionInterfaceType, + ) -> dict[str, GraphQLField]: if type_introspection.get("fields") is None: - raise TypeError( - f"Introspection result missing fields: {type_introspection}." - ) + msg = f"Introspection result missing fields: {type_introspection}." + + raise TypeError(msg) return { field_introspection["name"]: build_field(field_introspection) for field_introspection in type_introspection["fields"] } def build_field(field_introspection: IntrospectionField) -> GraphQLField: - type_introspection = cast(IntrospectionType, field_introspection["type"]) + type_introspection = cast("IntrospectionType", field_introspection["type"]) type_ = get_type(type_introspection) if not is_output_type(type_): - raise TypeError( + msg = ( "Introspection must provide output type for fields," f" but received: {inspect(type_)}." ) - type_ = cast(GraphQLOutputType, type_) + raise TypeError(msg) args_introspection = field_introspection.get("args") if args_introspection is None: - raise TypeError( + msg = ( "Introspection result missing field args:" f" {inspect(field_introspection)}." ) + raise TypeError(msg) return GraphQLField( type_, @@ -275,7 +306,7 @@ def build_field(field_introspection: IntrospectionField) -> GraphQLField: def build_argument_def_map( argument_value_introspections: Collection[IntrospectionInputValue], - ) -> Dict[str, GraphQLArgument]: + ) -> dict[str, GraphQLArgument]: return { argument_introspection["name"]: build_argument(argument_introspection) for argument_introspection in argument_value_introspections @@ -284,14 +315,14 @@ def build_argument_def_map( def build_argument( argument_introspection: IntrospectionInputValue, ) -> GraphQLArgument: - type_introspection = cast(IntrospectionType, argument_introspection["type"]) + type_introspection = cast("IntrospectionType", argument_introspection["type"]) type_ = get_type(type_introspection) if not is_input_type(type_): - raise TypeError( + msg = ( "Introspection must provide input type for arguments," f" but received: {inspect(type_)}." ) - type_ = cast(GraphQLInputType, type_) + raise TypeError(msg) default_value_introspection = argument_introspection.get("defaultValue") default_value = ( @@ -308,7 +339,7 @@ def build_argument( def build_input_value_def_map( input_value_introspections: Collection[IntrospectionInputValue], - ) -> Dict[str, GraphQLInputField]: + ) -> dict[str, GraphQLInputField]: return { input_value_introspection["name"]: build_input_value( input_value_introspection @@ -319,14 +350,16 @@ def build_input_value_def_map( def build_input_value( input_value_introspection: IntrospectionInputValue, ) -> GraphQLInputField: - type_introspection = cast(IntrospectionType, input_value_introspection["type"]) + type_introspection = cast( + "IntrospectionType", input_value_introspection["type"] + ) type_ = get_type(type_introspection) if not is_input_type(type_): - raise TypeError( + msg = ( "Introspection must provide input type for input fields," f" but received: {inspect(type_)}." ) - type_ = cast(GraphQLInputType, type_) + raise TypeError(msg) default_value_introspection = input_value_introspection.get("defaultValue") default_value = ( @@ -345,22 +378,24 @@ def build_directive( directive_introspection: IntrospectionDirective, ) -> GraphQLDirective: if directive_introspection.get("args") is None: - raise TypeError( + msg = ( "Introspection result missing directive args:" f" {inspect(directive_introspection)}." ) + raise TypeError(msg) if directive_introspection.get("locations") is None: - raise TypeError( + msg = ( "Introspection result missing directive locations:" f" {inspect(directive_introspection)}." ) + raise TypeError(msg) return GraphQLDirective( name=directive_introspection["name"], description=directive_introspection.get("description"), is_repeatable=directive_introspection.get("isRepeatable", False), locations=list( cast( - Collection[DirectiveLocation], + "Collection[DirectiveLocation]", directive_introspection.get("locations"), ) ), @@ -368,7 +403,7 @@ def build_directive( ) # Iterate through all types, getting the type definition for each. - type_map: Dict[str, GraphQLNamedType] = { + type_map: dict[str, GraphQLNamedType] = { type_introspection["name"]: build_type(type_introspection) for type_introspection in schema_introspection["types"] } diff --git a/src/graphql/utilities/coerce_input_value.py b/src/graphql/utilities/coerce_input_value.py index 6901c892..b7452ec3 100644 --- a/src/graphql/utilities/coerce_input_value.py +++ b/src/graphql/utilities/coerce_input_value.py @@ -1,36 +1,42 @@ -from typing import Any, Callable, Dict, List, Optional, Union, cast +"""Input value coercion""" +from __future__ import annotations + +from typing import Any, Callable, List, Union, cast from ..error import GraphQLError from ..pyutils import ( Path, + Undefined, did_you_mean, inspect, is_iterable, print_path_list, suggestion_list, - Undefined, ) from ..type import ( - GraphQLInputObjectType, GraphQLInputType, - GraphQLList, GraphQLScalarType, - is_leaf_type, is_input_object_type, + is_leaf_type, is_list_type, is_non_null_type, - GraphQLNonNull, ) +try: + from typing import TypeAlias +except ImportError: # Python < 3.10 + from typing_extensions import TypeAlias + + __all__ = ["coerce_input_value"] -OnErrorCB = Callable[[List[Union[str, int]], Any, GraphQLError], None] +OnErrorCB: TypeAlias = Callable[[List[Union[str, int]], Any, GraphQLError], None] def default_on_error( - path: List[Union[str, int]], invalid_value: Any, error: GraphQLError + path: list[str | int], invalid_value: Any, error: GraphQLError ) -> None: error_prefix = "Invalid value " + inspect(invalid_value) if path: @@ -43,12 +49,11 @@ def coerce_input_value( input_value: Any, type_: GraphQLInputType, on_error: OnErrorCB = default_on_error, - path: Optional[Path] = None, + path: Path | None = None, ) -> Any: """Coerce a Python value given a GraphQL Input Type.""" if is_non_null_type(type_): if input_value is not None and input_value is not Undefined: - type_ = cast(GraphQLNonNull, type_) return coerce_input_value(input_value, type_.of_type, on_error, path) on_error( path.as_list() if path else [], @@ -64,10 +69,9 @@ def coerce_input_value( return None if is_list_type(type_): - type_ = cast(GraphQLList, type_) item_type = type_.of_type if is_iterable(input_value): - coerced_list: List[Any] = [] + coerced_list: list[Any] = [] append_item = coerced_list.append for index, item_value in enumerate(input_value): append_item( @@ -80,7 +84,6 @@ def coerce_input_value( return [coerce_input_value(input_value, item_type, on_error, path)] if is_input_object_type(type_): - type_ = cast(GraphQLInputObjectType, type_) if not isinstance(input_value, dict): on_error( path.as_list() if path else [], @@ -89,7 +92,7 @@ def coerce_input_value( ) return Undefined - coerced_dict: Dict[str, Any] = {} + coerced_dict: dict[str, Any] = {} fields = type_.fields for field_name, field in fields.items(): @@ -127,18 +130,43 @@ def coerce_input_value( + did_you_mean(suggestions) ), ) + + if type_.is_one_of: + keys = list(coerced_dict) + if len(keys) != 1: + on_error( + path.as_list() if path else [], + input_value, + GraphQLError( + "Exactly one key must be specified" + f" for OneOf type '{type_.name}'.", + ), + ) + else: + key = keys[0] + value = coerced_dict[key] + if value is None: + on_error( + (path.as_list() if path else []) + [key], + value, + GraphQLError( + f"Field '{key}' must be non-null.", + ), + ) + return type_.out_type(coerced_dict) if is_leaf_type(type_): - # Scalars determine if a value is valid via `parse_value()`, which can throw to - # indicate failure. If it throws, maintain a reference to the original error. - type_ = cast(GraphQLScalarType, type_) + # Scalars and Enums determine if an input value is valid via `parse_value()`, + # which can throw to indicate failure. If it throws, maintain a reference + # to the original error. + type_ = cast("GraphQLScalarType", type_) try: parse_result = type_.parse_value(input_value) except GraphQLError as error: on_error(path.as_list() if path else [], input_value, error) return Undefined - except Exception as error: + except Exception as error: # noqa: BLE001 on_error( path.as_list() if path else [], input_value, @@ -156,4 +184,5 @@ def coerce_input_value( return parse_result # Not reachable. All possible input types have been considered. - raise TypeError(f"Unexpected input type: {inspect(type_)}.") + msg = f"Unexpected input type: {inspect(type_)}." # pragma: no cover + raise TypeError(msg) # pragma: no cover diff --git a/src/graphql/utilities/concat_ast.py b/src/graphql/utilities/concat_ast.py index cd12a74f..806292f9 100644 --- a/src/graphql/utilities/concat_ast.py +++ b/src/graphql/utilities/concat_ast.py @@ -1,3 +1,7 @@ +"""AST concatenation""" + +from __future__ import annotations + from itertools import chain from typing import Collection diff --git a/src/graphql/utilities/extend_schema.py b/src/graphql/utilities/extend_schema.py index 93c8dce4..aebdd2b3 100644 --- a/src/graphql/utilities/extend_schema.py +++ b/src/graphql/utilities/extend_schema.py @@ -1,14 +1,14 @@ +"""GraphQL schema extension""" + +from __future__ import annotations + from collections import defaultdict +from functools import partial from typing import ( Any, - Callable, Collection, - DefaultDict, - Dict, - List, Mapping, - Optional, - Union, + TypeVar, cast, ) @@ -33,8 +33,8 @@ OperationType, ScalarTypeDefinitionNode, ScalarTypeExtensionNode, - SchemaExtensionNode, SchemaDefinitionNode, + SchemaExtensionNode, TypeDefinitionNode, TypeExtensionNode, TypeNode, @@ -53,15 +53,19 @@ GraphQLField, GraphQLFieldMap, GraphQLInputField, + GraphQLInputFieldMap, GraphQLInputObjectType, + GraphQLInputObjectTypeKwargs, GraphQLInputType, - GraphQLInputFieldMap, GraphQLInterfaceType, + GraphQLInterfaceTypeKwargs, GraphQLList, GraphQLNamedType, GraphQLNonNull, GraphQLNullableType, GraphQLObjectType, + GraphQLObjectTypeKwargs, + GraphQLOneOfDirective, GraphQLOutputType, GraphQLScalarType, GraphQLSchema, @@ -69,25 +73,27 @@ GraphQLSpecifiedByDirective, GraphQLType, GraphQLUnionType, + GraphQLUnionTypeKwargs, assert_schema, + introspection_types, is_enum_type, is_input_object_type, is_interface_type, + is_introspection_type, is_list_type, is_non_null_type, is_object_type, is_scalar_type, - is_union_type, - is_introspection_type, + is_specified_directive, is_specified_scalar_type, - introspection_types, + is_union_type, specified_scalar_types, ) from .value_from_ast import value_from_ast __all__ = [ + "ExtendSchemaImpl", "extend_schema", - "extend_schema_impl", ] @@ -116,162 +122,271 @@ def extend_schema( """ assert_schema(schema) - if not isinstance(document_ast, DocumentNode): - raise TypeError("Must provide valid Document AST.") - if not (assume_valid or assume_valid_sdl): from ..validation.validate import assert_valid_sdl_extension assert_valid_sdl_extension(document_ast, schema) schema_kwargs = schema.to_kwargs() - extended_kwargs = extend_schema_impl(schema_kwargs, document_ast, assume_valid) + extended_kwargs = ExtendSchemaImpl.extend_schema_args( + schema_kwargs, document_ast, assume_valid + ) return ( schema if schema_kwargs is extended_kwargs else GraphQLSchema(**extended_kwargs) ) -def extend_schema_impl( - schema_kwargs: GraphQLSchemaKwargs, - document_ast: DocumentNode, - assume_valid: bool = False, -) -> GraphQLSchemaKwargs: - """Extend the given schema arguments with extensions from a given document. +TEN = TypeVar("TEN", bound=TypeExtensionNode) + + +class TypeExtensionsMap: + """Mappings from types to their extensions.""" + + scalar: defaultdict[str, list[ScalarTypeExtensionNode]] + object: defaultdict[str, list[ObjectTypeExtensionNode]] + interface: defaultdict[str, list[InterfaceTypeExtensionNode]] + union: defaultdict[str, list[UnionTypeExtensionNode]] + enum: defaultdict[str, list[EnumTypeExtensionNode]] + input_object: defaultdict[str, list[InputObjectTypeExtensionNode]] + + def __init__(self) -> None: + self.scalar = defaultdict(list) + self.object = defaultdict(list) + self.interface = defaultdict(list) + self.union = defaultdict(list) + self.enum = defaultdict(list) + self.input_object = defaultdict(list) + + def for_node(self, node: TEN) -> defaultdict[str, list[TEN]]: + """Get type extensions map for the given node kind.""" + kind = node.kind + try: + kind = kind.removesuffix("_type_extension") + except AttributeError: # pragma: no cover (Python < 3.9) + if kind.endswith("_type_extension"): + kind = kind[:-15] + return getattr(self, kind) + + +class ExtendSchemaImpl: + """Helper class implementing the methods to extend a schema. + + Note: We use a class instead of an implementation with local functions + and lambda functions so that the extended schema can be pickled. For internal use only. """ - # Note: schema_kwargs should become a TypedDict once we require Python 3.8 - - # Collect the type definitions and extensions found in the document. - type_defs: List[TypeDefinitionNode] = [] - type_extensions_map: DefaultDict[str, Any] = defaultdict(list) - - # New directives and types are separate because a directives and types can have the - # same name. For example, a type named "skip". - directive_defs: List[DirectiveDefinitionNode] = [] - - schema_def: Optional[SchemaDefinitionNode] = None - # Schema extensions are collected which may add additional operation types. - schema_extensions: List[SchemaExtensionNode] = [] - - for def_ in document_ast.definitions: - if isinstance(def_, SchemaDefinitionNode): - schema_def = def_ - elif isinstance(def_, SchemaExtensionNode): - schema_extensions.append(def_) - elif isinstance(def_, TypeDefinitionNode): - type_defs.append(def_) - elif isinstance(def_, TypeExtensionNode): - extended_type_name = def_.name.value - type_extensions_map[extended_type_name].append(def_) - elif isinstance(def_, DirectiveDefinitionNode): - directive_defs.append(def_) - - # If this document contains no new types, extensions, or directives then return the - # same unmodified GraphQLSchema instance. - if ( - not type_extensions_map - and not type_defs - and not directive_defs - and not schema_extensions - and not schema_def - ): - return schema_kwargs - - # Below are functions used for producing this schema that have closed over this - # scope and have access to the schema, cache, and newly defined types. + + type_map: dict[str, GraphQLNamedType] + type_extensions: TypeExtensionsMap + + def __init__(self, type_extensions: TypeExtensionsMap) -> None: + self.type_map = {} + self.type_extensions = type_extensions + + @classmethod + def extend_schema_args( + cls, + schema_kwargs: GraphQLSchemaKwargs, + document_ast: DocumentNode, + assume_valid: bool = False, + ) -> GraphQLSchemaKwargs: + """Extend the given schema arguments with extensions from a given document. + + For internal use only. + """ + # Collect the type definitions and extensions found in the document. + type_defs: list[TypeDefinitionNode] = [] + + type_extensions = TypeExtensionsMap() + + # New directives and types are separate because a directives and types can have + # the same name. For example, a type named "skip". + directive_defs: list[DirectiveDefinitionNode] = [] + + schema_def: SchemaDefinitionNode | None = None + # Schema extensions are collected which may add additional operation types. + schema_extensions: list[SchemaExtensionNode] = [] + + is_schema_changed = False + for def_ in document_ast.definitions: + if isinstance(def_, SchemaDefinitionNode): + schema_def = def_ + elif isinstance(def_, SchemaExtensionNode): + schema_extensions.append(def_) + elif isinstance(def_, DirectiveDefinitionNode): + directive_defs.append(def_) + elif isinstance(def_, TypeDefinitionNode): + type_defs.append(def_) + elif isinstance(def_, TypeExtensionNode): + type_extensions.for_node(def_)[def_.name.value].append(def_) + else: + continue + is_schema_changed = True + + # If this document contains no new types, extensions, or directives then return + # the same unmodified GraphQLSchema instance. + if not is_schema_changed: + return schema_kwargs + + self = cls(type_extensions) + + self.type_map = { + type_.name: self.extend_named_type(type_) + for type_ in schema_kwargs["types"] or () + } + + for type_node in type_defs: + name = type_node.name.value + self.type_map[name] = std_type_map.get(name) or self.build_type(type_node) + + # Get the extended root operation types. + operation_types: dict[OperationType, GraphQLNamedType] = {} + for operation_type in OperationType: + original_type = schema_kwargs[operation_type.value] + if original_type: + operation_types[operation_type] = self.replace_named_type(original_type) + # Then, incorporate schema definition and all schema extensions. + if schema_def: + operation_types.update(self.get_operation_types([schema_def])) + if schema_extensions: + operation_types.update(self.get_operation_types(schema_extensions)) + + # Then produce and return the kwargs for a Schema with these types. + get_operation = operation_types.get + description = ( + schema_def.description.value + if schema_def and schema_def.description + else None + ) + if description is None: + description = schema_kwargs["description"] + return GraphQLSchemaKwargs( + query=get_operation(OperationType.QUERY), # type: ignore + mutation=get_operation(OperationType.MUTATION), # type: ignore + subscription=get_operation(OperationType.SUBSCRIPTION), # type: ignore + types=tuple(self.type_map.values()), + directives=tuple( + self.replace_directive(directive) + for directive in schema_kwargs["directives"] + ) + + tuple(self.build_directive(directive) for directive in directive_defs), + description=description, + extensions=schema_kwargs["extensions"], + ast_node=schema_def or schema_kwargs["ast_node"], + extension_ast_nodes=schema_kwargs["extension_ast_nodes"] + + tuple(schema_extensions), + assume_valid=assume_valid, + ) # noinspection PyTypeChecker,PyUnresolvedReferences - def replace_type(type_: GraphQLType) -> GraphQLType: + def replace_type(self, type_: GraphQLType) -> GraphQLType: + """Replace a GraphQL type.""" if is_list_type(type_): - return GraphQLList(replace_type(type_.of_type)) # type: ignore + return GraphQLList(self.replace_type(type_.of_type)) if is_non_null_type(type_): - return GraphQLNonNull(replace_type(type_.of_type)) # type: ignore - return replace_named_type(type_) # type: ignore + return GraphQLNonNull(self.replace_type(type_.of_type)) # type: ignore + return self.replace_named_type(type_) # type: ignore - def replace_named_type(type_: GraphQLNamedType) -> GraphQLNamedType: + def replace_named_type(self, type_: GraphQLNamedType) -> GraphQLNamedType: + """Replace a named GraphQL type.""" # Note: While this could make early assertions to get the correctly # typed values below, that would throw immediately while type system # validation with validate_schema() will produce more actionable results. - return type_map[type_.name] + return self.type_map[type_.name] # noinspection PyShadowingNames - def replace_directive(directive: GraphQLDirective) -> GraphQLDirective: + def replace_directive(self, directive: GraphQLDirective) -> GraphQLDirective: + """Replace a GraphQL directive.""" + if is_specified_directive(directive): + # Builtin directives are not extended. + return directive + kwargs = directive.to_kwargs() return GraphQLDirective( **merge_kwargs( kwargs, - args={name: extend_arg(arg) for name, arg in kwargs["args"].items()}, + args={ + name: self.extend_arg(arg) for name, arg in kwargs["args"].items() + }, ) ) - def extend_named_type(type_: GraphQLNamedType) -> GraphQLNamedType: + def extend_named_type(self, type_: GraphQLNamedType) -> GraphQLNamedType: + """Extend a named GraphQL type.""" if is_introspection_type(type_) or is_specified_scalar_type(type_): # Builtin types are not extended. return type_ if is_scalar_type(type_): - type_ = cast(GraphQLScalarType, type_) - return extend_scalar_type(type_) + return self.extend_scalar_type(type_) if is_object_type(type_): - type_ = cast(GraphQLObjectType, type_) - return extend_object_type(type_) + return self.extend_object_type(type_) if is_interface_type(type_): - type_ = cast(GraphQLInterfaceType, type_) - return extend_interface_type(type_) + return self.extend_interface_type(type_) if is_union_type(type_): - type_ = cast(GraphQLUnionType, type_) - return extend_union_type(type_) + return self.extend_union_type(type_) if is_enum_type(type_): - type_ = cast(GraphQLEnumType, type_) - return extend_enum_type(type_) + return self.extend_enum_type(type_) if is_input_object_type(type_): - type_ = cast(GraphQLInputObjectType, type_) - return extend_input_object_type(type_) + return self.extend_input_object_type(type_) # Not reachable. All possible types have been considered. - raise TypeError(f"Unexpected type: {inspect(type_)}.") # pragma: no cover + msg = f"Unexpected type: {inspect(type_)}." # pragma: no cover + raise TypeError(msg) # pragma: no cover + + def extend_input_object_type_fields( + self, kwargs: GraphQLInputObjectTypeKwargs, extensions: tuple[Any, ...] + ) -> GraphQLInputFieldMap: + """Extend GraphQL input object type fields.""" + return { + **{ + name: GraphQLInputField( + **merge_kwargs( + field.to_kwargs(), + type_=self.replace_type(field.type), + ) + ) + for name, field in kwargs["fields"].items() + }, + **self.build_input_field_map(extensions), + } # noinspection PyShadowingNames def extend_input_object_type( + self, type_: GraphQLInputObjectType, ) -> GraphQLInputObjectType: + """Extend a GraphQL input object type.""" kwargs = type_.to_kwargs() - extensions = tuple(type_extensions_map[kwargs["name"]]) + extensions = tuple(self.type_extensions.input_object[kwargs["name"]]) return GraphQLInputObjectType( **merge_kwargs( kwargs, - fields=lambda: { - **{ - name: GraphQLInputField( - **merge_kwargs( - field.to_kwargs(), - type_=replace_type(field.type), - ) - ) - for name, field in kwargs["fields"].items() - }, - **build_input_field_map(extensions), - }, + fields=partial( + self.extend_input_object_type_fields, kwargs, extensions + ), extension_ast_nodes=kwargs["extension_ast_nodes"] + extensions, ) ) - def extend_enum_type(type_: GraphQLEnumType) -> GraphQLEnumType: + def extend_enum_type(self, type_: GraphQLEnumType) -> GraphQLEnumType: + """Extend a GraphQL enum type.""" kwargs = type_.to_kwargs() - extensions = tuple(type_extensions_map[kwargs["name"]]) + extensions = tuple(self.type_extensions.enum[kwargs["name"]]) return GraphQLEnumType( **merge_kwargs( kwargs, - values={**kwargs["values"], **build_enum_value_map(extensions)}, + values={**kwargs["values"], **self.build_enum_value_map(extensions)}, extension_ast_nodes=kwargs["extension_ast_nodes"] + extensions, ) ) - def extend_scalar_type(type_: GraphQLScalarType) -> GraphQLScalarType: + def extend_scalar_type(self, type_: GraphQLScalarType) -> GraphQLScalarType: + """Extend a GraphQL scalar type.""" kwargs = type_.to_kwargs() - extensions = tuple(type_extensions_map[kwargs["name"]]) + extensions = tuple(self.type_extensions.scalar[kwargs["name"]]) specified_by_url = kwargs["specified_by_url"] for extension_node in extensions: @@ -285,120 +400,163 @@ def extend_scalar_type(type_: GraphQLScalarType) -> GraphQLScalarType: ) ) + def extend_object_type_interfaces( + self, kwargs: GraphQLObjectTypeKwargs, extensions: tuple[Any, ...] + ) -> list[GraphQLInterfaceType]: + """Extend a GraphQL object type interface.""" + return [ + cast("GraphQLInterfaceType", self.replace_named_type(interface)) + for interface in kwargs["interfaces"] + ] + self.build_interfaces(extensions) + + def extend_object_type_fields( + self, kwargs: GraphQLObjectTypeKwargs, extensions: tuple[Any, ...] + ) -> GraphQLFieldMap: + """Extend GraphQL object type fields.""" + return { + **{ + name: self.extend_field(field) + for name, field in kwargs["fields"].items() + }, + **self.build_field_map(extensions), + } + # noinspection PyShadowingNames - def extend_object_type(type_: GraphQLObjectType) -> GraphQLObjectType: + def extend_object_type(self, type_: GraphQLObjectType) -> GraphQLObjectType: + """Extend a GraphQL object type.""" kwargs = type_.to_kwargs() - extensions = tuple(type_extensions_map[kwargs["name"]]) + extensions = tuple(self.type_extensions.object[kwargs["name"]]) return GraphQLObjectType( **merge_kwargs( kwargs, - interfaces=lambda: [ - cast(GraphQLInterfaceType, replace_named_type(interface)) - for interface in kwargs["interfaces"] - ] - + build_interfaces(extensions), - fields=lambda: { - **{ - name: extend_field(field) - for name, field in kwargs["fields"].items() - }, - **build_field_map(extensions), - }, + interfaces=partial( + self.extend_object_type_interfaces, kwargs, extensions + ), + fields=partial(self.extend_object_type_fields, kwargs, extensions), extension_ast_nodes=kwargs["extension_ast_nodes"] + extensions, ) ) + def extend_interface_type_interfaces( + self, kwargs: GraphQLInterfaceTypeKwargs, extensions: tuple[Any, ...] + ) -> list[GraphQLInterfaceType]: + """Extend GraphQL interface type interfaces.""" + return [ + cast("GraphQLInterfaceType", self.replace_named_type(interface)) + for interface in kwargs["interfaces"] + ] + self.build_interfaces(extensions) + + def extend_interface_type_fields( + self, kwargs: GraphQLInterfaceTypeKwargs, extensions: tuple[Any, ...] + ) -> GraphQLFieldMap: + """Extend GraphQL interface type fields.""" + return { + **{ + name: self.extend_field(field) + for name, field in kwargs["fields"].items() + }, + **self.build_field_map(extensions), + } + # noinspection PyShadowingNames - def extend_interface_type(type_: GraphQLInterfaceType) -> GraphQLInterfaceType: + def extend_interface_type( + self, type_: GraphQLInterfaceType + ) -> GraphQLInterfaceType: + """Extend a GraphQL interface type.""" kwargs = type_.to_kwargs() - extensions = tuple(type_extensions_map[kwargs["name"]]) + extensions = tuple(self.type_extensions.interface[kwargs["name"]]) return GraphQLInterfaceType( **merge_kwargs( kwargs, - interfaces=lambda: [ - cast(GraphQLInterfaceType, replace_named_type(interface)) - for interface in kwargs["interfaces"] - ] - + build_interfaces(extensions), - fields=lambda: { - **{ - name: extend_field(field) - for name, field in kwargs["fields"].items() - }, - **build_field_map(extensions), - }, + interfaces=partial( + self.extend_interface_type_interfaces, kwargs, extensions + ), + fields=partial(self.extend_interface_type_fields, kwargs, extensions), extension_ast_nodes=kwargs["extension_ast_nodes"] + extensions, ) ) - def extend_union_type(type_: GraphQLUnionType) -> GraphQLUnionType: + def extend_union_type_types( + self, kwargs: GraphQLUnionTypeKwargs, extensions: tuple[Any, ...] + ) -> list[GraphQLObjectType]: + """Extend types of a GraphQL union type.""" + return [ + cast("GraphQLObjectType", self.replace_named_type(member_type)) + for member_type in kwargs["types"] + ] + self.build_union_types(extensions) + + def extend_union_type(self, type_: GraphQLUnionType) -> GraphQLUnionType: + """Extend a GraphQL union type.""" kwargs = type_.to_kwargs() - extensions = tuple(type_extensions_map[kwargs["name"]]) + extensions = tuple(self.type_extensions.union[kwargs["name"]]) return GraphQLUnionType( **merge_kwargs( kwargs, - types=lambda: [ - cast(GraphQLObjectType, replace_named_type(member_type)) - for member_type in kwargs["types"] - ] - + build_union_types(extensions), + types=partial(self.extend_union_type_types, kwargs, extensions), extension_ast_nodes=kwargs["extension_ast_nodes"] + extensions, - ) + ), ) # noinspection PyShadowingNames - def extend_field(field: GraphQLField) -> GraphQLField: + def extend_field(self, field: GraphQLField) -> GraphQLField: + """Extend a GraphQL field.""" return GraphQLField( **merge_kwargs( field.to_kwargs(), - type_=replace_type(field.type), - args={name: extend_arg(arg) for name, arg in field.args.items()}, + type_=self.replace_type(field.type), + args={name: self.extend_arg(arg) for name, arg in field.args.items()}, ) ) - def extend_arg(arg: GraphQLArgument) -> GraphQLArgument: + def extend_arg(self, arg: GraphQLArgument) -> GraphQLArgument: + """Extend a GraphQL argument.""" return GraphQLArgument( **merge_kwargs( arg.to_kwargs(), - type_=replace_type(arg.type), + type_=self.replace_type(arg.type), ) ) # noinspection PyShadowingNames def get_operation_types( - nodes: Collection[Union[SchemaDefinitionNode, SchemaExtensionNode]] - ) -> Dict[OperationType, GraphQLNamedType]: + self, nodes: Collection[SchemaDefinitionNode | SchemaExtensionNode] + ) -> dict[OperationType, GraphQLNamedType]: + """Extend GraphQL operation types.""" # Note: While this could make early assertions to get the correctly # typed values below, that would throw immediately while type system # validation with validate_schema() will produce more actionable results. return { - operation_type.operation: get_named_type(operation_type.type) + operation_type.operation: self.get_named_type(operation_type.type) for node in nodes for operation_type in node.operation_types or [] } # noinspection PyShadowingNames - def get_named_type(node: NamedTypeNode) -> GraphQLNamedType: + def get_named_type(self, node: NamedTypeNode) -> GraphQLNamedType: + """Get name GraphQL type for a given named type node.""" name = node.name.value - type_ = std_type_map.get(name) or type_map.get(name) + type_ = std_type_map.get(name) or self.type_map.get(name) if not type_: - raise TypeError(f"Unknown type: '{name}'.") + msg = f"Unknown type: '{name}'." + raise TypeError(msg) return type_ - def get_wrapped_type(node: TypeNode) -> GraphQLType: + def get_wrapped_type(self, node: TypeNode) -> GraphQLType: + """Get wrapped GraphQL type for a given type node.""" if isinstance(node, ListTypeNode): - return GraphQLList(get_wrapped_type(node.type)) + return GraphQLList(self.get_wrapped_type(node.type)) if isinstance(node, NonNullTypeNode): return GraphQLNonNull( - cast(GraphQLNullableType, get_wrapped_type(node.type)) + cast("GraphQLNullableType", self.get_wrapped_type(node.type)) ) - return get_named_type(cast(NamedTypeNode, node)) + return self.get_named_type(cast("NamedTypeNode", node)) - def build_directive(node: DirectiveDefinitionNode) -> GraphQLDirective: + def build_directive(self, node: DirectiveDefinitionNode) -> GraphQLDirective: + """Build a GraphQL directive for a given directive definition node.""" locations = [DirectiveLocation[node.value] for node in node.locations] return GraphQLDirective( @@ -406,20 +564,20 @@ def build_directive(node: DirectiveDefinitionNode) -> GraphQLDirective: description=node.description.value if node.description else None, locations=locations, is_repeatable=node.repeatable, - args=build_argument_map(node.arguments), + args=self.build_argument_map(node.arguments), ast_node=node, ) def build_field_map( + self, nodes: Collection[ - Union[ - InterfaceTypeDefinitionNode, - InterfaceTypeExtensionNode, - ObjectTypeDefinitionNode, - ObjectTypeExtensionNode, - ] + InterfaceTypeDefinitionNode + | InterfaceTypeExtensionNode + | ObjectTypeDefinitionNode + | ObjectTypeExtensionNode ], ) -> GraphQLFieldMap: + """Build a GraphQL field map.""" field_map: GraphQLFieldMap = {} for node in nodes: for field in node.fields or []: @@ -427,23 +585,25 @@ def build_field_map( # value, that would throw immediately while type system validation # with validate_schema() will produce more actionable results. field_map[field.name.value] = GraphQLField( - type_=cast(GraphQLOutputType, get_wrapped_type(field.type)), + type_=cast("GraphQLOutputType", self.get_wrapped_type(field.type)), description=field.description.value if field.description else None, - args=build_argument_map(field.arguments), + args=self.build_argument_map(field.arguments), deprecation_reason=get_deprecation_reason(field), ast_node=field, ) return field_map def build_argument_map( - args: Optional[Collection[InputValueDefinitionNode]], + self, + args: Collection[InputValueDefinitionNode] | None, ) -> GraphQLArgumentMap: + """Build a GraphQL argument map.""" arg_map: GraphQLArgumentMap = {} for arg in args or []: # Note: While this could make assertions to get the correctly typed # value, that would throw immediately while type system validation # with validate_schema() will produce more actionable results. - type_ = cast(GraphQLInputType, get_wrapped_type(arg.type)) + type_ = cast("GraphQLInputType", self.get_wrapped_type(arg.type)) arg_map[arg.name.value] = GraphQLArgument( type_=type_, description=arg.description.value if arg.description else None, @@ -454,17 +614,17 @@ def build_argument_map( return arg_map def build_input_field_map( - nodes: Collection[ - Union[InputObjectTypeDefinitionNode, InputObjectTypeExtensionNode] - ], + self, + nodes: Collection[InputObjectTypeDefinitionNode | InputObjectTypeExtensionNode], ) -> GraphQLInputFieldMap: + """Build a GraphQL input field map.""" input_field_map: GraphQLInputFieldMap = {} for node in nodes: for field in node.fields or []: # Note: While this could make assertions to get the correctly typed # value, that would throw immediately while type system validation # with validate_schema() will produce more actionable results. - type_ = cast(GraphQLInputType, get_wrapped_type(field.type)) + type_ = cast("GraphQLInputType", self.get_wrapped_type(field.type)) input_field_map[field.name.value] = GraphQLInputField( type_=type_, description=field.description.value if field.description else None, @@ -474,9 +634,11 @@ def build_input_field_map( ) return input_field_map + @staticmethod def build_enum_value_map( - nodes: Collection[Union[EnumTypeDefinitionNode, EnumTypeExtensionNode]] + nodes: Collection[EnumTypeDefinitionNode | EnumTypeExtensionNode], ) -> GraphQLEnumValueMap: + """Build a GraphQL enum value map.""" enum_value_map: GraphQLEnumValueMap = {} for node in nodes: for value in node.values or []: @@ -493,97 +655,110 @@ def build_enum_value_map( return enum_value_map def build_interfaces( + self, nodes: Collection[ - Union[ - InterfaceTypeDefinitionNode, - InterfaceTypeExtensionNode, - ObjectTypeDefinitionNode, - ObjectTypeExtensionNode, - ] + InterfaceTypeDefinitionNode + | InterfaceTypeExtensionNode + | ObjectTypeDefinitionNode + | ObjectTypeExtensionNode ], - ) -> List[GraphQLInterfaceType]: - interfaces: List[GraphQLInterfaceType] = [] - for node in nodes: - for type_ in node.interfaces or []: - # Note: While this could make assertions to get the correctly typed - # value, that would throw immediately while type system validation - # with validate_schema() will produce more actionable results. - interfaces.append(cast(GraphQLInterfaceType, get_named_type(type_))) - return interfaces + ) -> list[GraphQLInterfaceType]: + """Build GraphQL interface types for the given nodes.""" + # Note: While this could make assertions to get the correctly typed + # value, that would throw immediately while type system validation + # with validate_schema() will produce more actionable results. + return [ + cast("GraphQLInterfaceType", self.get_named_type(type_)) + for node in nodes + for type_ in node.interfaces or [] + ] def build_union_types( - nodes: Collection[Union[UnionTypeDefinitionNode, UnionTypeExtensionNode]], - ) -> List[GraphQLObjectType]: - types: List[GraphQLObjectType] = [] - for node in nodes: - for type_ in node.types or []: - # Note: While this could make assertions to get the correctly typed - # value, that would throw immediately while type system validation - # with validate_schema() will produce more actionable results. - types.append(cast(GraphQLObjectType, get_named_type(type_))) - return types + self, + nodes: Collection[UnionTypeDefinitionNode | UnionTypeExtensionNode], + ) -> list[GraphQLObjectType]: + """Build GraphQL object types for the given union type nodes.""" + # Note: While this could make assertions to get the correctly typed + # value, that would throw immediately while type system validation + # with validate_schema() will produce more actionable results. + return [ + cast("GraphQLObjectType", self.get_named_type(type_)) + for node in nodes + for type_ in node.types or [] + ] - def build_object_type(ast_node: ObjectTypeDefinitionNode) -> GraphQLObjectType: - extension_nodes = type_extensions_map[ast_node.name.value] - all_nodes: List[Union[ObjectTypeDefinitionNode, ObjectTypeExtensionNode]] = [ + def build_object_type( + self, ast_node: ObjectTypeDefinitionNode + ) -> GraphQLObjectType: + """Build a GraphQL object type for the given object type definition node.""" + extension_nodes = self.type_extensions.object[ast_node.name.value] + all_nodes: list[ObjectTypeDefinitionNode | ObjectTypeExtensionNode] = [ ast_node, *extension_nodes, ] return GraphQLObjectType( name=ast_node.name.value, description=ast_node.description.value if ast_node.description else None, - interfaces=lambda: build_interfaces(all_nodes), - fields=lambda: build_field_map(all_nodes), + interfaces=partial(self.build_interfaces, all_nodes), + fields=partial(self.build_field_map, all_nodes), ast_node=ast_node, extension_ast_nodes=extension_nodes, ) def build_interface_type( + self, ast_node: InterfaceTypeDefinitionNode, ) -> GraphQLInterfaceType: - extension_nodes = type_extensions_map[ast_node.name.value] - all_nodes: List[ - Union[InterfaceTypeDefinitionNode, InterfaceTypeExtensionNode] - ] = [ast_node, *extension_nodes] + """Build a GraphQL interface type for the given type definition nodes.""" + extension_nodes = self.type_extensions.interface[ast_node.name.value] + all_nodes: list[InterfaceTypeDefinitionNode | InterfaceTypeExtensionNode] = [ + ast_node, + *extension_nodes, + ] return GraphQLInterfaceType( name=ast_node.name.value, description=ast_node.description.value if ast_node.description else None, - interfaces=lambda: build_interfaces(all_nodes), - fields=lambda: build_field_map(all_nodes), + interfaces=partial(self.build_interfaces, all_nodes), + fields=partial(self.build_field_map, all_nodes), ast_node=ast_node, extension_ast_nodes=extension_nodes, ) - def build_enum_type(ast_node: EnumTypeDefinitionNode) -> GraphQLEnumType: - extension_nodes = type_extensions_map[ast_node.name.value] - all_nodes: List[Union[EnumTypeDefinitionNode, EnumTypeExtensionNode]] = [ + def build_enum_type(self, ast_node: EnumTypeDefinitionNode) -> GraphQLEnumType: + """Build a GraphQL enum type for the given enum type definition nodes.""" + extension_nodes = self.type_extensions.enum[ast_node.name.value] + all_nodes: list[EnumTypeDefinitionNode | EnumTypeExtensionNode] = [ ast_node, *extension_nodes, ] return GraphQLEnumType( name=ast_node.name.value, description=ast_node.description.value if ast_node.description else None, - values=build_enum_value_map(all_nodes), + values=self.build_enum_value_map(all_nodes), ast_node=ast_node, extension_ast_nodes=extension_nodes, ) - def build_union_type(ast_node: UnionTypeDefinitionNode) -> GraphQLUnionType: - extension_nodes = type_extensions_map[ast_node.name.value] - all_nodes: List[Union[UnionTypeDefinitionNode, UnionTypeExtensionNode]] = [ + def build_union_type(self, ast_node: UnionTypeDefinitionNode) -> GraphQLUnionType: + """Build a GraphQL union type for the given union type definition nodes.""" + extension_nodes = self.type_extensions.union[ast_node.name.value] + all_nodes: list[UnionTypeDefinitionNode | UnionTypeExtensionNode] = [ ast_node, *extension_nodes, ] return GraphQLUnionType( name=ast_node.name.value, description=ast_node.description.value if ast_node.description else None, - types=lambda: build_union_types(all_nodes), + types=partial(self.build_union_types, all_nodes), ast_node=ast_node, extension_ast_nodes=extension_nodes, ) - def build_scalar_type(ast_node: ScalarTypeDefinitionNode) -> GraphQLScalarType: - extension_nodes = type_extensions_map[ast_node.name.value] + def build_scalar_type( + self, ast_node: ScalarTypeDefinitionNode + ) -> GraphQLScalarType: + """Build a GraphQL scalar type for the given scalar type definition node.""" + extension_nodes = self.type_extensions.scalar[ast_node.name.value] return GraphQLScalarType( name=ast_node.name.value, description=ast_node.description.value if ast_node.description else None, @@ -593,94 +768,51 @@ def build_scalar_type(ast_node: ScalarTypeDefinitionNode) -> GraphQLScalarType: ) def build_input_object_type( + self, ast_node: InputObjectTypeDefinitionNode, ) -> GraphQLInputObjectType: - extension_nodes = type_extensions_map[ast_node.name.value] - all_nodes: List[ - Union[InputObjectTypeDefinitionNode, InputObjectTypeExtensionNode] + """Build a GraphQL input object type for the given node.""" + extension_nodes = self.type_extensions.input_object[ast_node.name.value] + all_nodes: list[ + InputObjectTypeDefinitionNode | InputObjectTypeExtensionNode ] = [ast_node, *extension_nodes] return GraphQLInputObjectType( name=ast_node.name.value, description=ast_node.description.value if ast_node.description else None, - fields=lambda: build_input_field_map(all_nodes), + fields=partial(self.build_input_field_map, all_nodes), ast_node=ast_node, extension_ast_nodes=extension_nodes, + is_one_of=is_one_of(ast_node), ) - build_type_for_kind = cast( - Dict[str, Callable[[TypeDefinitionNode], GraphQLNamedType]], - { - "object_type_definition": build_object_type, - "interface_type_definition": build_interface_type, - "enum_type_definition": build_enum_type, - "union_type_definition": build_union_type, - "scalar_type_definition": build_scalar_type, - "input_object_type_definition": build_input_object_type, - }, - ) - - def build_type(ast_node: TypeDefinitionNode) -> GraphQLNamedType: + def build_type(self, ast_node: TypeDefinitionNode) -> GraphQLNamedType: + """Build a named GraphQL type for the given type definition node.""" + kind = ast_node.kind + try: + kind = kind.removesuffix("_definition") + except AttributeError: # pragma: no cover (Python < 3.9) + if kind.endswith("_definition"): + kind = kind[:-11] try: - # object_type_definition_node is built with _build_object_type etc. - build_function = build_type_for_kind[ast_node.kind] - except KeyError: # pragma: no cover + build = getattr(self, f"build_{kind}") + except AttributeError as error: # pragma: no cover # Not reachable. All possible type definition nodes have been considered. - raise TypeError( # pragma: no cover + msg = ( # pragma: no cover f"Unexpected type definition node: {inspect(ast_node)}." ) - else: - return build_function(ast_node) - - type_map: Dict[str, GraphQLNamedType] = {} - for existing_type in schema_kwargs["types"] or (): - type_map[existing_type.name] = extend_named_type(existing_type) - for type_node in type_defs: - name = type_node.name.value - type_map[name] = std_type_map.get(name) or build_type(type_node) - - # Get the extended root operation types. - operation_types: Dict[OperationType, GraphQLNamedType] = {} - for operation_type in OperationType: - original_type = schema_kwargs[operation_type.value] - if original_type: - operation_types[operation_type] = replace_named_type(original_type) - # Then, incorporate schema definition and all schema extensions. - if schema_def: - operation_types.update(get_operation_types([schema_def])) - if schema_extensions: - operation_types.update(get_operation_types(schema_extensions)) - - # Then produce and return the kwargs for a Schema with these types. - get_operation = operation_types.get - return GraphQLSchemaKwargs( - query=get_operation(OperationType.QUERY), # type: ignore - mutation=get_operation(OperationType.MUTATION), # type: ignore - subscription=get_operation(OperationType.SUBSCRIPTION), # type: ignore - types=tuple(type_map.values()), - directives=tuple( - replace_directive(directive) for directive in schema_kwargs["directives"] - ) - + tuple(build_directive(directive) for directive in directive_defs), - description=schema_def.description.value - if schema_def and schema_def.description - else None, - extensions={}, - ast_node=schema_def or schema_kwargs["ast_node"], - extension_ast_nodes=schema_kwargs["extension_ast_nodes"] - + tuple(schema_extensions), - assume_valid=assume_valid, - ) + raise TypeError(msg) from error # pragma: no cover + return build(ast_node) -std_type_map: Mapping[str, Union[GraphQLNamedType, GraphQLObjectType]] = { +std_type_map: Mapping[str, GraphQLNamedType | GraphQLObjectType] = { **specified_scalar_types, **introspection_types, } def get_deprecation_reason( - node: Union[EnumValueDefinitionNode, FieldDefinitionNode, InputValueDefinitionNode] -) -> Optional[str]: + node: EnumValueDefinitionNode | FieldDefinitionNode | InputValueDefinitionNode, +) -> str | None: """Given a field or enum value node, get deprecation reason as string.""" from ..execution import get_directive_values @@ -689,10 +821,17 @@ def get_deprecation_reason( def get_specified_by_url( - node: Union[ScalarTypeDefinitionNode, ScalarTypeExtensionNode] -) -> Optional[str]: + node: ScalarTypeDefinitionNode | ScalarTypeExtensionNode, +) -> str | None: """Given a scalar node, return the string value for the specifiedByURL.""" from ..execution import get_directive_values specified_by_url = get_directive_values(GraphQLSpecifiedByDirective, node) return specified_by_url["url"] if specified_by_url else None + + +def is_one_of(node: InputObjectTypeDefinitionNode) -> bool: + """Given an input object node, returns if the node should be OneOf.""" + from ..execution import get_directive_values + + return get_directive_values(GraphQLOneOfDirective, node) is not None diff --git a/src/graphql/utilities/find_breaking_changes.py b/src/graphql/utilities/find_breaking_changes.py index 281a2def..d2a03ad2 100644 --- a/src/graphql/utilities/find_breaking_changes.py +++ b/src/graphql/utilities/find_breaking_changes.py @@ -1,16 +1,19 @@ +"""Find breaking changes between GraphQL schemas""" + +from __future__ import annotations + from enum import Enum -from typing import Any, Collection, Dict, List, NamedTuple, Union, cast +from typing import Any, Collection, NamedTuple, Union from ..language import print_ast -from ..pyutils import inspect, Undefined +from ..pyutils import Undefined, inspect from ..type import ( GraphQLEnumType, GraphQLField, - GraphQLList, - GraphQLNamedType, - GraphQLNonNull, + GraphQLInputObjectType, GraphQLInputType, GraphQLInterfaceType, + GraphQLNamedType, GraphQLObjectType, GraphQLSchema, GraphQLType, @@ -31,6 +34,12 @@ from ..utilities.sort_value_node import sort_value_node from .ast_from_value import ast_from_value +try: + from typing import TypeAlias +except ImportError: # Python < 3.10 + from typing_extensions import TypeAlias + + __all__ = [ "BreakingChange", "BreakingChangeType", @@ -42,6 +51,8 @@ class BreakingChangeType(Enum): + """Types of breaking changes""" + TYPE_REMOVED = 10 TYPE_CHANGED_KIND = 11 TYPE_REMOVED_FROM_UNION = 20 @@ -61,6 +72,8 @@ class BreakingChangeType(Enum): class DangerousChangeType(Enum): + """Types of dangerous changes""" + VALUE_ADDED_TO_ENUM = 60 TYPE_ADDED_TO_UNION = 61 OPTIONAL_INPUT_FIELD_ADDED = 62 @@ -70,21 +83,25 @@ class DangerousChangeType(Enum): class BreakingChange(NamedTuple): + """Type and description of a breaking change""" + type: BreakingChangeType description: str class DangerousChange(NamedTuple): + """Type and description of a dangerous change""" + type: DangerousChangeType description: str -Change = Union[BreakingChange, DangerousChange] +Change: TypeAlias = Union[BreakingChange, DangerousChange] def find_breaking_changes( old_schema: GraphQLSchema, new_schema: GraphQLSchema -) -> List[BreakingChange]: +) -> list[BreakingChange]: """Find breaking changes. Given two schemas, returns a list containing descriptions of all the types of @@ -99,7 +116,7 @@ def find_breaking_changes( def find_dangerous_changes( old_schema: GraphQLSchema, new_schema: GraphQLSchema -) -> List[DangerousChange]: +) -> list[DangerousChange]: """Find dangerous changes. Given two schemas, returns a list containing descriptions of all the types of @@ -114,7 +131,7 @@ def find_dangerous_changes( def find_schema_changes( old_schema: GraphQLSchema, new_schema: GraphQLSchema -) -> List[Change]: +) -> list[Change]: return find_type_changes(old_schema, new_schema) + find_directive_changes( old_schema, new_schema ) @@ -122,8 +139,8 @@ def find_schema_changes( def find_directive_changes( old_schema: GraphQLSchema, new_schema: GraphQLSchema -) -> List[Change]: - schema_changes: List[Change] = [] +) -> list[Change]: + schema_changes: list[Change] = [] directives_diff = list_diff(old_schema.directives, new_schema.directives) @@ -134,7 +151,7 @@ def find_directive_changes( ) ) - for (old_directive, new_directive) in directives_diff.persisted: + for old_directive, new_directive in directives_diff.persisted: args_diff = dict_diff(old_directive.args, new_directive.args) for arg_name, new_arg in args_diff.added.items(): @@ -177,8 +194,8 @@ def find_directive_changes( def find_type_changes( old_schema: GraphQLSchema, new_schema: GraphQLSchema -) -> List[Change]: - schema_changes: List[Change] = [] +) -> list[Change]: + schema_changes: list[Change] = [] types_diff = dict_diff(old_schema.type_map, new_schema.type_map) for type_name, old_type in types_diff.removed.items(): @@ -199,12 +216,9 @@ def find_type_changes( schema_changes.extend(find_union_type_changes(old_type, new_type)) elif is_input_object_type(old_type) and is_input_object_type(new_type): schema_changes.extend(find_input_object_type_changes(old_type, new_type)) - elif is_object_type(old_type) and is_object_type(new_type): - schema_changes.extend(find_field_changes(old_type, new_type)) - schema_changes.extend( - find_implemented_interfaces_changes(old_type, new_type) - ) - elif is_interface_type(old_type) and is_interface_type(new_type): + elif (is_object_type(old_type) and is_object_type(new_type)) or ( + is_interface_type(old_type) and is_interface_type(new_type) + ): schema_changes.extend(find_field_changes(old_type, new_type)) schema_changes.extend( find_implemented_interfaces_changes(old_type, new_type) @@ -222,10 +236,10 @@ def find_type_changes( def find_input_object_type_changes( - old_type: Union[GraphQLObjectType, GraphQLInterfaceType], - new_type: Union[GraphQLObjectType, GraphQLInterfaceType], -) -> List[Change]: - schema_changes: List[Change] = [] + old_type: GraphQLInputObjectType, + new_type: GraphQLInputObjectType, +) -> list[Change]: + schema_changes: list[Change] = [] fields_diff = dict_diff(old_type.fields, new_type.fields) for field_name, new_field in fields_diff.added.items(): @@ -272,15 +286,15 @@ def find_input_object_type_changes( def find_union_type_changes( old_type: GraphQLUnionType, new_type: GraphQLUnionType -) -> List[Change]: - schema_changes: List[Change] = [] +) -> list[Change]: + schema_changes: list[Change] = [] possible_types_diff = list_diff(old_type.types, new_type.types) for possible_type in possible_types_diff.added: schema_changes.append( DangerousChange( DangerousChangeType.TYPE_ADDED_TO_UNION, - f"{possible_type.name} was added" f" to union type {old_type.name}.", + f"{possible_type.name} was added to union type {old_type.name}.", ) ) @@ -297,8 +311,8 @@ def find_union_type_changes( def find_enum_type_changes( old_type: GraphQLEnumType, new_type: GraphQLEnumType -) -> List[Change]: - schema_changes: List[Change] = [] +) -> list[Change]: + schema_changes: list[Change] = [] values_diff = dict_diff(old_type.values, new_type.values) for value_name in values_diff.added: @@ -321,10 +335,10 @@ def find_enum_type_changes( def find_implemented_interfaces_changes( - old_type: Union[GraphQLObjectType, GraphQLInterfaceType], - new_type: Union[GraphQLObjectType, GraphQLInterfaceType], -) -> List[Change]: - schema_changes: List[Change] = [] + old_type: GraphQLObjectType | GraphQLInterfaceType, + new_type: GraphQLObjectType | GraphQLInterfaceType, +) -> list[Change]: + schema_changes: list[Change] = [] interfaces_diff = list_diff(old_type.interfaces, new_type.interfaces) for interface in interfaces_diff.added: @@ -347,10 +361,10 @@ def find_implemented_interfaces_changes( def find_field_changes( - old_type: Union[GraphQLObjectType, GraphQLInterfaceType], - new_type: Union[GraphQLObjectType, GraphQLInterfaceType], -) -> List[Change]: - schema_changes: List[Change] = [] + old_type: GraphQLObjectType | GraphQLInterfaceType, + new_type: GraphQLObjectType | GraphQLInterfaceType, +) -> list[Change]: + schema_changes: list[Change] = [] fields_diff = dict_diff(old_type.fields, new_type.fields) for field_name in fields_diff.removed: @@ -381,19 +395,19 @@ def find_field_changes( def find_arg_changes( - old_type: Union[GraphQLObjectType, GraphQLInterfaceType], + old_type: GraphQLObjectType | GraphQLInterfaceType, field_name: str, old_field: GraphQLField, new_field: GraphQLField, -) -> List[Change]: - schema_changes: List[Change] = [] +) -> list[Change]: + schema_changes: list[Change] = [] args_diff = dict_diff(old_field.args, new_field.args) for arg_name in args_diff.removed: schema_changes.append( BreakingChange( BreakingChangeType.ARG_REMOVED, - f"{old_type.name}.{field_name} arg" f" {arg_name} was removed.", + f"{old_type.name}.{field_name} arg {arg_name} was removed.", ) ) @@ -465,14 +479,12 @@ def is_change_safe_for_object_or_interface_field( # if they're both lists, make sure underlying types are compatible is_list_type(new_type) and is_change_safe_for_object_or_interface_field( - cast(GraphQLList, old_type).of_type, cast(GraphQLList, new_type).of_type + old_type.of_type, new_type.of_type ) ) or ( # moving from nullable to non-null of same underlying type is safe is_non_null_type(new_type) - and is_change_safe_for_object_or_interface_field( - old_type, cast(GraphQLNonNull, new_type).of_type - ) + and is_change_safe_for_object_or_interface_field(old_type, new_type.of_type) ) if is_non_null_type(old_type): @@ -480,34 +492,33 @@ def is_change_safe_for_object_or_interface_field( return is_non_null_type( new_type ) and is_change_safe_for_object_or_interface_field( - cast(GraphQLNonNull, old_type).of_type, - cast(GraphQLNonNull, new_type).of_type, + old_type.of_type, new_type.of_type ) - return ( - # if they're both named types, see if their names are equivalent - is_named_type(new_type) - and cast(GraphQLNamedType, old_type).name - == cast(GraphQLNamedType, new_type).name - ) or ( - # moving from nullable to non-null of same underlying type is safe - is_non_null_type(new_type) - and is_change_safe_for_object_or_interface_field( - old_type, cast(GraphQLNonNull, new_type).of_type + if is_named_type(old_type): + return ( + # if they're both named types, see if their names are equivalent + is_named_type(new_type) and old_type.name == new_type.name + ) or ( + # moving from nullable to non-null of same underlying type is safe + is_non_null_type(new_type) + and is_change_safe_for_object_or_interface_field(old_type, new_type.of_type) ) - ) + + # Not reachable. All possible output types have been considered. + msg = f"Unexpected type {inspect(old_type)}" # pragma: no cover + raise TypeError(msg) # pragma: no cover def is_change_safe_for_input_object_field_or_field_arg( old_type: GraphQLType, new_type: GraphQLType ) -> bool: if is_list_type(old_type): - return is_list_type( # if they're both lists, make sure underlying types are compatible new_type ) and is_change_safe_for_input_object_field_or_field_arg( - cast(GraphQLList, old_type).of_type, cast(GraphQLList, new_type).of_type + old_type.of_type, new_type.of_type ) if is_non_null_type(old_type): @@ -515,23 +526,25 @@ def is_change_safe_for_input_object_field_or_field_arg( # if they're both non-null, make sure the underlying types are compatible is_non_null_type(new_type) and is_change_safe_for_input_object_field_or_field_arg( - cast(GraphQLNonNull, old_type).of_type, - cast(GraphQLNonNull, new_type).of_type, + old_type.of_type, new_type.of_type ) ) or ( # moving from non-null to nullable of same underlying type is safe not is_non_null_type(new_type) and is_change_safe_for_input_object_field_or_field_arg( - cast(GraphQLNonNull, old_type).of_type, new_type + old_type.of_type, new_type ) ) - return ( - # if they're both named types, see if their names are equivalent - is_named_type(new_type) - and cast(GraphQLNamedType, old_type).name - == cast(GraphQLNamedType, new_type).name - ) + if is_named_type(old_type): + return ( + # if they're both named types, see if their names are equivalent + is_named_type(new_type) and old_type.name == new_type.name + ) + + # Not reachable. All possible output types have been considered. + msg = f"Unexpected type {inspect(old_type)}" # pragma: no cover + raise TypeError(msg) # pragma: no cover def type_kind_name(type_: GraphQLNamedType) -> str: @@ -549,22 +562,24 @@ def type_kind_name(type_: GraphQLNamedType) -> str: return "an Input type" # Not reachable. All possible output types have been considered. - raise TypeError(f"Unexpected type {inspect(type)}") + msg = f"Unexpected type {inspect(type_)}" # pragma: no cover + raise TypeError(msg) # pragma: no cover def stringify_value(value: Any, type_: GraphQLInputType) -> str: ast = ast_from_value(value, type_) if ast is None: # pragma: no cover - raise TypeError(f"Invalid value: {inspect(value)}") + msg = f"Invalid value: {inspect(value)}" + raise TypeError(msg) return print_ast(sort_value_node(ast)) class ListDiff(NamedTuple): """Tuple with added, removed and persisted list items.""" - added: List - removed: List - persisted: List + added: list + removed: list + persisted: list def list_diff(old_list: Collection, new_list: Collection) -> ListDiff: @@ -593,12 +608,12 @@ def list_diff(old_list: Collection, new_list: Collection) -> ListDiff: class DictDiff(NamedTuple): """Tuple with added, removed and persisted dict entries.""" - added: Dict - removed: Dict - persisted: Dict + added: dict + removed: dict + persisted: dict -def dict_diff(old_dict: Dict, new_dict: Dict) -> DictDiff: +def dict_diff(old_dict: dict, new_dict: dict) -> DictDiff: """Get differences between two dicts.""" added = {} removed = {} diff --git a/src/graphql/utilities/get_introspection_query.py b/src/graphql/utilities/get_introspection_query.py index aed2348a..adf038ac 100644 --- a/src/graphql/utilities/get_introspection_query.py +++ b/src/graphql/utilities/get_introspection_query.py @@ -1,15 +1,24 @@ +"""Get introspection query""" + +from __future__ import annotations + from textwrap import dedent -from typing import Any, Dict, List, Optional, Union +from typing import TYPE_CHECKING, Any, Dict, Union -from ..language import DirectiveLocation +if TYPE_CHECKING: + from ..language import DirectiveLocation try: - from typing import TypedDict, Literal + from typing import Literal, TypedDict except ImportError: # Python < 3.8 - from typing_extensions import TypedDict, Literal # type: ignore + from typing_extensions import Literal, TypedDict # type: ignore +try: + from typing import TypeAlias +except ImportError: # Python < 3.10 + from typing_extensions import TypeAlias + __all__ = [ - "get_introspection_query", "IntrospectionDirective", "IntrospectionEnumType", "IntrospectionField", @@ -25,6 +34,7 @@ "IntrospectionType", "IntrospectionTypeRef", "IntrospectionUnionType", + "get_introspection_query", ] @@ -46,7 +56,7 @@ def get_introspection_query( maybe_directive_is_repeatable = "isRepeatable" if directive_is_repeatable else "" maybe_schema_description = maybe_description if schema_description else "" - def input_deprecation(string: str) -> Optional[str]: + def input_deprecation(string: str) -> str | None: return string if input_value_deprecation else "" return dedent( @@ -139,6 +149,14 @@ def input_deprecation(string: str) -> Optional[str]: ofType {{ kind name + ofType {{ + kind + name + ofType {{ + kind + name + }} + }} }} }} }} @@ -157,11 +175,11 @@ def input_deprecation(string: str) -> Optional[str]: # - no generic typed dicts, see https://github.com/python/mypy/issues/3863 # simplified IntrospectionNamedType to avoids cycles -SimpleIntrospectionType = Dict[str, Any] +SimpleIntrospectionType: TypeAlias = Dict[str, Any] class MaybeWithDescription(TypedDict, total=False): - description: Optional[str] + description: str | None class WithName(MaybeWithDescription): @@ -169,26 +187,26 @@ class WithName(MaybeWithDescription): class MaybeWithSpecifiedByUrl(TypedDict, total=False): - specifiedByURL: Optional[str] + specifiedByURL: str | None class WithDeprecated(TypedDict): isDeprecated: bool - deprecationReason: Optional[str] + deprecationReason: str | None class MaybeWithDeprecated(TypedDict, total=False): isDeprecated: bool - deprecationReason: Optional[str] + deprecationReason: str | None class IntrospectionInputValue(WithName, MaybeWithDeprecated): type: SimpleIntrospectionType # should be IntrospectionInputType - defaultValue: Optional[str] + defaultValue: str | None class IntrospectionField(WithName, WithDeprecated): - args: List[IntrospectionInputValue] + args: list[IntrospectionInputValue] type: SimpleIntrospectionType # should be IntrospectionOutputType @@ -201,8 +219,8 @@ class MaybeWithIsRepeatable(TypedDict, total=False): class IntrospectionDirective(WithName, MaybeWithIsRepeatable): - locations: List[DirectiveLocation] - args: List[IntrospectionInputValue] + locations: list[DirectiveLocation] + args: list[IntrospectionInputValue] class IntrospectionScalarType(WithName, MaybeWithSpecifiedByUrl): @@ -211,33 +229,33 @@ class IntrospectionScalarType(WithName, MaybeWithSpecifiedByUrl): class IntrospectionInterfaceType(WithName): kind: Literal["interface"] - fields: List[IntrospectionField] - interfaces: List[SimpleIntrospectionType] # should be InterfaceType - possibleTypes: List[SimpleIntrospectionType] # should be NamedType + fields: list[IntrospectionField] + interfaces: list[SimpleIntrospectionType] # should be InterfaceType + possibleTypes: list[SimpleIntrospectionType] # should be NamedType class IntrospectionObjectType(WithName): kind: Literal["object"] - fields: List[IntrospectionField] - interfaces: List[SimpleIntrospectionType] # should be InterfaceType + fields: list[IntrospectionField] + interfaces: list[SimpleIntrospectionType] # should be InterfaceType class IntrospectionUnionType(WithName): kind: Literal["union"] - possibleTypes: List[SimpleIntrospectionType] # should be NamedType + possibleTypes: list[SimpleIntrospectionType] # should be NamedType class IntrospectionEnumType(WithName): kind: Literal["enum"] - enumValues: List[IntrospectionEnumValue] + enumValues: list[IntrospectionEnumValue] class IntrospectionInputObjectType(WithName): kind: Literal["input_object"] - inputFields: List[IntrospectionInputValue] + inputFields: list[IntrospectionInputValue] -IntrospectionType = Union[ +IntrospectionType: TypeAlias = Union[ IntrospectionScalarType, IntrospectionObjectType, IntrospectionInterfaceType, @@ -247,7 +265,7 @@ class IntrospectionInputObjectType(WithName): ] -IntrospectionOutputType = Union[ +IntrospectionOutputType: TypeAlias = Union[ IntrospectionScalarType, IntrospectionObjectType, IntrospectionInterfaceType, @@ -256,7 +274,7 @@ class IntrospectionInputObjectType(WithName): ] -IntrospectionInputType = Union[ +IntrospectionInputType: TypeAlias = Union[ IntrospectionScalarType, IntrospectionEnumType, IntrospectionInputObjectType ] @@ -271,20 +289,22 @@ class IntrospectionNonNullType(TypedDict): ofType: SimpleIntrospectionType # should be IntrospectionType -IntrospectionTypeRef = Union[ +IntrospectionTypeRef: TypeAlias = Union[ IntrospectionType, IntrospectionListType, IntrospectionNonNullType ] class IntrospectionSchema(MaybeWithDescription): queryType: IntrospectionObjectType - mutationType: Optional[IntrospectionObjectType] - subscriptionType: Optional[IntrospectionObjectType] - types: List[IntrospectionType] - directives: List[IntrospectionDirective] + mutationType: IntrospectionObjectType | None + subscriptionType: IntrospectionObjectType | None + types: list[IntrospectionType] + directives: list[IntrospectionDirective] -class IntrospectionQuery(TypedDict): - """The root typed dictionary for schema introspections.""" - - __schema: IntrospectionSchema +# The root typed dictionary for schema introspections. +# Note: We don't use class syntax here since the key looks like a private attribute. +IntrospectionQuery = TypedDict( + "IntrospectionQuery", + {"__schema": IntrospectionSchema}, +) diff --git a/src/graphql/utilities/get_operation_ast.py b/src/graphql/utilities/get_operation_ast.py index b7d79317..2323e57f 100644 --- a/src/graphql/utilities/get_operation_ast.py +++ b/src/graphql/utilities/get_operation_ast.py @@ -1,4 +1,6 @@ -from typing import Optional +"""Get operation AST node""" + +from __future__ import annotations from ..language import DocumentNode, OperationDefinitionNode @@ -6,8 +8,8 @@ def get_operation_ast( - document_ast: DocumentNode, operation_name: Optional[str] = None -) -> Optional[OperationDefinitionNode]: + document_ast: DocumentNode, operation_name: str | None = None +) -> OperationDefinitionNode | None: """Get operation AST node. Returns an operation AST given a document AST and optionally an operation diff --git a/src/graphql/utilities/get_operation_root_type.py b/src/graphql/utilities/get_operation_root_type.py deleted file mode 100644 index be0e5e6e..00000000 --- a/src/graphql/utilities/get_operation_root_type.py +++ /dev/null @@ -1,46 +0,0 @@ -from typing import Union - -from ..error import GraphQLError -from ..language import ( - OperationType, - OperationDefinitionNode, - OperationTypeDefinitionNode, -) -from ..type import GraphQLObjectType, GraphQLSchema - -__all__ = ["get_operation_root_type"] - - -def get_operation_root_type( - schema: GraphQLSchema, - operation: Union[OperationDefinitionNode, OperationTypeDefinitionNode], -) -> GraphQLObjectType: - """Extract the root type of the operation from the schema. - - .. deprecated:: 3.2 - Please use `GraphQLSchema.getRootType` instead. Will be removed in v3.3. - """ - operation_type = operation.operation - if operation_type == OperationType.QUERY: - query_type = schema.query_type - if not query_type: - raise GraphQLError( - "Schema does not define the required query root type.", operation - ) - return query_type - - if operation_type == OperationType.MUTATION: - mutation_type = schema.mutation_type - if not mutation_type: - raise GraphQLError("Schema is not configured for mutations.", operation) - return mutation_type - - if operation_type == OperationType.SUBSCRIPTION: - subscription_type = schema.subscription_type - if not subscription_type: - raise GraphQLError("Schema is not configured for subscriptions.", operation) - return subscription_type - - raise GraphQLError( - "Can only have query, mutation and subscription operations.", operation - ) diff --git a/src/graphql/utilities/introspection_from_schema.py b/src/graphql/utilities/introspection_from_schema.py index e0634860..a0440a32 100644 --- a/src/graphql/utilities/introspection_from_schema.py +++ b/src/graphql/utilities/introspection_from_schema.py @@ -1,9 +1,15 @@ -from typing import cast +"""Building introspection queries from GraphQL schemas""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, cast from ..error import GraphQLError from ..language import parse -from ..type import GraphQLSchema -from .get_introspection_query import get_introspection_query, IntrospectionQuery +from .get_introspection_query import IntrospectionQuery, get_introspection_query + +if TYPE_CHECKING: + from ..type import GraphQLSchema __all__ = ["introspection_from_schema"] @@ -34,13 +40,15 @@ def introspection_from_schema( ) ) - from ..execution.execute import execute_sync, ExecutionResult + from ..execution.execute import ExecutionResult, execute_sync result = execute_sync(schema, document) if not isinstance(result, ExecutionResult): # pragma: no cover - raise RuntimeError("Introspection cannot be executed") + msg = "Introspection cannot be executed" + raise RuntimeError(msg) # noqa: TRY004 if result.errors: # pragma: no cover raise result.errors[0] if not result.data: # pragma: no cover - raise GraphQLError("Introspection did not return a result") - return cast(IntrospectionQuery, result.data) + msg = "Introspection did not return a result" + raise GraphQLError(msg) + return cast("IntrospectionQuery", result.data) diff --git a/src/graphql/utilities/lexicographic_sort_schema.py b/src/graphql/utilities/lexicographic_sort_schema.py index de6326cb..de675a94 100644 --- a/src/graphql/utilities/lexicographic_sort_schema.py +++ b/src/graphql/utilities/lexicographic_sort_schema.py @@ -1,6 +1,9 @@ -from typing import Collection, Dict, Optional, Tuple, Union, cast +"""Sorting GraphQL schemas""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Collection, Optional, cast -from ..language import DirectiveLocation from ..pyutils import inspect, merge_kwargs, natural_comparison_key from ..type import ( GraphQLArgument, @@ -29,6 +32,9 @@ is_union_type, ) +if TYPE_CHECKING: + from ..language import DirectiveLocation + __all__ = ["lexicographic_sort_schema"] @@ -39,20 +45,20 @@ def lexicographic_sort_schema(schema: GraphQLSchema) -> GraphQLSchema: """ def replace_type( - type_: Union[GraphQLList, GraphQLNonNull, GraphQLNamedType] - ) -> Union[GraphQLList, GraphQLNonNull, GraphQLNamedType]: + type_: GraphQLList | GraphQLNonNull | GraphQLNamedType, + ) -> GraphQLList | GraphQLNonNull | GraphQLNamedType: if is_list_type(type_): - return GraphQLList(replace_type(cast(GraphQLList, type_).of_type)) + return GraphQLList(replace_type(type_.of_type)) if is_non_null_type(type_): - return GraphQLNonNull(replace_type(cast(GraphQLNonNull, type_).of_type)) - return replace_named_type(cast(GraphQLNamedType, type_)) + return GraphQLNonNull(replace_type(type_.of_type)) + return replace_named_type(cast("GraphQLNamedType", type_)) def replace_named_type(type_: GraphQLNamedType) -> GraphQLNamedType: return type_map[type_.name] def replace_maybe_type( - maybe_type: Optional[GraphQLNamedType], - ) -> Optional[GraphQLNamedType]: + maybe_type: GraphQLNamedType | None, + ) -> GraphQLNamedType | None: return maybe_type and replace_named_type(maybe_type) def sort_directive(directive: GraphQLDirective) -> GraphQLDirective: @@ -64,36 +70,37 @@ def sort_directive(directive: GraphQLDirective) -> GraphQLDirective: ) ) - def sort_args(args_map: Dict[str, GraphQLArgument]) -> Dict[str, GraphQLArgument]: + def sort_args(args_map: dict[str, GraphQLArgument]) -> dict[str, GraphQLArgument]: args = {} for name, arg in sorted(args_map.items()): args[name] = GraphQLArgument( **merge_kwargs( arg.to_kwargs(), - type_=replace_type(cast(GraphQLNamedType, arg.type)), + type_=replace_type(cast("GraphQLNamedType", arg.type)), ) ) return args - def sort_fields(fields_map: Dict[str, GraphQLField]) -> Dict[str, GraphQLField]: + def sort_fields(fields_map: dict[str, GraphQLField]) -> dict[str, GraphQLField]: fields = {} for name, field in sorted(fields_map.items()): fields[name] = GraphQLField( **merge_kwargs( field.to_kwargs(), - type_=replace_type(cast(GraphQLNamedType, field.type)), + type_=replace_type(cast("GraphQLNamedType", field.type)), args=sort_args(field.args), ) ) return fields def sort_input_fields( - fields_map: Dict[str, GraphQLInputField] - ) -> Dict[str, GraphQLInputField]: + fields_map: dict[str, GraphQLInputField], + ) -> dict[str, GraphQLInputField]: return { name: GraphQLInputField( cast( - GraphQLInputType, replace_type(cast(GraphQLNamedType, field.type)) + "GraphQLInputType", + replace_type(cast("GraphQLNamedType", field.type)), ), description=field.description, default_value=field.default_value, @@ -102,7 +109,7 @@ def sort_input_fields( for name, field in sorted(fields_map.items()) } - def sort_types(array: Collection[GraphQLNamedType]) -> Tuple[GraphQLNamedType, ...]: + def sort_types(array: Collection[GraphQLNamedType]) -> tuple[GraphQLNamedType, ...]: return tuple( replace_named_type(type_) for type_ in sorted(array, key=sort_by_name_key) ) @@ -111,7 +118,6 @@ def sort_named_type(type_: GraphQLNamedType) -> GraphQLNamedType: if is_scalar_type(type_) or is_introspection_type(type_): return type_ if is_object_type(type_): - type_ = cast(GraphQLObjectType, type_) return GraphQLObjectType( **merge_kwargs( type_.to_kwargs(), @@ -120,7 +126,6 @@ def sort_named_type(type_: GraphQLNamedType) -> GraphQLNamedType: ) ) if is_interface_type(type_): - type_ = cast(GraphQLInterfaceType, type_) return GraphQLInterfaceType( **merge_kwargs( type_.to_kwargs(), @@ -129,12 +134,10 @@ def sort_named_type(type_: GraphQLNamedType) -> GraphQLNamedType: ) ) if is_union_type(type_): - type_ = cast(GraphQLUnionType, type_) return GraphQLUnionType( **merge_kwargs(type_.to_kwargs(), types=lambda: sort_types(type_.types)) ) if is_enum_type(type_): - type_ = cast(GraphQLEnumType, type_) return GraphQLEnumType( **merge_kwargs( type_.to_kwargs(), @@ -150,7 +153,6 @@ def sort_named_type(type_: GraphQLNamedType) -> GraphQLNamedType: ) ) if is_input_object_type(type_): - type_ = cast(GraphQLInputObjectType, type_) return GraphQLInputObjectType( **merge_kwargs( type_.to_kwargs(), @@ -159,9 +161,10 @@ def sort_named_type(type_: GraphQLNamedType) -> GraphQLNamedType: ) # Not reachable. All possible types have been considered. - raise TypeError(f"Unexpected type: {inspect(type_)}.") + msg = f"Unexpected type: {inspect(type_)}." # pragma: no cover + raise TypeError(msg) # pragma: no cover - type_map: Dict[str, GraphQLNamedType] = { + type_map: dict[str, GraphQLNamedType] = { type_.name: sort_named_type(type_) for type_ in sorted(schema.type_map.values(), key=sort_by_name_key) } @@ -172,18 +175,20 @@ def sort_named_type(type_: GraphQLNamedType) -> GraphQLNamedType: sort_directive(directive) for directive in sorted(schema.directives, key=sort_by_name_key) ], - query=cast(Optional[GraphQLObjectType], replace_maybe_type(schema.query_type)), + query=cast( + "Optional[GraphQLObjectType]", replace_maybe_type(schema.query_type) + ), mutation=cast( - Optional[GraphQLObjectType], replace_maybe_type(schema.mutation_type) + "Optional[GraphQLObjectType]", replace_maybe_type(schema.mutation_type) ), subscription=cast( - Optional[GraphQLObjectType], replace_maybe_type(schema.subscription_type) + "Optional[GraphQLObjectType]", replace_maybe_type(schema.subscription_type) ), ast_node=schema.ast_node, ) def sort_by_name_key( - type_: Union[GraphQLNamedType, GraphQLDirective, DirectiveLocation] -) -> Tuple: + type_: GraphQLNamedType | GraphQLDirective | DirectiveLocation, +) -> tuple: return natural_comparison_key(type_.name) diff --git a/src/graphql/utilities/print_schema.py b/src/graphql/utilities/print_schema.py index 55a6a58a..dd68e54e 100644 --- a/src/graphql/utilities/print_schema.py +++ b/src/graphql/utilities/print_schema.py @@ -1,6 +1,10 @@ -from typing import Any, Callable, Dict, List, Optional, Union, cast +"""Printing GraphQL Schemas in SDL format""" -from ..language import print_ast, StringValueNode +from __future__ import annotations + +from typing import Any, Callable + +from ..language import StringValueNode, print_ast from ..language.block_string import is_printable_as_block_string from ..pyutils import inspect from ..type import ( @@ -24,26 +28,34 @@ is_object_type, is_scalar_type, is_specified_directive, - is_specified_scalar_type, is_union_type, ) from .ast_from_value import ast_from_value -__all__ = ["print_schema", "print_introspection_schema", "print_type", "print_value"] +__all__ = [ + "print_directive", + "print_introspection_schema", + "print_schema", + "print_type", + "print_value", +] def print_schema(schema: GraphQLSchema) -> str: + """Print the given GraphQL schema in SDL format.""" return print_filtered_schema( schema, lambda n: not is_specified_directive(n), is_defined_type ) def print_introspection_schema(schema: GraphQLSchema) -> str: + """Print the built-in introspection schema in SDL format.""" return print_filtered_schema(schema, is_specified_directive, is_introspection_type) def is_defined_type(type_: GraphQLNamedType) -> bool: - return not is_specified_scalar_type(type_) and not is_introspection_type(type_) + """Check if the given named GraphQL type is a defined type.""" + return type_.name not in GraphQLNamedType.reserved_types def print_filtered_schema( @@ -51,6 +63,7 @@ def print_filtered_schema( directive_filter: Callable[[GraphQLDirective], bool], type_filter: Callable[[GraphQLNamedType], bool], ) -> str: + """Print a GraphQL schema filtered by the specified directives and types.""" directives = filter(directive_filter, schema.directives) types = filter(type_filter, schema.type_map.values()) @@ -63,79 +76,85 @@ def print_filtered_schema( ) -def print_schema_definition(schema: GraphQLSchema) -> Optional[str]: - if schema.description is None and is_schema_of_common_names(schema): - return None - - operation_types = [] - +def print_schema_definition(schema: GraphQLSchema) -> str | None: + """Print GraphQL schema definitions.""" query_type = schema.query_type - if query_type: - operation_types.append(f" query: {query_type.name}") - mutation_type = schema.mutation_type - if mutation_type: - operation_types.append(f" mutation: {mutation_type.name}") - subscription_type = schema.subscription_type - if subscription_type: - operation_types.append(f" subscription: {subscription_type.name}") - return print_description(schema) + "schema {\n" + "\n".join(operation_types) + "\n}" + # Special case: When a schema has no root operation types, no valid schema + # definition can be printed. + if not query_type and not mutation_type and not subscription_type: + return None + + # Only print a schema definition if there is a description or if it should + # not be omitted because of having default type names. + if not (schema.description is None and has_default_root_operation_types(schema)): + return ( + print_description(schema) + + "schema {\n" + + (f" query: {query_type.name}\n" if query_type else "") + + (f" mutation: {mutation_type.name}\n" if mutation_type else "") + + ( + f" subscription: {subscription_type.name}\n" + if subscription_type + else "" + ) + + "}" + ) + + return None -def is_schema_of_common_names(schema: GraphQLSchema) -> bool: - """Check whether this schema uses the common naming convention. +def has_default_root_operation_types(schema: GraphQLSchema) -> bool: + """Check whether a schema uses the default root operation type names. GraphQL schema define root types for each type of operation. These types are the same as any other type and can be named in any manner, however there is a common - naming convention: - - schema { - query: Query - mutation: Mutation - subscription: Subscription - } + naming convention:: - When using this naming convention, the schema description can be omitted. - """ - query_type = schema.query_type - if query_type and query_type.name != "Query": - return False + schema { + query: Query + mutation: Mutation + subscription: Subscription + } - mutation_type = schema.mutation_type - if mutation_type and mutation_type.name != "Mutation": - return False + When using this naming convention, the schema description can be omitted so + long as these names are only used for operation types. - subscription_type = schema.subscription_type - return not subscription_type or subscription_type.name == "Subscription" + Note however that if any of these default names are used elsewhere in the + schema but not as a root operation type, the schema definition must still + be printed to avoid ambiguity. + """ + return ( + schema.query_type is schema.get_type("Query") + and schema.mutation_type is schema.get_type("Mutation") + and schema.subscription_type is schema.get_type("Subscription") + ) def print_type(type_: GraphQLNamedType) -> str: + """Print a named GraphQL type.""" if is_scalar_type(type_): - type_ = cast(GraphQLScalarType, type_) return print_scalar(type_) if is_object_type(type_): - type_ = cast(GraphQLObjectType, type_) return print_object(type_) if is_interface_type(type_): - type_ = cast(GraphQLInterfaceType, type_) return print_interface(type_) if is_union_type(type_): - type_ = cast(GraphQLUnionType, type_) return print_union(type_) if is_enum_type(type_): - type_ = cast(GraphQLEnumType, type_) return print_enum(type_) if is_input_object_type(type_): - type_ = cast(GraphQLInputObjectType, type_) return print_input_object(type_) # Not reachable. All possible types have been considered. - raise TypeError(f"Unexpected type: {inspect(type_)}.") + msg = f"Unexpected type: {inspect(type_)}." # pragma: no cover + raise TypeError(msg) # pragma: no cover def print_scalar(type_: GraphQLScalarType) -> str: + """Print a GraphQL scalar type.""" return ( print_description(type_) + f"scalar {type_.name}" @@ -144,13 +163,15 @@ def print_scalar(type_: GraphQLScalarType) -> str: def print_implemented_interfaces( - type_: Union[GraphQLObjectType, GraphQLInterfaceType] + type_: GraphQLObjectType | GraphQLInterfaceType, ) -> str: + """Print the interfaces implemented by a GraphQL object or interface type.""" interfaces = type_.interfaces return " implements " + " & ".join(i.name for i in interfaces) if interfaces else "" def print_object(type_: GraphQLObjectType) -> str: + """Print a GraphQL object type.""" return ( print_description(type_) + f"type {type_.name}" @@ -160,6 +181,7 @@ def print_object(type_: GraphQLObjectType) -> str: def print_interface(type_: GraphQLInterfaceType) -> str: + """Print a GraphQL interface type.""" return ( print_description(type_) + f"interface {type_.name}" @@ -169,12 +191,14 @@ def print_interface(type_: GraphQLInterfaceType) -> str: def print_union(type_: GraphQLUnionType) -> str: + """Print a GraphQL union type.""" types = type_.types possible_types = " = " + " | ".join(t.name for t in types) if types else "" return print_description(type_) + f"union {type_.name}" + possible_types def print_enum(type_: GraphQLEnumType) -> str: + """Print a GraphQL enum type.""" values = [ print_description(value, " ", not i) + f" {name}" @@ -185,6 +209,7 @@ def print_enum(type_: GraphQLEnumType) -> str: def print_input_object(type_: GraphQLInputObjectType) -> str: + """Print a GraphQL input object type.""" fields = [ print_description(field, " ", not i) + " " + print_input_value(name, field) for i, (name, field) in enumerate(type_.fields.items()) @@ -192,7 +217,8 @@ def print_input_object(type_: GraphQLInputObjectType) -> str: return print_description(type_) + f"input {type_.name}" + print_block(fields) -def print_fields(type_: Union[GraphQLObjectType, GraphQLInterfaceType]) -> str: +def print_fields(type_: GraphQLObjectType | GraphQLInterfaceType) -> str: + """Print the fields of a GraphQL object or interface type.""" fields = [ print_description(field, " ", not i) + f" {name}" @@ -204,16 +230,18 @@ def print_fields(type_: Union[GraphQLObjectType, GraphQLInterfaceType]) -> str: return print_block(fields) -def print_block(items: List[str]) -> str: +def print_block(items: list[str]) -> str: + """Print a block with the given items.""" return " {\n" + "\n".join(items) + "\n}" if items else "" -def print_args(args: Dict[str, GraphQLArgument], indentation: str = "") -> str: +def print_args(args: dict[str, GraphQLArgument], indentation: str = "") -> str: + """Print the given GraphQL arguments.""" if not args: return "" # If every arg does not have a description, print them on one line. - if not any(arg.description for arg in args.values()): + if all(arg.description is None for arg in args.values()): return ( "(" + ", ".join(print_input_value(name, arg) for name, arg in args.items()) @@ -233,6 +261,7 @@ def print_args(args: Dict[str, GraphQLArgument], indentation: str = "") -> str: def print_input_value(name: str, arg: GraphQLArgument) -> str: + """Print an input value.""" default_ast = ast_from_value(arg.default_value, arg.type) arg_decl = f"{name}: {arg.type}" if default_ast: @@ -241,6 +270,7 @@ def print_input_value(name: str, arg: GraphQLArgument) -> str: def print_directive(directive: GraphQLDirective) -> str: + """Print a GraphQL directive.""" return ( print_description(directive) + f"directive @{directive.name}" @@ -251,7 +281,8 @@ def print_directive(directive: GraphQLDirective) -> str: ) -def print_deprecated(reason: Optional[str]) -> str: +def print_deprecated(reason: str | None) -> str: + """Print a deprecation reason.""" if reason is None: return "" if reason != DEFAULT_DEPRECATION_REASON: @@ -261,6 +292,7 @@ def print_deprecated(reason: Optional[str]) -> str: def print_specified_by_url(scalar: GraphQLScalarType) -> str: + """Print a specification URL.""" if scalar.specified_by_url is None: return "" ast_value = print_ast(StringValueNode(value=scalar.specified_by_url)) @@ -268,16 +300,15 @@ def print_specified_by_url(scalar: GraphQLScalarType) -> str: def print_description( - def_: Union[ - GraphQLArgument, - GraphQLDirective, - GraphQLEnumValue, - GraphQLNamedType, - GraphQLSchema, - ], + def_: GraphQLArgument + | GraphQLDirective + | GraphQLEnumValue + | GraphQLNamedType + | GraphQLSchema, indentation: str = "", first_in_block: bool = True, ) -> str: + """Print a description.""" description = def_.description if description is None: return "" diff --git a/src/graphql/utilities/separate_operations.py b/src/graphql/utilities/separate_operations.py index b40bc686..b6866748 100644 --- a/src/graphql/utilities/separate_operations.py +++ b/src/graphql/utilities/separate_operations.py @@ -1,4 +1,8 @@ -from typing import Any, Dict, List, Set +"""Separation of GraphQL operations""" + +from __future__ import annotations + +from typing import Any, Dict, List from ..language import ( DocumentNode, @@ -10,20 +14,26 @@ visit, ) +try: + from typing import TypeAlias +except ImportError: # Python < 3.10 + from typing_extensions import TypeAlias + + __all__ = ["separate_operations"] -DepGraph = Dict[str, List[str]] +DepGraph: TypeAlias = Dict[str, List[str]] -def separate_operations(document_ast: DocumentNode) -> Dict[str, DocumentNode]: +def separate_operations(document_ast: DocumentNode) -> dict[str, DocumentNode]: """Separate operations in a given AST document. This function accepts a single AST document which may contain many operations and fragments and returns a collection of AST documents each of which contains a single operation as well the fragment definitions it refers to. """ - operations: List[OperationDefinitionNode] = [] + operations: list[OperationDefinitionNode] = [] dep_graph: DepGraph = {} # Populate metadata and build a dependency graph. @@ -39,9 +49,9 @@ def separate_operations(document_ast: DocumentNode) -> Dict[str, DocumentNode]: # For each operation, produce a new synthesized AST which includes only what is # necessary for completing that operation. - separated_document_asts: Dict[str, DocumentNode] = {} + separated_document_asts: dict[str, DocumentNode] = {} for operation in operations: - dependencies: Set[str] = set() + dependencies: set[str] = set() for fragment_name in collect_dependencies(operation.selection_set): collect_transitive_dependencies(dependencies, dep_graph, fragment_name) @@ -67,7 +77,7 @@ def separate_operations(document_ast: DocumentNode) -> Dict[str, DocumentNode]: def collect_transitive_dependencies( - collected: Set[str], dep_graph: DepGraph, from_name: str + collected: set[str], dep_graph: DepGraph, from_name: str ) -> None: """Collect transitive dependencies. @@ -84,7 +94,7 @@ def collect_transitive_dependencies( class DependencyCollector(Visitor): - dependencies: List[str] + dependencies: list[str] def __init__(self) -> None: super().__init__() @@ -95,7 +105,7 @@ def enter_fragment_spread(self, node: FragmentSpreadNode, *_args: Any) -> None: self.add_dependency(node.name.value) -def collect_dependencies(selection_set: SelectionSetNode) -> List[str]: +def collect_dependencies(selection_set: SelectionSetNode) -> list[str]: collector = DependencyCollector() visit(selection_set, collector) return collector.dependencies diff --git a/src/graphql/utilities/sort_value_node.py b/src/graphql/utilities/sort_value_node.py index 77a8dcb7..bf20cf37 100644 --- a/src/graphql/utilities/sort_value_node.py +++ b/src/graphql/utilities/sort_value_node.py @@ -1,5 +1,8 @@ +"""Sorting value nodes""" + +from __future__ import annotations + from copy import copy -from typing import Tuple from ..language import ListValueNode, ObjectFieldNode, ObjectValueNode, ValueNode from ..pyutils import natural_comparison_key @@ -29,7 +32,7 @@ def sort_field(field: ObjectFieldNode) -> ObjectFieldNode: return field -def sort_fields(fields: Tuple[ObjectFieldNode, ...]) -> Tuple[ObjectFieldNode, ...]: +def sort_fields(fields: tuple[ObjectFieldNode, ...]) -> tuple[ObjectFieldNode, ...]: return tuple( sorted( (sort_field(field) for field in fields), diff --git a/src/graphql/utilities/strip_ignored_characters.py b/src/graphql/utilities/strip_ignored_characters.py index 1ed5bbef..9ffe1e26 100644 --- a/src/graphql/utilities/strip_ignored_characters.py +++ b/src/graphql/utilities/strip_ignored_characters.py @@ -1,15 +1,19 @@ -from typing import Union, cast +"""Removal of insignificant characters""" + +from __future__ import annotations + +from typing import cast from ..language import Lexer, TokenKind -from ..language.source import Source, is_source from ..language.block_string import print_block_string from ..language.lexer import is_punctuator_token_kind +from ..language.source import Source, is_source __all__ = ["strip_ignored_characters"] -def strip_ignored_characters(source: Union[str, Source]) -> str: - """Strip characters that are ignored anyway. +def strip_ignored_characters(source: str | Source) -> str: + '''Strip characters that are ignored anyway. Strips characters that are not significant to the validity or execution of a GraphQL document: @@ -30,7 +34,6 @@ def strip_ignored_characters(source: Union[str, Source]) -> str: Warning: It is guaranteed that this function will always produce stable results. However, it's not guaranteed that it will stay the same between different releases due to bugfixes or changes in the GraphQL specification. - """ ''' Query example:: @@ -64,7 +67,8 @@ def strip_ignored_characters(source: Union[str, Source]) -> str: """Type description""" type Foo{"""Field description""" bar:String} ''' - source = cast(Source, source) if is_source(source) else Source(cast(str, source)) + if not is_source(source): + source = Source(cast("str", source)) body = source.body lexer = Lexer(source) diff --git a/src/graphql/utilities/type_comparators.py b/src/graphql/utilities/type_comparators.py index 62883785..609c19b6 100644 --- a/src/graphql/utilities/type_comparators.py +++ b/src/graphql/utilities/type_comparators.py @@ -1,11 +1,7 @@ -from typing import cast +"""GraphQL type comparators""" from ..type import ( - GraphQLAbstractType, GraphQLCompositeType, - GraphQLList, - GraphQLNonNull, - GraphQLObjectType, GraphQLSchema, GraphQLType, is_abstract_type, @@ -15,13 +11,14 @@ is_object_type, ) -__all__ = ["is_equal_type", "is_type_sub_type_of", "do_types_overlap"] +__all__ = ["do_types_overlap", "is_equal_type", "is_type_sub_type_of"] def is_equal_type(type_a: GraphQLType, type_b: GraphQLType) -> bool: """Check whether two types are equal. - Provided two types, return true if the types are equal (invariant).""" + Provided two types, return true if the types are equal (invariant). + """ # Equivalent types are equal. if type_a is type_b: return True @@ -29,12 +26,12 @@ def is_equal_type(type_a: GraphQLType, type_b: GraphQLType) -> bool: # If either type is non-null, the other must also be non-null. if is_non_null_type(type_a) and is_non_null_type(type_b): # noinspection PyUnresolvedReferences - return is_equal_type(type_a.of_type, type_b.of_type) # type:ignore + return is_equal_type(type_a.of_type, type_b.of_type) # If either type is a list, the other must also be a list. if is_list_type(type_a) and is_list_type(type_b): # noinspection PyUnresolvedReferences - return is_equal_type(type_a.of_type, type_b.of_type) # type:ignore + return is_equal_type(type_a.of_type, type_b.of_type) # Otherwise the types are not equal. return False @@ -56,27 +53,21 @@ def is_type_sub_type_of( if is_non_null_type(super_type): if is_non_null_type(maybe_subtype): return is_type_sub_type_of( - schema, - cast(GraphQLNonNull, maybe_subtype).of_type, - cast(GraphQLNonNull, super_type).of_type, + schema, maybe_subtype.of_type, super_type.of_type ) return False - elif is_non_null_type(maybe_subtype): + if is_non_null_type(maybe_subtype): # If super_type is nullable, maybe_subtype may be non-null or nullable. - return is_type_sub_type_of( - schema, cast(GraphQLNonNull, maybe_subtype).of_type, super_type - ) + return is_type_sub_type_of(schema, maybe_subtype.of_type, super_type) # If super_type type is a list, maybeSubType type must also be a list. if is_list_type(super_type): if is_list_type(maybe_subtype): return is_type_sub_type_of( - schema, - cast(GraphQLList, maybe_subtype).of_type, - cast(GraphQLList, super_type).of_type, + schema, maybe_subtype.of_type, super_type.of_type ) return False - elif is_list_type(maybe_subtype): + if is_list_type(maybe_subtype): # If super_type is not a list, maybe_subtype must also be not a list. return False @@ -85,10 +76,7 @@ def is_type_sub_type_of( return ( is_abstract_type(super_type) and (is_interface_type(maybe_subtype) or is_object_type(maybe_subtype)) - and schema.is_sub_type( - cast(GraphQLAbstractType, super_type), - cast(GraphQLObjectType, maybe_subtype), - ) + and schema.is_sub_type(super_type, maybe_subtype) ) @@ -110,11 +98,9 @@ def do_types_overlap( return True if is_abstract_type(type_a): - type_a = cast(GraphQLAbstractType, type_a) if is_abstract_type(type_b): # If both types are abstract, then determine if there is any intersection # between possible concrete types of each. - type_b = cast(GraphQLAbstractType, type_b) return any( schema.is_sub_type(type_b, type_) for type_ in schema.get_possible_types(type_a) @@ -124,7 +110,6 @@ def do_types_overlap( if is_abstract_type(type_b): # Determine if former type is a possible concrete type of the latter. - type_b = cast(GraphQLAbstractType, type_b) return schema.is_sub_type(type_b, type_a) # Otherwise the types do not overlap. diff --git a/src/graphql/utilities/type_from_ast.py b/src/graphql/utilities/type_from_ast.py index d8f2a5be..10acd68f 100644 --- a/src/graphql/utilities/type_from_ast.py +++ b/src/graphql/utilities/type_from_ast.py @@ -1,13 +1,17 @@ -from typing import cast, overload, Optional +"""Generating GraphQL types from AST nodes""" + +from __future__ import annotations + +from typing import cast, overload from ..language import ListTypeNode, NamedTypeNode, NonNullTypeNode, TypeNode from ..pyutils import inspect from ..type import ( - GraphQLSchema, - GraphQLNamedType, GraphQLList, + GraphQLNamedType, GraphQLNonNull, GraphQLNullableType, + GraphQLSchema, GraphQLType, ) @@ -17,33 +21,29 @@ @overload def type_from_ast( schema: GraphQLSchema, type_node: NamedTypeNode -) -> Optional[GraphQLNamedType]: - ... +) -> GraphQLNamedType | None: ... @overload def type_from_ast( schema: GraphQLSchema, type_node: ListTypeNode -) -> Optional[GraphQLList]: - ... +) -> GraphQLList | None: ... @overload def type_from_ast( schema: GraphQLSchema, type_node: NonNullTypeNode -) -> Optional[GraphQLNonNull]: - ... +) -> GraphQLNonNull | None: ... @overload -def type_from_ast(schema: GraphQLSchema, type_node: TypeNode) -> Optional[GraphQLType]: - ... +def type_from_ast(schema: GraphQLSchema, type_node: TypeNode) -> GraphQLType | None: ... def type_from_ast( schema: GraphQLSchema, type_node: TypeNode, -) -> Optional[GraphQLType]: +) -> GraphQLType | None: """Get the GraphQL type definition from an AST node. Given a Schema and an AST node describing a type, return a GraphQLType definition @@ -52,16 +52,17 @@ def type_from_ast( "User" found in the schema. If a type called "User" is not found in the schema, then None will be returned. """ - inner_type: Optional[GraphQLType] + inner_type: GraphQLType | None if isinstance(type_node, ListTypeNode): inner_type = type_from_ast(schema, type_node.type) return GraphQLList(inner_type) if inner_type else None if isinstance(type_node, NonNullTypeNode): inner_type = type_from_ast(schema, type_node.type) - inner_type = cast(GraphQLNullableType, inner_type) + inner_type = cast("GraphQLNullableType", inner_type) return GraphQLNonNull(inner_type) if inner_type else None if isinstance(type_node, NamedTypeNode): return schema.get_type(type_node.name.value) # Not reachable. All possible type nodes have been considered. - raise TypeError(f"Unexpected type node: {inspect(type_node)}.") + msg = f"Unexpected type node: {inspect(type_node)}." # pragma: no cover + raise TypeError(msg) # pragma: no cover diff --git a/src/graphql/utilities/type_info.py b/src/graphql/utilities/type_info.py index 80a4ef3d..5763f16e 100644 --- a/src/graphql/utilities/type_info.py +++ b/src/graphql/utilities/type_info.py @@ -1,4 +1,8 @@ -from typing import Any, Callable, List, Optional, Union, cast +"""Managing type information""" + +from __future__ import annotations + +from typing import Any, Callable, Optional from ..language import ( ArgumentNode, @@ -19,38 +23,35 @@ GraphQLArgument, GraphQLCompositeType, GraphQLDirective, - GraphQLEnumType, GraphQLEnumValue, GraphQLField, - GraphQLInputObjectType, GraphQLInputType, - GraphQLInterfaceType, - GraphQLList, - GraphQLObjectType, GraphQLOutputType, GraphQLSchema, GraphQLType, - is_composite_type, - is_input_type, - is_output_type, get_named_type, - SchemaMetaFieldDef, - TypeMetaFieldDef, - TypeNameMetaFieldDef, - is_object_type, - is_interface_type, get_nullable_type, - is_list_type, - is_input_object_type, + is_composite_type, is_enum_type, + is_input_object_type, + is_input_type, + is_list_type, + is_object_type, + is_output_type, ) from .type_from_ast import type_from_ast +try: + from typing import TypeAlias +except ImportError: # Python < 3.10 + from typing_extensions import TypeAlias + + __all__ = ["TypeInfo", "TypeInfoVisitor"] -GetFieldDefFn = Callable[ - [GraphQLSchema, GraphQLType, FieldNode], Optional[GraphQLField] +GetFieldDefFn: TypeAlias = Callable[ + [GraphQLSchema, GraphQLCompositeType, FieldNode], Optional[GraphQLField] ] @@ -66,8 +67,8 @@ class TypeInfo: def __init__( self, schema: GraphQLSchema, - initial_type: Optional[GraphQLType] = None, - get_field_def_fn: Optional[GetFieldDefFn] = None, + initial_type: GraphQLType | None = None, + get_field_def_fn: GetFieldDefFn | None = None, ) -> None: """Initialize the TypeInfo for the given GraphQL schema. @@ -77,44 +78,44 @@ def __init__( The optional last parameter is deprecated and will be removed in v3.3. """ self._schema = schema - self._type_stack: List[Optional[GraphQLOutputType]] = [] - self._parent_type_stack: List[Optional[GraphQLCompositeType]] = [] - self._input_type_stack: List[Optional[GraphQLInputType]] = [] - self._field_def_stack: List[Optional[GraphQLField]] = [] - self._default_value_stack: List[Any] = [] - self._directive: Optional[GraphQLDirective] = None - self._argument: Optional[GraphQLArgument] = None - self._enum_value: Optional[GraphQLEnumValue] = None + self._type_stack: list[GraphQLOutputType | None] = [] + self._parent_type_stack: list[GraphQLCompositeType | None] = [] + self._input_type_stack: list[GraphQLInputType | None] = [] + self._field_def_stack: list[GraphQLField | None] = [] + self._default_value_stack: list[Any] = [] + self._directive: GraphQLDirective | None = None + self._argument: GraphQLArgument | None = None + self._enum_value: GraphQLEnumValue | None = None self._get_field_def: GetFieldDefFn = get_field_def_fn or get_field_def if initial_type: if is_input_type(initial_type): - self._input_type_stack.append(cast(GraphQLInputType, initial_type)) + self._input_type_stack.append(initial_type) if is_composite_type(initial_type): - self._parent_type_stack.append(cast(GraphQLCompositeType, initial_type)) + self._parent_type_stack.append(initial_type) if is_output_type(initial_type): - self._type_stack.append(cast(GraphQLOutputType, initial_type)) + self._type_stack.append(initial_type) - def get_type(self) -> Optional[GraphQLOutputType]: + def get_type(self) -> GraphQLOutputType | None: if self._type_stack: return self._type_stack[-1] return None - def get_parent_type(self) -> Optional[GraphQLCompositeType]: + def get_parent_type(self) -> GraphQLCompositeType | None: if self._parent_type_stack: return self._parent_type_stack[-1] return None - def get_input_type(self) -> Optional[GraphQLInputType]: + def get_input_type(self) -> GraphQLInputType | None: if self._input_type_stack: return self._input_type_stack[-1] return None - def get_parent_input_type(self) -> Optional[GraphQLInputType]: + def get_parent_input_type(self) -> GraphQLInputType | None: if len(self._input_type_stack) > 1: return self._input_type_stack[-2] return None - def get_field_def(self) -> Optional[GraphQLField]: + def get_field_def(self) -> GraphQLField | None: if self._field_def_stack: return self._field_def_stack[-1] return None @@ -124,13 +125,13 @@ def get_default_value(self) -> Any: return self._default_value_stack[-1] return None - def get_directive(self) -> Optional[GraphQLDirective]: + def get_directive(self) -> GraphQLDirective | None: return self._directive - def get_argument(self) -> Optional[GraphQLArgument]: + def get_argument(self) -> GraphQLArgument | None: return self._argument - def get_enum_value(self) -> Optional[GraphQLEnumValue]: + def get_enum_value(self) -> GraphQLEnumValue | None: return self._enum_value def enter(self, node: Node) -> None: @@ -144,12 +145,10 @@ def leave(self, node: Node) -> None: method() # noinspection PyUnusedLocal - def enter_selection_set(self, node: SelectionSetNode) -> None: + def enter_selection_set(self, _node: SelectionSetNode) -> None: named_type = get_named_type(self.get_type()) self._parent_type_stack.append( - cast(GraphQLCompositeType, named_type) - if is_composite_type(named_type) - else None + named_type if is_composite_type(named_type) else None ) def enter_field(self, node: FieldNode) -> None: @@ -176,19 +175,13 @@ def enter_inline_fragment(self, node: InlineFragmentNode) -> None: if type_condition_ast else get_named_type(self.get_type()) ) - self._type_stack.append( - cast(GraphQLOutputType, output_type) - if is_output_type(output_type) - else None - ) + self._type_stack.append(output_type if is_output_type(output_type) else None) enter_fragment_definition = enter_inline_fragment def enter_variable_definition(self, node: VariableDefinitionNode) -> None: input_type = type_from_ast(self._schema, node.type) - self._input_type_stack.append( - cast(GraphQLInputType, input_type) if is_input_type(input_type) else None - ) + self._input_type_stack.append(input_type if is_input_type(input_type) else None) def enter_argument(self, node: ArgumentNode) -> None: field_or_directive = self.get_directive() or self.get_field_def() @@ -204,13 +197,9 @@ def enter_argument(self, node: ArgumentNode) -> None: self._input_type_stack.append(arg_type if is_input_type(arg_type) else None) # noinspection PyUnusedLocal - def enter_list_value(self, node: ListValueNode) -> None: - list_type = get_nullable_type(self.get_input_type()) # type: ignore - item_type = ( - cast(GraphQLList, list_type).of_type - if is_list_type(list_type) - else list_type - ) + def enter_list_value(self, _node: ListValueNode) -> None: + list_type = get_nullable_type(self.get_input_type()) + item_type = list_type.of_type if is_list_type(list_type) else list_type # List positions never have a default value. self._default_value_stack.append(Undefined) self._input_type_stack.append(item_type if is_input_type(item_type) else None) @@ -218,9 +207,7 @@ def enter_list_value(self, node: ListValueNode) -> None: def enter_object_field(self, node: ObjectFieldNode) -> None: object_type = get_named_type(self.get_input_type()) if is_input_object_type(object_type): - input_field = cast(GraphQLInputObjectType, object_type).fields.get( - node.name.value - ) + input_field = object_type.fields.get(node.name.value) input_field_type = input_field.type if input_field else None else: input_field = input_field_type = None @@ -234,7 +221,7 @@ def enter_object_field(self, node: ObjectFieldNode) -> None: def enter_enum_value(self, node: EnumValueNode) -> None: enum_type = get_named_type(self.get_input_type()) if is_enum_type(enum_type): - enum_value = cast(GraphQLEnumType, enum_type).values.get(node.value) + enum_value = enum_type.values.get(node.value) else: enum_value = None self._enum_value = enum_value @@ -274,31 +261,15 @@ def leave_enum_value(self) -> None: def get_field_def( - schema: GraphQLSchema, parent_type: GraphQLType, field_node: FieldNode -) -> Optional[GraphQLField]: - """Get field definition. - - Not exactly the same as the executor's definition of - :func:`graphql.execution.get_field_def`, in this statically evaluated environment - we do not always have an Object type, and need to handle Interface and Union types. - """ - name = field_node.name.value - if name == "__schema" and schema.query_type is parent_type: - return SchemaMetaFieldDef - if name == "__type" and schema.query_type is parent_type: - return TypeMetaFieldDef - if name == "__typename" and is_composite_type(parent_type): - return TypeNameMetaFieldDef - if is_object_type(parent_type) or is_interface_type(parent_type): - parent_type = cast(Union[GraphQLObjectType, GraphQLInterfaceType], parent_type) - return parent_type.fields.get(name) - return None + schema: GraphQLSchema, parent_type: GraphQLCompositeType, field_node: FieldNode +) -> GraphQLField | None: + return schema.get_field(parent_type, field_node.name.value) class TypeInfoVisitor(Visitor): """A visitor which maintains a provided TypeInfo.""" - def __init__(self, type_info: "TypeInfo", visitor: Visitor): + def __init__(self, type_info: TypeInfo, visitor: Visitor) -> None: super().__init__() self.type_info = type_info self.visitor = visitor @@ -306,13 +277,14 @@ def __init__(self, type_info: "TypeInfo", visitor: Visitor): def enter(self, node: Node, *args: Any) -> Any: self.type_info.enter(node) fn = self.visitor.get_enter_leave_for_kind(node.kind).enter - if fn: - result = fn(node, *args) - if result is not None: - self.type_info.leave(node) - if isinstance(result, Node): - self.type_info.enter(result) - return result + if not fn: + return None + result = fn(node, *args) + if result is not None: + self.type_info.leave(node) + if isinstance(result, Node): + self.type_info.enter(result) + return result def leave(self, node: Node, *args: Any) -> Any: fn = self.visitor.get_enter_leave_for_kind(node.kind).leave diff --git a/src/graphql/utilities/value_from_ast.py b/src/graphql/utilities/value_from_ast.py index 62a506c8..399cdcb4 100644 --- a/src/graphql/utilities/value_from_ast.py +++ b/src/graphql/utilities/value_from_ast.py @@ -1,4 +1,8 @@ -from typing import Any, Dict, List, Optional, cast +"""Conversion from GraphQL value AST to Python values.""" + +from __future__ import annotations + +from typing import Any, cast from ..language import ( ListValueNode, @@ -7,12 +11,9 @@ ValueNode, VariableNode, ) -from ..pyutils import inspect, Undefined +from ..pyutils import Undefined, inspect from ..type import ( - GraphQLInputObjectType, GraphQLInputType, - GraphQLList, - GraphQLNonNull, GraphQLScalarType, is_input_object_type, is_leaf_type, @@ -24,9 +25,9 @@ def value_from_ast( - value_node: Optional[ValueNode], + value_node: ValueNode | None, type_: GraphQLInputType, - variables: Optional[Dict[str, Any]] = None, + variables: dict[str, Any] | None = None, ) -> Any: """Produce a Python value given a GraphQL Value AST. @@ -69,17 +70,15 @@ def value_from_ast( if is_non_null_type(type_): if isinstance(value_node, NullValueNode): return Undefined - type_ = cast(GraphQLNonNull, type_) return value_from_ast(value_node, type_.of_type, variables) if isinstance(value_node, NullValueNode): return None # This is explicitly returning the value None. if is_list_type(type_): - type_ = cast(GraphQLList, type_) item_type = type_.of_type if isinstance(value_node, ListValueNode): - coerced_values: List[Any] = [] + coerced_values: list[Any] = [] append_value = coerced_values.append for item_node in value_node.values: if is_missing_variable(item_node, variables): @@ -102,8 +101,7 @@ def value_from_ast( if is_input_object_type(type_): if not isinstance(value_node, ObjectValueNode): return Undefined - type_ = cast(GraphQLInputObjectType, type_) - coerced_obj: Dict[str, Any] = {} + coerced_obj: dict[str, Any] = {} fields = type_.fields field_nodes = {field.name.value: field for field in value_node.fields} for field_name, field in fields.items(): @@ -120,28 +118,37 @@ def value_from_ast( return Undefined coerced_obj[field.out_name or field_name] = field_value + if type_.is_one_of: + keys = list(coerced_obj) + if len(keys) != 1: + return Undefined + + if coerced_obj[keys[0]] is None: + return Undefined + return type_.out_type(coerced_obj) if is_leaf_type(type_): # Scalars fulfill parsing a literal value via `parse_literal()`. Invalid values # represent a failure to parse correctly, in which case Undefined is returned. - type_ = cast(GraphQLScalarType, type_) + type_ = cast("GraphQLScalarType", type_) # noinspection PyBroadException try: if variables: result = type_.parse_literal(value_node, variables) else: result = type_.parse_literal(value_node) - except Exception: + except Exception: # noqa: BLE001 return Undefined return result # Not reachable. All possible input types have been considered. - raise TypeError(f"Unexpected input type: {inspect(type_)}.") + msg = f"Unexpected input type: {inspect(type_)}." # pragma: no cover + raise TypeError(msg) # pragma: no cover def is_missing_variable( - value_node: ValueNode, variables: Optional[Dict[str, Any]] = None + value_node: ValueNode, variables: dict[str, Any] | None = None ) -> bool: """Check if ``value_node`` is a variable not defined in the ``variables`` dict.""" return isinstance(value_node, VariableNode) and ( diff --git a/src/graphql/utilities/value_from_ast_untyped.py b/src/graphql/utilities/value_from_ast_untyped.py index 795160b5..a9ad0632 100644 --- a/src/graphql/utilities/value_from_ast_untyped.py +++ b/src/graphql/utilities/value_from_ast_untyped.py @@ -1,26 +1,31 @@ +"""Conversion from GraphQL value AST to Python values without type.""" + +from __future__ import annotations + from math import nan -from typing import Any, Callable, Dict, Optional, Union - -from ..language import ( - ValueNode, - BooleanValueNode, - EnumValueNode, - FloatValueNode, - IntValueNode, - ListValueNode, - NullValueNode, - ObjectValueNode, - StringValueNode, - VariableNode, -) - -from ..pyutils import inspect, Undefined +from typing import TYPE_CHECKING, Any, Callable + +from ..pyutils import Undefined, inspect + +if TYPE_CHECKING: + from ..language import ( + BooleanValueNode, + EnumValueNode, + FloatValueNode, + IntValueNode, + ListValueNode, + NullValueNode, + ObjectValueNode, + StringValueNode, + ValueNode, + VariableNode, + ) __all__ = ["value_from_ast_untyped"] def value_from_ast_untyped( - value_node: ValueNode, variables: Optional[Dict[str, Any]] = None + value_node: ValueNode, variables: dict[str, Any] | None = None ) -> Any: """Produce a Python value given a GraphQL Value AST. @@ -44,9 +49,8 @@ def value_from_ast_untyped( return func(value_node, variables) # Not reachable. All possible value nodes have been considered. - raise TypeError( # pragma: no cover - f"Unexpected value node: {inspect(value_node)}." - ) + msg = f"Unexpected value node: {inspect(value_node)}." # pragma: no cover + raise TypeError(msg) # pragma: no cover def value_from_null(_value_node: NullValueNode, _variables: Any) -> Any: @@ -68,19 +72,17 @@ def value_from_float(value_node: FloatValueNode, _variables: Any) -> Any: def value_from_string( - value_node: Union[BooleanValueNode, EnumValueNode, StringValueNode], _variables: Any + value_node: BooleanValueNode | EnumValueNode | StringValueNode, _variables: Any ) -> Any: return value_node.value -def value_from_list( - value_node: ListValueNode, variables: Optional[Dict[str, Any]] -) -> Any: +def value_from_list(value_node: ListValueNode, variables: dict[str, Any] | None) -> Any: return [value_from_ast_untyped(node, variables) for node in value_node.values] def value_from_object( - value_node: ObjectValueNode, variables: Optional[Dict[str, Any]] + value_node: ObjectValueNode, variables: dict[str, Any] | None ) -> Any: return { field.name.value: value_from_ast_untyped(field.value, variables) @@ -89,7 +91,7 @@ def value_from_object( def value_from_variable( - value_node: VariableNode, variables: Optional[Dict[str, Any]] + value_node: VariableNode, variables: dict[str, Any] | None ) -> Any: variable_name = value_node.name.value if not variables: @@ -97,7 +99,7 @@ def value_from_variable( return variables.get(variable_name, Undefined) -_value_from_kind_functions: Dict[str, Callable] = { +_value_from_kind_functions: dict[str, Callable] = { "null_value": value_from_null, "int_value": value_from_int, "float_value": value_from_float, diff --git a/src/graphql/validation/__init__.py b/src/graphql/validation/__init__.py index 313073a5..ed6ca6c8 100644 --- a/src/graphql/validation/__init__.py +++ b/src/graphql/validation/__init__.py @@ -17,6 +17,17 @@ # All validation rules in the GraphQL Specification. from .specified_rules import specified_rules +# Spec Section: "Defer And Stream Directive Labels Are Unique" +from .rules.defer_stream_directive_label import DeferStreamDirectiveLabel + +# Spec Section: "Defer And Stream Directives Are Used On Valid Root Field" +from .rules.defer_stream_directive_on_root_field import DeferStreamDirectiveOnRootField + +# Spec Section: "Defer And Stream Directives Are Used On Valid Operations" +from .rules.defer_stream_directive_on_valid_operations_rule import ( + DeferStreamDirectiveOnValidOperationsRule, +) + # Spec Section: "Executable Definitions" from .rules.executable_definitions import ExecutableDefinitionsRule @@ -68,6 +79,9 @@ # Spec Section: "Subscriptions with Single Root Field" from .rules.single_field_subscriptions import SingleFieldSubscriptionsRule +# Spec Section: "Stream Directives Are Used On List Fields" +from .rules.stream_directive_on_list_field import StreamDirectiveOnListField + # Spec Section: "Argument Uniqueness" from .rules.unique_argument_names import UniqueArgumentNamesRule @@ -110,14 +124,11 @@ from .rules.custom.no_schema_introspection import NoSchemaIntrospectionCustomRule __all__ = [ - "validate", "ASTValidationContext", "ASTValidationRule", - "SDLValidationContext", - "SDLValidationRule", - "ValidationContext", - "ValidationRule", - "specified_rules", + "DeferStreamDirectiveLabel", + "DeferStreamDirectiveOnRootField", + "DeferStreamDirectiveOnValidOperationsRule", "ExecutableDefinitionsRule", "FieldsOnCorrectTypeRule", "FragmentsOnCompositeTypesRule", @@ -126,32 +137,39 @@ "KnownFragmentNamesRule", "KnownTypeNamesRule", "LoneAnonymousOperationRule", + "LoneSchemaDefinitionRule", + "NoDeprecatedCustomRule", "NoFragmentCyclesRule", + "NoSchemaIntrospectionCustomRule", "NoUndefinedVariablesRule", "NoUnusedFragmentsRule", "NoUnusedVariablesRule", "OverlappingFieldsCanBeMergedRule", "PossibleFragmentSpreadsRule", + "PossibleTypeExtensionsRule", "ProvidedRequiredArgumentsRule", + "SDLValidationContext", + "SDLValidationRule", "ScalarLeafsRule", "SingleFieldSubscriptionsRule", + "StreamDirectiveOnListField", + "UniqueArgumentDefinitionNamesRule", "UniqueArgumentNamesRule", + "UniqueDirectiveNamesRule", "UniqueDirectivesPerLocationRule", + "UniqueEnumValueNamesRule", + "UniqueFieldDefinitionNamesRule", "UniqueFragmentNamesRule", "UniqueInputFieldNamesRule", "UniqueOperationNamesRule", + "UniqueOperationTypesRule", + "UniqueTypeNamesRule", "UniqueVariableNamesRule", + "ValidationContext", + "ValidationRule", "ValuesOfCorrectTypeRule", "VariablesAreInputTypesRule", "VariablesInAllowedPositionRule", - "LoneSchemaDefinitionRule", - "UniqueOperationTypesRule", - "UniqueTypeNamesRule", - "UniqueEnumValueNamesRule", - "UniqueFieldDefinitionNamesRule", - "UniqueArgumentDefinitionNamesRule", - "UniqueDirectiveNamesRule", - "PossibleTypeExtensionsRule", - "NoDeprecatedCustomRule", - "NoSchemaIntrospectionCustomRule", + "specified_rules", + "validate", ] diff --git a/src/graphql/validation/rules/__init__.py b/src/graphql/validation/rules/__init__.py index 1b0c5d57..2ea665e4 100644 --- a/src/graphql/validation/rules/__init__.py +++ b/src/graphql/validation/rules/__init__.py @@ -16,11 +16,12 @@ class ASTValidationRule(Visitor): context: ASTValidationContext - def __init__(self, context: ASTValidationContext): + def __init__(self, context: ASTValidationContext) -> None: super().__init__() self.context = context def report_error(self, error: GraphQLError) -> None: + """Report a GraphQL error.""" self.context.report_error(error) diff --git a/src/graphql/validation/rules/custom/no_deprecated.py b/src/graphql/validation/rules/custom/no_deprecated.py index e4b897ff..c9742911 100644 --- a/src/graphql/validation/rules/custom/no_deprecated.py +++ b/src/graphql/validation/rules/custom/no_deprecated.py @@ -1,10 +1,16 @@ -from typing import Any, cast +"""No deprecated rule""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any from ....error import GraphQLError -from ....language import ArgumentNode, EnumValueNode, FieldNode, ObjectFieldNode -from ....type import GraphQLInputObjectType, get_named_type, is_input_object_type +from ....type import get_named_type, is_input_object_type from .. import ValidationRule +if TYPE_CHECKING: + from ....language import ArgumentNode, EnumValueNode, FieldNode, ObjectFieldNode + __all__ = ["NoDeprecatedCustomRule"] @@ -68,14 +74,12 @@ def enter_object_field(self, node: ObjectFieldNode, *_args: Any) -> None: context = self.context input_object_def = get_named_type(context.get_parent_input_type()) if is_input_object_type(input_object_def): - input_field_def = cast(GraphQLInputObjectType, input_object_def).fields.get( - node.name.value - ) + input_field_def = input_object_def.fields.get(node.name.value) if input_field_def: deprecation_reason = input_field_def.deprecation_reason if deprecation_reason is not None: field_name = node.name.value - input_object_name = input_object_def.name # type: ignore + input_object_name = input_object_def.name self.report_error( GraphQLError( f"The input field {input_object_name}.{field_name}" diff --git a/src/graphql/validation/rules/custom/no_schema_introspection.py b/src/graphql/validation/rules/custom/no_schema_introspection.py index 76f15ea7..99c12a9e 100644 --- a/src/graphql/validation/rules/custom/no_schema_introspection.py +++ b/src/graphql/validation/rules/custom/no_schema_introspection.py @@ -1,10 +1,16 @@ -from typing import Any +"""No schema introspection rule""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any from ....error import GraphQLError -from ....language import FieldNode from ....type import get_named_type, is_introspection_type from .. import ValidationRule +if TYPE_CHECKING: + from ....language import FieldNode + __all__ = ["NoSchemaIntrospectionCustomRule"] diff --git a/src/graphql/validation/rules/defer_stream_directive_label.py b/src/graphql/validation/rules/defer_stream_directive_label.py new file mode 100644 index 00000000..6b688133 --- /dev/null +++ b/src/graphql/validation/rules/defer_stream_directive_label.py @@ -0,0 +1,63 @@ +"""Defer stream directive label rule""" + +from typing import Any, Dict, List + +from ...error import GraphQLError +from ...language import DirectiveNode, Node, StringValueNode +from ...type import GraphQLDeferDirective, GraphQLStreamDirective +from . import ASTValidationRule, ValidationContext + +__all__ = ["DeferStreamDirectiveLabel"] + + +class DeferStreamDirectiveLabel(ASTValidationRule): + """Defer and stream directive labels are unique + + A GraphQL document is only valid if defer and stream directives' label argument + is static and unique. + """ + + def __init__(self, context: ValidationContext) -> None: + super().__init__(context) + self.known_labels: Dict[str, Node] = {} + + def enter_directive( + self, + node: DirectiveNode, + _key: Any, + _parent: Any, + _path: Any, + _ancestors: List[Node], + ) -> None: + if node.name.value not in ( + GraphQLDeferDirective.name, + GraphQLStreamDirective.name, + ): + return + try: + label_argument = next( + arg for arg in node.arguments if arg.name.value == "label" + ) + except StopIteration: + return + label_value = label_argument.value + if not isinstance(label_value, StringValueNode): + self.report_error( + GraphQLError( + f"{node.name.value.capitalize()} directive label argument" + " must be a static string.", + node, + ), + ) + return + label_name = label_value.value + known_labels = self.known_labels + if label_name in known_labels: + self.report_error( + GraphQLError( + "Defer/Stream directive label argument must be unique.", + [known_labels[label_name], node], + ), + ) + return + known_labels[label_name] = node diff --git a/src/graphql/validation/rules/defer_stream_directive_on_root_field.py b/src/graphql/validation/rules/defer_stream_directive_on_root_field.py new file mode 100644 index 00000000..023fc2b2 --- /dev/null +++ b/src/graphql/validation/rules/defer_stream_directive_on_root_field.py @@ -0,0 +1,73 @@ +"""Defer stream directive on root field rule""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, cast + +from ...error import GraphQLError +from ...type import GraphQLDeferDirective, GraphQLStreamDirective +from . import ASTValidationRule, ValidationContext + +if TYPE_CHECKING: + from ...language import DirectiveNode, Node + +__all__ = ["DeferStreamDirectiveOnRootField"] + + +class DeferStreamDirectiveOnRootField(ASTValidationRule): + """Defer and stream directives are used on valid root field + + A GraphQL document is only valid if defer directives are not used on root + mutation or subscription types. + """ + + def enter_directive( + self, + node: DirectiveNode, + _key: Any, + _parent: Any, + _path: Any, + _ancestors: list[Node], + ) -> None: + context = cast("ValidationContext", self.context) + parent_type = context.get_parent_type() + if not parent_type: + return + schema = context.schema + mutation_type = schema.mutation_type + subscription_type = schema.subscription_type + + if node.name.value == GraphQLDeferDirective.name: + if mutation_type and parent_type is mutation_type: + self.report_error( + GraphQLError( + "Defer directive cannot be used on root" + f" mutation type '{parent_type.name}'.", + node, + ) + ) + if subscription_type and parent_type is subscription_type: + self.report_error( + GraphQLError( + "Defer directive cannot be used on root" + f" subscription type '{parent_type.name}'.", + node, + ) + ) + if node.name.value == GraphQLStreamDirective.name: + if mutation_type and parent_type is mutation_type: + self.report_error( + GraphQLError( + "Stream directive cannot be used on root" + f" mutation type '{parent_type.name}'.", + node, + ) + ) + if subscription_type and parent_type is subscription_type: + self.report_error( + GraphQLError( + "Stream directive cannot be used on root" + f" subscription type '{parent_type.name}'.", + node, + ) + ) diff --git a/src/graphql/validation/rules/defer_stream_directive_on_valid_operations_rule.py b/src/graphql/validation/rules/defer_stream_directive_on_valid_operations_rule.py new file mode 100644 index 00000000..0159715d --- /dev/null +++ b/src/graphql/validation/rules/defer_stream_directive_on_valid_operations_rule.py @@ -0,0 +1,86 @@ +"""Defer stream directive on valid operations rule""" + +from __future__ import annotations + +from typing import Any + +from ...error import GraphQLError +from ...language import ( + BooleanValueNode, + DirectiveNode, + FragmentDefinitionNode, + Node, + OperationDefinitionNode, + OperationType, + VariableNode, +) +from ...type import GraphQLDeferDirective, GraphQLStreamDirective +from . import ASTValidationRule, ValidationContext + +__all__ = ["DeferStreamDirectiveOnValidOperationsRule"] + + +def if_argument_can_be_false(node: DirectiveNode) -> bool: + for argument in node.arguments: + if argument.name.value == "if": + if isinstance(argument.value, BooleanValueNode): + if argument.value.value: + return False + elif not isinstance(argument.value, VariableNode): + return False + return True + return False + + +class DeferStreamDirectiveOnValidOperationsRule(ASTValidationRule): + """Defer and stream directives are used on valid root field + + A GraphQL document is only valid if defer directives are not used on root + mutation or subscription types. + """ + + def __init__(self, context: ValidationContext) -> None: + super().__init__(context) + self.fragments_used_on_subscriptions: set[str] = set() + + def enter_operation_definition( + self, operation: OperationDefinitionNode, *_args: Any + ) -> None: + if operation.operation == OperationType.SUBSCRIPTION: + fragments = self.context.get_recursively_referenced_fragments(operation) + for fragment in fragments: + self.fragments_used_on_subscriptions.add(fragment.name.value) + + def enter_directive( + self, + node: DirectiveNode, + _key: Any, + _parent: Any, + _path: Any, + ancestors: list[Node], + ) -> None: + try: + definition_node = ancestors[2] + except IndexError: # pragma: no cover + return + if ( + isinstance(definition_node, FragmentDefinitionNode) + and definition_node.name.value in self.fragments_used_on_subscriptions + ) or ( + isinstance(definition_node, OperationDefinitionNode) + and definition_node.operation == OperationType.SUBSCRIPTION + ): + if node.name.value == GraphQLDeferDirective.name: + if not if_argument_can_be_false(node): + msg = ( + "Defer directive not supported on subscription operations." + " Disable `@defer` by setting the `if` argument to `false`." + ) + self.report_error(GraphQLError(msg, node)) + elif node.name.value == GraphQLStreamDirective.name: # noqa: SIM102 + if not if_argument_can_be_false(node): + msg = ( + "Stream directive not supported on subscription operations." + " Disable `@stream` by setting the `if` argument to `false`." + ) + self.report_error(GraphQLError(msg, node)) diff --git a/src/graphql/validation/rules/executable_definitions.py b/src/graphql/validation/rules/executable_definitions.py index eead0fae..6ca01a9d 100644 --- a/src/graphql/validation/rules/executable_definitions.py +++ b/src/graphql/validation/rules/executable_definitions.py @@ -1,7 +1,12 @@ +"""Executable definitions rule""" + +from __future__ import annotations + from typing import Any, Union, cast from ...error import GraphQLError from ...language import ( + SKIP, DirectiveDefinitionNode, DocumentNode, ExecutableDefinitionNode, @@ -9,7 +14,6 @@ SchemaExtensionNode, TypeDefinitionNode, VisitorAction, - SKIP, ) from . import ASTValidationRule @@ -35,7 +39,7 @@ def enter_document(self, node: DocumentNode, *_args: Any) -> VisitorAction: ) else "'{}'".format( cast( - Union[DirectiveDefinitionNode, TypeDefinitionNode], + "Union[DirectiveDefinitionNode, TypeDefinitionNode]", definition, ).name.value ) diff --git a/src/graphql/validation/rules/fields_on_correct_type.py b/src/graphql/validation/rules/fields_on_correct_type.py index d5622f76..83142fae 100644 --- a/src/graphql/validation/rules/fields_on_correct_type.py +++ b/src/graphql/validation/rules/fields_on_correct_type.py @@ -1,9 +1,14 @@ +"""Fields on correct type rule""" + +from __future__ import annotations + from collections import defaultdict from functools import cmp_to_key -from typing import Any, Dict, List, Union, cast +from typing import TYPE_CHECKING, Any +from ...error import GraphQLError +from ...pyutils import did_you_mean, natural_comparison_key, suggestion_list from ...type import ( - GraphQLAbstractType, GraphQLInterfaceType, GraphQLObjectType, GraphQLOutputType, @@ -12,11 +17,11 @@ is_interface_type, is_object_type, ) -from ...error import GraphQLError -from ...language import FieldNode -from ...pyutils import did_you_mean, natural_comparison_key, suggestion_list from . import ValidationRule +if TYPE_CHECKING: + from ...language import FieldNode + __all__ = ["FieldsOnCorrectTypeRule"] @@ -61,9 +66,8 @@ def enter_field(self, node: FieldNode, *_args: Any) -> None: def get_suggested_type_names( schema: GraphQLSchema, type_: GraphQLOutputType, field_name: str -) -> List[str]: - """ - Get a list of suggested type names. +) -> list[str]: + """Get a list of suggested type names. Go through all of the implementations of type, as well as the interfaces that they implement. If any of those types include the provided field, @@ -73,10 +77,9 @@ def get_suggested_type_names( # Must be an Object type, which does not have possible fields. return [] - type_ = cast(GraphQLAbstractType, type_) # Use a dict instead of a set for stable sorting when usage counts are the same - suggested_types: Dict[Union[GraphQLObjectType, GraphQLInterfaceType], None] = {} - usage_count: Dict[str, int] = defaultdict(int) + suggested_types: dict[GraphQLObjectType | GraphQLInterfaceType, None] = {} + usage_count: dict[str, int] = defaultdict(int) for possible_type in schema.get_possible_types(type_): if field_name not in possible_type.fields: continue @@ -94,8 +97,8 @@ def get_suggested_type_names( usage_count[possible_interface.name] += 1 def cmp( - type_a: Union[GraphQLObjectType, GraphQLInterfaceType], - type_b: Union[GraphQLObjectType, GraphQLInterfaceType], + type_a: GraphQLObjectType | GraphQLInterfaceType, + type_b: GraphQLObjectType | GraphQLInterfaceType, ) -> int: # pragma: no cover # Suggest both interface and object types based on how common they are. usage_count_diff = usage_count[type_b.name] - usage_count[type_a.name] @@ -103,13 +106,9 @@ def cmp( return usage_count_diff # Suggest super types first followed by subtypes - if is_interface_type(type_a) and schema.is_sub_type( - cast(GraphQLInterfaceType, type_a), type_b - ): + if is_interface_type(type_a) and schema.is_sub_type(type_a, type_b): return -1 - if is_interface_type(type_b) and schema.is_sub_type( - cast(GraphQLInterfaceType, type_b), type_a - ): + if is_interface_type(type_b) and schema.is_sub_type(type_b, type_a): return 1 name_a = natural_comparison_key(type_a.name) @@ -123,14 +122,14 @@ def cmp( return [type_.name for type_ in sorted(suggested_types, key=cmp_to_key(cmp))] -def get_suggested_field_names(type_: GraphQLOutputType, field_name: str) -> List[str]: +def get_suggested_field_names(type_: GraphQLOutputType, field_name: str) -> list[str]: """Get a list of suggested field names. For the field name provided, determine if there are any similar field names that may be the result of a typo. """ if is_object_type(type_) or is_interface_type(type_): - possible_field_names = list(type_.fields) # type: ignore + possible_field_names = list(type_.fields) return suggestion_list(field_name, possible_field_names) # Otherwise, must be a Union type, which does not define fields. return [] diff --git a/src/graphql/validation/rules/fragments_on_composite_types.py b/src/graphql/validation/rules/fragments_on_composite_types.py index c0c9cb5f..782f6c70 100644 --- a/src/graphql/validation/rules/fragments_on_composite_types.py +++ b/src/graphql/validation/rules/fragments_on_composite_types.py @@ -1,11 +1,11 @@ +"""Fragments on composite type rule""" + +from __future__ import annotations + from typing import Any from ...error import GraphQLError -from ...language import ( - FragmentDefinitionNode, - InlineFragmentNode, - print_ast, -) +from ...language import FragmentDefinitionNode, InlineFragmentNode, print_ast from ...type import is_composite_type from ...utilities import type_from_ast from . import ValidationRule diff --git a/src/graphql/validation/rules/known_argument_names.py b/src/graphql/validation/rules/known_argument_names.py index d0eb6b9f..643300d0 100644 --- a/src/graphql/validation/rules/known_argument_names.py +++ b/src/graphql/validation/rules/known_argument_names.py @@ -1,18 +1,22 @@ -from typing import cast, Any, Dict, List, Union +"""Known argument names on directives rule""" + +from __future__ import annotations + +from typing import Any, List, cast from ...error import GraphQLError from ...language import ( + SKIP, ArgumentNode, DirectiveDefinitionNode, DirectiveNode, - SKIP, VisitorAction, ) from ...pyutils import did_you_mean, suggestion_list from ...type import specified_directives from . import ASTValidationRule, SDLValidationContext, ValidationContext -__all__ = ["KnownArgumentNamesRule", "KnownArgumentNamesOnDirectivesRule"] +__all__ = ["KnownArgumentNamesOnDirectivesRule", "KnownArgumentNamesRule"] class KnownArgumentNamesOnDirectivesRule(ASTValidationRule): @@ -23,15 +27,15 @@ class KnownArgumentNamesOnDirectivesRule(ASTValidationRule): For internal use only. """ - context: Union[ValidationContext, SDLValidationContext] + context: ValidationContext | SDLValidationContext - def __init__(self, context: Union[ValidationContext, SDLValidationContext]): + def __init__(self, context: ValidationContext | SDLValidationContext) -> None: super().__init__(context) - directive_args: Dict[str, List[str]] = {} + directive_args: dict[str, list[str]] = {} schema = context.schema defined_directives = schema.directives if schema else specified_directives - for directive in cast(List, defined_directives): + for directive in cast("List", defined_directives): directive_args[directive.name] = list(directive.args) ast_definitions = context.document.definitions @@ -75,7 +79,7 @@ class KnownArgumentNamesRule(KnownArgumentNamesOnDirectivesRule): context: ValidationContext - def __init__(self, context: ValidationContext): + def __init__(self, context: ValidationContext) -> None: super().__init__(context) def enter_argument(self, arg_node: ArgumentNode, *args: Any) -> None: diff --git a/src/graphql/validation/rules/known_directives.py b/src/graphql/validation/rules/known_directives.py index 26c5c75b..da31730b 100644 --- a/src/graphql/validation/rules/known_directives.py +++ b/src/graphql/validation/rules/known_directives.py @@ -1,9 +1,13 @@ -from typing import cast, Any, Dict, List, Optional, Tuple, Union +"""Known directives rule""" + +from __future__ import annotations + +from typing import Any, List, cast from ...error import GraphQLError from ...language import ( - DirectiveLocation, DirectiveDefinitionNode, + DirectiveLocation, DirectiveNode, Node, OperationDefinitionNode, @@ -23,15 +27,15 @@ class KnownDirectivesRule(ASTValidationRule): See https://spec.graphql.org/draft/#sec-Directives-Are-Defined """ - context: Union[ValidationContext, SDLValidationContext] + context: ValidationContext | SDLValidationContext - def __init__(self, context: Union[ValidationContext, SDLValidationContext]): + def __init__(self, context: ValidationContext | SDLValidationContext) -> None: super().__init__(context) - locations_map: Dict[str, Tuple[DirectiveLocation, ...]] = {} + locations_map: dict[str, tuple[DirectiveLocation, ...]] = {} schema = context.schema defined_directives = ( - schema.directives if schema else cast(List, specified_directives) + schema.directives if schema else cast("List", specified_directives) ) for directive in defined_directives: locations_map[directive.name] = directive.locations @@ -49,7 +53,7 @@ def enter_directive( _key: Any, _parent: Any, _path: Any, - ancestors: List[Node], + ancestors: list[Node], ) -> None: name = node.name.value locations = self.locations_map.get(name) @@ -99,21 +103,21 @@ def enter_directive( def get_directive_location_for_ast_path( - ancestors: List[Node], -) -> Optional[DirectiveLocation]: + ancestors: list[Node], +) -> DirectiveLocation | None: applied_to = ancestors[-1] if not isinstance(applied_to, Node): # pragma: no cover - raise TypeError("Unexpected error in directive.") + msg = "Unexpected error in directive." + raise TypeError(msg) kind = applied_to.kind if kind == "operation_definition": - applied_to = cast(OperationDefinitionNode, applied_to) + applied_to = cast("OperationDefinitionNode", applied_to) return _operation_location[applied_to.operation.value] - elif kind == "input_value_definition": + if kind == "input_value_definition": parent_node = ancestors[-3] return ( DirectiveLocation.INPUT_FIELD_DEFINITION if parent_node.kind == "input_object_type_definition" else DirectiveLocation.ARGUMENT_DEFINITION ) - else: - return _directive_location.get(kind) + return _directive_location.get(kind) diff --git a/src/graphql/validation/rules/known_fragment_names.py b/src/graphql/validation/rules/known_fragment_names.py index bb21ea77..52e9b679 100644 --- a/src/graphql/validation/rules/known_fragment_names.py +++ b/src/graphql/validation/rules/known_fragment_names.py @@ -1,9 +1,15 @@ -from typing import Any +"""Known fragment names rule""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any from ...error import GraphQLError -from ...language import FragmentSpreadNode from . import ValidationRule +if TYPE_CHECKING: + from ...language import FragmentSpreadNode + __all__ = ["KnownFragmentNamesRule"] diff --git a/src/graphql/validation/rules/known_type_names.py b/src/graphql/validation/rules/known_type_names.py index 68e10454..5dbac00b 100644 --- a/src/graphql/validation/rules/known_type_names.py +++ b/src/graphql/validation/rules/known_type_names.py @@ -1,17 +1,28 @@ -from typing import Any, Collection, List, Union, cast +"""Known type names rule""" + +from __future__ import annotations + +from typing import Any, Collection, cast from ...error import GraphQLError from ...language import ( + NamedTypeNode, + Node, + TypeSystemDefinitionNode, + TypeSystemExtensionNode, is_type_definition_node, is_type_system_definition_node, is_type_system_extension_node, - Node, - NamedTypeNode, - TypeDefinitionNode, ) -from ...type import introspection_types, specified_scalar_types from ...pyutils import did_you_mean, suggestion_list -from . import ASTValidationRule, ValidationContext, SDLValidationContext +from ...type import introspection_types, specified_scalar_types +from . import ASTValidationRule, SDLValidationContext, ValidationContext + +try: + from typing import TypeGuard +except ImportError: # Python < 3.10 + from typing_extensions import TypeGuard + __all__ = ["KnownTypeNamesRule"] @@ -25,7 +36,7 @@ class KnownTypeNamesRule(ASTValidationRule): See https://spec.graphql.org/draft/#sec-Fragment-Spread-Type-Existence """ - def __init__(self, context: Union[ValidationContext, SDLValidationContext]): + def __init__(self, context: ValidationContext | SDLValidationContext) -> None: super().__init__(context) schema = context.schema self.existing_types_map = schema.type_map if schema else {} @@ -33,7 +44,6 @@ def __init__(self, context: Union[ValidationContext, SDLValidationContext]): defined_types = [] for def_ in context.document.definitions: if is_type_definition_node(def_): - def_ = cast(TypeDefinitionNode, def_) defined_types.append(def_.name.value) self.defined_types = set(defined_types) @@ -45,7 +55,7 @@ def enter_named_type( _key: Any, parent: Node, _path: Any, - ancestors: List[Node], + ancestors: list[Node], ) -> None: type_name = node.name.value if ( @@ -77,12 +87,14 @@ def enter_named_type( standard_type_names = set(specified_scalar_types).union(introspection_types) -def is_sdl_node(value: Union[Node, Collection[Node], None]) -> bool: +def is_sdl_node( + value: Node | Collection[Node] | None, +) -> TypeGuard[TypeSystemDefinitionNode | TypeSystemExtensionNode]: return ( value is not None and not isinstance(value, list) and ( - is_type_system_definition_node(cast(Node, value)) - or is_type_system_extension_node(cast(Node, value)) + is_type_system_definition_node(cast("Node", value)) + or is_type_system_extension_node(cast("Node", value)) ) ) diff --git a/src/graphql/validation/rules/lone_anonymous_operation.py b/src/graphql/validation/rules/lone_anonymous_operation.py index f88b5c85..f7587bda 100644 --- a/src/graphql/validation/rules/lone_anonymous_operation.py +++ b/src/graphql/validation/rules/lone_anonymous_operation.py @@ -1,3 +1,7 @@ +"""Lone anonymous operation rule""" + +from __future__ import annotations + from typing import Any from ...error import GraphQLError @@ -16,7 +20,7 @@ class LoneAnonymousOperationRule(ASTValidationRule): See https://spec.graphql.org/draft/#sec-Lone-Anonymous-Operation """ - def __init__(self, context: ASTValidationContext): + def __init__(self, context: ASTValidationContext) -> None: super().__init__(context) self.operation_count = 0 diff --git a/src/graphql/validation/rules/lone_schema_definition.py b/src/graphql/validation/rules/lone_schema_definition.py index 2d33cb86..ceac80d1 100644 --- a/src/graphql/validation/rules/lone_schema_definition.py +++ b/src/graphql/validation/rules/lone_schema_definition.py @@ -1,8 +1,14 @@ -from typing import Any +"""Lone Schema definition rule""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any from ...error import GraphQLError -from ...language import SchemaDefinitionNode -from . import SDLValidationRule, SDLValidationContext +from . import SDLValidationContext, SDLValidationRule + +if TYPE_CHECKING: + from ...language import SchemaDefinitionNode __all__ = ["LoneSchemaDefinitionRule"] @@ -13,7 +19,7 @@ class LoneSchemaDefinitionRule(SDLValidationRule): A GraphQL document is only valid if it contains only one schema definition. """ - def __init__(self, context: SDLValidationContext): + def __init__(self, context: SDLValidationContext) -> None: super().__init__(context) old_schema = context.schema self.already_defined = old_schema and ( diff --git a/src/graphql/validation/rules/no_fragment_cycles.py b/src/graphql/validation/rules/no_fragment_cycles.py index cf2b7e37..c7584655 100644 --- a/src/graphql/validation/rules/no_fragment_cycles.py +++ b/src/graphql/validation/rules/no_fragment_cycles.py @@ -1,7 +1,11 @@ -from typing import Any, Dict, List, Set +"""No fragment cycles rule""" + +from __future__ import annotations + +from typing import Any from ...error import GraphQLError -from ...language import FragmentDefinitionNode, FragmentSpreadNode, VisitorAction, SKIP +from ...language import SKIP, FragmentDefinitionNode, FragmentSpreadNode, VisitorAction from . import ASTValidationContext, ASTValidationRule __all__ = ["NoFragmentCyclesRule"] @@ -17,15 +21,15 @@ class NoFragmentCyclesRule(ASTValidationRule): See https://spec.graphql.org/draft/#sec-Fragment-spreads-must-not-form-cycles """ - def __init__(self, context: ASTValidationContext): + def __init__(self, context: ASTValidationContext) -> None: super().__init__(context) # Tracks already visited fragments to maintain O(N) and to ensure that # cycles are not redundantly reported. - self.visited_frags: Set[str] = set() + self.visited_frags: set[str] = set() # List of AST nodes used to produce meaningful errors - self.spread_path: List[FragmentSpreadNode] = [] + self.spread_path: list[FragmentSpreadNode] = [] # Position in the spread path - self.spread_path_index_by_name: Dict[str, int] = {} + self.spread_path_index_by_name: dict[str, int] = {} @staticmethod def enter_operation_definition(*_args: Any) -> VisitorAction: diff --git a/src/graphql/validation/rules/no_undefined_variables.py b/src/graphql/validation/rules/no_undefined_variables.py index a890473f..5c20d647 100644 --- a/src/graphql/validation/rules/no_undefined_variables.py +++ b/src/graphql/validation/rules/no_undefined_variables.py @@ -1,9 +1,15 @@ -from typing import Any, Set +"""No undefined variables rule""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any from ...error import GraphQLError -from ...language import OperationDefinitionNode, VariableDefinitionNode from . import ValidationContext, ValidationRule +if TYPE_CHECKING: + from ...language import OperationDefinitionNode, VariableDefinitionNode + __all__ = ["NoUndefinedVariablesRule"] @@ -16,9 +22,9 @@ class NoUndefinedVariablesRule(ValidationRule): See https://spec.graphql.org/draft/#sec-All-Variable-Uses-Defined """ - def __init__(self, context: ValidationContext): + def __init__(self, context: ValidationContext) -> None: super().__init__(context) - self.defined_variable_names: Set[str] = set() + self.defined_variable_names: set[str] = set() def enter_operation_definition(self, *_args: Any) -> None: self.defined_variable_names.clear() diff --git a/src/graphql/validation/rules/no_unused_fragments.py b/src/graphql/validation/rules/no_unused_fragments.py index c2b46cf0..b79b5b07 100644 --- a/src/graphql/validation/rules/no_unused_fragments.py +++ b/src/graphql/validation/rules/no_unused_fragments.py @@ -1,11 +1,15 @@ -from typing import Any, List +"""No unused fragments rule""" + +from __future__ import annotations + +from typing import Any from ...error import GraphQLError from ...language import ( + SKIP, FragmentDefinitionNode, OperationDefinitionNode, VisitorAction, - SKIP, ) from . import ASTValidationContext, ASTValidationRule @@ -21,10 +25,10 @@ class NoUnusedFragmentsRule(ASTValidationRule): See https://spec.graphql.org/draft/#sec-Fragments-Must-Be-Used """ - def __init__(self, context: ASTValidationContext): + def __init__(self, context: ASTValidationContext) -> None: super().__init__(context) - self.operation_defs: List[OperationDefinitionNode] = [] - self.fragment_defs: List[FragmentDefinitionNode] = [] + self.operation_defs: list[OperationDefinitionNode] = [] + self.fragment_defs: list[FragmentDefinitionNode] = [] def enter_operation_definition( self, node: OperationDefinitionNode, *_args: Any diff --git a/src/graphql/validation/rules/no_unused_variables.py b/src/graphql/validation/rules/no_unused_variables.py index b8770944..ec5d0b70 100644 --- a/src/graphql/validation/rules/no_unused_variables.py +++ b/src/graphql/validation/rules/no_unused_variables.py @@ -1,9 +1,15 @@ -from typing import Any, List, Set +"""No unused variables rule""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any from ...error import GraphQLError -from ...language import OperationDefinitionNode, VariableDefinitionNode from . import ValidationContext, ValidationRule +if TYPE_CHECKING: + from ...language import OperationDefinitionNode, VariableDefinitionNode + __all__ = ["NoUnusedVariablesRule"] @@ -16,9 +22,9 @@ class NoUnusedVariablesRule(ValidationRule): See https://spec.graphql.org/draft/#sec-All-Variables-Used """ - def __init__(self, context: ValidationContext): + def __init__(self, context: ValidationContext) -> None: super().__init__(context) - self.variable_defs: List[VariableDefinitionNode] = [] + self.variable_defs: list[VariableDefinitionNode] = [] def enter_operation_definition(self, *_args: Any) -> None: self.variable_defs.clear() @@ -26,7 +32,7 @@ def enter_operation_definition(self, *_args: Any) -> None: def leave_operation_definition( self, operation: OperationDefinitionNode, *_args: Any ) -> None: - variable_name_used: Set[str] = set() + variable_name_used: set[str] = set() usages = self.context.get_recursive_variable_usages(operation) for usage in usages: diff --git a/src/graphql/validation/rules/overlapping_fields_can_be_merged.py b/src/graphql/validation/rules/overlapping_fields_can_be_merged.py index 33c1b09e..97939e56 100644 --- a/src/graphql/validation/rules/overlapping_fields_can_be_merged.py +++ b/src/graphql/validation/rules/overlapping_fields_can_be_merged.py @@ -1,23 +1,25 @@ +"""Overlapping fields can be merged rule""" + +from __future__ import annotations + from itertools import chain -from typing import Any, Dict, List, Optional, Tuple, Union, cast +from typing import Any, Dict, List, Optional, Sequence, Tuple, Union, cast from ...error import GraphQLError from ...language import ( + DirectiveNode, FieldNode, FragmentDefinitionNode, FragmentSpreadNode, InlineFragmentNode, - ObjectFieldNode, - ObjectValueNode, SelectionSetNode, + ValueNode, print_ast, ) from ...type import ( GraphQLCompositeType, GraphQLField, - GraphQLList, GraphQLNamedType, - GraphQLNonNull, GraphQLOutputType, get_named_type, is_interface_type, @@ -30,16 +32,19 @@ from ...utilities.sort_value_node import sort_value_node from . import ValidationContext, ValidationRule -MYPY = False +try: + from typing import TypeAlias +except ImportError: # Python < 3.10 + from typing_extensions import TypeAlias + __all__ = ["OverlappingFieldsCanBeMergedRule"] -def reason_message(reason: "ConflictReasonMessage") -> str: +def reason_message(reason: ConflictReasonMessage) -> str: if isinstance(reason, list): return " and ".join( - f"subfields '{response_name}' conflict" - f" because {reason_message(sub_reason)}" + f"subfields '{response_name}' conflict because {reason_message(sub_reason)}" for response_name, sub_reason in reason ) return reason @@ -54,7 +59,7 @@ class OverlappingFieldsCanBeMergedRule(ValidationRule): See https://spec.graphql.org/draft/#sec-Field-Selection-Merging """ - def __init__(self, context: ValidationContext): + def __init__(self, context: ValidationContext) -> None: super().__init__(context) # A memoization for when two fragments are compared "between" each other for # conflicts. Two fragments may be compared many times, so memoizing this can @@ -64,7 +69,7 @@ def __init__(self, context: ValidationContext): # A cache for the "field map" and list of fragment names found in any given # selection set. Selection sets may be asked for this information multiple # times, so this improves the performance of this validator. - self.cached_fields_and_fragment_names: Dict = {} + self.cached_fields_and_fragment_names: dict = {} def enter_selection_set(self, selection_set: SelectionSetNode, *_args: Any) -> None: conflicts = find_conflicts_within_selection_set( @@ -86,18 +91,15 @@ def enter_selection_set(self, selection_set: SelectionSetNode, *_args: Any) -> N ) -Conflict = Tuple["ConflictReason", List[FieldNode], List[FieldNode]] +Conflict: TypeAlias = Tuple["ConflictReason", List[FieldNode], List[FieldNode]] # Field name and reason. -ConflictReason = Tuple[str, "ConflictReasonMessage"] +ConflictReason: TypeAlias = Tuple[str, "ConflictReasonMessage"] # Reason is a string, or a nested list of conflicts. -if MYPY: # recursive types not fully supported yet (/python/mypy/issues/731) - ConflictReasonMessage = Union[str, List] -else: - ConflictReasonMessage = Union[str, List[ConflictReason]] +ConflictReasonMessage: TypeAlias = Union[str, List[ConflictReason]] # Tuple defining a field node in a context. -NodeAndDef = Tuple[GraphQLCompositeType, FieldNode, Optional[GraphQLField]] +NodeAndDef: TypeAlias = Tuple[GraphQLCompositeType, FieldNode, Optional[GraphQLField]] # Dictionary of lists of those. -NodeAndDefCollection = Dict[str, List[NodeAndDef]] +NodeAndDefCollection: TypeAlias = Dict[str, List[NodeAndDef]] # Algorithm: @@ -115,7 +117,7 @@ def enter_selection_set(self, selection_set: SelectionSetNode, *_args: Any) -> N # A) Each selection set represented in the document first compares "within" its # collected set of fields, finding any conflicts between every pair of # overlapping fields. -# Note: This is the#only time* that a the fields "within" a set are compared +# Note: This is the *only time* that the fields "within" a set are compared # to each other. After this only fields "between" sets are compared. # # B) Also, if any fragment is referenced in a selection set, then a @@ -127,7 +129,7 @@ def enter_selection_set(self, selection_set: SelectionSetNode, *_args: Any) -> N # # D) When comparing "between" a set of fields and a referenced fragment, first # a comparison is made between each field in the original set of fields and -# each field in the the referenced set of fields. +# each field in the referenced set of fields. # # E) Also, if any fragment is referenced in the referenced selection set, # then a comparison is made "between" the original set of fields and the @@ -155,11 +157,11 @@ def enter_selection_set(self, selection_set: SelectionSetNode, *_args: Any) -> N def find_conflicts_within_selection_set( context: ValidationContext, - cached_fields_and_fragment_names: Dict, - compared_fragment_pairs: "PairSet", - parent_type: Optional[GraphQLNamedType], + cached_fields_and_fragment_names: dict, + compared_fragment_pairs: PairSet, + parent_type: GraphQLNamedType | None, selection_set: SelectionSetNode, -) -> List[Conflict]: +) -> list[Conflict]: """Find conflicts within selection set. Find all conflicts found "within" a selection set, including those found via @@ -167,7 +169,7 @@ def find_conflicts_within_selection_set( Called when visiting each SelectionSet in the GraphQL Document. """ - conflicts: List[Conflict] = [] + conflicts: list[Conflict] = [] field_map, fragment_names = get_fields_and_fragment_names( context, cached_fields_and_fragment_names, parent_type, selection_set @@ -216,9 +218,9 @@ def find_conflicts_within_selection_set( def collect_conflicts_between_fields_and_fragment( context: ValidationContext, - conflicts: List[Conflict], - cached_fields_and_fragment_names: Dict, - compared_fragment_pairs: "PairSet", + conflicts: list[Conflict], + cached_fields_and_fragment_names: dict, + compared_fragment_pairs: PairSet, are_mutually_exclusive: bool, field_map: NodeAndDefCollection, fragment_name: str, @@ -230,7 +232,7 @@ def collect_conflicts_between_fields_and_fragment( """ fragment = context.get_fragment(fragment_name) if not fragment: - return None + return field_map2, referenced_fragment_names = get_referenced_fields_and_fragment_names( context, cached_fields_and_fragment_names, fragment @@ -277,9 +279,9 @@ def collect_conflicts_between_fields_and_fragment( def collect_conflicts_between_fragments( context: ValidationContext, - conflicts: List[Conflict], - cached_fields_and_fragment_names: Dict, - compared_fragment_pairs: "PairSet", + conflicts: list[Conflict], + cached_fields_and_fragment_names: dict, + compared_fragment_pairs: PairSet, are_mutually_exclusive: bool, fragment_name1: str, fragment_name2: str, @@ -303,7 +305,7 @@ def collect_conflicts_between_fragments( fragment1 = context.get_fragment(fragment_name1) fragment2 = context.get_fragment(fragment_name2) if not fragment1 or not fragment2: - return None + return field_map1, referenced_fragment_names1 = get_referenced_fields_and_fragment_names( context, cached_fields_and_fragment_names, fragment1 @@ -354,21 +356,21 @@ def collect_conflicts_between_fragments( def find_conflicts_between_sub_selection_sets( context: ValidationContext, - cached_fields_and_fragment_names: Dict, - compared_fragment_pairs: "PairSet", + cached_fields_and_fragment_names: dict, + compared_fragment_pairs: PairSet, are_mutually_exclusive: bool, - parent_type1: Optional[GraphQLNamedType], + parent_type1: GraphQLNamedType | None, selection_set1: SelectionSetNode, - parent_type2: Optional[GraphQLNamedType], + parent_type2: GraphQLNamedType | None, selection_set2: SelectionSetNode, -) -> List[Conflict]: +) -> list[Conflict]: """Find conflicts between sub selection sets. Find all conflicts found between two selection sets, including those found via spreading in fragments. Called when determining if conflicts exist between the sub-fields of two overlapping fields. """ - conflicts: List[Conflict] = [] + conflicts: list[Conflict] = [] field_map1, fragment_names1 = get_fields_and_fragment_names( context, cached_fields_and_fragment_names, parent_type1, selection_set1 @@ -436,9 +438,9 @@ def find_conflicts_between_sub_selection_sets( def collect_conflicts_within( context: ValidationContext, - conflicts: List[Conflict], - cached_fields_and_fragment_names: Dict, - compared_fragment_pairs: "PairSet", + conflicts: list[Conflict], + cached_fields_and_fragment_names: dict, + compared_fragment_pairs: PairSet, field_map: NodeAndDefCollection, ) -> None: """Collect all Conflicts "within" one collection of fields.""" @@ -469,9 +471,9 @@ def collect_conflicts_within( def collect_conflicts_between( context: ValidationContext, - conflicts: List[Conflict], - cached_fields_and_fragment_names: Dict, - compared_fragment_pairs: "PairSet", + conflicts: list[Conflict], + cached_fields_and_fragment_names: dict, + compared_fragment_pairs: PairSet, parent_fields_are_mutually_exclusive: bool, field_map1: NodeAndDefCollection, field_map2: NodeAndDefCollection, @@ -508,13 +510,13 @@ def collect_conflicts_between( def find_conflict( context: ValidationContext, - cached_fields_and_fragment_names: Dict, - compared_fragment_pairs: "PairSet", + cached_fields_and_fragment_names: dict, + compared_fragment_pairs: PairSet, parent_fields_are_mutually_exclusive: bool, response_name: str, field1: NodeAndDef, field2: NodeAndDef, -) -> Optional[Conflict]: +) -> Conflict | None: """Find conflict. Determines if there is a conflict between two particular fields, including comparing @@ -536,8 +538,8 @@ def find_conflict( ) # The return type for each field. - type1 = cast(Optional[GraphQLOutputType], def1 and def1.type) - type2 = cast(Optional[GraphQLOutputType], def2 and def2.type) + type1 = cast("Optional[GraphQLOutputType]", def1 and def1.type) + type2 = cast("Optional[GraphQLOutputType]", def2 and def2.type) if not are_mutually_exclusive: # Two aliases must refer to the same field. @@ -551,9 +553,18 @@ def find_conflict( ) # Two field calls must have the same arguments. - if stringify_arguments(node1) != stringify_arguments(node2): + if not same_arguments(node1, node2): return (response_name, "they have differing arguments"), [node1], [node2] + directives1 = node1.directives + directives2 = node2.directives + if not same_streams(directives1, directives2): + return ( + (response_name, "they have differing stream directives"), + [node1], + [node2], + ) + if type1 and type2 and do_types_conflict(type1, type2): return ( (response_name, f"they return conflicting types '{type1}' and '{type2}'"), @@ -582,14 +593,58 @@ def find_conflict( return None # no conflict -def stringify_arguments(field_node: FieldNode) -> str: - input_object_with_args = ObjectValueNode( - fields=tuple( - ObjectFieldNode(name=arg_node.name, value=arg_node.value) - for arg_node in field_node.arguments - ) - ) - return print_ast(sort_value_node(input_object_with_args)) +def same_arguments( + node1: FieldNode | DirectiveNode, node2: FieldNode | DirectiveNode +) -> bool: + args1 = node1.arguments + args2 = node2.arguments + + if not args1: + return not args2 + + if not args2: + return False + + if len(args1) != len(args2): + return False + + values2 = {arg.name.value: arg.value for arg in args2} + + for arg1 in args1: + value1 = arg1.value + value2 = values2.get(arg1.name.value) + if value2 is None or stringify_value(value1) != stringify_value(value2): + return False + + return True + + +def stringify_value(value: ValueNode) -> str: + return print_ast(sort_value_node(value)) + + +def get_stream_directive( + directives: Sequence[DirectiveNode], +) -> DirectiveNode | None: + for directive in directives: + if directive.name.value == "stream": + return directive + return None + + +def same_streams( + directives1: Sequence[DirectiveNode], directives2: Sequence[DirectiveNode] +) -> bool: + stream1 = get_stream_directive(directives1) + stream2 = get_stream_directive(directives2) + if not stream1 and not stream2: + # both fields do not have streams + return True + if stream1 and stream2: + # check if both fields have equivalent streams + return same_arguments(stream1, stream2) + # fields have a mix of stream and no stream + return False def do_types_conflict(type1: GraphQLOutputType, type2: GraphQLOutputType) -> bool: @@ -601,9 +656,7 @@ def do_types_conflict(type1: GraphQLOutputType, type2: GraphQLOutputType) -> boo """ if is_list_type(type1): return ( - do_types_conflict( - cast(GraphQLList, type1).of_type, cast(GraphQLList, type2).of_type - ) + do_types_conflict(type1.of_type, type2.of_type) if is_list_type(type2) else True ) @@ -611,9 +664,7 @@ def do_types_conflict(type1: GraphQLOutputType, type2: GraphQLOutputType) -> boo return True if is_non_null_type(type1): return ( - do_types_conflict( - cast(GraphQLNonNull, type1).of_type, cast(GraphQLNonNull, type2).of_type - ) + do_types_conflict(type1.of_type, type2.of_type) if is_non_null_type(type2) else True ) @@ -626,10 +677,10 @@ def do_types_conflict(type1: GraphQLOutputType, type2: GraphQLOutputType) -> boo def get_fields_and_fragment_names( context: ValidationContext, - cached_fields_and_fragment_names: Dict, - parent_type: Optional[GraphQLNamedType], + cached_fields_and_fragment_names: dict, + parent_type: GraphQLNamedType | None, selection_set: SelectionSetNode, -) -> Tuple[NodeAndDefCollection, List[str]]: +) -> tuple[NodeAndDefCollection, list[str]]: """Get fields and referenced fragment names Given a selection set, return the collection of fields (a mapping of response name @@ -639,7 +690,7 @@ def get_fields_and_fragment_names( cached = cached_fields_and_fragment_names.get(selection_set) if not cached: node_and_defs: NodeAndDefCollection = {} - fragment_names: Dict[str, bool] = {} + fragment_names: dict[str, bool] = {} collect_fields_and_fragment_names( context, parent_type, selection_set, node_and_defs, fragment_names ) @@ -650,9 +701,9 @@ def get_fields_and_fragment_names( def get_referenced_fields_and_fragment_names( context: ValidationContext, - cached_fields_and_fragment_names: Dict, + cached_fields_and_fragment_names: dict, fragment: FragmentDefinitionNode, -) -> Tuple[NodeAndDefCollection, List[str]]: +) -> tuple[NodeAndDefCollection, list[str]]: """Get referenced fields and nested fragment names Given a reference to a fragment, return the represented collection of fields as well @@ -671,16 +722,16 @@ def get_referenced_fields_and_fragment_names( def collect_fields_and_fragment_names( context: ValidationContext, - parent_type: Optional[GraphQLNamedType], + parent_type: GraphQLNamedType | None, selection_set: SelectionSetNode, node_and_defs: NodeAndDefCollection, - fragment_names: Dict[str, bool], + fragment_names: dict[str, bool], ) -> None: for selection in selection_set.selections: if isinstance(selection, FieldNode): field_name = selection.name.value field_def = ( - parent_type.fields.get(field_name) # type: ignore + parent_type.fields.get(field_name) if is_object_type(parent_type) or is_interface_type(parent_type) else None ) @@ -688,7 +739,7 @@ def collect_fields_and_fragment_names( if not node_and_defs.get(response_name): node_and_defs[response_name] = [] node_and_defs[response_name].append( - cast(NodeAndDef, (parent_type, selection, field_def)) + cast("NodeAndDef", (parent_type, selection, field_def)) ) elif isinstance(selection, FragmentSpreadNode): fragment_names[selection.name.value] = True @@ -709,8 +760,8 @@ def collect_fields_and_fragment_names( def subfield_conflicts( - conflicts: List[Conflict], response_name: str, node1: FieldNode, node2: FieldNode -) -> Optional[Conflict]: + conflicts: list[Conflict], response_name: str, node1: FieldNode, node2: FieldNode +) -> Conflict | None: """Check whether there are conflicts between sub-fields. Given a series of Conflicts which occurred between two sub-fields, generate a single @@ -733,7 +784,7 @@ class PairSet: __slots__ = ("_data",) - _data: Dict[str, Dict[str, bool]] + _data: dict[str, dict[str, bool]] def __init__(self) -> None: self._data = {} diff --git a/src/graphql/validation/rules/possible_fragment_spreads.py b/src/graphql/validation/rules/possible_fragment_spreads.py index 27c2ad9b..11748a47 100644 --- a/src/graphql/validation/rules/possible_fragment_spreads.py +++ b/src/graphql/validation/rules/possible_fragment_spreads.py @@ -1,11 +1,17 @@ -from typing import cast, Any, Optional +"""Possible fragment spread rule""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any from ...error import GraphQLError -from ...language import FragmentSpreadNode, InlineFragmentNode from ...type import GraphQLCompositeType, is_composite_type from ...utilities import do_types_overlap, type_from_ast from . import ValidationRule +if TYPE_CHECKING: + from ...language import FragmentSpreadNode, InlineFragmentNode + __all__ = ["PossibleFragmentSpreadsRule"] @@ -24,11 +30,7 @@ def enter_inline_fragment(self, node: InlineFragmentNode, *_args: Any) -> None: if ( is_composite_type(frag_type) and is_composite_type(parent_type) - and not do_types_overlap( - context.schema, - cast(GraphQLCompositeType, frag_type), - cast(GraphQLCompositeType, parent_type), - ) + and not do_types_overlap(context.schema, frag_type, parent_type) ): context.report_error( GraphQLError( @@ -56,11 +58,11 @@ def enter_fragment_spread(self, node: FragmentSpreadNode, *_args: Any) -> None: ) ) - def get_fragment_type(self, name: str) -> Optional[GraphQLCompositeType]: + def get_fragment_type(self, name: str) -> GraphQLCompositeType | None: context = self.context frag = context.get_fragment(name) if frag: type_ = type_from_ast(context.schema, frag.type_condition) if is_composite_type(type_): - return cast(GraphQLCompositeType, type_) + return type_ return None diff --git a/src/graphql/validation/rules/possible_type_extensions.py b/src/graphql/validation/rules/possible_type_extensions.py index 63c90dbc..e8eb349d 100644 --- a/src/graphql/validation/rules/possible_type_extensions.py +++ b/src/graphql/validation/rules/possible_type_extensions.py @@ -1,6 +1,10 @@ +"""Possible type extension rule""" + +from __future__ import annotations + import re from functools import partial -from typing import Any, Optional +from typing import Any from ...error import GraphQLError from ...language import TypeDefinitionNode, TypeExtensionNode @@ -24,7 +28,7 @@ class PossibleTypeExtensionsRule(SDLValidationRule): A type extension is only valid if the type is defined and has the same kind. """ - def __init__(self, context: SDLValidationContext): + def __init__(self, context: SDLValidationContext) -> None: super().__init__(context) self.schema = context.schema self.defined_types = { @@ -39,7 +43,7 @@ def check_extension(self, node: TypeExtensionNode, *_args: Any) -> None: def_node = self.defined_types.get(type_name) existing_type = schema.get_type(type_name) if schema else None - expected_kind: Optional[str] + expected_kind: str | None if def_node: expected_kind = def_kind_to_ext_kind(def_node.kind) elif existing_type: @@ -92,7 +96,8 @@ def type_to_ext_kind(type_: Any) -> str: return "input_object_type_extension" # Not reachable. All possible types have been considered. - raise TypeError(f"Unexpected type: {inspect(type_)}.") + msg = f"Unexpected type: {inspect(type_)}." # pragma: no cover + raise TypeError(msg) # pragma: no cover _type_names_for_extension_kinds = { diff --git a/src/graphql/validation/rules/provided_required_arguments.py b/src/graphql/validation/rules/provided_required_arguments.py index 99e23cea..9c98065e 100644 --- a/src/graphql/validation/rules/provided_required_arguments.py +++ b/src/graphql/validation/rules/provided_required_arguments.py @@ -1,7 +1,12 @@ -from typing import cast, Any, Dict, List, Union +"""Provided required arguments on directives rule""" + +from __future__ import annotations + +from typing import Any, List, cast from ...error import GraphQLError from ...language import ( + SKIP, DirectiveDefinitionNode, DirectiveNode, FieldNode, @@ -9,13 +14,12 @@ NonNullTypeNode, TypeNode, VisitorAction, - SKIP, print_ast, ) from ...type import GraphQLArgument, is_required_argument, is_type, specified_directives from . import ASTValidationRule, SDLValidationContext, ValidationContext -__all__ = ["ProvidedRequiredArgumentsRule", "ProvidedRequiredArgumentsOnDirectivesRule"] +__all__ = ["ProvidedRequiredArgumentsOnDirectivesRule", "ProvidedRequiredArgumentsRule"] class ProvidedRequiredArgumentsOnDirectivesRule(ASTValidationRule): @@ -27,17 +31,17 @@ class ProvidedRequiredArgumentsOnDirectivesRule(ASTValidationRule): For internal use only. """ - context: Union[ValidationContext, SDLValidationContext] + context: ValidationContext | SDLValidationContext - def __init__(self, context: Union[ValidationContext, SDLValidationContext]): + def __init__(self, context: ValidationContext | SDLValidationContext) -> None: super().__init__(context) - required_args_map: Dict[ - str, Dict[str, Union[GraphQLArgument, InputValueDefinitionNode]] + required_args_map: dict[ + str, dict[str, GraphQLArgument | InputValueDefinitionNode] ] = {} schema = context.schema defined_directives = schema.directives if schema else specified_directives - for directive in cast(List, defined_directives): + for directive in cast("List", defined_directives): required_args_map[directive.name] = { name: arg for name, arg in directive.args.items() @@ -59,7 +63,6 @@ def leave_directive(self, directive_node: DirectiveNode, *_args: Any) -> None: directive_name = directive_node.name.value required_args = self.required_args_map.get(directive_name) if required_args: - arg_nodes = directive_node.arguments or () arg_node_set = {arg.name.value for arg in arg_nodes} for arg_name in required_args: @@ -68,7 +71,7 @@ def leave_directive(self, directive_node: DirectiveNode, *_args: Any) -> None: arg_type_str = ( str(arg_type) if is_type(arg_type) - else print_ast(cast(TypeNode, arg_type)) + else print_ast(cast("TypeNode", arg_type)) ) self.report_error( GraphQLError( @@ -89,7 +92,7 @@ class ProvidedRequiredArgumentsRule(ProvidedRequiredArgumentsOnDirectivesRule): context: ValidationContext - def __init__(self, context: ValidationContext): + def __init__(self, context: ValidationContext) -> None: super().__init__(context) def leave_field(self, field_node: FieldNode, *_args: Any) -> VisitorAction: diff --git a/src/graphql/validation/rules/scalar_leafs.py b/src/graphql/validation/rules/scalar_leafs.py index 174df4e4..73a51c78 100644 --- a/src/graphql/validation/rules/scalar_leafs.py +++ b/src/graphql/validation/rules/scalar_leafs.py @@ -1,10 +1,16 @@ -from typing import Any +"""Scalar leafs rule""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any from ...error import GraphQLError -from ...language import FieldNode from ...type import get_named_type, is_leaf_type from . import ValidationRule +if TYPE_CHECKING: + from ...language import FieldNode + __all__ = ["ScalarLeafsRule"] diff --git a/src/graphql/validation/rules/single_field_subscriptions.py b/src/graphql/validation/rules/single_field_subscriptions.py index 53a7218f..89235856 100644 --- a/src/graphql/validation/rules/single_field_subscriptions.py +++ b/src/graphql/validation/rules/single_field_subscriptions.py @@ -1,7 +1,11 @@ -from typing import Any, Dict, cast +"""Single field subscriptions rule""" + +from __future__ import annotations + +from typing import Any from ...error import GraphQLError -from ...execution.collect_fields import collect_fields +from ...execution.collect_fields import FieldGroup, collect_fields from ...language import ( FieldNode, FragmentDefinitionNode, @@ -13,6 +17,10 @@ __all__ = ["SingleFieldSubscriptionsRule"] +def to_nodes(field_group: FieldGroup) -> list[FieldNode]: + return [field_details.node for field_details in field_group.fields] + + class SingleFieldSubscriptionsRule(ValidationRule): """Subscriptions must only include a single non-introspection field. @@ -31,31 +39,27 @@ def enter_operation_definition( subscription_type = schema.subscription_type if subscription_type: operation_name = node.name.value if node.name else None - variable_values: Dict[str, Any] = {} + variable_values: dict[str, Any] = {} document = self.context.document - fragments: Dict[str, FragmentDefinitionNode] = { + fragments: dict[str, FragmentDefinitionNode] = { definition.name.value: definition for definition in document.definitions if isinstance(definition, FragmentDefinitionNode) } - fields = collect_fields( + grouped_field_set = collect_fields( schema, fragments, variable_values, subscription_type, - node.selection_set, - ) - if len(fields) > 1: - field_selection_lists = list(fields.values()) - extra_field_selection_lists = field_selection_lists[1:] + node, + ).grouped_field_set + if len(grouped_field_set) > 1: + field_groups = list(grouped_field_set.values()) + extra_field_groups = field_groups[1:] extra_field_selection = [ - field - for fields in extra_field_selection_lists - for field in ( - fields - if isinstance(fields, list) - else [cast(FieldNode, fields)] - ) + node + for field_group in extra_field_groups + for node in to_nodes(field_group) ] self.report_error( GraphQLError( @@ -68,9 +72,8 @@ def enter_operation_definition( extra_field_selection, ) ) - for field_nodes in fields.values(): - field = field_nodes[0] - field_name = field.name.value + for field_group in grouped_field_set.values(): + field_name = to_nodes(field_group)[0].name.value if field_name.startswith("__"): self.report_error( GraphQLError( @@ -80,6 +83,6 @@ def enter_operation_definition( else f"Subscription '{operation_name}'" ) + " must not select an introspection top level field.", - field_nodes, + to_nodes(field_group), ) ) diff --git a/src/graphql/validation/rules/stream_directive_on_list_field.py b/src/graphql/validation/rules/stream_directive_on_list_field.py new file mode 100644 index 00000000..03015cd0 --- /dev/null +++ b/src/graphql/validation/rules/stream_directive_on_list_field.py @@ -0,0 +1,62 @@ +"""Stream directive on list field rule""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, cast + +from ...error import GraphQLError +from ...type import GraphQLStreamDirective, is_list_type, is_wrapping_type +from . import ASTValidationRule, ValidationContext + +if TYPE_CHECKING: + from ...language import DirectiveNode, Node + +__all__ = ["StreamDirectiveOnListField"] + + +class StreamDirectiveOnListField(ASTValidationRule): + """Stream directives are used on list fields + + A GraphQL document is only valid if stream directives are used on list fields. + """ + + def enter_directive( + self, + node: DirectiveNode, + _key: Any, + _parent: Any, + _path: Any, + _ancestors: list[Node], + ) -> None: + context = cast("ValidationContext", self.context) + field_def = context.get_field_def() + parent_type = context.get_parent_type() + if ( + field_def + and parent_type + and node.name.value == GraphQLStreamDirective.name + and not ( + is_list_type(field_def.type) + or ( + is_wrapping_type(field_def.type) + and is_list_type(field_def.type.of_type) + ) + ) + ): + try: + field_name = next( + name + for name, field in parent_type.fields.items() # type: ignore + if field is field_def + ) + except StopIteration: # pragma: no cover + field_name = "" + else: + field_name = f" '{field_name}'" + self.report_error( + GraphQLError( + "Stream directive cannot be used on non-list" + f" field{field_name} on type '{parent_type.name}'.", + node, + ) + ) diff --git a/src/graphql/validation/rules/unique_argument_definition_names.py b/src/graphql/validation/rules/unique_argument_definition_names.py index ba0e13ac..b992577f 100644 --- a/src/graphql/validation/rules/unique_argument_definition_names.py +++ b/src/graphql/validation/rules/unique_argument_definition_names.py @@ -1,8 +1,13 @@ +"""Unique argument definition names rule""" + +from __future__ import annotations + from operator import attrgetter from typing import Any, Collection from ...error import GraphQLError from ...language import ( + SKIP, DirectiveDefinitionNode, FieldDefinitionNode, InputValueDefinitionNode, @@ -12,7 +17,6 @@ ObjectTypeDefinitionNode, ObjectTypeExtensionNode, VisitorAction, - SKIP, ) from ...pyutils import group_by from . import SDLValidationRule diff --git a/src/graphql/validation/rules/unique_argument_names.py b/src/graphql/validation/rules/unique_argument_names.py index 9afe1c09..124aa6e6 100644 --- a/src/graphql/validation/rules/unique_argument_names.py +++ b/src/graphql/validation/rules/unique_argument_names.py @@ -1,11 +1,17 @@ +"""Unique argument names rule""" + +from __future__ import annotations + from operator import attrgetter -from typing import Any, Collection +from typing import TYPE_CHECKING, Any, Collection from ...error import GraphQLError -from ...language import ArgumentNode, DirectiveNode, FieldNode from ...pyutils import group_by from . import ASTValidationRule +if TYPE_CHECKING: + from ...language import ArgumentNode, DirectiveNode, FieldNode + __all__ = ["UniqueArgumentNamesRule"] @@ -21,7 +27,7 @@ class UniqueArgumentNamesRule(ASTValidationRule): def enter_field(self, node: FieldNode, *_args: Any) -> None: self.check_arg_uniqueness(node.arguments) - def enter_directive(self, node: DirectiveNode, *args: Any) -> None: + def enter_directive(self, node: DirectiveNode, *_args: Any) -> None: self.check_arg_uniqueness(node.arguments) def check_arg_uniqueness(self, argument_nodes: Collection[ArgumentNode]) -> None: diff --git a/src/graphql/validation/rules/unique_directive_names.py b/src/graphql/validation/rules/unique_directive_names.py index b25dafe1..24d8066f 100644 --- a/src/graphql/validation/rules/unique_directive_names.py +++ b/src/graphql/validation/rules/unique_directive_names.py @@ -1,7 +1,11 @@ -from typing import Any, Dict +"""Unique directive names rule""" + +from __future__ import annotations + +from typing import Any from ...error import GraphQLError -from ...language import DirectiveDefinitionNode, NameNode, VisitorAction, SKIP +from ...language import SKIP, DirectiveDefinitionNode, NameNode, VisitorAction from . import SDLValidationContext, SDLValidationRule __all__ = ["UniqueDirectiveNamesRule"] @@ -13,9 +17,9 @@ class UniqueDirectiveNamesRule(SDLValidationRule): A GraphQL document is only valid if all defined directives have unique names. """ - def __init__(self, context: SDLValidationContext): + def __init__(self, context: SDLValidationContext) -> None: super().__init__(context) - self.known_directive_names: Dict[str, NameNode] = {} + self.known_directive_names: dict[str, NameNode] = {} self.schema = context.schema def enter_directive_definition( diff --git a/src/graphql/validation/rules/unique_directives_per_location.py b/src/graphql/validation/rules/unique_directives_per_location.py index 98e592ac..daab2935 100644 --- a/src/graphql/validation/rules/unique_directives_per_location.py +++ b/src/graphql/validation/rules/unique_directives_per_location.py @@ -1,5 +1,9 @@ +"""Unique directive names per location rule""" + +from __future__ import annotations + from collections import defaultdict -from typing import Any, Dict, List, Union, cast +from typing import Any, List, cast from ...error import GraphQLError from ...language import ( @@ -8,8 +12,6 @@ Node, SchemaDefinitionNode, SchemaExtensionNode, - TypeDefinitionNode, - TypeExtensionNode, is_type_definition_node, is_type_extension_node, ) @@ -28,15 +30,15 @@ class UniqueDirectivesPerLocationRule(ASTValidationRule): See https://spec.graphql.org/draft/#sec-Directives-Are-Unique-Per-Location """ - context: Union[ValidationContext, SDLValidationContext] + context: ValidationContext | SDLValidationContext - def __init__(self, context: Union[ValidationContext, SDLValidationContext]): + def __init__(self, context: ValidationContext | SDLValidationContext) -> None: super().__init__(context) - unique_directive_map: Dict[str, bool] = {} + unique_directive_map: dict[str, bool] = {} schema = context.schema defined_directives = ( - schema.directives if schema else cast(List, specified_directives) + schema.directives if schema else cast("List", specified_directives) ) for directive in defined_directives: unique_directive_map[directive.name] = not directive.is_repeatable @@ -47,8 +49,8 @@ def __init__(self, context: Union[ValidationContext, SDLValidationContext]): unique_directive_map[def_.name.value] = not def_.repeatable self.unique_directive_map = unique_directive_map - self.schema_directives: Dict[str, DirectiveNode] = {} - self.type_directives_map: Dict[str, Dict[str, DirectiveNode]] = defaultdict( + self.schema_directives: dict[str, DirectiveNode] = {} + self.type_directives_map: dict[str, dict[str, DirectiveNode]] = defaultdict( dict ) @@ -58,12 +60,11 @@ def enter(self, node: Node, *_args: Any) -> None: directives = getattr(node, "directives", None) if not directives: return - directives = cast(List[DirectiveNode], directives) + directives = cast("List[DirectiveNode]", directives) if isinstance(node, (SchemaDefinitionNode, SchemaExtensionNode)): seen_directives = self.schema_directives elif is_type_definition_node(node) or is_type_extension_node(node): - node = cast(Union[TypeDefinitionNode, TypeExtensionNode], node) type_name = node.name.value seen_directives = self.type_directives_map[type_name] else: diff --git a/src/graphql/validation/rules/unique_enum_value_names.py b/src/graphql/validation/rules/unique_enum_value_names.py index 9be41e8c..1df28d83 100644 --- a/src/graphql/validation/rules/unique_enum_value_names.py +++ b/src/graphql/validation/rules/unique_enum_value_names.py @@ -1,9 +1,13 @@ +"""Unique enum value names rule""" + +from __future__ import annotations + from collections import defaultdict -from typing import cast, Any, Dict +from typing import Any from ...error import GraphQLError -from ...language import NameNode, EnumTypeDefinitionNode, VisitorAction, SKIP -from ...type import is_enum_type, GraphQLEnumType +from ...language import SKIP, EnumTypeDefinitionNode, NameNode, VisitorAction +from ...type import is_enum_type from . import SDLValidationContext, SDLValidationRule __all__ = ["UniqueEnumValueNamesRule"] @@ -15,11 +19,11 @@ class UniqueEnumValueNamesRule(SDLValidationRule): A GraphQL enum type is only valid if all its values are uniquely named. """ - def __init__(self, context: SDLValidationContext): + def __init__(self, context: SDLValidationContext) -> None: super().__init__(context) schema = context.schema self.existing_type_map = schema.type_map if schema else {} - self.known_value_names: Dict[str, Dict[str, NameNode]] = defaultdict(dict) + self.known_value_names: dict[str, dict[str, NameNode]] = defaultdict(dict) def check_value_uniqueness( self, node: EnumTypeDefinitionNode, *_args: Any @@ -32,10 +36,7 @@ def check_value_uniqueness( value_name = value_def.name.value existing_type = existing_type_map.get(type_name) - if ( - is_enum_type(existing_type) - and value_name in cast(GraphQLEnumType, existing_type).values - ): + if is_enum_type(existing_type) and value_name in existing_type.values: self.report_error( GraphQLError( f"Enum value '{type_name}.{value_name}'" diff --git a/src/graphql/validation/rules/unique_field_definition_names.py b/src/graphql/validation/rules/unique_field_definition_names.py index fe29d6d5..39df7203 100644 --- a/src/graphql/validation/rules/unique_field_definition_names.py +++ b/src/graphql/validation/rules/unique_field_definition_names.py @@ -1,9 +1,13 @@ +"""Unique field definition names rule""" + +from __future__ import annotations + from collections import defaultdict -from typing import Any, Dict +from typing import Any from ...error import GraphQLError -from ...language import NameNode, ObjectTypeDefinitionNode, VisitorAction, SKIP -from ...type import is_object_type, is_interface_type, is_input_object_type +from ...language import SKIP, NameNode, ObjectTypeDefinitionNode, VisitorAction +from ...type import is_input_object_type, is_interface_type, is_object_type from . import SDLValidationContext, SDLValidationRule __all__ = ["UniqueFieldDefinitionNamesRule"] @@ -15,11 +19,11 @@ class UniqueFieldDefinitionNamesRule(SDLValidationRule): A GraphQL complex type is only valid if all its fields are uniquely named. """ - def __init__(self, context: SDLValidationContext): + def __init__(self, context: SDLValidationContext) -> None: super().__init__(context) schema = context.schema self.existing_type_map = schema.type_map if schema else {} - self.known_field_names: Dict[str, Dict[str, NameNode]] = defaultdict(dict) + self.known_field_names: dict[str, dict[str, NameNode]] = defaultdict(dict) def check_field_uniqueness( self, node: ObjectTypeDefinitionNode, *_args: Any @@ -43,8 +47,7 @@ def check_field_uniqueness( elif field_name in field_names: self.report_error( GraphQLError( - f"Field '{type_name}.{field_name}'" - " can only be defined once.", + f"Field '{type_name}.{field_name}' can only be defined once.", [field_names[field_name], field_def.name], ) ) diff --git a/src/graphql/validation/rules/unique_fragment_names.py b/src/graphql/validation/rules/unique_fragment_names.py index c951fc75..a4c16d86 100644 --- a/src/graphql/validation/rules/unique_fragment_names.py +++ b/src/graphql/validation/rules/unique_fragment_names.py @@ -1,7 +1,11 @@ -from typing import Any, Dict +"""Unique fragment names rule""" + +from __future__ import annotations + +from typing import Any from ...error import GraphQLError -from ...language import NameNode, FragmentDefinitionNode, VisitorAction, SKIP +from ...language import SKIP, FragmentDefinitionNode, NameNode, VisitorAction from . import ASTValidationContext, ASTValidationRule __all__ = ["UniqueFragmentNamesRule"] @@ -15,9 +19,9 @@ class UniqueFragmentNamesRule(ASTValidationRule): See https://spec.graphql.org/draft/#sec-Fragment-Name-Uniqueness """ - def __init__(self, context: ASTValidationContext): + def __init__(self, context: ASTValidationContext) -> None: super().__init__(context) - self.known_fragment_names: Dict[str, NameNode] = {} + self.known_fragment_names: dict[str, NameNode] = {} @staticmethod def enter_operation_definition(*_args: Any) -> VisitorAction: diff --git a/src/graphql/validation/rules/unique_input_field_names.py b/src/graphql/validation/rules/unique_input_field_names.py index 76739a75..b9de90f7 100644 --- a/src/graphql/validation/rules/unique_input_field_names.py +++ b/src/graphql/validation/rules/unique_input_field_names.py @@ -1,9 +1,15 @@ -from typing import Any, Dict, List +"""Unique input field names rule""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any from ...error import GraphQLError -from ...language import NameNode, ObjectFieldNode from . import ASTValidationContext, ASTValidationRule +if TYPE_CHECKING: + from ...language import NameNode, ObjectFieldNode + __all__ = ["UniqueInputFieldNamesRule"] @@ -16,10 +22,10 @@ class UniqueInputFieldNamesRule(ASTValidationRule): See https://spec.graphql.org/draft/#sec-Input-Object-Field-Uniqueness """ - def __init__(self, context: ASTValidationContext): + def __init__(self, context: ASTValidationContext) -> None: super().__init__(context) - self.known_names_stack: List[Dict[str, NameNode]] = [] - self.known_names: Dict[str, NameNode] = {} + self.known_names_stack: list[dict[str, NameNode]] = [] + self.known_names: dict[str, NameNode] = {} def enter_object_value(self, *_args: Any) -> None: self.known_names_stack.append(self.known_names) diff --git a/src/graphql/validation/rules/unique_operation_names.py b/src/graphql/validation/rules/unique_operation_names.py index 346ebf91..03af6335 100644 --- a/src/graphql/validation/rules/unique_operation_names.py +++ b/src/graphql/validation/rules/unique_operation_names.py @@ -1,7 +1,11 @@ -from typing import Any, Dict +"""Unique operation names rule""" + +from __future__ import annotations + +from typing import Any from ...error import GraphQLError -from ...language import NameNode, OperationDefinitionNode, VisitorAction, SKIP +from ...language import SKIP, NameNode, OperationDefinitionNode, VisitorAction from . import ASTValidationContext, ASTValidationRule __all__ = ["UniqueOperationNamesRule"] @@ -15,9 +19,9 @@ class UniqueOperationNamesRule(ASTValidationRule): See https://spec.graphql.org/draft/#sec-Operation-Name-Uniqueness """ - def __init__(self, context: ASTValidationContext): + def __init__(self, context: ASTValidationContext) -> None: super().__init__(context) - self.known_operation_names: Dict[str, NameNode] = {} + self.known_operation_names: dict[str, NameNode] = {} def enter_operation_definition( self, node: OperationDefinitionNode, *_args: Any diff --git a/src/graphql/validation/rules/unique_operation_types.py b/src/graphql/validation/rules/unique_operation_types.py index 190963f7..da737751 100644 --- a/src/graphql/validation/rules/unique_operation_types.py +++ b/src/graphql/validation/rules/unique_operation_types.py @@ -1,15 +1,21 @@ -from typing import Any, Dict, Optional, Union +"""Unique operation types rule""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any from ...error import GraphQLError from ...language import ( - OperationTypeDefinitionNode, + SKIP, OperationType, + OperationTypeDefinitionNode, SchemaDefinitionNode, SchemaExtensionNode, VisitorAction, - SKIP, ) -from ...type import GraphQLObjectType + +if TYPE_CHECKING: + from ...type import GraphQLObjectType from . import SDLValidationContext, SDLValidationRule __all__ = ["UniqueOperationTypesRule"] @@ -21,15 +27,13 @@ class UniqueOperationTypesRule(SDLValidationRule): A GraphQL document is only valid if it has only one type per operation. """ - def __init__(self, context: SDLValidationContext): + def __init__(self, context: SDLValidationContext) -> None: super().__init__(context) schema = context.schema - self.defined_operation_types: Dict[ + self.defined_operation_types: dict[ OperationType, OperationTypeDefinitionNode ] = {} - self.existing_operation_types: Dict[ - OperationType, Optional[GraphQLObjectType] - ] = ( + self.existing_operation_types: dict[OperationType, GraphQLObjectType | None] = ( { OperationType.QUERY: schema.query_type, OperationType.MUTATION: schema.mutation_type, @@ -41,7 +45,7 @@ def __init__(self, context: SDLValidationContext): self.schema = schema def check_operation_types( - self, node: Union[SchemaDefinitionNode, SchemaExtensionNode], *_args: Any + self, node: SchemaDefinitionNode | SchemaExtensionNode, *_args: Any ) -> VisitorAction: for operation_type in node.operation_types or []: operation = operation_type.operation diff --git a/src/graphql/validation/rules/unique_type_names.py b/src/graphql/validation/rules/unique_type_names.py index 7082a0fb..7f7dee8f 100644 --- a/src/graphql/validation/rules/unique_type_names.py +++ b/src/graphql/validation/rules/unique_type_names.py @@ -1,7 +1,11 @@ -from typing import Any, Dict +"""Unique type names rule""" + +from __future__ import annotations + +from typing import Any from ...error import GraphQLError -from ...language import NameNode, TypeDefinitionNode, VisitorAction, SKIP +from ...language import SKIP, NameNode, TypeDefinitionNode, VisitorAction from . import SDLValidationContext, SDLValidationRule __all__ = ["UniqueTypeNamesRule"] @@ -13,9 +17,9 @@ class UniqueTypeNamesRule(SDLValidationRule): A GraphQL document is only valid if all defined types have unique names. """ - def __init__(self, context: SDLValidationContext): + def __init__(self, context: SDLValidationContext) -> None: super().__init__(context) - self.known_type_names: Dict[str, NameNode] = {} + self.known_type_names: dict[str, NameNode] = {} self.schema = context.schema def check_type_name(self, node: TypeDefinitionNode, *_args: Any) -> VisitorAction: diff --git a/src/graphql/validation/rules/unique_variable_names.py b/src/graphql/validation/rules/unique_variable_names.py index 4111cb32..28e78653 100644 --- a/src/graphql/validation/rules/unique_variable_names.py +++ b/src/graphql/validation/rules/unique_variable_names.py @@ -1,11 +1,17 @@ +"""Unique variable names rule""" + +from __future__ import annotations + from operator import attrgetter -from typing import Any +from typing import TYPE_CHECKING, Any from ...error import GraphQLError -from ...language import OperationDefinitionNode from ...pyutils import group_by from . import ASTValidationRule +if TYPE_CHECKING: + from ...language import OperationDefinitionNode + __all__ = ["UniqueVariableNamesRule"] diff --git a/src/graphql/validation/rules/values_of_correct_type.py b/src/graphql/validation/rules/values_of_correct_type.py index 44dc6a0d..ea4c4a3c 100644 --- a/src/graphql/validation/rules/values_of_correct_type.py +++ b/src/graphql/validation/rules/values_of_correct_type.py @@ -1,22 +1,29 @@ -from typing import cast, Any +"""Value literals of correct type rule""" + +from __future__ import annotations + +from typing import Any, Mapping, cast from ...error import GraphQLError from ...language import ( + SKIP, BooleanValueNode, EnumValueNode, FloatValueNode, IntValueNode, - NullValueNode, ListValueNode, + NonNullTypeNode, + NullValueNode, ObjectFieldNode, ObjectValueNode, StringValueNode, ValueNode, + VariableDefinitionNode, + VariableNode, VisitorAction, - SKIP, print_ast, ) -from ...pyutils import did_you_mean, suggestion_list, Undefined +from ...pyutils import Undefined, did_you_mean, suggestion_list from ...type import ( GraphQLInputObjectType, GraphQLScalarType, @@ -28,7 +35,7 @@ is_non_null_type, is_required_input_field, ) -from . import ValidationRule +from . import ValidationContext, ValidationRule __all__ = ["ValuesOfCorrectTypeRule"] @@ -42,10 +49,22 @@ class ValuesOfCorrectTypeRule(ValidationRule): See https://spec.graphql.org/draft/#sec-Values-of-Correct-Type """ + def __init__(self, context: ValidationContext) -> None: + super().__init__(context) + self.variable_definitions: dict[str, VariableDefinitionNode] = {} + + def enter_operation_definition(self, *_args: Any) -> None: + self.variable_definitions.clear() + + def enter_variable_definition( + self, definition: VariableDefinitionNode, *_args: Any + ) -> None: + self.variable_definitions[definition.variable.name.value] = definition + def enter_list_value(self, node: ListValueNode, *_args: Any) -> VisitorAction: # Note: TypeInfo will traverse into a list's item type, so look to the parent # input type to check if it is a list. - type_ = get_nullable_type(self.context.get_parent_input_type()) # type: ignore + type_ = get_nullable_type(self.context.get_parent_input_type()) if not is_list_type(type_): self.is_valid_value_node(node) return SKIP # Don't traverse further. @@ -56,7 +75,6 @@ def enter_object_value(self, node: ObjectValueNode, *_args: Any) -> VisitorActio if not is_input_object_type(type_): self.is_valid_value_node(node) return SKIP # Don't traverse further. - type_ = cast(GraphQLInputObjectType, type_) # Ensure every required field exists. field_node_map = {field.name.value: field for field in node.fields} for field_name, field_def in type_.fields.items(): @@ -70,13 +88,16 @@ def enter_object_value(self, node: ObjectValueNode, *_args: Any) -> VisitorActio node, ) ) + if type_.is_one_of: + validate_one_of_input_object( + self.context, node, type_, field_node_map, self.variable_definitions + ) return None def enter_object_field(self, node: ObjectFieldNode, *_args: Any) -> None: parent_type = get_named_type(self.context.get_parent_input_type()) field_type = self.context.get_input_type() if not field_type and is_input_object_type(parent_type): - parent_type = cast(GraphQLInputObjectType, parent_type) suggestions = suggestion_list(node.name.value, list(parent_type.fields)) self.report_error( GraphQLError( @@ -136,7 +157,7 @@ def is_valid_value_node(self, node: ValueNode) -> None: # Scalars determine if a literal value is valid via `parse_literal()` which may # throw or return an invalid value to indicate failure. - type_ = cast(GraphQLScalarType, type_) + type_ = cast("GraphQLScalarType", type_) try: parse_result = type_.parse_literal(node) if parse_result is Undefined: @@ -149,7 +170,7 @@ def is_valid_value_node(self, node: ValueNode) -> None: ) except GraphQLError as error: self.report_error(error) - except Exception as error: + except Exception as error: # noqa: BLE001 self.report_error( GraphQLError( f"Expected value of type '{location_type}'," @@ -161,3 +182,51 @@ def is_valid_value_node(self, node: ValueNode) -> None: ) return + + +def validate_one_of_input_object( + context: ValidationContext, + node: ObjectValueNode, + type_: GraphQLInputObjectType, + field_node_map: Mapping[str, ObjectFieldNode], + variable_definitions: dict[str, VariableDefinitionNode], +) -> None: + keys = list(field_node_map) + is_not_exactly_one_filed = len(keys) != 1 + + if is_not_exactly_one_filed: + context.report_error( + GraphQLError( + f"OneOf Input Object '{type_.name}' must specify exactly one key.", + node, + ) + ) + return + + object_field_node = field_node_map.get(keys[0]) + value = object_field_node.value if object_field_node else None + is_null_literal = not value or isinstance(value, NullValueNode) + + if is_null_literal: + context.report_error( + GraphQLError( + f"Field '{type_.name}.{keys[0]}' must be non-null.", + node, + ) + ) + return + + is_variable = value and isinstance(value, VariableNode) + if is_variable: + variable_name = cast("VariableNode", value).name.value + definition = variable_definitions[variable_name] + is_nullable_variable = not isinstance(definition.type, NonNullTypeNode) + + if is_nullable_variable: + context.report_error( + GraphQLError( + f"Variable '{variable_name}' must be non-nullable" + f" to be used for OneOf Input Object '{type_.name}'.", + node, + ) + ) diff --git a/src/graphql/validation/rules/variables_are_input_types.py b/src/graphql/validation/rules/variables_are_input_types.py index 449ad290..552fe91b 100644 --- a/src/graphql/validation/rules/variables_are_input_types.py +++ b/src/graphql/validation/rules/variables_are_input_types.py @@ -1,3 +1,7 @@ +"""Variables are input types rule""" + +from __future__ import annotations + from typing import Any from ...error import GraphQLError diff --git a/src/graphql/validation/rules/variables_in_allowed_position.py b/src/graphql/validation/rules/variables_in_allowed_position.py index 49d3b416..1a8fd2e2 100644 --- a/src/graphql/validation/rules/variables_in_allowed_position.py +++ b/src/graphql/validation/rules/variables_in_allowed_position.py @@ -1,4 +1,8 @@ -from typing import Any, Dict, Optional, cast +"""Variables in allowed position rule""" + +from __future__ import annotations + +from typing import Any from ...error import GraphQLError from ...language import ( @@ -8,8 +12,8 @@ VariableDefinitionNode, ) from ...pyutils import Undefined -from ...type import GraphQLNonNull, GraphQLSchema, GraphQLType, is_non_null_type -from ...utilities import type_from_ast, is_type_sub_type_of +from ...type import GraphQLSchema, GraphQLType, is_non_null_type +from ...utilities import is_type_sub_type_of, type_from_ast from . import ValidationContext, ValidationRule __all__ = ["VariablesInAllowedPositionRule"] @@ -23,9 +27,9 @@ class VariablesInAllowedPositionRule(ValidationRule): See https://spec.graphql.org/draft/#sec-All-Variable-Usages-are-Allowed """ - def __init__(self, context: ValidationContext): + def __init__(self, context: ValidationContext) -> None: super().__init__(context) - self.var_def_map: Dict[str, Any] = {} + self.var_def_map: dict[str, Any] = {} def enter_operation_definition(self, *_args: Any) -> None: self.var_def_map.clear() @@ -69,7 +73,7 @@ def enter_variable_definition( def allowed_variable_usage( schema: GraphQLSchema, var_type: GraphQLType, - var_default_value: Optional[ValueNode], + var_default_value: ValueNode | None, location_type: GraphQLType, location_default_value: Any, ) -> bool: @@ -87,7 +91,6 @@ def allowed_variable_usage( has_location_default_value = location_default_value is not Undefined if not has_non_null_variable_default_value and not has_location_default_value: return False - location_type = cast(GraphQLNonNull, location_type) nullable_location_type = location_type.of_type return is_type_sub_type_of(schema, var_type, nullable_location_type) return is_type_sub_type_of(schema, var_type, location_type) diff --git a/src/graphql/validation/specified_rules.py b/src/graphql/validation/specified_rules.py index db990aeb..e7f7c54e 100644 --- a/src/graphql/validation/specified_rules.py +++ b/src/graphql/validation/specified_rules.py @@ -1,96 +1,119 @@ -from typing import Tuple, Type +"""Specified rules""" -from .rules import ASTValidationRule +from __future__ import annotations -# Spec Section: "Executable Definitions" -from .rules.executable_definitions import ExecutableDefinitionsRule +from typing import TYPE_CHECKING -# Spec Section: "Operation Name Uniqueness" -from .rules.unique_operation_names import UniqueOperationNamesRule +# Spec Section: "Defer And Stream Directive Labels Are Unique" +from .rules.defer_stream_directive_label import DeferStreamDirectiveLabel -# Spec Section: "Lone Anonymous Operation" -from .rules.lone_anonymous_operation import LoneAnonymousOperationRule +# Spec Section: "Defer And Stream Directives Are Used On Valid Root Field" +from .rules.defer_stream_directive_on_root_field import DeferStreamDirectiveOnRootField -# Spec Section: "Subscriptions with Single Root Field" -from .rules.single_field_subscriptions import SingleFieldSubscriptionsRule +# Spec Section: "Defer And Stream Directives Are Used On Valid Operations" +from .rules.defer_stream_directive_on_valid_operations_rule import ( + DeferStreamDirectiveOnValidOperationsRule, +) -# Spec Section: "Fragment Spread Type Existence" -from .rules.known_type_names import KnownTypeNamesRule +# Spec Section: "Executable Definitions" +from .rules.executable_definitions import ExecutableDefinitionsRule + +# Spec Section: "Field Selections on Objects, Interfaces, and Unions Types" +from .rules.fields_on_correct_type import FieldsOnCorrectTypeRule # Spec Section: "Fragments on Composite Types" from .rules.fragments_on_composite_types import FragmentsOnCompositeTypesRule -# Spec Section: "Variables are Input Types" -from .rules.variables_are_input_types import VariablesAreInputTypesRule - -# Spec Section: "Leaf Field Selections" -from .rules.scalar_leafs import ScalarLeafsRule - -# Spec Section: "Field Selections on Objects, Interfaces, and Unions Types" -from .rules.fields_on_correct_type import FieldsOnCorrectTypeRule +# Spec Section: "Argument Names" +from .rules.known_argument_names import ( + KnownArgumentNamesOnDirectivesRule, + KnownArgumentNamesRule, +) -# Spec Section: "Fragment Name Uniqueness" -from .rules.unique_fragment_names import UniqueFragmentNamesRule +# Spec Section: "Directives Are Defined" +from .rules.known_directives import KnownDirectivesRule # Spec Section: "Fragment spread target defined" from .rules.known_fragment_names import KnownFragmentNamesRule -# Spec Section: "Fragments must be used" -from .rules.no_unused_fragments import NoUnusedFragmentsRule +# Spec Section: "Fragment Spread Type Existence" +from .rules.known_type_names import KnownTypeNamesRule -# Spec Section: "Fragment spread is possible" -from .rules.possible_fragment_spreads import PossibleFragmentSpreadsRule +# Spec Section: "Lone Anonymous Operation" +from .rules.lone_anonymous_operation import LoneAnonymousOperationRule + +# Schema definition language: +from .rules.lone_schema_definition import LoneSchemaDefinitionRule # Spec Section: "Fragments must not form cycles" from .rules.no_fragment_cycles import NoFragmentCyclesRule -# Spec Section: "Variable Uniqueness" -from .rules.unique_variable_names import UniqueVariableNamesRule - # Spec Section: "All Variable Used Defined" from .rules.no_undefined_variables import NoUndefinedVariablesRule +# Spec Section: "Fragments must be used" +from .rules.no_unused_fragments import NoUnusedFragmentsRule + # Spec Section: "All Variables Used" from .rules.no_unused_variables import NoUnusedVariablesRule -# Spec Section: "Directives Are Defined" -from .rules.known_directives import KnownDirectivesRule +# Spec Section: "Field Selection Merging" +from .rules.overlapping_fields_can_be_merged import OverlappingFieldsCanBeMergedRule -# Spec Section: "Directives Are Unique Per Location" -from .rules.unique_directives_per_location import UniqueDirectivesPerLocationRule +# Spec Section: "Fragment spread is possible" +from .rules.possible_fragment_spreads import PossibleFragmentSpreadsRule +from .rules.possible_type_extensions import PossibleTypeExtensionsRule -# Spec Section: "Argument Names" -from .rules.known_argument_names import KnownArgumentNamesRule -from .rules.known_argument_names import KnownArgumentNamesOnDirectivesRule +# Spec Section: "Argument Optionality" +from .rules.provided_required_arguments import ( + ProvidedRequiredArgumentsOnDirectivesRule, + ProvidedRequiredArgumentsRule, +) -# Spec Section: "Argument Uniqueness" -from .rules.unique_argument_names import UniqueArgumentNamesRule +# Spec Section: "Leaf Field Selections" +from .rules.scalar_leafs import ScalarLeafsRule -# Spec Section: "Value Type Correctness" -from .rules.values_of_correct_type import ValuesOfCorrectTypeRule +# Spec Section: "Subscriptions with Single Root Field" +from .rules.single_field_subscriptions import SingleFieldSubscriptionsRule -# Spec Section: "Argument Optionality" -from .rules.provided_required_arguments import ProvidedRequiredArgumentsRule -from .rules.provided_required_arguments import ProvidedRequiredArgumentsOnDirectivesRule +# Spec Section: "Stream Directives Are Used On List Fields" +from .rules.stream_directive_on_list_field import StreamDirectiveOnListField -# Spec Section: "All Variable Usages Are Allowed" -from .rules.variables_in_allowed_position import VariablesInAllowedPositionRule +# Spec Section: "Argument Uniqueness" +from .rules.unique_argument_definition_names import UniqueArgumentDefinitionNamesRule +from .rules.unique_argument_names import UniqueArgumentNamesRule +from .rules.unique_directive_names import UniqueDirectiveNamesRule -# Spec Section: "Field Selection Merging" -from .rules.overlapping_fields_can_be_merged import OverlappingFieldsCanBeMergedRule +# Spec Section: "Directives Are Unique Per Location" +from .rules.unique_directives_per_location import UniqueDirectivesPerLocationRule +from .rules.unique_enum_value_names import UniqueEnumValueNamesRule +from .rules.unique_field_definition_names import UniqueFieldDefinitionNamesRule + +# Spec Section: "Fragment Name Uniqueness" +from .rules.unique_fragment_names import UniqueFragmentNamesRule # Spec Section: "Input Object Field Uniqueness" from .rules.unique_input_field_names import UniqueInputFieldNamesRule -# Schema definition language: -from .rules.lone_schema_definition import LoneSchemaDefinitionRule +# Spec Section: "Operation Name Uniqueness" +from .rules.unique_operation_names import UniqueOperationNamesRule from .rules.unique_operation_types import UniqueOperationTypesRule from .rules.unique_type_names import UniqueTypeNamesRule -from .rules.unique_enum_value_names import UniqueEnumValueNamesRule -from .rules.unique_field_definition_names import UniqueFieldDefinitionNamesRule -from .rules.unique_argument_definition_names import UniqueArgumentDefinitionNamesRule -from .rules.unique_directive_names import UniqueDirectiveNamesRule -from .rules.possible_type_extensions import PossibleTypeExtensionsRule + +# Spec Section: "Variable Uniqueness" +from .rules.unique_variable_names import UniqueVariableNamesRule + +# Spec Section: "Value Type Correctness" +from .rules.values_of_correct_type import ValuesOfCorrectTypeRule + +# Spec Section: "Variables are Input Types" +from .rules.variables_are_input_types import VariablesAreInputTypesRule + +# Spec Section: "All Variable Usages Are Allowed" +from .rules.variables_in_allowed_position import VariablesInAllowedPositionRule + +if TYPE_CHECKING: + from .rules import ASTValidationRule __all__ = ["specified_rules", "specified_sdl_rules"] @@ -100,7 +123,7 @@ # The order of the rules in this list has been adjusted to lead to the # most clear output when encountering multiple validation errors. -specified_rules: Tuple[Type[ASTValidationRule], ...] = ( +specified_rules: tuple[type[ASTValidationRule], ...] = ( ExecutableDefinitionsRule, UniqueOperationNamesRule, LoneAnonymousOperationRule, @@ -120,6 +143,10 @@ NoUnusedVariablesRule, KnownDirectivesRule, UniqueDirectivesPerLocationRule, + DeferStreamDirectiveOnRootField, + DeferStreamDirectiveOnValidOperationsRule, + DeferStreamDirectiveLabel, + StreamDirectiveOnListField, KnownArgumentNamesRule, UniqueArgumentNamesRule, ValuesOfCorrectTypeRule, @@ -134,7 +161,7 @@ most clear output when encountering multiple validation errors. """ -specified_sdl_rules: Tuple[Type[ASTValidationRule], ...] = ( +specified_sdl_rules: tuple[type[ASTValidationRule], ...] = ( LoneSchemaDefinitionRule, UniqueOperationTypesRule, UniqueTypeNamesRule, diff --git a/src/graphql/validation/validate.py b/src/graphql/validation/validate.py index 13dc5243..8e59821c 100644 --- a/src/graphql/validation/validate.py +++ b/src/graphql/validation/validate.py @@ -1,28 +1,44 @@ -from typing import Collection, List, Optional, Type +"""Validation""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Collection from ..error import GraphQLError from ..language import DocumentNode, ParallelVisitor, visit from ..type import GraphQLSchema, assert_valid_schema -from ..pyutils import inspect, is_collection from ..utilities import TypeInfo, TypeInfoVisitor -from .rules import ASTValidationRule from .specified_rules import specified_rules, specified_sdl_rules from .validation_context import SDLValidationContext, ValidationContext -__all__ = ["assert_valid_sdl", "assert_valid_sdl_extension", "validate", "validate_sdl"] +if TYPE_CHECKING: + from .rules import ASTValidationRule +__all__ = [ + "ValidationAbortedError", + "assert_valid_sdl", + "assert_valid_sdl_extension", + "validate", + "validate_sdl", +] -class ValidationAbortedError(RuntimeError): + +class ValidationAbortedError(GraphQLError): """Error when a validation has been aborted (error limit reached).""" +validation_aborted_error = ValidationAbortedError( + "Too many validation errors, error limit reached. Validation aborted." +) + + def validate( schema: GraphQLSchema, document_ast: DocumentNode, - rules: Optional[Collection[Type[ASTValidationRule]]] = None, - max_errors: Optional[int] = None, - type_info: Optional[TypeInfo] = None, -) -> List[GraphQLError]: + rules: Collection[type[ASTValidationRule]] | None = None, + max_errors: int | None = None, + type_info: TypeInfo | None = None, +) -> list[GraphQLError]: """Implements the "Validation" section of the spec. Validation runs synchronously, returning a list of encountered errors, or an empty @@ -41,38 +57,20 @@ def validate( Providing a custom TypeInfo instance is deprecated and will be removed in v3.3. """ - if not document_ast or not isinstance(document_ast, DocumentNode): - raise TypeError("Must provide document.") # If the schema used for validation is invalid, throw an error. assert_valid_schema(schema) if max_errors is None: max_errors = 100 - elif not isinstance(max_errors, int): - raise TypeError("The maximum number of errors must be passed as an int.") if type_info is None: type_info = TypeInfo(schema) - elif not isinstance(type_info, TypeInfo): - raise TypeError(f"Not a TypeInfo object: {inspect(type_info)}.") if rules is None: rules = specified_rules - elif not is_collection(rules) or not all( - isinstance(rule, type) and issubclass(rule, ASTValidationRule) for rule in rules - ): - raise TypeError( - "Rules must be specified as a collection of ASTValidationRule subclasses." - ) - errors: List[GraphQLError] = [] + errors: list[GraphQLError] = [] def on_error(error: GraphQLError) -> None: - if len(errors) >= max_errors: # type: ignore - errors.append( - GraphQLError( - "Too many validation errors, error limit reached." - " Validation aborted." - ) - ) - raise ValidationAbortedError + if len(errors) >= max_errors: + raise validation_aborted_error errors.append(error) context = ValidationContext(schema, document_ast, type_info, on_error) @@ -85,20 +83,20 @@ def on_error(error: GraphQLError) -> None: try: visit(document_ast, TypeInfoVisitor(type_info, ParallelVisitor(visitors))) except ValidationAbortedError: - pass + errors.append(validation_aborted_error) return errors def validate_sdl( document_ast: DocumentNode, - schema_to_extend: Optional[GraphQLSchema] = None, - rules: Optional[Collection[Type[ASTValidationRule]]] = None, -) -> List[GraphQLError]: + schema_to_extend: GraphQLSchema | None = None, + rules: Collection[type[ASTValidationRule]] | None = None, +) -> list[GraphQLError]: """Validate an SDL document. For internal use only. """ - errors: List[GraphQLError] = [] + errors: list[GraphQLError] = [] context = SDLValidationContext(document_ast, schema_to_extend, errors.append) if rules is None: rules = specified_sdl_rules @@ -113,7 +111,6 @@ def assert_valid_sdl(document_ast: DocumentNode) -> None: Utility function which asserts a SDL document is valid by throwing an error if it is invalid. """ - errors = validate_sdl(document_ast) if errors: raise TypeError("\n\n".join(error.message for error in errors)) @@ -127,7 +124,6 @@ def assert_valid_sdl_extension( Utility function which asserts a SDL document is valid by throwing an error if it is invalid. """ - errors = validate_sdl(document_ast, schema) if errors: raise TypeError("\n\n".join(error.message for error in errors)) diff --git a/src/graphql/validation/validation_context.py b/src/graphql/validation/validation_context.py index 931a19d9..055b4231 100644 --- a/src/graphql/validation/validation_context.py +++ b/src/graphql/validation/validation_context.py @@ -1,6 +1,16 @@ -from typing import Any, Callable, Dict, List, NamedTuple, Optional, Set, Union, cast +"""Validation context""" + +from __future__ import annotations + +from typing import ( + TYPE_CHECKING, + Any, + Callable, + NamedTuple, + Union, + cast, +) -from ..error import GraphQLError from ..language import ( DocumentNode, FragmentDefinitionNode, @@ -12,18 +22,27 @@ VisitorAction, visit, ) -from ..type import ( - GraphQLArgument, - GraphQLCompositeType, - GraphQLDirective, - GraphQLEnumValue, - GraphQLField, - GraphQLInputType, - GraphQLOutputType, - GraphQLSchema, -) from ..utilities import TypeInfo, TypeInfoVisitor +if TYPE_CHECKING: + from ..error import GraphQLError + from ..type import ( + GraphQLArgument, + GraphQLCompositeType, + GraphQLDirective, + GraphQLEnumValue, + GraphQLField, + GraphQLInputType, + GraphQLOutputType, + GraphQLSchema, + ) + +try: + from typing import TypeAlias +except ImportError: # Python < 3.10 + from typing_extensions import TypeAlias + + __all__ = [ "ASTValidationContext", "SDLValidationContext", @@ -32,21 +51,23 @@ "VariableUsageVisitor", ] -NodeWithSelectionSet = Union[OperationDefinitionNode, FragmentDefinitionNode] +NodeWithSelectionSet: TypeAlias = Union[OperationDefinitionNode, FragmentDefinitionNode] class VariableUsage(NamedTuple): + """Variable usage""" + node: VariableNode - type: Optional[GraphQLInputType] + type: GraphQLInputType | None default_value: Any class VariableUsageVisitor(Visitor): """Visitor adding all variable usages to a given list.""" - usages: List[VariableUsage] + usages: list[VariableUsage] - def __init__(self, type_info: TypeInfo): + def __init__(self, type_info: TypeInfo) -> None: super().__init__() self.usages = [] self._append_usage = self.usages.append @@ -74,10 +95,10 @@ class ASTValidationContext: document: DocumentNode - _fragments: Optional[Dict[str, FragmentDefinitionNode]] - _fragment_spreads: Dict[SelectionSetNode, List[FragmentSpreadNode]] - _recursively_referenced_fragments: Dict[ - OperationDefinitionNode, List[FragmentDefinitionNode] + _fragments: dict[str, FragmentDefinitionNode] | None + _fragment_spreads: dict[SelectionSetNode, list[FragmentSpreadNode]] + _recursively_referenced_fragments: dict[ + OperationDefinitionNode, list[FragmentDefinitionNode] ] def __init__( @@ -95,7 +116,7 @@ def on_error(self, error: GraphQLError) -> None: def report_error(self, error: GraphQLError) -> None: self.on_error(error) - def get_fragment(self, name: str) -> Optional[FragmentDefinitionNode]: + def get_fragment(self, name: str) -> FragmentDefinitionNode | None: fragments = self._fragments if fragments is None: fragments = { @@ -107,7 +128,7 @@ def get_fragment(self, name: str) -> Optional[FragmentDefinitionNode]: self._fragments = fragments return fragments.get(name) - def get_fragment_spreads(self, node: SelectionSetNode) -> List[FragmentSpreadNode]: + def get_fragment_spreads(self, node: SelectionSetNode) -> list[FragmentSpreadNode]: spreads = self._fragment_spreads.get(node) if spreads is None: spreads = [] @@ -122,7 +143,7 @@ def get_fragment_spreads(self, node: SelectionSetNode) -> List[FragmentSpreadNod append_spread(selection) else: set_to_visit = cast( - NodeWithSelectionSet, selection + "NodeWithSelectionSet", selection ).selection_set if set_to_visit: append_set(set_to_visit) @@ -131,12 +152,12 @@ def get_fragment_spreads(self, node: SelectionSetNode) -> List[FragmentSpreadNod def get_recursively_referenced_fragments( self, operation: OperationDefinitionNode - ) -> List[FragmentDefinitionNode]: + ) -> list[FragmentDefinitionNode]: fragments = self._recursively_referenced_fragments.get(operation) if fragments is None: fragments = [] append_fragment = fragments.append - collected_names: Set[str] = set() + collected_names: set[str] = set() add_name = collected_names.add nodes_to_visit = [operation.selection_set] append_node = nodes_to_visit.append @@ -165,12 +186,12 @@ class SDLValidationContext(ASTValidationContext): rule. """ - schema: Optional[GraphQLSchema] + schema: GraphQLSchema | None def __init__( self, ast: DocumentNode, - schema: Optional[GraphQLSchema], + schema: GraphQLSchema | None, on_error: Callable[[GraphQLError], None], ) -> None: super().__init__(ast, on_error) @@ -188,8 +209,8 @@ class ValidationContext(ASTValidationContext): schema: GraphQLSchema _type_info: TypeInfo - _variable_usages: Dict[NodeWithSelectionSet, List[VariableUsage]] - _recursive_variable_usages: Dict[OperationDefinitionNode, List[VariableUsage]] + _variable_usages: dict[NodeWithSelectionSet, list[VariableUsage]] + _recursive_variable_usages: dict[OperationDefinitionNode, list[VariableUsage]] def __init__( self, @@ -204,7 +225,7 @@ def __init__( self._variable_usages = {} self._recursive_variable_usages = {} - def get_variable_usages(self, node: NodeWithSelectionSet) -> List[VariableUsage]: + def get_variable_usages(self, node: NodeWithSelectionSet) -> list[VariableUsage]: usages = self._variable_usages.get(node) if usages is None: usage_visitor = VariableUsageVisitor(self._type_info) @@ -215,7 +236,7 @@ def get_variable_usages(self, node: NodeWithSelectionSet) -> List[VariableUsage] def get_recursive_variable_usages( self, operation: OperationDefinitionNode - ) -> List[VariableUsage]: + ) -> list[VariableUsage]: usages = self._recursive_variable_usages.get(operation) if usages is None: get_variable_usages = self.get_variable_usages @@ -225,26 +246,26 @@ def get_recursive_variable_usages( self._recursive_variable_usages[operation] = usages return usages - def get_type(self) -> Optional[GraphQLOutputType]: + def get_type(self) -> GraphQLOutputType | None: return self._type_info.get_type() - def get_parent_type(self) -> Optional[GraphQLCompositeType]: + def get_parent_type(self) -> GraphQLCompositeType | None: return self._type_info.get_parent_type() - def get_input_type(self) -> Optional[GraphQLInputType]: + def get_input_type(self) -> GraphQLInputType | None: return self._type_info.get_input_type() - def get_parent_input_type(self) -> Optional[GraphQLInputType]: + def get_parent_input_type(self) -> GraphQLInputType | None: return self._type_info.get_parent_input_type() - def get_field_def(self) -> Optional[GraphQLField]: + def get_field_def(self) -> GraphQLField | None: return self._type_info.get_field_def() - def get_directive(self) -> Optional[GraphQLDirective]: + def get_directive(self) -> GraphQLDirective | None: return self._type_info.get_directive() - def get_argument(self) -> Optional[GraphQLArgument]: + def get_argument(self) -> GraphQLArgument | None: return self._type_info.get_argument() - def get_enum_value(self) -> Optional[GraphQLEnumValue]: + def get_enum_value(self) -> GraphQLEnumValue | None: return self._type_info.get_enum_value() diff --git a/src/graphql/version.py b/src/graphql/version.py index 424a9851..311c74a0 100644 --- a/src/graphql/version.py +++ b/src/graphql/version.py @@ -1,12 +1,16 @@ +"""GraphQL-core version number""" + +from __future__ import annotations + import re from typing import NamedTuple -__all__ = ["version", "version_info", "version_js", "version_info_js"] +__all__ = ["version", "version_info", "version_info_js", "version_js"] -version = "3.2.2" +version = "3.3.0a7" -version_js = "16.4.0" +version_js = "17.0.0a3" _re_version = re.compile(r"(\d+)\.(\d+)\.(\d+)(\D*)(\d*)") @@ -20,7 +24,7 @@ class VersionInfo(NamedTuple): serial: int @classmethod - def from_str(cls, v: str) -> "VersionInfo": + def from_str(cls, v: str) -> VersionInfo: groups = _re_version.match(v).groups() # type: ignore major, minor, micro = map(int, groups[:3]) level = (groups[3] or "")[:1] diff --git a/tests/benchmarks/__init__.py b/tests/benchmarks/__init__.py index 0f0a83c1..4142e203 100644 --- a/tests/benchmarks/__init__.py +++ b/tests/benchmarks/__init__.py @@ -1,6 +1,6 @@ """Benchmarks for graphql -Benchmarks are disabled (only executed as tests) by default in setup.cfg. +Benchmarks are disabled (only executed as tests) by default in pyproject.toml. You can enable them with --benchmark-enable if your want to execute them. E.g. in order to execute all the benchmarks with tox using Python 3.9:: diff --git a/tests/benchmarks/test_async_iterable.py b/tests/benchmarks/test_async_iterable.py new file mode 100644 index 00000000..d4a01166 --- /dev/null +++ b/tests/benchmarks/test_async_iterable.py @@ -0,0 +1,33 @@ +import asyncio + +from graphql import ExecutionResult, build_schema, execute, parse +from graphql.pyutils import is_awaitable + +schema = build_schema("type Query { listField: [String] }") +document = parse("{ listField }") + + +class Data: + # noinspection PyPep8Naming + @staticmethod + async def listField(_info): + for index in range(1000): + yield index + + +async def execute_async() -> ExecutionResult: + result = execute(schema, document, Data()) + assert is_awaitable(result) + return await result + + +def test_execute_async_iterable_list_field(benchmark): + # Note: we are creating the async loop outside of the benchmark code so that + # the setup is not included in the benchmark timings + loop = asyncio.events.new_event_loop() + asyncio.events.set_event_loop(loop) + result = benchmark(lambda: loop.run_until_complete(execute_async())) + asyncio.events.set_event_loop(None) + loop.close() + assert not result.errors + assert result.data == {"listField": [str(index) for index in range(1000)]} diff --git a/tests/benchmarks/test_build_ast_schema.py b/tests/benchmarks/test_build_ast_schema.py index b41626fc..cf201dec 100644 --- a/tests/benchmarks/test_build_ast_schema.py +++ b/tests/benchmarks/test_build_ast_schema.py @@ -1,4 +1,4 @@ -from graphql import parse, build_ast_schema, GraphQLSchema +from graphql import GraphQLSchema, build_ast_schema, parse from ..fixtures import big_schema_sdl # noqa: F401 diff --git a/tests/benchmarks/test_build_client_schema.py b/tests/benchmarks/test_build_client_schema.py index a8627f8d..e56ca7a1 100644 --- a/tests/benchmarks/test_build_client_schema.py +++ b/tests/benchmarks/test_build_client_schema.py @@ -1,10 +1,11 @@ -from graphql import build_client_schema, GraphQLSchema +from graphql import GraphQLSchema, build_client_schema from ..fixtures import big_schema_introspection_result # noqa: F401 def test_build_schema_from_introspection( - benchmark, big_schema_introspection_result # noqa: F811 + benchmark, + big_schema_introspection_result, # noqa: F811 ): schema: GraphQLSchema = benchmark( lambda: build_client_schema( diff --git a/tests/benchmarks/test_execution_async.py b/tests/benchmarks/test_execution_async.py index de7de2e5..70de8729 100644 --- a/tests/benchmarks/test_execution_async.py +++ b/tests/benchmarks/test_execution_async.py @@ -1,13 +1,13 @@ import asyncio + from graphql import ( - GraphQLSchema, - GraphQLObjectType, GraphQLField, + GraphQLObjectType, + GraphQLSchema, GraphQLString, graphql, ) - user = GraphQLObjectType( name="User", fields={ @@ -17,7 +17,7 @@ ) -async def resolve_user(obj, info): +async def resolve_user(_obj, _info): return { "id": "1", "name": "Sarah", diff --git a/tests/benchmarks/test_execution_sync.py b/tests/benchmarks/test_execution_sync.py index bfdb7cc2..7ae78ea9 100644 --- a/tests/benchmarks/test_execution_sync.py +++ b/tests/benchmarks/test_execution_sync.py @@ -1,12 +1,11 @@ from graphql import ( - GraphQLSchema, - GraphQLObjectType, GraphQLField, + GraphQLObjectType, + GraphQLSchema, GraphQLString, graphql_sync, ) - user = GraphQLObjectType( name="User", fields={ @@ -16,7 +15,7 @@ ) -def resolve_user(obj, info): +def resolve_user(_obj, _info): return { "id": "1", "name": "Sarah", diff --git a/tests/benchmarks/test_graphql_schema.py b/tests/benchmarks/test_graphql_schema.py new file mode 100644 index 00000000..b7b6ed46 --- /dev/null +++ b/tests/benchmarks/test_graphql_schema.py @@ -0,0 +1,11 @@ +from graphql import GraphQLSchema, build_schema, print_schema + +from ..fixtures import big_schema_sdl # noqa: F401 + + +def test_recreate_a_graphql_schema(benchmark, big_schema_sdl): # noqa: F811 + schema = build_schema(big_schema_sdl, assume_valid=True) + recreated_schema: GraphQLSchema = benchmark( + lambda: GraphQLSchema(**schema.to_kwargs()) + ) + assert print_schema(schema) == print_schema(recreated_schema) diff --git a/tests/benchmarks/test_introspection_from_schema.py b/tests/benchmarks/test_introspection_from_schema.py index f67a1f58..4c30d965 100644 --- a/tests/benchmarks/test_introspection_from_schema.py +++ b/tests/benchmarks/test_introspection_from_schema.py @@ -1,4 +1,4 @@ -from graphql import build_schema, parse, execute_sync +from graphql import build_schema, execute_sync, parse from graphql.utilities import get_introspection_query from ..fixtures import big_schema_sdl # noqa: F401 diff --git a/tests/benchmarks/test_parser.py b/tests/benchmarks/test_parser.py index 7d059f2b..7db8ef2f 100644 --- a/tests/benchmarks/test_parser.py +++ b/tests/benchmarks/test_parser.py @@ -1,8 +1,12 @@ -from graphql import parse, DocumentNode +from graphql import DocumentNode, parse from ..fixtures import kitchen_sink_query # noqa: F401 def test_parse_kitchen_sink(benchmark, kitchen_sink_query): # noqa: F811 - query = benchmark(lambda: parse(kitchen_sink_query)) + query = benchmark( + lambda: parse( + kitchen_sink_query, experimental_client_controlled_nullability=True + ) + ) assert isinstance(query, DocumentNode) diff --git a/tests/benchmarks/test_repeated_fields.py b/tests/benchmarks/test_repeated_fields.py new file mode 100644 index 00000000..daba6169 --- /dev/null +++ b/tests/benchmarks/test_repeated_fields.py @@ -0,0 +1,25 @@ +from graphql import ( + GraphQLField, + GraphQLObjectType, + GraphQLSchema, + GraphQLString, + graphql_sync, +) + +schema = GraphQLSchema( + query=GraphQLObjectType( + name="Query", + fields={ + "hello": GraphQLField( + GraphQLString, + resolve=lambda _obj, _info: "world", + ) + }, + ) +) +source = f"{{ {'hello ' * 250}}}" + + +def test_many_repeated_fields(benchmark): + result = benchmark(lambda: graphql_sync(schema, source)) + assert result == ({"hello": "world"}, None) diff --git a/tests/benchmarks/test_visit.py b/tests/benchmarks/test_visit.py index 4a91f0c0..4e7a85a2 100644 --- a/tests/benchmarks/test_visit.py +++ b/tests/benchmarks/test_visit.py @@ -1,5 +1,5 @@ from graphql import parse -from graphql.language import visit, Visitor, ParallelVisitor +from graphql.language import ParallelVisitor, Visitor, visit from ..fixtures import big_schema_sdl # noqa: F401 @@ -23,5 +23,5 @@ def test_visit_all_ast_nodes(benchmark, big_schema_sdl): # noqa: F811 def test_visit_all_ast_nodes_in_parallel(benchmark, big_schema_sdl): # noqa: F811 document_ast = parse(big_schema_sdl) visitor = DummyVisitor() - parallel_visitor = ParallelVisitor([visitor] * 50) + parallel_visitor = ParallelVisitor([visitor] * 25) benchmark(lambda: visit(document_ast, parallel_visitor)) diff --git a/tests/error/test_graphql_error.py b/tests/error/test_graphql_error.py index 6c4689da..03b85dcf 100644 --- a/tests/error/test_graphql_error.py +++ b/tests/error/test_graphql_error.py @@ -1,19 +1,18 @@ -from typing import cast, List, Union +from __future__ import annotations -from pytest import raises +from typing import cast from graphql.error import GraphQLError from graphql.language import ( - parse, Node, - OperationDefinitionNode, ObjectTypeDefinitionNode, + OperationDefinitionNode, Source, + parse, ) from ..utils import dedent - source = Source( dedent( """ @@ -26,8 +25,9 @@ ast = parse(source) operation_node = ast.definitions[0] -operation_node = cast(OperationDefinitionNode, operation_node) -assert operation_node and operation_node.kind == "operation_definition" +operation_node = cast("OperationDefinitionNode", operation_node) +assert operation_node +assert operation_node.kind == "operation_definition" field_node = operation_node.selection_set.selections[0] assert field_node @@ -206,7 +206,7 @@ def serializes_to_include_message_and_locations(): } def serializes_to_include_path(): - path: List[Union[int, str]] = ["path", 3, "to", "field"] + path: list[int | str] = ["path", 3, "to", "field"] e = GraphQLError("msg", path=path) assert e.path is path assert repr(e) == "GraphQLError('msg', path=['path', 3, 'to', 'field'])" @@ -220,11 +220,11 @@ def serializes_to_include_all_standard_fields(): assert str(e_short) == "msg" assert repr(e_short) == "GraphQLError('msg')" - path: List[Union[str, int]] = ["path", 2, "field"] + path: list[str | int] = ["path", 2, "field"] extensions = {"foo": "bar "} e_full = GraphQLError("msg", field_node, None, None, path, None, extensions) assert str(e_full) == ( - "msg\n\nGraphQL request:2:3\n" "1 | {\n2 | field\n | ^\n3 | }" + "msg\n\nGraphQL request:2:3\n1 | {\n2 | field\n | ^\n3 | }" ) assert repr(e_full) == ( "GraphQLError('msg', locations=[SourceLocation(line=2, column=3)]," @@ -242,25 +242,26 @@ def repr_includes_extensions(): assert repr(e) == "GraphQLError('msg', extensions={'foo': 'bar'})" def always_stores_path_as_list(): - path: List[Union[int, str]] = ["path", 3, "to", "field"] + path: list[int | str] = ["path", 3, "to", "field"] e = GraphQLError("msg,", path=tuple(path)) assert isinstance(e.path, list) assert e.path == path def is_comparable(): e1 = GraphQLError("msg,", path=["field", 1]) - assert e1 == e1 + assert e1 == e1 # noqa: PLR0124 + assert e1 == e1.formatted + assert e1 == e1 # noqa: PLR0124 assert e1 == e1.formatted - assert not e1 != e1 - assert not e1 != e1.formatted e2 = GraphQLError("msg,", path=["field", 1]) assert e1 == e2 - assert not e1 != e2 - assert e2.path and e2.path[1] == 1 + assert e1 == e2 + assert e2.path + assert e2.path[1] == 1 e2.path[1] = 2 - assert not e1 == e2 assert e1 != e2 - assert not e1 == e2.formatted + assert e1 != e2 + assert e1 != e2.formatted assert e1 != e2.formatted def is_hashable(): @@ -273,17 +274,6 @@ def hashes_are_unique_per_instance(): def describe_to_string(): - def deprecated_prints_an_error_using_print_error(): - # noinspection PyProtectedMember - from graphql.error.graphql_error import print_error - - error = GraphQLError("Error") - assert print_error(error) == "Error" - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - print_error(Exception) # type: ignore - assert str(exc_info.value) == "Expected a GraphQLError." - def prints_an_error_without_location(): error = GraphQLError("Error without location") assert str(error) == "Error without location" @@ -309,8 +299,10 @@ def prints_an_error_with_nodes_from_different_sources(): ) ) op_a = doc_a.definitions[0] - op_a = cast(ObjectTypeDefinitionNode, op_a) - assert op_a and op_a.kind == "object_type_definition" and op_a.fields + op_a = cast("ObjectTypeDefinitionNode", op_a) + assert op_a + assert op_a.kind == "object_type_definition" + assert op_a.fields field_a = op_a.fields[0] doc_b = parse( Source( @@ -325,8 +317,10 @@ def prints_an_error_with_nodes_from_different_sources(): ) ) op_b = doc_b.definitions[0] - op_b = cast(ObjectTypeDefinitionNode, op_b) - assert op_b and op_b.kind == "object_type_definition" and op_b.fields + op_b = cast("ObjectTypeDefinitionNode", op_b) + assert op_b + assert op_b.kind == "object_type_definition" + assert op_b.fields field_b = op_b.fields[0] error = GraphQLError( @@ -353,21 +347,8 @@ def prints_an_error_with_nodes_from_different_sources(): def describe_formatted(): - def deprecated_formats_an_error_using_format_error(): - # noinspection PyProtectedMember - from graphql.error.graphql_error import format_error - - error = GraphQLError("Example Error") - assert format_error(error) == { - "message": "Example Error", - } - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - format_error(Exception) # type: ignore - assert str(exc_info.value) == "Expected a GraphQLError." - def formats_graphql_error(): - path: List[Union[int, str]] = ["one", 2] + path: list[int | str] = ["one", 2] extensions = {"ext": None} error = GraphQLError( "test message", @@ -400,7 +381,7 @@ def uses_default_message(): } def includes_path(): - path: List[Union[int, str]] = ["path", 3, "to", "field"] + path: list[int | str] = ["path", 3, "to", "field"] error = GraphQLError("msg", path=path) assert error.formatted == {"message": "msg", "path": path} @@ -412,14 +393,14 @@ def includes_extension_fields(): } def can_be_created_from_dict(): - args = dict( - nodes=[operation_node], - source=source, - positions=[6], - path=["path", 2, "a"], - original_error=Exception("I like turtles"), - extensions=dict(hee="I like turtles"), - ) + args = { + "nodes": [operation_node], + "source": source, + "positions": [6], + "path": ["path", 2, "a"], + "original_error": Exception("I like turtles"), + "extensions": {"hee": "I like turtles"}, + } error = GraphQLError("msg", **args) # type: ignore assert error.formatted == { "message": "msg", diff --git a/tests/error/test_located_error.py b/tests/error/test_located_error.py index 4fd9acd9..f22f6fd4 100644 --- a/tests/error/test_located_error.py +++ b/tests/error/test_located_error.py @@ -1,4 +1,4 @@ -from typing import cast, Any +from typing import Any, cast from graphql.error import GraphQLError, located_error @@ -11,7 +11,7 @@ def throws_without_an_original_error(): def passes_graphql_error_through(): path = ["path", 3, "to", "field"] - e = GraphQLError("msg", None, None, None, cast(Any, path)) + e = GraphQLError("msg", None, None, None, cast("Any", path)) assert located_error(e, [], []) == e def passes_graphql_error_ish_through(): @@ -21,7 +21,7 @@ def passes_graphql_error_ish_through(): def does_not_pass_through_elasticsearch_like_errors(): e = Exception("I am from elasticsearch") - cast(Any, e).path = "/something/feed/_search" + cast("Any", e).path = "/something/feed/_search" assert located_error(e, [], []) is not e def handles_lazy_error_messages(): diff --git a/tests/error/test_print_location.py b/tests/error/test_print_location.py index 69f6143e..46b3497d 100644 --- a/tests/error/test_print_location.py +++ b/tests/error/test_print_location.py @@ -1,4 +1,4 @@ -from graphql.language import print_source_location, Source, SourceLocation +from graphql.language import Source, SourceLocation, print_source_location from ..utils import dedent diff --git a/tests/execution/test_abstract.py b/tests/execution/test_abstract.py index efc130f1..ddb01345 100644 --- a/tests/execution/test_abstract.py +++ b/tests/execution/test_abstract.py @@ -1,10 +1,12 @@ -from inspect import isawaitable -from typing import Any, NamedTuple, Optional +from __future__ import annotations -from pytest import mark +from typing import Any, NamedTuple -from graphql.execution import execute, execute_sync, ExecutionResult +import pytest + +from graphql.execution import ExecutionResult, execute, execute_sync from graphql.language import parse +from graphql.pyutils import is_awaitable from graphql.type import ( GraphQLBoolean, GraphQLField, @@ -20,15 +22,15 @@ def sync_and_async(spec): """Decorator for running a test synchronously and asynchronously.""" - return mark.asyncio( - mark.parametrize("sync", (True, False), ids=("sync", "async"))(spec) + return pytest.mark.asyncio( + pytest.mark.parametrize("sync", [True, False], ids=("sync", "async"))(spec) ) def access_variants(spec): """Decorator for tests with dict and object access, including inheritance.""" - return mark.asyncio( - mark.parametrize("access", ("dict", "object", "inheritance"))(spec) + return pytest.mark.asyncio( + pytest.mark.parametrize("access", ["dict", "object", "inheritance"])(spec) ) @@ -40,10 +42,8 @@ async def execute_query( assert isinstance(schema, GraphQLSchema) assert isinstance(query, str) document = parse(query) - result = (execute_sync if sync else execute)( - schema, document, root_value - ) # type: ignore - if not sync and isawaitable(result): + result = (execute_sync if sync else execute)(schema, document, root_value) + if not sync and is_awaitable(result): result = await result assert isinstance(result, ExecutionResult) return result @@ -81,13 +81,11 @@ async def type_error(*_args): class Dog(NamedTuple): - name: str woofs: bool class Cat(NamedTuple): - name: str meows: bool @@ -412,7 +410,6 @@ async def resolve_type_can_throw(sync): ) def describe_using_typename_on_source_object(): - expected = ( { "pets": [ @@ -454,11 +451,11 @@ class RootValueAsObject: class Pet: __typename = "Pet" - name: Optional[str] = None + name: str | None = None class DogPet(Pet): __typename = "Dog" - woofs: Optional[bool] = None + woofs: bool | None = None class Odie(DogPet): name = "Odie" @@ -466,7 +463,7 @@ class Odie(DogPet): class CatPet(Pet): __typename = "Cat" - meows: Optional[bool] = None + meows: bool | None = None class Tabby(CatPet): pass @@ -483,7 +480,6 @@ class RootValueWithInheritance: assert False, f"Unknown access variant: {access}" # pragma: no cover def describe_union_type(): - schema = build_schema( """ type Query { @@ -533,7 +529,7 @@ def describe_interface_type(): interface Pet { name: String - } + } type Cat implements Pet { name: String diff --git a/tests/execution/test_customize.py b/tests/execution/test_customize.py index f5b6554a..bf1859a2 100644 --- a/tests/execution/test_customize.py +++ b/tests/execution/test_customize.py @@ -1,6 +1,18 @@ -from graphql.execution import execute, ExecutionContext +from inspect import isasyncgen + +import pytest + +from graphql.execution import ExecutionContext, execute, subscribe from graphql.language import parse -from graphql.type import GraphQLSchema, GraphQLObjectType, GraphQLString, GraphQLField +from graphql.type import GraphQLField, GraphQLObjectType, GraphQLSchema, GraphQLString + +try: + anext # noqa: B018 +except NameError: # pragma: no cover (Python < 3.10) + # noinspection PyShadowingBuiltins + async def anext(iterator): + """Return the next item from an async iterator.""" + return await iterator.__anext__() def describe_customize_execution(): @@ -31,11 +43,110 @@ def uses_a_custom_execution_context_class(): ) class TestExecutionContext(ExecutionContext): - def execute_field(self, parent_type, source, field_nodes, path): - result = super().execute_field(parent_type, source, field_nodes, path) + def __init__(self, *args, **kwargs): + assert kwargs.pop("custom_arg", None) == "baz" + super().__init__(*args, **kwargs) + + def execute_field( + self, + parent_type, + source, + field_group, + path, + incremental_data_record, + defer_map, + ): + result = super().execute_field( + parent_type, + source, + field_group, + path, + incremental_data_record, + defer_map, + ) return result * 2 # type: ignore - assert execute(schema, query, execution_context_class=TestExecutionContext) == ( + assert execute( + schema, + query, + execution_context_class=TestExecutionContext, + custom_arg="baz", + ) == ( {"foo": "barbar"}, None, ) + + +def describe_customize_subscription(): + @pytest.mark.asyncio + async def uses_a_custom_subscribe_field_resolver(): + schema = GraphQLSchema( + query=GraphQLObjectType("Query", {"foo": GraphQLField(GraphQLString)}), + subscription=GraphQLObjectType( + "Subscription", {"foo": GraphQLField(GraphQLString)} + ), + ) + + class Root: + @staticmethod + async def custom_foo(): + yield {"foo": "FooValue"} + + subscription = subscribe( + schema, + document=parse("subscription { foo }"), + root_value=Root(), + subscribe_field_resolver=lambda root, _info: root.custom_foo(), + ) + assert isasyncgen(subscription) + + assert await anext(subscription) == ( + {"foo": "FooValue"}, + None, + ) + + await subscription.aclose() + + @pytest.mark.asyncio + async def uses_a_custom_execution_context_class(): + class TestExecutionContext(ExecutionContext): + def __init__(self, *args, **kwargs): + assert kwargs.pop("custom_arg", None) == "baz" + super().__init__(*args, **kwargs) + + def build_resolve_info(self, *args, **kwargs): + resolve_info = super().build_resolve_info(*args, **kwargs) + resolve_info.context["foo"] = "bar" + return resolve_info + + async def generate_foo(_obj, info): + yield info.context["foo"] + + def resolve_foo(message, _info): + return message + + schema = GraphQLSchema( + query=GraphQLObjectType("Query", {"foo": GraphQLField(GraphQLString)}), + subscription=GraphQLObjectType( + "Subscription", + { + "foo": GraphQLField( + GraphQLString, + resolve=resolve_foo, + subscribe=generate_foo, + ) + }, + ), + ) + + document = parse("subscription { foo }") + subscription = subscribe( + schema, + document, + context_value={}, + execution_context_class=TestExecutionContext, + custom_arg="baz", + ) + assert isasyncgen(subscription) + + assert await anext(subscription) == ({"foo": "bar"}, None) diff --git a/tests/execution/test_defer.py b/tests/execution/test_defer.py new file mode 100644 index 00000000..51133100 --- /dev/null +++ b/tests/execution/test_defer.py @@ -0,0 +1,2325 @@ +from __future__ import annotations + +from asyncio import sleep +from typing import Any, AsyncGenerator, NamedTuple, cast + +import pytest + +from graphql.error import GraphQLError +from graphql.execution import ( + ExecutionResult, + ExperimentalIncrementalExecutionResults, + IncrementalDeferResult, + IncrementalResult, + InitialIncrementalExecutionResult, + SubsequentIncrementalExecutionResult, + execute, + experimental_execute_incrementally, +) +from graphql.execution.incremental_publisher import ( + CompletedResult, + DeferredFragmentRecord, + DeferredGroupedFieldSetRecord, + PendingResult, + StreamItemsRecord, + StreamRecord, +) +from graphql.language import DocumentNode, parse +from graphql.pyutils import Path, is_awaitable +from graphql.type import ( + GraphQLField, + GraphQLID, + GraphQLList, + GraphQLNonNull, + GraphQLObjectType, + GraphQLSchema, + GraphQLString, +) + +friend_type = GraphQLObjectType( + "Friend", + { + "id": GraphQLField(GraphQLID), + "name": GraphQLField(GraphQLString), + "nonNullName": GraphQLField(GraphQLNonNull(GraphQLString)), + }, +) + + +class Friend(NamedTuple): + id: int + name: str + + +friends = [Friend(2, "Han"), Friend(3, "Leia"), Friend(4, "C-3PO")] + +deeper_object = GraphQLObjectType( + "DeeperObject", + { + "foo": GraphQLField(GraphQLString), + "bar": GraphQLField(GraphQLString), + "baz": GraphQLField(GraphQLString), + "bak": GraphQLField(GraphQLString), + }, +) + +nested_object = GraphQLObjectType( + "NestedObject", + {"deeperObject": GraphQLField(deeper_object), "name": GraphQLField(GraphQLString)}, +) + +another_nested_object = GraphQLObjectType( + "AnotherNestedObject", {"deeperObject": GraphQLField(deeper_object)} +) + +hero = { + "name": "Luke", + "id": 1, + "friends": friends, + "nestedObject": nested_object, + "AnotherNestedObject": another_nested_object, +} + +c = GraphQLObjectType( + "c", + { + "d": GraphQLField(GraphQLString), + "nonNullErrorField": GraphQLField(GraphQLNonNull(GraphQLString)), + }, +) + +e = GraphQLObjectType( + "e", + { + "f": GraphQLField(GraphQLString), + }, +) + +b = GraphQLObjectType( + "b", + { + "c": GraphQLField(c), + "e": GraphQLField(e), + }, +) + +a = GraphQLObjectType( + "a", + { + "b": GraphQLField(b), + "someField": GraphQLField(GraphQLString), + }, +) + +g = GraphQLObjectType( + "g", + { + "h": GraphQLField(GraphQLString), + }, +) + +hero_type = GraphQLObjectType( + "Hero", + { + "id": GraphQLField(GraphQLID), + "name": GraphQLField(GraphQLString), + "nonNullName": GraphQLField(GraphQLNonNull(GraphQLString)), + "friends": GraphQLField(GraphQLList(friend_type)), + "nestedObject": GraphQLField(nested_object), + "anotherNestedObject": GraphQLField(another_nested_object), + }, +) + +query = GraphQLObjectType( + "Query", + {"hero": GraphQLField(hero_type), "a": GraphQLField(a), "g": GraphQLField(g)}, +) + +schema = GraphQLSchema(query) + + +class Resolvers: + """Various resolver functions for testing.""" + + @staticmethod + def null(_info) -> None: + """A resolver returning a null value synchronously.""" + return + + @staticmethod + async def null_async(_info) -> None: + """A resolver returning a null value asynchronously.""" + return + + @staticmethod + async def slow(_info) -> str: + """Simulate a slow async resolver returning a non-null value.""" + await sleep(0) + return "slow" + + @staticmethod + async def slow_null(_info) -> None: + """Simulate a slow async resolver returning a null value.""" + await sleep(0) + + @staticmethod + def bad(_info) -> str: + """Simulate a bad resolver raising an error.""" + raise RuntimeError("bad") + + @staticmethod + async def first_friend(_info) -> AsyncGenerator[Friend, None]: + """An async generator yielding the first friend.""" + yield friends[0] + + +async def complete(document: DocumentNode, root_value: Any = None) -> Any: + result = experimental_execute_incrementally( + schema, document, root_value or {"hero": hero} + ) + if is_awaitable(result): + result = await result + + if isinstance(result, ExperimentalIncrementalExecutionResults): + results: list[Any] = [result.initial_result.formatted] + async for patch in result.subsequent_results: + results.append(patch.formatted) + return results + + assert isinstance(result, ExecutionResult) + return result.formatted + + +def modified_args(args: dict[str, Any], **modifications: Any) -> dict[str, Any]: + return {**args, **modifications} + + +def describe_execute_defer_directive(): + def can_format_and_print_pending_result(): + result = PendingResult("foo", []) + assert result.formatted == {"id": "foo", "path": []} + assert str(result) == "PendingResult(id='foo', path=[])" + + result = PendingResult(id="foo", path=["bar", 1], label="baz") + assert result.formatted == {"id": "foo", "path": ["bar", 1], "label": "baz"} + assert str(result) == "PendingResult(id='foo', path=['bar', 1], label='baz')" + + def can_compare_pending_result(): + args: dict[str, Any] = {"id": "foo", "path": ["bar", 1], "label": "baz"} + result = PendingResult(**args) + assert result == PendingResult(**args) + assert result != PendingResult(**modified_args(args, id="bar")) + assert result != PendingResult(**modified_args(args, path=["bar", 2])) + assert result != PendingResult(**modified_args(args, label="bar")) + assert result == tuple(args.values()) + assert result == tuple(args.values())[:2] + assert result != tuple(args.values())[:1] + assert result != tuple(args.values())[:1] + (["bar", 2],) + assert result == args + assert result != {**args, "id": "bar"} + assert result != {**args, "path": ["bar", 2]} + assert result != {**args, "label": "bar"} + + def can_format_and_print_completed_result(): + result = CompletedResult("foo") + assert result.formatted == {"id": "foo"} + assert str(result) == "CompletedResult(id='foo')" + + result = CompletedResult(id="foo", errors=[GraphQLError("oops")]) + assert result.formatted == {"id": "foo", "errors": [{"message": "oops"}]} + assert str(result) == "CompletedResult(id='foo', errors=[GraphQLError('oops')])" + + def can_compare_completed_result(): + args: dict[str, Any] = {"id": "foo", "errors": []} + result = CompletedResult(**args) + assert result == CompletedResult(**args) + assert result != CompletedResult(**modified_args(args, id="bar")) + assert result != CompletedResult( + **modified_args(args, errors=[GraphQLError("oops")]) + ) + assert result == tuple(args.values()) + assert result != tuple(args.values())[:1] + assert result != tuple(args.values())[:1] + ([GraphQLError("oops")],) + assert result == args + assert result != {**args, "id": "bar"} + assert result != {**args, "errors": [{"message": "oops"}]} + + def can_format_and_print_incremental_defer_result(): + result = IncrementalDeferResult(data={}, id="foo") + assert result.formatted == {"data": {}, "id": "foo"} + assert str(result) == "IncrementalDeferResult(data={}, id='foo')" + + result = IncrementalDeferResult( + data={"hello": "world"}, + id="foo", + sub_path=["bar", 1], + errors=[GraphQLError("oops")], + extensions={"baz": 2}, + ) + assert result.formatted == { + "data": {"hello": "world"}, + "id": "foo", + "subPath": ["bar", 1], + "errors": [{"message": "oops"}], + "extensions": {"baz": 2}, + } + assert ( + str(result) == "IncrementalDeferResult(data={'hello': 'world'}," + " id='foo', sub_path=['bar', 1], errors=[GraphQLError('oops')]," + " extensions={'baz': 2})" + ) + + # noinspection PyTypeChecker + def can_compare_incremental_defer_result(): + args: dict[str, Any] = { + "data": {"hello": "world"}, + "id": "foo", + "sub_path": ["bar", 1], + "errors": [GraphQLError("oops")], + "extensions": {"baz": 2}, + } + result = IncrementalDeferResult(**args) + assert result == IncrementalDeferResult(**args) + assert result != IncrementalDeferResult( + **modified_args(args, data={"hello": "foo"}) + ) + assert result != IncrementalDeferResult(**modified_args(args, id="bar")) + assert result != IncrementalDeferResult( + **modified_args(args, sub_path=["bar", 2]) + ) + assert result != IncrementalDeferResult(**modified_args(args, errors=[])) + assert result != IncrementalDeferResult( + **modified_args(args, extensions={"baz": 1}) + ) + assert result == tuple(args.values()) + assert result == tuple(args.values())[:4] + assert result == tuple(args.values())[:3] + assert result == tuple(args.values())[:2] + assert result != tuple(args.values())[:1] + assert result != ({"hello": "world"}, "bar") + args["subPath"] = args.pop("sub_path") + assert result == args + assert result != {**args, "data": {"hello": "foo"}} + assert result != {**args, "id": "bar"} + assert result != {**args, "subPath": ["bar", 2]} + assert result != {**args, "errors": []} + assert result != {**args, "extensions": {"baz": 1}} + + def can_format_and_print_initial_incremental_execution_result(): + result = InitialIncrementalExecutionResult() + assert result.formatted == {"data": None, "hasNext": False, "pending": []} + assert str(result) == "InitialIncrementalExecutionResult(data=None)" + + result = InitialIncrementalExecutionResult(has_next=True) + assert result.formatted == {"data": None, "hasNext": True, "pending": []} + assert str(result) == "InitialIncrementalExecutionResult(data=None, has_next)" + + result = InitialIncrementalExecutionResult( + data={"hello": "world"}, + errors=[GraphQLError("msg")], + pending=[PendingResult("foo", ["bar"])], + has_next=True, + extensions={"baz": 2}, + ) + assert result.formatted == { + "data": {"hello": "world"}, + "errors": [{"message": "msg"}], + "pending": [{"id": "foo", "path": ["bar"]}], + "hasNext": True, + "extensions": {"baz": 2}, + } + assert ( + str(result) == "InitialIncrementalExecutionResult(" + "data={'hello': 'world'}, errors=[GraphQLError('msg')]," + " pending=[PendingResult(id='foo', path=['bar'])], has_next," + " extensions={'baz': 2})" + ) + + def can_compare_initial_incremental_execution_result(): + args: dict[str, Any] = { + "data": {"hello": "world"}, + "errors": [GraphQLError("msg")], + "pending": [PendingResult("foo", ["bar"])], + "has_next": True, + "extensions": {"baz": 2}, + } + result = InitialIncrementalExecutionResult(**args) + assert result == InitialIncrementalExecutionResult(**args) + assert result != InitialIncrementalExecutionResult( + **modified_args(args, data={"hello": "foo"}) + ) + assert result != InitialIncrementalExecutionResult( + **modified_args(args, errors=[]) + ) + assert result != InitialIncrementalExecutionResult( + **modified_args(args, pending=[]) + ) + assert result != InitialIncrementalExecutionResult( + **modified_args(args, has_next=False) + ) + assert result != InitialIncrementalExecutionResult( + **modified_args(args, extensions={"baz": 1}) + ) + assert result == tuple(args.values()) + assert result == tuple(args.values())[:5] + assert result == tuple(args.values())[:4] + assert result == tuple(args.values())[:3] + assert result == tuple(args.values())[:2] + assert result != tuple(args.values())[:1] + assert result != ({"hello": "foo"}, []) + + assert result == { + "data": {"hello": "world"}, + "errors": [GraphQLError("msg")], + "pending": [PendingResult("foo", ["bar"])], + "hasNext": True, + "extensions": {"baz": 2}, + } + assert result != { + "errors": [GraphQLError("msg")], + "pending": [PendingResult("foo", ["bar"])], + "hasNext": True, + "extensions": {"baz": 2}, + } + assert result != { + "data": {"hello": "world"}, + "pending": [PendingResult("foo", ["bar"])], + "hasNext": True, + "extensions": {"baz": 2}, + } + assert result != { + "data": {"hello": "world"}, + "errors": [GraphQLError("msg")], + "hasNext": True, + "extensions": {"baz": 2}, + } + assert result != { + "data": {"hello": "world"}, + "errors": [GraphQLError("msg")], + "pending": [PendingResult("foo", ["bar"])], + "extensions": {"baz": 2}, + } + assert result != { + "data": {"hello": "world"}, + "errors": [GraphQLError("msg")], + "pending": [PendingResult("foo", ["bar"])], + "hasNext": True, + } + + def can_format_and_print_subsequent_incremental_execution_result(): + result = SubsequentIncrementalExecutionResult() + assert result.formatted == {"hasNext": False} + assert str(result) == "SubsequentIncrementalExecutionResult()" + + result = SubsequentIncrementalExecutionResult(has_next=True) + assert result.formatted == {"hasNext": True} + assert str(result) == "SubsequentIncrementalExecutionResult(has_next)" + + pending = [PendingResult("foo", ["bar"])] + incremental = [ + cast("IncrementalResult", IncrementalDeferResult({"foo": 1}, "bar")) + ] + completed = [CompletedResult("foo")] + result = SubsequentIncrementalExecutionResult( + has_next=True, + pending=pending, + incremental=incremental, + completed=completed, + extensions={"baz": 2}, + ) + assert result.formatted == { + "hasNext": True, + "pending": [{"id": "foo", "path": ["bar"]}], + "incremental": [{"data": {"foo": 1}, "id": "bar"}], + "completed": [{"id": "foo"}], + "extensions": {"baz": 2}, + } + assert ( + str(result) == "SubsequentIncrementalExecutionResult(has_next," + " pending[1], incremental[1], completed[1], extensions={'baz': 2})" + ) + + def can_compare_subsequent_incremental_execution_result(): + pending = [PendingResult("foo", ["bar"])] + incremental = [ + cast("IncrementalResult", IncrementalDeferResult({"foo": 1}, "bar")) + ] + completed = [CompletedResult("foo")] + args: dict[str, Any] = { + "has_next": True, + "pending": pending, + "incremental": incremental, + "completed": completed, + "extensions": {"baz": 2}, + } + result = SubsequentIncrementalExecutionResult(**args) + assert result == SubsequentIncrementalExecutionResult(**args) + assert result != SubsequentIncrementalExecutionResult( + **modified_args(args, pending=[]) + ) + assert result != SubsequentIncrementalExecutionResult( + **modified_args(args, incremental=[]) + ) + assert result != SubsequentIncrementalExecutionResult( + **modified_args(args, has_next=False) + ) + assert result != SubsequentIncrementalExecutionResult( + **modified_args(args, extensions={"baz": 1}) + ) + assert result == tuple(args.values()) + assert result == tuple(args.values())[:3] + assert result == tuple(args.values())[:2] + assert result != tuple(args.values())[:1] + assert result != (incremental, False) + assert result == { + "hasNext": True, + "pending": pending, + "incremental": incremental, + "completed": completed, + "extensions": {"baz": 2}, + } + assert result != { + "pending": pending, + "incremental": incremental, + "completed": completed, + "extensions": {"baz": 2}, + } + assert result != { + "hasNext": True, + "incremental": incremental, + "completed": completed, + "extensions": {"baz": 2}, + } + assert result != { + "hasNext": True, + "pending": pending, + "completed": completed, + "extensions": {"baz": 2}, + } + assert result != { + "hasNext": True, + "pending": pending, + "incremental": incremental, + "extensions": {"baz": 2}, + } + assert result != { + "hasNext": True, + "pending": pending, + "incremental": incremental, + "completed": completed, + } + + def can_print_deferred_grouped_field_set_record(): + record = DeferredGroupedFieldSetRecord([], {}, False) + assert ( + str(record) == "DeferredGroupedFieldSetRecord(" + "deferred_fragment_records=[], grouped_field_set={})" + ) + record = DeferredGroupedFieldSetRecord([], {}, True, Path(None, "foo", "Foo")) + assert ( + str(record) == "DeferredGroupedFieldSetRecord(" + "deferred_fragment_records=[], grouped_field_set={}, path=['foo'])" + ) + + def can_print_deferred_fragment_record(): + record = DeferredFragmentRecord(None, None) + assert str(record) == "DeferredFragmentRecord()" + record = DeferredFragmentRecord(Path(None, "bar", "Bar"), "foo") + assert str(record) == "DeferredFragmentRecord(path=['bar'], label='foo')" + + def can_print_stream_record(): + record = StreamRecord(Path(None, "bar", "Bar"), "foo") + assert str(record) == "StreamRecord(path=['bar'], label='foo')" + record.path = [] + assert str(record) == "StreamRecord(label='foo')" + record.label = None + assert str(record) == "StreamRecord()" + + def can_print_stream_items_record(): + record = StreamItemsRecord( + StreamRecord(Path(None, "bar", "Bar"), "foo"), + Path(None, "baz", "Baz"), + ) + assert ( + str(record) == "StreamItemsRecord(stream_record=StreamRecord(" + "path=['bar'], label='foo'), path=['baz'])" + ) + record = StreamItemsRecord(StreamRecord(Path(None, "bar", "Bar"))) + assert ( + str(record) == "StreamItemsRecord(stream_record=StreamRecord(path=['bar']))" + ) + + @pytest.mark.asyncio + async def can_defer_fragments_containing_scalar_types(): + document = parse( + """ + query HeroNameQuery { + hero { + id + ...NameFragment @defer + } + } + fragment NameFragment on Hero { + name + } + """ + ) + result = await complete(document) + + assert result == [ + { + "data": {"hero": {"id": "1"}}, + "pending": [{"id": "0", "path": ["hero"]}], + "hasNext": True, + }, + { + "incremental": [{"data": {"name": "Luke"}, "id": "0"}], + "completed": [{"id": "0"}], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def can_disable_defer_using_if_argument(): + document = parse( + """ + query HeroNameQuery { + hero { + id + ...NameFragment @defer(if: false) + } + } + fragment NameFragment on Hero { + name + } + """ + ) + result = await complete(document) + + assert result == {"data": {"hero": {"id": "1", "name": "Luke"}}} + + @pytest.mark.asyncio + async def does_not_disable_defer_with_null_if_argument(): + document = parse( + """ + query HeroNameQuery($shouldDefer: Boolean) { + hero { + id + ...NameFragment @defer(if: $shouldDefer) + } + } + fragment NameFragment on Hero { + name + } + """ + ) + result = await complete(document) + + assert result == [ + { + "data": {"hero": {"id": "1"}}, + "pending": [{"id": "0", "path": ["hero"]}], + "hasNext": True, + }, + { + "incremental": [{"data": {"name": "Luke"}, "id": "0"}], + "completed": [{"id": "0"}], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def throws_an_error_for_defer_directive_with_non_string_label(): + document = parse( + """ + query Deferred { + ... @defer(label: 42) { hero { id } } + } + """ + ) + result = await complete(document) + + assert result == { + "data": None, + "errors": [ + { + "locations": [{"column": 33, "line": 3}], + "message": "Argument 'label' has invalid value 42.", + } + ], + } + + @pytest.mark.asyncio + async def can_defer_fragments_on_the_top_level_query_field(): + document = parse( + """ + query HeroNameQuery { + ...QueryFragment @defer(label: "DeferQuery") + } + fragment QueryFragment on Query { + hero { + id + } + } + """ + ) + result = await complete(document) + + assert result == [ + { + "data": {}, + "pending": [{"id": "0", "path": [], "label": "DeferQuery"}], + "hasNext": True, + }, + { + "incremental": [{"data": {"hero": {"id": "1"}}, "id": "0"}], + "completed": [{"id": "0"}], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def can_defer_fragments_with_errors_on_the_top_level_query_field(): + document = parse( + """ + query HeroNameQuery { + ...QueryFragment @defer(label: "DeferQuery") + } + fragment QueryFragment on Query { + hero { + name + } + } + """ + ) + result = await complete(document, {"hero": {**hero, "name": Resolvers.bad}}) + + assert result == [ + { + "data": {}, + "pending": [{"id": "0", "path": [], "label": "DeferQuery"}], + "hasNext": True, + }, + { + "incremental": [ + { + "data": {"hero": {"name": None}}, + "errors": [ + { + "message": "bad", + "locations": [{"column": 17, "line": 7}], + "path": ["hero", "name"], + } + ], + "id": "0", + } + ], + "completed": [{"id": "0"}], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def can_defer_a_fragment_within_an_already_deferred_fragment(): + document = parse( + """ + query HeroNameQuery { + hero { + ...TopFragment @defer(label: "DeferTop") + } + } + fragment TopFragment on Hero { + id + ...NestedFragment @defer(label: "DeferNested") + } + fragment NestedFragment on Hero { + friends { + name + } + } + """ + ) + result = await complete(document) + + assert result == [ + { + "data": {"hero": {}}, + "pending": [ + {"id": "0", "path": ["hero"], "label": "DeferTop"}, + {"id": "1", "path": ["hero"], "label": "DeferNested"}, + ], + "hasNext": True, + }, + { + "incremental": [ + {"data": {"id": "1"}, "id": "0"}, + { + "data": { + "friends": [ + {"name": "Han"}, + {"name": "Leia"}, + {"name": "C-3PO"}, + ] + }, + "id": "1", + }, + ], + "completed": [{"id": "0"}, {"id": "1"}], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def can_defer_a_fragment_that_is_also_not_deferred_with_deferred_first(): + document = parse( + """ + query HeroNameQuery { + hero { + ...TopFragment @defer(label: "DeferTop") + ...TopFragment + } + } + fragment TopFragment on Hero { + name + } + """ + ) + result = await complete(document) + + assert result == [ + { + "data": {"hero": {"name": "Luke"}}, + "pending": [{"id": "0", "path": ["hero"], "label": "DeferTop"}], + "hasNext": True, + }, + {"completed": [{"id": "0"}], "hasNext": False}, + ] + + @pytest.mark.asyncio + async def can_defer_a_fragment_that_is_also_not_deferred_with_non_deferred_first(): + document = parse( + """ + query HeroNameQuery { + hero { + ...TopFragment + ...TopFragment @defer(label: "DeferTop") + } + } + fragment TopFragment on Hero { + name + } + """ + ) + result = await complete(document) + + assert result == [ + { + "data": {"hero": {"name": "Luke"}}, + "pending": [{"id": "0", "path": ["hero"], "label": "DeferTop"}], + "hasNext": True, + }, + {"completed": [{"id": "0"}], "hasNext": False}, + ] + + @pytest.mark.asyncio + async def can_defer_an_inline_fragment(): + document = parse( + """ + query HeroNameQuery { + hero { + id + ... on Hero @defer(label: "InlineDeferred") { + name + } + } + } + """ + ) + result = await complete(document) + + assert result == [ + { + "data": {"hero": {"id": "1"}}, + "pending": [{"id": "0", "path": ["hero"], "label": "InlineDeferred"}], + "hasNext": True, + }, + { + "incremental": [{"data": {"name": "Luke"}, "id": "0"}], + "completed": [{"id": "0"}], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def does_not_emit_empty_defer_fragments(): + document = parse( + """ + query HeroNameQuery { + hero { + ... @defer { + name @skip(if: true) + } + } + } + fragment TopFragment on Hero { + name + } + """ + ) + result = await complete(document) + + assert result == [ + { + "data": {"hero": {}}, + "pending": [{"id": "0", "path": ["hero"]}], + "hasNext": True, + }, + {"completed": [{"id": "0"}], "hasNext": False}, + ] + + @pytest.mark.asyncio + async def separately_emits_defer_fragments_different_labels_varying_fields(): + document = parse( + """ + query HeroNameQuery { + hero { + ... @defer(label: "DeferID") { + id + } + ... @defer(label: "DeferName") { + name + } + } + } + """ + ) + result = await complete(document) + + assert result == [ + { + "data": {"hero": {}}, + "pending": [ + {"id": "0", "path": ["hero"], "label": "DeferID"}, + {"id": "1", "path": ["hero"], "label": "DeferName"}, + ], + "hasNext": True, + }, + { + "incremental": [ + {"data": {"id": "1"}, "id": "0"}, + {"data": {"name": "Luke"}, "id": "1"}, + ], + "completed": [{"id": "0"}, {"id": "1"}], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def separately_emits_defer_fragments_different_labels_varying_subfields(): + document = parse( + """ + query HeroNameQuery { + ... @defer(label: "DeferID") { + hero { + id + } + } + ... @defer(label: "DeferName") { + hero { + name + } + } + } + """ + ) + result = await complete(document) + + assert result == [ + { + "data": {}, + "pending": [ + {"id": "0", "path": [], "label": "DeferID"}, + {"id": "1", "path": [], "label": "DeferName"}, + ], + "hasNext": True, + }, + { + "incremental": [ + {"data": {"hero": {}}, "id": "0"}, + {"data": {"id": "1"}, "id": "0", "subPath": ["hero"]}, + {"data": {"name": "Luke"}, "id": "1", "subPath": ["hero"]}, + ], + "completed": [{"id": "0"}, {"id": "1"}], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def separately_emits_defer_fragments_different_labels_var_subfields_async(): + document = parse( + """ + query HeroNameQuery { + ... @defer(label: "DeferID") { + hero { + id + } + } + ... @defer(label: "DeferName") { + hero { + name + } + } + } + """ + ) + + async def resolve(value): + return value + + result = await complete( + document, + { + "hero": { + "id": lambda _info: resolve(1), + "name": lambda _info: resolve("Luke"), + } + }, + ) + + assert result == [ + { + "data": {}, + "pending": [ + {"id": "0", "path": [], "label": "DeferID"}, + {"id": "1", "path": [], "label": "DeferName"}, + ], + "hasNext": True, + }, + { + "incremental": [ + {"data": {"hero": {}}, "id": "0"}, + {"data": {"id": "1"}, "id": "0", "subPath": ["hero"]}, + {"data": {"name": "Luke"}, "id": "1", "subPath": ["hero"]}, + ], + "completed": [{"id": "0"}, {"id": "1"}], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def separately_emits_defer_fragments_var_subfields_same_prio_diff_level(): + document = parse( + """ + query HeroNameQuery { + hero { + ... @defer(label: "DeferID") { + id + } + } + ... @defer(label: "DeferName") { + hero { + name + } + } + } + """ + ) + result = await complete(document) + + assert result == [ + { + "data": {"hero": {}}, + "pending": [ + {"id": "0", "path": [], "label": "DeferName"}, + {"id": "1", "path": ["hero"], "label": "DeferID"}, + ], + "hasNext": True, + }, + { + "incremental": [ + {"data": {"id": "1"}, "id": "1"}, + {"data": {"name": "Luke"}, "id": "0", "subPath": ["hero"]}, + ], + "completed": [{"id": "1"}, {"id": "0"}], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def separately_emits_nested_defer_frags_var_subfields_same_prio_diff_level(): + document = parse( + """ + query HeroNameQuery { + ... @defer(label: "DeferName") { + hero { + name + ... @defer(label: "DeferID") { + id + } + } + } + } + """ + ) + result = await complete(document) + + assert result == [ + { + "data": {}, + "pending": [{"id": "0", "path": [], "label": "DeferName"}], + "hasNext": True, + }, + { + "pending": [{"id": "1", "path": ["hero"], "label": "DeferID"}], + "incremental": [{"data": {"hero": {"name": "Luke"}}, "id": "0"}], + "completed": [{"id": "0"}], + "hasNext": True, + }, + { + "incremental": [{"data": {"id": "1"}, "id": "1"}], + "completed": [{"id": "1"}], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def can_deduplicate_multiple_defers_on_the_same_object(): + document = parse( + """ + query { + hero { + friends { + ... @defer { + ...FriendFrag + ... @defer { + ...FriendFrag + ... @defer { + ...FriendFrag + ... @defer { + ...FriendFrag + } + } + } + } + } + } + } + + fragment FriendFrag on Friend { + id + name + } + """ + ) + result = await complete(document) + + assert result == [ + { + "data": {"hero": {"friends": [{}, {}, {}]}}, + "pending": [ + {"id": "0", "path": ["hero", "friends", 0]}, + {"id": "1", "path": ["hero", "friends", 0]}, + {"id": "2", "path": ["hero", "friends", 0]}, + {"id": "3", "path": ["hero", "friends", 0]}, + {"id": "4", "path": ["hero", "friends", 1]}, + {"id": "5", "path": ["hero", "friends", 1]}, + {"id": "6", "path": ["hero", "friends", 1]}, + {"id": "7", "path": ["hero", "friends", 1]}, + {"id": "8", "path": ["hero", "friends", 2]}, + {"id": "9", "path": ["hero", "friends", 2]}, + {"id": "10", "path": ["hero", "friends", 2]}, + {"id": "11", "path": ["hero", "friends", 2]}, + ], + "hasNext": True, + }, + { + "incremental": [ + {"data": {"id": "2", "name": "Han"}, "id": "0"}, + {"data": {"id": "3", "name": "Leia"}, "id": "4"}, + {"data": {"id": "4", "name": "C-3PO"}, "id": "8"}, + ], + "completed": [ + {"id": "1"}, + {"id": "2"}, + {"id": "3"}, + {"id": "5"}, + {"id": "6"}, + {"id": "7"}, + {"id": "9"}, + {"id": "10"}, + {"id": "11"}, + {"id": "0"}, + {"id": "4"}, + {"id": "8"}, + ], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def deduplicates_fields_present_in_the_initial_payload(): + document = parse( + """ + query { + hero { + nestedObject { + deeperObject { + foo + } + } + anotherNestedObject { + deeperObject { + foo + } + } + ... @defer { + nestedObject { + deeperObject { + bar + } + } + anotherNestedObject { + deeperObject { + foo + } + } + } + } + } + """ + ) + result = await complete( + document, + { + "hero": { + "nestedObject": {"deeperObject": {"foo": "foo", "bar": "bar"}}, + "anotherNestedObject": {"deeperObject": {"foo": "foo"}}, + } + }, + ) + + assert result == [ + { + "data": { + "hero": { + "nestedObject": {"deeperObject": {"foo": "foo"}}, + "anotherNestedObject": {"deeperObject": {"foo": "foo"}}, + } + }, + "pending": [{"id": "0", "path": ["hero"]}], + "hasNext": True, + }, + { + "incremental": [ + { + "data": {"bar": "bar"}, + "id": "0", + "subPath": ["nestedObject", "deeperObject"], + }, + ], + "completed": [{"id": "0"}], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def deduplicates_fields_present_in_a_parent_defer_payload(): + document = parse( + """ + query { + hero { + ... @defer { + nestedObject { + deeperObject { + foo + ... @defer { + foo + bar + } + } + } + } + } + } + """ + ) + result = await complete( + document, + {"hero": {"nestedObject": {"deeperObject": {"foo": "foo", "bar": "bar"}}}}, + ) + + assert result == [ + { + "data": {"hero": {}}, + "pending": [{"id": "0", "path": ["hero"]}], + "hasNext": True, + }, + { + "pending": [ + {"id": "1", "path": ["hero", "nestedObject", "deeperObject"]} + ], + "incremental": [ + { + "data": {"nestedObject": {"deeperObject": {"foo": "foo"}}}, + "id": "0", + }, + ], + "completed": [{"id": "0"}], + "hasNext": True, + }, + { + "incremental": [{"data": {"bar": "bar"}, "id": "1"}], + "completed": [{"id": "1"}], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def deduplicates_fields_with_deferred_fragments_at_multiple_levels(): + document = parse( + """ + query { + hero { + nestedObject { + deeperObject { + foo + } + } + ... @defer { + nestedObject { + deeperObject { + foo + bar + } + ... @defer { + deeperObject { + foo + bar + baz + ... @defer { + foo + bar + baz + bak + } + } + } + } + } + } + } + """ + ) + result = await complete( + document, + { + "hero": { + "nestedObject": { + "deeperObject": { + "foo": "foo", + "bar": "bar", + "baz": "baz", + "bak": "bak", + } + } + } + }, + ) + + assert result == [ + { + "data": { + "hero": { + "nestedObject": { + "deeperObject": { + "foo": "foo", + }, + }, + }, + }, + "pending": [{"id": "0", "path": ["hero"]}], + "hasNext": True, + }, + { + "pending": [{"id": "1", "path": ["hero", "nestedObject"]}], + "incremental": [ + { + "data": {"bar": "bar"}, + "id": "0", + "subPath": ["nestedObject", "deeperObject"], + }, + ], + "completed": [{"id": "0"}], + "hasNext": True, + }, + { + "pending": [ + {"id": "2", "path": ["hero", "nestedObject", "deeperObject"]} + ], + "incremental": [ + {"data": {"baz": "baz"}, "id": "1", "subPath": ["deeperObject"]}, + ], + "hasNext": True, + "completed": [{"id": "1"}], + }, + { + "incremental": [{"data": {"bak": "bak"}, "id": "2"}], + "completed": [{"id": "2"}], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def deduplicates_fields_from_deferred_fragments_branches_same_level(): + document = parse( + """ + query { + hero { + nestedObject { + deeperObject { + ... @defer { + foo + } + } + } + ... @defer { + nestedObject { + deeperObject { + ... @defer { + foo + bar + } + } + } + } + } + } + """ + ) + result = await complete( + document, + {"hero": {"nestedObject": {"deeperObject": {"foo": "foo", "bar": "bar"}}}}, + ) + + assert result == [ + { + "data": {"hero": {"nestedObject": {"deeperObject": {}}}}, + "pending": [ + {"id": "0", "path": ["hero"]}, + {"id": "1", "path": ["hero", "nestedObject", "deeperObject"]}, + ], + "hasNext": True, + }, + { + "pending": [ + {"id": "2", "path": ["hero", "nestedObject", "deeperObject"]} + ], + "incremental": [{"data": {"foo": "foo"}, "id": "1"}], + "completed": [{"id": "0"}, {"id": "1"}], + "hasNext": True, + }, + { + "incremental": [{"data": {"bar": "bar"}, "id": "2"}], + "completed": [{"id": "2"}], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def deduplicates_fields_from_deferred_fragments_branches_multi_levels(): + document = parse( + """ + query { + a { + b { + c { + d + } + ... @defer { + e { + f + } + } + } + } + ... @defer { + a { + b { + e { + f + } + } + } + g { + h + } + } + } + """ + ) + result = await complete( + document, + {"a": {"b": {"c": {"d": "d"}, "e": {"f": "f"}}}, "g": {"h": "h"}}, + ) + + assert result == [ + { + "data": {"a": {"b": {"c": {"d": "d"}}}}, + "pending": [{"id": "0", "path": []}, {"id": "1", "path": ["a", "b"]}], + "hasNext": True, + }, + { + "incremental": [ + {"data": {"e": {"f": "f"}}, "id": "1"}, + {"data": {"g": {"h": "h"}}, "id": "0"}, + ], + "completed": [{"id": "1"}, {"id": "0"}], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def nulls_cross_defer_boundaries_null_first(): + document = parse( + """ + query { + ... @defer { + a { + someField + b { + c { + nonNullErrorField + } + } + } + } + a { + ... @defer { + b { + c { + d + } + } + } + } + } + """ + ) + result = await complete( + document, + {"a": {"b": {"c": {"d": "d"}}, "someField": "someField"}}, + ) + + assert result == [ + { + "data": {"a": {}}, + "pending": [{"id": "0", "path": []}, {"id": "1", "path": ["a"]}], + "hasNext": True, + }, + { + "incremental": [ + {"data": {"b": {"c": {}}}, "id": "1"}, + {"data": {"d": "d"}, "id": "1", "subPath": ["b", "c"]}, + ], + "completed": [ + { + "id": "0", + "errors": [ + { + "message": "Cannot return null" + " for non-nullable field c.nonNullErrorField.", + "locations": [{"line": 8, "column": 23}], + "path": ["a", "b", "c", "nonNullErrorField"], + }, + ], + }, + {"id": "1"}, + ], + "hasNext": False, + }, + ] + + async def nulls_cross_defer_boundaries_value_first(): + document = parse( + """ + query { + ... @defer { + a { + b { + c { + d + } + } + } + } + a { + ... @defer { + someField + b { + c { + nonNullErrorField + } + } + } + } + } + """ + ) + result = await complete( + document, + { + "a": { + "b": {"c": {"d": "d"}, "nonNullErrorFIeld": None}, + "someField": "someField", + } + }, + ) + + assert result == [ + { + "data": {"a": {}}, + "pending": [{"id": "0", "path": []}, {"id": "1", "path": ["a"]}], + "hasNext": True, + }, + { + "incremental": [ + {"data": {"b": {"c": {}}}, "id": "1"}, + {"data": {"d": "d"}, "id": "0", "subPath": ["a", "b", "c"]}, + ], + "completed": [ + { + "id": "1", + "errors": [ + { + "message": "Cannot return null" + " for non-nullable field c.nonNullErrorField.", + "locations": [{"line": 17, "column": 23}], + "path": ["a", "b", "c", "nonNullErrorField"], + }, + ], + }, + {"id": "0"}, + ], + "hasNext": False, + }, + ] + + async def filters_a_payload_with_a_null_that_cannot_be_merged(): + document = parse( + """ + query { + ... @defer { + a { + someField + b { + c { + nonNullErrorField + } + } + } + } + a { + ... @defer { + b { + c { + d + } + } + } + } + } + """ + ) + + result = await complete( + document, + { + "a": { + "b": {"c": {"d": "d", "nonNullErrorField": Resolvers.slow_null}}, + "someField": "someField", + } + }, + ) + + assert result == [ + { + "data": {"a": {}}, + "pending": [{"id": "0", "path": []}, {"id": "1", "path": ["a"]}], + "hasNext": True, + }, + { + "incremental": [ + {"data": {"b": {"c": {}}}, "id": "1"}, + {"data": {"d": "d"}, "id": "1", "subPath": ["b", "c"]}, + ], + "completed": [{"id": "1"}], + "hasNext": True, + }, + { + "completed": [ + { + "id": "0", + "errors": [ + { + "message": "Cannot return null" + " for non-nullable field c.nonNullErrorField.", + "locations": [{"line": 8, "column": 23}], + "path": ["a", "b", "c", "nonNullErrorField"], + }, + ], + }, + ], + "hasNext": False, + }, + ] + + async def cancels_deferred_fields_when_initial_result_exhibits_null_bubbling(): + document = parse( + """ + query { + hero { + nonNullName + } + ... @defer { + hero { + name + } + } + } + """ + ) + result = await complete( + document, {"hero": {**hero, "nonNullName": lambda _info: None}} + ) + + assert result == { + "data": {"hero": None}, + "errors": [ + { + "message": "Cannot return null" + " for non-nullable field Hero.nonNullName.", + "locations": [{"line": 4, "column": 17}], + "path": ["hero", "nonNullName"], + }, + ], + } + + async def cancels_deferred_fields_when_deferred_result_exhibits_null_bubbling(): + document = parse( + """ + query { + ... @defer { + hero { + nonNullName + name + } + } + } + """ + ) + result = await complete( + document, {"hero": {**hero, "nonNullName": lambda _info: None}} + ) + + assert result == [ + { + "data": {}, + "pending": [{"id": "0", "path": []}], + "hasNext": True, + }, + { + "incremental": [ + { + "data": {"hero": None}, + "id": "0", + "errors": [ + { + "message": "Cannot return null" + " for non-nullable field Hero.nonNullName.", + "locations": [{"line": 5, "column": 19}], + "path": ["hero", "nonNullName"], + }, + ], + }, + ], + "completed": [{"id": "0"}], + "hasNext": False, + }, + ] + + async def deduplicates_list_fields(): + document = parse( + """ + query { + hero { + friends { + name + } + ... @defer { + friends { + name + } + } + } + } + """ + ) + + result = await complete(document) + + assert result == [ + { + "data": { + "hero": { + "friends": [ + {"name": "Han"}, + {"name": "Leia"}, + {"name": "C-3PO"}, + ] + } + }, + "pending": [{"id": "0", "path": ["hero"]}], + "hasNext": True, + }, + {"completed": [{"id": "0"}], "hasNext": False}, + ] + + async def deduplicates_async_iterable_list_fields(): + document = parse( + """ + query { + hero { + friends { + name + } + ... @defer { + friends { + name + } + } + } + } + """ + ) + + result = await complete( + document, {"hero": {**hero, "friends": Resolvers.first_friend}} + ) + + assert result == [ + { + "data": {"hero": {"friends": [{"name": "Han"}]}}, + "pending": [{"id": "0", "path": ["hero"]}], + "hasNext": True, + }, + {"completed": [{"id": "0"}], "hasNext": False}, + ] + + async def deduplicates_empty_async_iterable_list_fields(): + document = parse( + """ + query { + hero { + friends { + name + } + ... @defer { + friends { + name + } + } + } + } + """ + ) + + async def resolve_friends(_info): + await sleep(0) + for friend in []: # type: ignore + yield friend # pragma: no cover + + result = await complete( + document, {"hero": {**hero, "friends": resolve_friends}} + ) + + assert result == [ + { + "data": {"hero": {"friends": []}}, + "pending": [{"id": "0", "path": ["hero"]}], + "hasNext": True, + }, + {"completed": [{"id": "0"}], "hasNext": False}, + ] + + async def does_not_deduplicate_list_fields_with_non_overlapping_fields(): + document = parse( + """ + query { + hero { + friends { + name + } + ... @defer { + friends { + id + } + } + } + } + """ + ) + result = await complete(document) + + assert result == [ + { + "data": { + "hero": { + "friends": [ + {"name": "Han"}, + {"name": "Leia"}, + {"name": "C-3PO"}, + ] + } + }, + "pending": [{"id": "0", "path": ["hero"]}], + "hasNext": True, + }, + { + "incremental": [ + {"data": {"id": "2"}, "id": "0", "subPath": ["friends", 0]}, + {"data": {"id": "3"}, "id": "0", "subPath": ["friends", 1]}, + {"data": {"id": "4"}, "id": "0", "subPath": ["friends", 2]}, + ], + "completed": [{"id": "0"}], + "hasNext": False, + }, + ] + + async def deduplicates_list_fields_that_return_empty_lists(): + document = parse( + """ + query { + hero { + friends { + name + } + ... @defer { + friends { + name + } + } + } + } + """ + ) + result = await complete( + document, {"hero": {**hero, "friends": lambda _info: []}} + ) + + assert result == [ + { + "data": {"hero": {"friends": []}}, + "pending": [{"id": "0", "path": ["hero"]}], + "hasNext": True, + }, + {"completed": [{"id": "0"}], "hasNext": False}, + ] + + async def deduplicates_null_object_fields(): + document = parse( + """ + query { + hero { + nestedObject { + name + } + ... @defer { + nestedObject { + name + } + } + } + } + """ + ) + result = await complete( + document, {"hero": {**hero, "nestedObject": lambda _info: None}} + ) + + assert result == [ + { + "data": {"hero": {"nestedObject": None}}, + "pending": [{"id": "0", "path": ["hero"]}], + "hasNext": True, + }, + {"completed": [{"id": "0"}], "hasNext": False}, + ] + + async def deduplicates_async_object_fields(): + document = parse( + """ + query { + hero { + nestedObject { + name + } + ... @defer { + nestedObject { + name + } + } + } + } + """ + ) + + async def resolve_nested_object(_info): + return {"name": "foo"} + + result = await complete( + document, {"hero": {"nestedObject": resolve_nested_object}} + ) + + assert result == [ + { + "data": {"hero": {"nestedObject": {"name": "foo"}}}, + "pending": [{"id": "0", "path": ["hero"]}], + "hasNext": True, + }, + {"completed": [{"id": "0"}], "hasNext": False}, + ] + + @pytest.mark.asyncio + async def handles_errors_thrown_in_deferred_fragments(): + document = parse( + """ + query HeroNameQuery { + hero { + id + ...NameFragment @defer + } + } + fragment NameFragment on Hero { + name + } + """ + ) + result = await complete(document, {"hero": {**hero, "name": Resolvers.bad}}) + + assert result == [ + { + "data": {"hero": {"id": "1"}}, + "pending": [{"id": "0", "path": ["hero"]}], + "hasNext": True, + }, + { + "incremental": [ + { + "data": {"name": None}, + "id": "0", + "errors": [ + { + "message": "bad", + "locations": [{"line": 9, "column": 15}], + "path": ["hero", "name"], + } + ], + }, + ], + "completed": [{"id": "0"}], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def handles_non_nullable_errors_thrown_in_deferred_fragments(): + document = parse( + """ + query HeroNameQuery { + hero { + id + ...NameFragment @defer + } + } + fragment NameFragment on Hero { + nonNullName + } + """ + ) + result = await complete( + document, {"hero": {**hero, "nonNullName": Resolvers.null}} + ) + + assert result == [ + { + "data": {"hero": {"id": "1"}}, + "pending": [{"id": "0", "path": ["hero"]}], + "hasNext": True, + }, + { + "completed": [ + { + "id": "0", + "errors": [ + { + "message": "Cannot return null for non-nullable field" + " Hero.nonNullName.", + "locations": [{"line": 9, "column": 15}], + "path": ["hero", "nonNullName"], + } + ], + }, + ], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def handles_non_nullable_errors_thrown_outside_deferred_fragments(): + document = parse( + """ + query HeroNameQuery { + hero { + nonNullName + ...NameFragment @defer + } + } + fragment NameFragment on Hero { + id + } + """ + ) + result = await complete( + document, {"hero": {**hero, "nonNullName": Resolvers.null}} + ) + + assert result == { + "data": {"hero": None}, + "errors": [ + { + "message": "Cannot return null for non-nullable field" + " Hero.nonNullName.", + "locations": [{"line": 4, "column": 17}], + "path": ["hero", "nonNullName"], + } + ], + } + + @pytest.mark.asyncio + async def handles_async_non_nullable_errors_thrown_in_deferred_fragments(): + document = parse( + """ + query HeroNameQuery { + hero { + id + ...NameFragment @defer + } + } + fragment NameFragment on Hero { + nonNullName + } + """ + ) + result = await complete( + document, {"hero": {**hero, "nonNullName": Resolvers.null_async}} + ) + + assert result == [ + { + "data": {"hero": {"id": "1"}}, + "pending": [{"id": "0", "path": ["hero"]}], + "hasNext": True, + }, + { + "completed": [ + { + "id": "0", + "errors": [ + { + "message": "Cannot return null for non-nullable field" + " Hero.nonNullName.", + "locations": [{"line": 9, "column": 15}], + "path": ["hero", "nonNullName"], + } + ], + }, + ], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def returns_payloads_in_correct_order(): + document = parse( + """ + query HeroNameQuery { + hero { + id + ...NameFragment @defer + } + } + fragment NameFragment on Hero { + name + friends { + ...NestedFragment @defer + } + } + fragment NestedFragment on Friend { + name + } + """ + ) + result = await complete(document, {"hero": {**hero, "name": Resolvers.slow}}) + + assert result == [ + { + "data": {"hero": {"id": "1"}}, + "pending": [{"id": "0", "path": ["hero"]}], + "hasNext": True, + }, + { + "pending": [ + {"id": "1", "path": ["hero", "friends", 0]}, + {"id": "2", "path": ["hero", "friends", 1]}, + {"id": "3", "path": ["hero", "friends", 2]}, + ], + "incremental": [ + {"data": {"name": "slow", "friends": [{}, {}, {}]}, "id": "0"} + ], + "completed": [{"id": "0"}], + "hasNext": True, + }, + { + "incremental": [ + {"data": {"name": "Han"}, "id": "1"}, + {"data": {"name": "Leia"}, "id": "2"}, + {"data": {"name": "C-3PO"}, "id": "3"}, + ], + "completed": [{"id": "1"}, {"id": "2"}, {"id": "3"}], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def returns_payloads_from_synchronous_data_in_correct_order(): + document = parse( + """ + query HeroNameQuery { + hero { + id + ...NameFragment @defer + } + } + fragment NameFragment on Hero { + name + friends { + ...NestedFragment @defer + } + } + fragment NestedFragment on Friend { + name + } + """ + ) + result = await complete(document) + + assert result == [ + { + "data": {"hero": {"id": "1"}}, + "pending": [{"id": "0", "path": ["hero"]}], + "hasNext": True, + }, + { + "pending": [ + {"id": "1", "path": ["hero", "friends", 0]}, + {"id": "2", "path": ["hero", "friends", 1]}, + {"id": "3", "path": ["hero", "friends", 2]}, + ], + "incremental": [ + {"data": {"name": "Luke", "friends": [{}, {}, {}]}, "id": "0"} + ], + "completed": [{"id": "0"}], + "hasNext": True, + }, + { + "incremental": [ + {"data": {"name": "Han"}, "id": "1"}, + {"data": {"name": "Leia"}, "id": "2"}, + {"data": {"name": "C-3PO"}, "id": "3"}, + ], + "completed": [{"id": "1"}, {"id": "2"}, {"id": "3"}], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def filters_deferred_payloads_when_list_item_from_async_iterable_nulled(): + document = parse( + """ + query { + hero { + friends { + nonNullName + ...NameFragment @defer + } + } + } + fragment NameFragment on Friend { + name + } + """ + ) + + result = await complete( + document, {"hero": {**hero, "friends": Resolvers.first_friend}} + ) + + assert result == { + "data": {"hero": {"friends": [None]}}, + "errors": [ + { + "message": "Cannot return null for non-nullable field" + " Friend.nonNullName.", + "locations": [{"line": 5, "column": 19}], + "path": ["hero", "friends", 0, "nonNullName"], + } + ], + } + + @pytest.mark.asyncio + async def original_execute_function_throws_error_if_deferred_and_all_is_sync(): + document = parse( + """ + query Deferred { + ... @defer { hero { id } } + } + """ + ) + + with pytest.raises(GraphQLError) as exc_info: + await execute(schema, document, {}) # type: ignore + + assert str(exc_info.value) == ( + "Executing this GraphQL operation would unexpectedly produce" + " multiple payloads (due to @defer or @stream directive)" + ) + + @pytest.mark.asyncio + async def original_execute_function_throws_error_if_deferred_and_not_all_is_sync(): + document = parse( + """ + query Deferred { + hero { name } + ... @defer { hero { id } } + } + """ + ) + + root_value = {"hero": {**hero, "name": Resolvers.slow}} + with pytest.raises(GraphQLError) as exc_info: + await execute(schema, document, root_value) # type: ignore + + assert str(exc_info.value) == ( + "Executing this GraphQL operation would unexpectedly produce" + " multiple payloads (due to @defer or @stream directive)" + ) diff --git a/tests/execution/test_directives.py b/tests/execution/test_directives.py index 2beadc77..d7f45dd6 100644 --- a/tests/execution/test_directives.py +++ b/tests/execution/test_directives.py @@ -1,6 +1,6 @@ -from graphql.execution import execute_sync, ExecutionResult +from graphql.execution import ExecutionResult, execute_sync from graphql.language import parse -from graphql.type import GraphQLObjectType, GraphQLField, GraphQLSchema, GraphQLString +from graphql.type import GraphQLField, GraphQLObjectType, GraphQLSchema, GraphQLString schema = GraphQLSchema( GraphQLObjectType( diff --git a/tests/execution/test_execution_result.py b/tests/execution/test_execution_result.py index 25247b88..96935d99 100644 --- a/tests/execution/test_execution_result.py +++ b/tests/execution/test_execution_result.py @@ -1,11 +1,10 @@ -from pytest import raises +import pytest from graphql.error import GraphQLError from graphql.execution import ExecutionResult def describe_execution_result(): - data = {"foo": "Some data"} error = GraphQLError("Some error") errors = [error] @@ -56,15 +55,15 @@ def compares_to_dict(): res = ExecutionResult(data, errors) assert res == {"data": data, "errors": errors} assert res == {"data": data, "errors": errors, "extensions": None} - assert res != {"data": data, "errors": None} - assert res != {"data": None, "errors": errors} + assert res == {"data": data, "errors": errors, "extensions": {}} + assert res != {"errors": errors} + assert res != {"data": data} assert res != {"data": data, "errors": errors, "extensions": extensions} res = ExecutionResult(data, errors, extensions) - assert res == {"data": data, "errors": errors} assert res == {"data": data, "errors": errors, "extensions": extensions} - assert res != {"data": data, "errors": None} - assert res != {"data": None, "errors": errors} - assert res != {"data": data, "errors": errors, "extensions": None} + assert res != {"errors": errors, "extensions": extensions} + assert res != {"data": data, "extensions": extensions} + assert res != {"data": data, "errors": errors} def compares_to_tuple(): res = ExecutionResult(data, errors) @@ -108,9 +107,9 @@ def compares_to_another_execution_result(): def unpacks_as_two_tuple(): res = ExecutionResult(data, errors) - res_data, res_errors = res # type: ignore - assert res_data == data # type: ignore - assert res_errors == errors # type: ignore - with raises(ValueError): - res = ExecutionResult(data, errors, extensions) - _res_data, _res_errors, _res_extensions = res # type: ignore + res_data, res_errors = res + assert res_data == data + assert res_errors == errors + res = ExecutionResult(data, errors, extensions) + with pytest.raises(ValueError, match="not enough values to unpack"): + _res_data, _res_errors, _res_extensions = res diff --git a/tests/execution/test_executor.py b/tests/execution/test_executor.py index 7cd2260a..a11c6b5e 100644 --- a/tests/execution/test_executor.py +++ b/tests/execution/test_executor.py @@ -1,15 +1,18 @@ +from __future__ import annotations + import asyncio -from typing import cast, Any, Awaitable, Optional +from typing import Any, Awaitable, cast -from pytest import mark, raises +import pytest from graphql.error import GraphQLError from graphql.execution import execute, execute_sync -from graphql.language import parse, FieldNode, OperationDefinitionNode -from graphql.pyutils import inspect, Undefined +from graphql.language import FieldNode, OperationDefinitionNode, parse +from graphql.pyutils import Undefined, inspect from graphql.type import ( GraphQLArgument, GraphQLBoolean, + GraphQLDeferDirective, GraphQLField, GraphQLInt, GraphQLInterfaceType, @@ -17,8 +20,9 @@ GraphQLNonNull, GraphQLObjectType, GraphQLResolveInfo, - GraphQLSchema, GraphQLScalarType, + GraphQLSchema, + GraphQLStreamDirective, GraphQLString, GraphQLUnionType, ResponsePath, @@ -26,64 +30,11 @@ def describe_execute_handles_basic_execution_tasks(): - # noinspection PyTypeChecker - def throws_if_no_document_is_provided(): - schema = GraphQLSchema( - GraphQLObjectType("Type", {"a": GraphQLField(GraphQLString)}) - ) - - with raises(TypeError) as exc_info: - assert execute_sync(schema=schema, document=None) # type: ignore - - assert str(exc_info.value) == "Must provide document." - - # noinspection PyTypeChecker - def throws_if_no_schema_is_provided(): - document = parse("{ field }") - - with raises(TypeError) as exc_info: - assert execute_sync(schema=None, document=document) # type: ignore - - assert str(exc_info.value) == "Expected None to be a GraphQL schema." - - def throws_on_invalid_variables(): - schema = GraphQLSchema( - GraphQLObjectType( - "Type", - { - "fieldA": GraphQLField( - GraphQLString, args={"argA": GraphQLArgument(GraphQLInt)} - ) - }, - ) - ) - document = parse( - """ - query ($a: Int) { - fieldA(argA: $a) - } - """ - ) - variable_values = "{'a': 1}" - - with raises(TypeError) as exc_info: - assert execute_sync( - schema=schema, - document=document, - variable_values=variable_values, # type: ignore - ) - - assert str(exc_info.value) == ( - "Variable values must be provided as a dictionary" - " with variable names as keys. Perhaps look to see" - " if an unparsed JSON string was provided." - ) - def accepts_positional_arguments(): schema = GraphQLSchema( GraphQLObjectType( "Type", - {"a": GraphQLField(GraphQLString, resolve=lambda obj, *args: obj)}, + {"a": GraphQLField(GraphQLString, resolve=lambda obj, *_args: obj)}, ) ) @@ -91,7 +42,7 @@ def accepts_positional_arguments(): assert result == ({"a": "rootValue"}, None) - @mark.asyncio + @pytest.mark.asyncio async def executes_arbitrary_code(): # noinspection PyMethodMayBeStatic,PyMethodMayBeStatic class Data: @@ -294,15 +245,16 @@ def resolve(_obj, info): execute_sync(schema, document, root_value, variable_values=variable_values) assert len(resolved_infos) == 1 - operation = cast(OperationDefinitionNode, document.definitions[0]) - assert operation and operation.kind == "operation_definition" - field = cast(FieldNode, operation.selection_set.selections[0]) + operation = cast("OperationDefinitionNode", document.definitions[0]) + assert operation + assert operation.kind == "operation_definition" + field = cast("FieldNode", operation.selection_set.selections[0]) assert resolved_infos[0] == GraphQLResolveInfo( field_name="test", field_nodes=[field], return_type=GraphQLString, - parent_type=cast(GraphQLObjectType, schema.query_type), + parent_type=cast("GraphQLObjectType", schema.query_type), path=ResponsePath(None, "result", "Test"), schema=schema, fragments={}, @@ -314,7 +266,7 @@ def resolve(_obj, info): ) def it_populates_path_correctly_with_complex_types(): - path: Optional[ResponsePath] = None + path: ResponsePath | None = None def resolve(_val, info): nonlocal path @@ -357,9 +309,11 @@ def resolve_type(_val, _info, _type): prev, key, typename = path assert key == "l2" assert typename == "SomeObject" + assert prev is not None prev, key, typename = prev assert key == 0 assert typename is None + assert prev is not None prev, key, typename = prev assert key == "l1" assert typename == "SomeQuery" @@ -422,7 +376,7 @@ def resolve(_obj, _info, **args): assert len(resolved_args) == 1 assert resolved_args[0] == {"numArg": 123, "stringArg": "foo"} - @mark.asyncio + @pytest.mark.asyncio async def nulls_out_error_subtrees(): document = parse( """ @@ -567,6 +521,54 @@ async def asyncReturnErrorWithExtensions(self, _info): ], ) + @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") + def handles_sync_errors_combined_with_async_ones(): + is_async_resolver_finished = False + + async def async_resolver(_obj, _info): + nonlocal is_async_resolver_finished + is_async_resolver_finished = True # pragma: no cover + + schema = GraphQLSchema( + GraphQLObjectType( + "Query", + { + "syncNullError": GraphQLField( + GraphQLNonNull(GraphQLString), resolve=lambda _obj, _info: None + ), + "asyncNullError": GraphQLField( + GraphQLNonNull(GraphQLString), resolve=async_resolver + ), + }, + ) + ) + + document = parse( + """ + { + asyncNullError + syncNullError + } + """ + ) + + result = execute(schema, document) + + assert is_async_resolver_finished is False + + assert result == ( + None, + [ + { + "message": "Cannot return null" + " for non-nullable field Query.syncNullError.", + "locations": [(4, 15)], + "path": ["syncNullError"], + } + ], + ) + + @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") def full_response_path_is_included_for_non_nullable_fields(): def resolve_ok(*_args): return {} @@ -616,6 +618,7 @@ def resolve_error(*_args): ], ) + @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") def uses_the_inline_operation_if_no_operation_name_is_provided(): schema = GraphQLSchema( GraphQLObjectType("Type", {"a": GraphQLField(GraphQLString)}) @@ -629,6 +632,7 @@ class Data: result = execute_sync(schema, document, Data()) assert result == ({"a": "b"}, None) + @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") def uses_the_only_operation_if_no_operation_name_is_provided(): schema = GraphQLSchema( GraphQLObjectType("Type", {"a": GraphQLField(GraphQLString)}) @@ -642,6 +646,7 @@ class Data: result = execute_sync(schema, document, Data()) assert result == ({"a": "b"}, None) + @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") def uses_the_named_operation_if_operation_name_is_provided(): schema = GraphQLSchema( GraphQLObjectType("Type", {"a": GraphQLField(GraphQLString)}) @@ -789,6 +794,38 @@ class Data: result = execute_sync(schema, document, Data(), operation_name="S") assert result == ({"a": "b"}, None) + def errors_when_using_original_execute_with_schemas_including_experimental_defer(): + schema = GraphQLSchema( + query=GraphQLObjectType("Q", {"a": GraphQLField(GraphQLString)}), + directives=[GraphQLDeferDirective], + ) + document = parse("query Q { a }") + + with pytest.raises(GraphQLError) as exc_info: + execute(schema, document) + + assert str(exc_info.value) == ( + "The provided schema unexpectedly contains experimental directives" + " (@defer or @stream). These directives may only be utilized" + " if experimental execution features are explicitly enabled." + ) + + def errors_when_using_original_execute_with_schemas_including_experimental_stream(): + schema = GraphQLSchema( + query=GraphQLObjectType("Q", {"a": GraphQLField(GraphQLString)}), + directives=[GraphQLStreamDirective], + ) + document = parse("query Q { a }") + + with pytest.raises(GraphQLError) as exc_info: + execute(schema, document) + + assert str(exc_info.value) == ( + "The provided schema unexpectedly contains experimental directives" + " (@defer or @stream). These directives may only be utilized" + " if experimental execution features are explicitly enabled." + ) + def resolves_to_an_error_if_schema_does_not_support_operation(): schema = GraphQLSchema(assume_valid=True) @@ -832,7 +869,7 @@ def resolves_to_an_error_if_schema_does_not_support_operation(): ], ) - @mark.asyncio + @pytest.mark.asyncio async def correct_field_ordering_despite_execution_order(): schema = GraphQLSchema( GraphQLObjectType( @@ -948,7 +985,7 @@ def does_not_include_arguments_that_were_not_set(): None, ) - @mark.asyncio + @pytest.mark.asyncio async def fails_when_is_type_of_check_is_not_met(): class Special: value: str @@ -1008,7 +1045,8 @@ async def async_is_special(): def fails_when_serialize_of_custom_scalar_does_not_return_a_value(): custom_scalar = GraphQLScalarType( - "CustomScalar", serialize=lambda _value: Undefined # returns nothing + "CustomScalar", + serialize=lambda _value: Undefined, # returns nothing ) schema = GraphQLSchema( GraphQLObjectType( diff --git a/tests/execution/test_lists.py b/tests/execution/test_lists.py index 729c1191..a7f747fb 100644 --- a/tests/execution/test_lists.py +++ b/tests/execution/test_lists.py @@ -1,10 +1,20 @@ -from typing import cast, Any, Awaitable +from typing import Any, AsyncGenerator -from pytest import mark +import pytest -from graphql.execution import execute, execute_sync, ExecutionResult +from graphql.execution import ExecutionResult, execute, execute_sync from graphql.language import parse from graphql.pyutils import is_awaitable +from graphql.type import ( + GraphQLField, + GraphQLFieldResolver, + GraphQLList, + GraphQLNonNull, + GraphQLObjectType, + GraphQLResolveInfo, + GraphQLSchema, + GraphQLString, +) from graphql.utilities import build_schema @@ -25,6 +35,22 @@ def _complete(list_field): Data(list_field), ) + def accepts_a_list_as_a_list_value(): + result = _complete([]) + assert result == ({"listField": []}, None) + list_field = ["just an apple"] + result = _complete(list_field) + assert result == ({"listField": list_field}, None) + list_field = ["apple", "banana", "coconut"] + result = _complete(list_field) + assert result == ({"listField": list_field}, None) + + def accepts_a_tuple_as_a_list_value(): + list_field = ("apple", "banana", "coconut") + result = _complete(list_field) + assert result == ({"listField": list(list_field)}, None) + + @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") def accepts_a_set_as_a_list_value(): # Note that sets are not ordered in Python. list_field = {"apple", "banana", "coconut"} @@ -100,6 +126,187 @@ def does_not_accept_iterable_string_literal_as_a_list_value(): ) +def describe_execute_accepts_async_iterables_as_list_value(): + async def _complete(list_field, as_: str = "[String]"): + result = execute( + build_schema(f"type Query {{ listField: {as_} }}"), + parse("{ listField }"), + Data(list_field), + ) + assert is_awaitable(result) + return await result + + class _IndexData: + def __init__(self, index: int): + self.index = index + + async def _complete_object_lists( + resolve: GraphQLFieldResolver, count=3 + ) -> ExecutionResult: + async def _list_field( + _obj: Any, _info: GraphQLResolveInfo + ) -> AsyncGenerator[_IndexData, None]: + for index in range(count): + yield _IndexData(index) + + schema = GraphQLSchema( + GraphQLObjectType( + "Query", + { + "listField": GraphQLField( + GraphQLList( + GraphQLObjectType( + "ObjectWrapper", + { + "index": GraphQLField( + GraphQLNonNull(GraphQLString), resolve=resolve + ) + }, + ) + ), + resolve=_list_field, + ) + }, + ) + ) + result = execute(schema, document=parse("{ listField { index } }")) + assert is_awaitable(result) + return await result + + @pytest.mark.asyncio + async def accepts_an_async_generator_as_a_list_value(): + async def list_field(): + yield "two" + yield 4 + yield False + + assert await _complete(list_field()) == ( + {"listField": ["two", "4", "false"]}, + None, + ) + + @pytest.mark.asyncio + async def accepts_a_custom_async_iterable_as_a_list_value(): + class ListField: + def __aiter__(self): + self.last = "hello" + return self + + async def __anext__(self): + last = self.last + if last == "stop": + raise StopAsyncIteration + self.last = "world" if last == "hello" else "stop" + return last + + assert await _complete(ListField()) == ( + {"listField": ["hello", "world"]}, + None, + ) + + @pytest.mark.asyncio + async def handles_an_async_generator_that_throws(): + async def list_field(): + yield "two" + yield 4 + raise RuntimeError("bad") + + assert await _complete(list_field()) == ( + {"listField": None}, + [{"message": "bad", "locations": [(1, 3)], "path": ["listField"]}], + ) + + @pytest.mark.asyncio + async def handles_an_async_generator_where_intermediate_value_triggers_an_error(): + async def list_field(): + yield "two" + yield {} + yield 4 + + assert await _complete(list_field()) == ( + {"listField": ["two", None, "4"]}, + [ + { + "message": "String cannot represent value: {}", + "locations": [(1, 3)], + "path": ["listField", 1], + } + ], + ) + + @pytest.mark.asyncio + async def handles_errors_from_complete_value_in_async_iterables(): + async def list_field(): + yield "two" + yield {} + + assert await _complete(list_field()) == ( + {"listField": ["two", None]}, + [ + { + "message": "String cannot represent value: {}", + "locations": [(1, 3)], + "path": ["listField", 1], + } + ], + ) + + @pytest.mark.asyncio + async def handles_async_functions_from_complete_value_in_async_iterables(): + async def resolve(data: _IndexData, _info: GraphQLResolveInfo) -> int: + return data.index + + assert await _complete_object_lists(resolve) == ( + {"listField": [{"index": "0"}, {"index": "1"}, {"index": "2"}]}, + None, + ) + + @pytest.mark.asyncio + async def handles_single_async_functions_from_complete_value_in_async_iterables(): + async def resolve(data: _IndexData, _info: GraphQLResolveInfo) -> int: + return data.index + + assert await _complete_object_lists(resolve, 1) == ( + {"listField": [{"index": "0"}]}, + None, + ) + + @pytest.mark.asyncio + async def handles_async_errors_from_complete_value_in_async_iterables(): + async def resolve(data: _IndexData, _info: GraphQLResolveInfo) -> int: + index = data.index + if index == 2: + raise RuntimeError("bad") + return index + + assert await _complete_object_lists(resolve) == ( + {"listField": [{"index": "0"}, {"index": "1"}, None]}, + [ + { + "message": "bad", + "locations": [(1, 15)], + "path": ["listField", 2, "index"], + } + ], + ) + + @pytest.mark.asyncio + async def handles_nulls_yielded_by_async_generator(): + async def list_field(): + yield 1 + yield None + yield 2 + + data = {"listField": [1, None, 2]} + message = "Cannot return null for non-nullable field Query.listField." + errors = [{"message": message, "locations": [(1, 3)], "path": ["listField", 1]}] + + assert await _complete(list_field(), "[Int]") == (data, None) + assert await _complete(list_field(), "[Int]!") == (data, None) + assert await _complete(list_field(), "[Int!]") == ({"listField": None}, errors) + assert await _complete(list_field(), "[Int!]!") == (None, errors) + + def describe_execute_handles_list_nullability(): async def _complete(list_field: Any, as_type: str) -> ExecutionResult: schema = build_schema(f"type Query {{ listField: {as_type} }}") @@ -117,7 +324,7 @@ def execute_query(list_value: Any) -> Any: return result - @mark.asyncio + @pytest.mark.asyncio async def contains_values(): list_field = [1, 2] assert await _complete(list_field, "[Int]") == ({"listField": [1, 2]}, None) @@ -125,7 +332,7 @@ async def contains_values(): assert await _complete(list_field, "[Int!]") == ({"listField": [1, 2]}, None) assert await _complete(list_field, "[Int!]!") == ({"listField": [1, 2]}, None) - @mark.asyncio + @pytest.mark.asyncio async def contains_null(): list_field = [1, None, 2] errors = [ @@ -146,7 +353,7 @@ async def contains_null(): assert await _complete(list_field, "[Int!]") == ({"listField": None}, errors) assert await _complete(list_field, "[Int!]!") == (None, errors) - @mark.asyncio + @pytest.mark.asyncio async def returns_null(): list_field = None errors = [ @@ -161,7 +368,7 @@ async def returns_null(): assert await _complete(list_field, "[Int!]") == ({"listField": None}, None) assert await _complete(list_field, "[Int!]!") == (None, errors) - @mark.asyncio + @pytest.mark.asyncio async def contains_error(): list_field = [1, RuntimeError("bad"), 2] errors = [ @@ -188,7 +395,7 @@ async def contains_error(): errors, ) - @mark.asyncio + @pytest.mark.asyncio async def results_in_errors(): list_field = RuntimeError("bad") errors = [ @@ -214,46 +421,3 @@ async def results_in_errors(): None, errors, ) - - -def describe_experimental_execute_accepts_async_iterables_as_list_value(): - async def _complete(list_field): - result = execute( - build_schema("type Query { listField: [String] }"), - parse("{ listField }"), - Data(list_field), - ) - assert is_awaitable(result) - result = cast(Awaitable, result) - return await result - - @mark.asyncio - async def accepts_an_async_generator_as_a_list_value(): - async def list_field(): - yield "one" - yield 2 - yield True - - assert await _complete(list_field()) == ( - {"listField": ["one", "2", "true"]}, - None, - ) - - @mark.asyncio - async def accepts_a_custom_async_iterable_as_a_list_value(): - class ListField: - def __aiter__(self): - self.last = "hello" - return self - - async def __anext__(self): - last = self.last - if last == "stop": - raise StopAsyncIteration - self.last = "world" if last == "hello" else "stop" - return last - - assert await _complete(ListField()) == ( - {"listField": ["hello", "world"]}, - None, - ) diff --git a/tests/execution/test_map_async_iterable.py b/tests/execution/test_map_async_iterable.py new file mode 100644 index 00000000..eb3cddb8 --- /dev/null +++ b/tests/execution/test_map_async_iterable.py @@ -0,0 +1,304 @@ +import pytest + +from graphql.execution import map_async_iterable + +try: # pragma: no cover + anext # noqa: B018 +except NameError: # pragma: no cover (Python < 3.10) + # noinspection PyShadowingBuiltins + async def anext(iterator): + """Return the next item from an async iterator.""" + return await iterator.__anext__() + + +async def double(x: int) -> int: + """Test callback that doubles the input value.""" + return x + x + + +async def throw(_x: int) -> int: + """Test callback that raises a RuntimeError.""" + raise RuntimeError("Ouch") + + +def describe_map_async_iterable(): + @pytest.mark.asyncio + async def maps_over_async_generator(): + async def source(): + yield 1 + yield 2 + yield 3 + + doubles = map_async_iterable(source(), double) + + assert await anext(doubles) == 2 + assert await anext(doubles) == 4 + assert await anext(doubles) == 6 + with pytest.raises(StopAsyncIteration): + assert await anext(doubles) + + @pytest.mark.asyncio + async def maps_over_async_iterable(): + items = [1, 2, 3] + + class Iterable: + def __aiter__(self): + return self + + async def __anext__(self): + try: + return items.pop(0) + except IndexError: + raise StopAsyncIteration + + doubles = map_async_iterable(Iterable(), double) + + values = [value async for value in doubles] + + assert not items + assert values == [2, 4, 6] + + @pytest.mark.asyncio + async def compatible_with_async_for(): + async def source(): + yield 1 + yield 2 + yield 3 + + doubles = map_async_iterable(source(), double) + + values = [value async for value in doubles] + + assert values == [2, 4, 6] + + @pytest.mark.asyncio + async def allows_returning_early_from_mapped_async_generator(): + async def source(): + yield 1 + yield 2 + yield 3 # pragma: no cover + + doubles = map_async_iterable(source(), double) + + assert await anext(doubles) == 2 + assert await anext(doubles) == 4 + + # Early return + await doubles.aclose() + + # Subsequent next calls + with pytest.raises(StopAsyncIteration): + await anext(doubles) + with pytest.raises(StopAsyncIteration): + await anext(doubles) + + @pytest.mark.asyncio + async def allows_returning_early_from_mapped_async_iterable(): + items = [1, 2, 3] + + class Iterable: + def __aiter__(self): + return self + + async def __anext__(self): + try: + return items.pop(0) + except IndexError: # pragma: no cover + raise StopAsyncIteration + + doubles = map_async_iterable(Iterable(), double) + + assert await anext(doubles) == 2 + assert await anext(doubles) == 4 + + # Early return + await doubles.aclose() + + # Subsequent next calls + with pytest.raises(StopAsyncIteration): + await anext(doubles) + with pytest.raises(StopAsyncIteration): + await anext(doubles) + + @pytest.mark.asyncio + async def allows_throwing_errors_through_async_iterable(): + items = [1, 2, 3] + + class Iterable: + def __aiter__(self): + return self + + async def __anext__(self): + try: + return items.pop(0) + except IndexError: # pragma: no cover + raise StopAsyncIteration + + doubles = map_async_iterable(Iterable(), double) + + assert await anext(doubles) == 2 + assert await anext(doubles) == 4 + + # Throw error + message = "allows throwing errors when mapping async iterable" + with pytest.raises(RuntimeError) as exc_info: + await doubles.athrow(RuntimeError(message)) + + assert str(exc_info.value) == message + + with pytest.raises(StopAsyncIteration): + await anext(doubles) + with pytest.raises(StopAsyncIteration): + await anext(doubles) + + @pytest.mark.asyncio + async def allows_throwing_errors_with_traceback_through_async_iterables(): + class Iterable: + def __aiter__(self): + return self + + async def __anext__(self): + return 1 + + one = map_async_iterable(Iterable(), double) + + assert await anext(one) == 2 + + try: + raise RuntimeError("Ouch") + except RuntimeError as error: + with pytest.raises(RuntimeError, match="Ouch") as exc_info: + await one.athrow(error) + + assert exc_info.value is error # noqa: PT017 + assert exc_info.tb + assert error.__traceback__ # noqa: PT017 + assert exc_info.tb is error.__traceback__ # noqa: PT017 + + with pytest.raises(StopAsyncIteration): + await anext(one) + + @pytest.mark.asyncio + async def does_not_map_over_thrown_errors(): + async def source(): + yield 1 + raise RuntimeError("Goodbye") + + doubles = map_async_iterable(source(), double) + + assert await anext(doubles) == 2 + + with pytest.raises(RuntimeError) as exc_info: + await anext(doubles) + + assert str(exc_info.value) == "Goodbye" + + @pytest.mark.asyncio + async def does_not_map_over_externally_thrown_errors(): + async def source(): + yield 1 + + doubles = map_async_iterable(source(), double) + + assert await anext(doubles) == 2 + + with pytest.raises(RuntimeError) as exc_info: + await doubles.athrow(RuntimeError("Goodbye")) + + assert str(exc_info.value) == "Goodbye" + + @pytest.mark.asyncio + async def iterable_is_closed_when_mapped_iterable_is_closed(): + class Iterable: + def __init__(self): + self.closed = False + + def __aiter__(self): + return self + + async def __anext__(self): + return 1 + + async def aclose(self): + self.closed = True + + iterable = Iterable() + doubles = map_async_iterable(iterable, double) + assert await anext(doubles) == 2 + assert not iterable.closed + await doubles.aclose() + assert iterable.closed + with pytest.raises(StopAsyncIteration): + await anext(doubles) + + @pytest.mark.asyncio + async def iterable_is_closed_on_callback_error(): + class Iterable: + def __init__(self): + self.closed = False + + def __aiter__(self): + return self + + async def __anext__(self): + return 1 + + async def aclose(self): + self.closed = True + + iterable = Iterable() + doubles = map_async_iterable(iterable, throw) + with pytest.raises(RuntimeError, match="Ouch"): + await anext(doubles) + assert iterable.closed + with pytest.raises(StopAsyncIteration): + await anext(doubles) + + @pytest.mark.asyncio + async def iterable_exits_on_callback_error(): + exited = False + + async def iterable(): + nonlocal exited + try: + while True: + yield 1 + except GeneratorExit: + exited = True + + doubles = map_async_iterable(iterable(), throw) + with pytest.raises(RuntimeError, match="Ouch"): + await anext(doubles) + assert exited + with pytest.raises(StopAsyncIteration): + await anext(doubles) + + @pytest.mark.asyncio + async def mapped_iterable_is_closed_when_iterable_cannot_be_closed(): + class Iterable: + def __aiter__(self): + return self + + async def __anext__(self): + return 1 + + doubles = map_async_iterable(Iterable(), double) + assert await anext(doubles) == 2 + await doubles.aclose() + with pytest.raises(StopAsyncIteration): + await anext(doubles) + + @pytest.mark.asyncio + async def ignores_that_iterable_cannot_be_closed_on_callback_error(): + class Iterable: + def __aiter__(self): + return self + + async def __anext__(self): + return 1 + + doubles = map_async_iterable(Iterable(), throw) + with pytest.raises(RuntimeError, match="Ouch"): + await anext(doubles) + with pytest.raises(StopAsyncIteration): + await anext(doubles) diff --git a/tests/execution/test_map_async_iterator.py b/tests/execution/test_map_async_iterator.py deleted file mode 100644 index 299d010a..00000000 --- a/tests/execution/test_map_async_iterator.py +++ /dev/null @@ -1,492 +0,0 @@ -import sys -from asyncio import CancelledError, Event, ensure_future, sleep - -from pytest import mark, raises - -from graphql.execution import MapAsyncIterator - - -try: # pragma: no cover - anext -except NameError: # pragma: no cover (Python < 3.10) - # noinspection PyShadowingBuiltins - async def anext(iterator): - """Return the next item from an async iterator.""" - return await iterator.__anext__() - - -def describe_map_async_iterator(): - @mark.asyncio - async def maps_over_async_generator(): - async def source(): - yield 1 - yield 2 - yield 3 - - doubles = MapAsyncIterator(source(), lambda x: x + x) - - assert await anext(doubles) == 2 - assert await anext(doubles) == 4 - assert await anext(doubles) == 6 - with raises(StopAsyncIteration): - assert await anext(doubles) - - @mark.asyncio - async def maps_over_async_iterable(): - items = [1, 2, 3] - - class Iterable: - def __aiter__(self): - return self - - async def __anext__(self): - try: - return items.pop(0) - except IndexError: - raise StopAsyncIteration - - doubles = MapAsyncIterator(Iterable(), lambda x: x + x) - - values = [value async for value in doubles] - - assert not items - assert values == [2, 4, 6] - - @mark.asyncio - async def compatible_with_async_for(): - async def source(): - yield 1 - yield 2 - yield 3 - - doubles = MapAsyncIterator(source(), lambda x: x + x) - - values = [value async for value in doubles] - - assert values == [2, 4, 6] - - @mark.asyncio - async def maps_over_async_values_with_async_function(): - async def source(): - yield 1 - yield 2 - yield 3 - - async def double(x): - return x + x - - doubles = MapAsyncIterator(source(), double) - - values = [value async for value in doubles] - - assert values == [2, 4, 6] - - @mark.asyncio - async def allows_returning_early_from_mapped_async_generator(): - async def source(): - yield 1 - yield 2 - yield 3 # pragma: no cover - - doubles = MapAsyncIterator(source(), lambda x: x + x) - - assert await anext(doubles) == 2 - assert await anext(doubles) == 4 - - # Early return - await doubles.aclose() - - # Subsequent next calls - with raises(StopAsyncIteration): - await anext(doubles) - with raises(StopAsyncIteration): - await anext(doubles) - - @mark.asyncio - async def allows_returning_early_from_mapped_async_iterable(): - items = [1, 2, 3] - - class Iterable: - def __aiter__(self): - return self - - async def __anext__(self): - try: - return items.pop(0) - except IndexError: # pragma: no cover - raise StopAsyncIteration - - doubles = MapAsyncIterator(Iterable(), lambda x: x + x) - - assert await anext(doubles) == 2 - assert await anext(doubles) == 4 - - # Early return - await doubles.aclose() - - # Subsequent next calls - with raises(StopAsyncIteration): - await anext(doubles) - with raises(StopAsyncIteration): - await anext(doubles) - - @mark.asyncio - async def passes_through_early_return_from_async_values(): - async def source(): - try: - yield 1 - yield 2 - yield 3 # pragma: no cover - finally: - yield "Done" - yield "Last" - - doubles = MapAsyncIterator(source(), lambda x: x + x) - - assert await anext(doubles) == 2 - assert await anext(doubles) == 4 - - # Early return - await doubles.aclose() - - # Subsequent next calls may yield from finally block - assert await anext(doubles) == "LastLast" - with raises(GeneratorExit): - assert await anext(doubles) - - @mark.asyncio - async def allows_throwing_errors_through_async_iterable(): - items = [1, 2, 3] - - class Iterable: - def __aiter__(self): - return self - - async def __anext__(self): - try: - return items.pop(0) - except IndexError: # pragma: no cover - raise StopAsyncIteration - - doubles = MapAsyncIterator(Iterable(), lambda x: x + x) - - assert await anext(doubles) == 2 - assert await anext(doubles) == 4 - - # Throw error - with raises(RuntimeError, match="Ouch") as exc_info: - await doubles.athrow(RuntimeError("Ouch")) - - assert str(exc_info.value) == "Ouch" - - with raises(StopAsyncIteration): - await anext(doubles) - with raises(StopAsyncIteration): - await anext(doubles) - - @mark.asyncio - async def allows_throwing_errors_with_values_through_async_iterators(): - class Iterator: - def __aiter__(self): - return self - - async def __anext__(self): - return 1 - - one = MapAsyncIterator(Iterator(), lambda x: x) - - assert await anext(one) == 1 - - # Throw error with value passed separately - try: - raise RuntimeError("Ouch") - except RuntimeError as error: - with raises(RuntimeError, match="Ouch") as exc_info: - await one.athrow(error.__class__, error) - - assert exc_info.value is error - assert exc_info.tb is error.__traceback__ - - with raises(StopAsyncIteration): - await anext(one) - - @mark.asyncio - async def allows_throwing_errors_with_traceback_through_async_iterators(): - class Iterator: - def __aiter__(self): - return self - - async def __anext__(self): - return 1 - - one = MapAsyncIterator(Iterator(), lambda x: x) - - assert await anext(one) == 1 - - # Throw error with traceback passed separately - try: - raise RuntimeError("Ouch") - except RuntimeError as error: - with raises(RuntimeError) as exc_info: - await one.athrow(error.__class__, None, error.__traceback__) - - assert exc_info.tb and error.__traceback__ - assert exc_info.tb.tb_frame is error.__traceback__.tb_frame - - with raises(StopAsyncIteration): - await anext(one) - - @mark.asyncio - async def passes_through_caught_errors_through_async_generators(): - async def source(): - try: - yield 1 - yield 2 - yield 3 # pragma: no cover - except Exception as e: - yield e - - doubles = MapAsyncIterator(source(), lambda x: x + x) - - assert await anext(doubles) == 2 - assert await anext(doubles) == 4 - - # Throw error - await doubles.athrow(RuntimeError("ouch")) - - with raises(StopAsyncIteration): - await anext(doubles) - with raises(StopAsyncIteration): - await anext(doubles) - - @mark.asyncio - async def does_not_normally_map_over_thrown_errors(): - async def source(): - yield "Hello" - raise RuntimeError("Goodbye") - - doubles = MapAsyncIterator(source(), lambda x: x + x) - - assert await anext(doubles) == "HelloHello" - - with raises(RuntimeError) as exc_info: - await anext(doubles) - - assert str(exc_info.value) == "Goodbye" - - @mark.asyncio - async def does_not_normally_map_over_externally_thrown_errors(): - async def source(): - yield "Hello" - - doubles = MapAsyncIterator(source(), lambda x: x + x) - - assert await anext(doubles) == "HelloHello" - - with raises(RuntimeError) as exc_info: - await doubles.athrow(RuntimeError("Goodbye")) - - assert str(exc_info.value) == "Goodbye" - - @mark.asyncio - async def can_use_simple_iterator_instead_of_generator(): - async def source(): - yield 1 - yield 2 - yield 3 - - class Source: - def __init__(self): - self.counter = 0 - - def __aiter__(self): - return self - - async def __anext__(self): - self.counter += 1 - if self.counter > 3: - raise StopAsyncIteration - return self.counter - - def double(x): - return x + x - - for iterator in source, Source: - doubles = MapAsyncIterator(iterator(), double) - - await doubles.aclose() - - with raises(StopAsyncIteration): - await anext(doubles) - - doubles = MapAsyncIterator(iterator(), double) - - assert await anext(doubles) == 2 - assert await anext(doubles) == 4 - assert await anext(doubles) == 6 - - with raises(StopAsyncIteration): - await anext(doubles) - - doubles = MapAsyncIterator(iterator(), double) - - assert await anext(doubles) == 2 - assert await anext(doubles) == 4 - - # Throw error - with raises(RuntimeError) as exc_info: - await doubles.athrow(RuntimeError("ouch")) - - assert str(exc_info.value) == "ouch" - - with raises(StopAsyncIteration): - await anext(doubles) - with raises(StopAsyncIteration): - await anext(doubles) - - await doubles.athrow(RuntimeError("no more ouch")) - - with raises(StopAsyncIteration): - await anext(doubles) - - await doubles.aclose() - - doubles = MapAsyncIterator(iterator(), double) - - assert await anext(doubles) == 2 - assert await anext(doubles) == 4 - - try: - raise ValueError("bad") - except ValueError: - tb = sys.exc_info()[2] - - # Throw error - with raises(ValueError): - await doubles.athrow(ValueError, None, tb) - - await sleep(0) - - @mark.asyncio - async def stops_async_iteration_on_close(): - async def source(): - yield 1 - await Event().wait() # Block forever - yield 2 # pragma: no cover - yield 3 # pragma: no cover - - singles = source() - doubles = MapAsyncIterator(singles, lambda x: x * 2) - - result = await anext(doubles) - assert result == 2 - - # Make sure it is blocked - doubles_future = ensure_future(anext(doubles)) - await sleep(0.05) - assert not doubles_future.done() - - # Unblock and watch StopAsyncIteration propagate - await doubles.aclose() - await sleep(0.05) - assert doubles_future.done() - assert isinstance(doubles_future.exception(), StopAsyncIteration) - - with raises(StopAsyncIteration): - await anext(singles) - - @mark.asyncio - async def can_unset_closed_state_of_async_iterator(): - items = [1, 2, 3] - - class Iterator: - def __init__(self): - self.is_closed = False - - def __aiter__(self): - return self - - async def __anext__(self): - if self.is_closed: - raise StopAsyncIteration - try: - return items.pop(0) - except IndexError: - raise StopAsyncIteration - - async def aclose(self): - self.is_closed = True - - iterator = Iterator() - doubles = MapAsyncIterator(iterator, lambda x: x + x) - - assert await anext(doubles) == 2 - assert await anext(doubles) == 4 - assert not iterator.is_closed - await doubles.aclose() - assert iterator.is_closed - with raises(StopAsyncIteration): - await anext(iterator) - with raises(StopAsyncIteration): - await anext(doubles) - assert doubles.is_closed - - iterator.is_closed = False - doubles.is_closed = False - assert not doubles.is_closed - - assert await anext(doubles) == 6 - assert not doubles.is_closed - assert not iterator.is_closed - with raises(StopAsyncIteration): - await anext(iterator) - with raises(StopAsyncIteration): - await anext(doubles) - assert not doubles.is_closed - assert not iterator.is_closed - - @mark.asyncio - async def can_cancel_async_iterator_while_waiting(): - class Iterator: - def __init__(self): - self.is_closed = False - self.value = 1 - - def __aiter__(self): - return self - - async def __anext__(self): - try: - await sleep(0.5) - return self.value # pragma: no cover - except CancelledError: - self.value = -1 - raise - - async def aclose(self): - self.is_closed = True - - iterator = Iterator() - doubles = MapAsyncIterator(iterator, lambda x: x + x) # pragma: no cover exit - cancelled = False - - async def iterator_task(): - nonlocal cancelled - try: - async for _ in doubles: - assert False # pragma: no cover - except CancelledError: - cancelled = True - - task = ensure_future(iterator_task()) - await sleep(0.05) - assert not cancelled - assert not doubles.is_closed - assert iterator.value == 1 - assert not iterator.is_closed - task.cancel() - await sleep(0.05) - assert cancelled - assert iterator.value == -1 - assert doubles.is_closed - assert iterator.is_closed diff --git a/tests/execution/test_middleware.py b/tests/execution/test_middleware.py index 6db8bdab..50159995 100644 --- a/tests/execution/test_middleware.py +++ b/tests/execution/test_middleware.py @@ -1,8 +1,9 @@ -from typing import Awaitable +import inspect +from typing import Awaitable, cast -from pytest import mark, raises +import pytest -from graphql.execution import MiddlewareManager, execute +from graphql.execution import Middleware, MiddlewareManager, execute, subscribe from graphql.language.parser import parse from graphql.type import GraphQLField, GraphQLObjectType, GraphQLSchema, GraphQLString @@ -90,7 +91,7 @@ def capitalize_middleware(next_, *args, **kwargs): assert result.data == {"first": "Eno", "second": "Owt"} # type: ignore - @mark.asyncio + @pytest.mark.asyncio async def single_async_function(): doc = parse("{ first second }") @@ -141,7 +142,6 @@ def second(self, _info): ) class ReverseMiddleware: - # noinspection PyMethodMayBeStatic def resolve(self, next_, *args, **kwargs): return next_(*args, **kwargs)[::-1] @@ -185,7 +185,6 @@ def reverse_middleware(next_, *args, **kwargs): return next_(*args, **kwargs)[::-1] class CaptitalizeMiddleware: - # noinspection PyMethodMayBeStatic def resolve(self, next_, *args, **kwargs): return next_(*args, **kwargs).capitalize() @@ -202,7 +201,7 @@ def resolve(self, next_, *args, **kwargs): ) assert result.data == {"field": "devloseR"} # type: ignore - @mark.asyncio + @pytest.mark.asyncio async def with_async_function_and_object(): doc = parse("{ field }") @@ -219,7 +218,6 @@ async def reverse_middleware(next_, *args, **kwargs): return (await next_(*args, **kwargs))[::-1] class CaptitalizeMiddleware: - # noinspection PyMethodMayBeStatic async def resolve(self, next_, *args, **kwargs): return (await next_(*args, **kwargs)).capitalize() @@ -240,6 +238,45 @@ async def resolve(self, next_, *args, **kwargs): result = await awaitable_result assert result.data == {"field": "devloseR"} + @pytest.mark.asyncio + async def subscription_simple(): + async def bar_resolve(_obj, _info): + yield "bar" + yield "oof" + + test_type = GraphQLObjectType( + "Subscription", + { + "bar": GraphQLField( + GraphQLString, + resolve=lambda message, _info: message, + subscribe=bar_resolve, + ), + }, + ) + doc = parse("subscription { bar }") + + async def reverse_middleware(next_, value, info, **kwargs): + awaitable_maybe = next_(value, info, **kwargs) + return awaitable_maybe[::-1] + + noop_type = GraphQLObjectType( + "Noop", + {"noop": GraphQLField(GraphQLString)}, + ) + schema = GraphQLSchema(query=noop_type, subscription=test_type) + + agen = subscribe( + schema, + doc, + middleware=MiddlewareManager(reverse_middleware), + ) + assert inspect.isasyncgen(agen) + data = (await agen.__anext__()).data + assert data == {"bar": "rab"} + data = (await agen.__anext__()).data + assert data == {"bar": "foo"} + def describe_without_manager(): def no_middleware(): doc = parse("{ field }") @@ -280,13 +317,13 @@ def bad_middleware_object(): "TestType", {"field": GraphQLField(GraphQLString)} ) - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: # noinspection PyTypeChecker execute( GraphQLSchema(test_type), doc, None, - middleware={"bad": "value"}, # type: ignore + middleware=cast("Middleware", {"bad": "value"}), ) assert str(exc_info.value) == ( diff --git a/tests/execution/test_mutations.py b/tests/execution/test_mutations.py index 819eddd4..b03004de 100644 --- a/tests/execution/test_mutations.py +++ b/tests/execution/test_mutations.py @@ -1,9 +1,16 @@ -import asyncio -from typing import Awaitable +from __future__ import annotations -from pytest import mark +from asyncio import sleep +from typing import Any, Awaitable -from graphql.execution import execute, execute_sync +import pytest + +from graphql.execution import ( + ExperimentalIncrementalExecutionResults, + execute, + execute_sync, + experimental_execute_incrementally, +) from graphql.language import parse from graphql.type import ( GraphQLArgument, @@ -16,7 +23,6 @@ # noinspection PyPep8Naming class NumberHolder: - theNumber: int def __init__(self, originalNumber: int): @@ -25,7 +31,6 @@ def __init__(self, originalNumber: int): # noinspection PyPep8Naming class Root: - numberHolder: NumberHolder def __init__(self, originalNumber: int): @@ -35,20 +40,31 @@ def immediately_change_the_number(self, newNumber: int) -> NumberHolder: self.numberHolder.theNumber = newNumber return self.numberHolder - async def promise_to_change_the_number(self, new_number: int) -> NumberHolder: - await asyncio.sleep(0) - return self.immediately_change_the_number(new_number) + async def promise_to_change_the_number(self, newNumber: int) -> NumberHolder: + await sleep(0) + return self.immediately_change_the_number(newNumber) def fail_to_change_the_number(self, newNumber: int): raise RuntimeError(f"Cannot change the number to {newNumber}") async def promise_and_fail_to_change_the_number(self, newNumber: int): - await asyncio.sleep(0) + await sleep(0) self.fail_to_change_the_number(newNumber) +async def promise_to_get_the_number(holder: NumberHolder, _info) -> int: + await sleep(0) + return holder.theNumber + + numberHolderType = GraphQLObjectType( - "NumberHolder", {"theNumber": GraphQLField(GraphQLInt)} + "NumberHolder", + { + "theNumber": GraphQLField(GraphQLInt), + "promiseToGetTheNumber": GraphQLField( + GraphQLInt, resolve=promise_to_get_the_number + ), + }, ) # noinspection PyPep8Naming @@ -91,7 +107,7 @@ async def promise_and_fail_to_change_the_number(self, newNumber: int): def describe_execute_handles_mutation_execution_ordering(): - @mark.asyncio + @pytest.mark.asyncio async def evaluates_mutations_serially(): document = parse( """ @@ -139,7 +155,7 @@ def does_not_include_illegal_mutation_fields_in_output(): result = execute_sync(schema=schema, document=document) assert result == ({}, None) - @mark.asyncio + @pytest.mark.asyncio async def evaluates_mutations_correctly_in_presence_of_a_failed_mutation(): document = parse( """ @@ -195,3 +211,113 @@ async def evaluates_mutations_correctly_in_presence_of_a_failed_mutation(): }, ], ) + + @pytest.mark.asyncio + async def mutation_fields_with_defer_do_not_block_next_mutation(): + document = parse( + """ + mutation M { + first: promiseToChangeTheNumber(newNumber: 1) { + ...DeferFragment @defer(label: "defer-label") + }, + second: immediatelyChangeTheNumber(newNumber: 2) { + theNumber + } + } + fragment DeferFragment on NumberHolder { + promiseToGetTheNumber + } + """ + ) + + root_value = Root(6) + mutation_result = await experimental_execute_incrementally( # type: ignore + schema, document, root_value + ) + + patches: list[Any] = [] + assert isinstance(mutation_result, ExperimentalIncrementalExecutionResults) + patches.append(mutation_result.initial_result.formatted) + async for patch in mutation_result.subsequent_results: + patches.append(patch.formatted) + + assert patches == [ + { + "data": {"first": {}, "second": {"theNumber": 2}}, + "pending": [{"id": "0", "path": ["first"], "label": "defer-label"}], + "hasNext": True, + }, + { + "incremental": [{"id": "0", "data": {"promiseToGetTheNumber": 2}}], + "completed": [{"id": "0"}], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def mutation_inside_of_a_fragment(): + document = parse( + """ + mutation M { + ...MutationFragment + second: immediatelyChangeTheNumber(newNumber: 2) { + theNumber + } + } + fragment MutationFragment on Mutation { + first: promiseToChangeTheNumber(newNumber: 1) { + theNumber + }, + } + """ + ) + + root_value = Root(6) + mutation_result = await execute(schema, document, root_value) # type: ignore + + assert mutation_result == ( + {"first": {"theNumber": 1}, "second": {"theNumber": 2}}, + None, + ) + + @pytest.mark.asyncio + async def mutation_with_defer_is_not_executed_serially(): + document = parse( + """ + mutation M { + ...MutationFragment @defer(label: "defer-label") + second: immediatelyChangeTheNumber(newNumber: 2) { + theNumber + } + } + fragment MutationFragment on Mutation { + first: promiseToChangeTheNumber(newNumber: 1) { + theNumber + }, + } + """ + ) + + root_value = Root(6) + mutation_result = experimental_execute_incrementally( + schema, document, root_value + ) + + patches: list[Any] = [] + assert isinstance(mutation_result, ExperimentalIncrementalExecutionResults) + patches.append(mutation_result.initial_result.formatted) + async for patch in mutation_result.subsequent_results: + patches.append(patch.formatted) + + assert patches == [ + { + "data": {"second": {"theNumber": 2}}, + "pending": [{"id": "0", "path": [], "label": "defer-label"}], + "hasNext": True, + }, + { + "incremental": [{"id": "0", "data": {"first": {"theNumber": 1}}}], + "completed": [{"id": "0"}], + "hasNext": False, + }, + ] diff --git a/tests/execution/test_nonnull.py b/tests/execution/test_nonnull.py index da15e1b6..6c98eb67 100644 --- a/tests/execution/test_nonnull.py +++ b/tests/execution/test_nonnull.py @@ -1,9 +1,10 @@ +import asyncio import re from typing import Any, Awaitable, cast -from pytest import mark +import pytest -from graphql.execution import execute, execute_sync, ExecutionResult +from graphql.execution import ExecutionResult, execute, execute_sync from graphql.language import parse from graphql.pyutils import AwaitableOrValue from graphql.type import ( @@ -110,7 +111,7 @@ def patch(data: str) -> str: async def execute_sync_and_async(query: str, root_value: Any) -> ExecutionResult: sync_result = execute_sync(schema, parse(query), root_value) async_result = await cast( - Awaitable[ExecutionResult], execute(schema, parse(patch(query)), root_value) + "Awaitable[ExecutionResult]", execute(schema, parse(patch(query)), root_value) ) assert repr(async_result) == patch(repr(sync_result)) @@ -125,12 +126,12 @@ def describe_nulls_a_nullable_field(): } """ - @mark.asyncio + @pytest.mark.asyncio async def returns_null(): result = await execute_sync_and_async(query, NullingData()) assert result == ({"sync": None}, None) - @mark.asyncio + @pytest.mark.asyncio async def throws(): result = await execute_sync_and_async(query, ThrowingData()) assert result == ( @@ -145,7 +146,6 @@ async def throws(): ) def describe_nulls_a_returned_object_that_contains_a_non_null_field(): - query = """ { syncNest { @@ -154,7 +154,7 @@ def describe_nulls_a_returned_object_that_contains_a_non_null_field(): } """ - @mark.asyncio + @pytest.mark.asyncio async def that_returns_null(): result = await execute_sync_and_async(query, NullingData()) assert result == ( @@ -169,7 +169,7 @@ async def that_returns_null(): ], ) - @mark.asyncio + @pytest.mark.asyncio async def that_throws(): result = await execute_sync_and_async(query, ThrowingData()) assert result == ( @@ -215,17 +215,17 @@ def describe_nulls_a_complex_tree_of_nullable_fields_each(): }, } - @mark.asyncio + @pytest.mark.asyncio async def returns_null(): result = await cast( - Awaitable[ExecutionResult], execute_query(query, NullingData()) + "Awaitable[ExecutionResult]", execute_query(query, NullingData()) ) assert result == (data, None) - @mark.asyncio + @pytest.mark.asyncio async def throws(): result = await cast( - Awaitable[ExecutionResult], execute_query(query, ThrowingData()) + "Awaitable[ExecutionResult]", execute_query(query, ThrowingData()) ) assert result == ( data, @@ -349,10 +349,10 @@ def describe_nulls_first_nullable_after_long_chain_of_non_null_fields(): "anotherPromiseNest": None, } - @mark.asyncio + @pytest.mark.asyncio async def returns_null(): result = await cast( - Awaitable[ExecutionResult], execute_query(query, NullingData()) + "Awaitable[ExecutionResult]", execute_query(query, NullingData()) ) assert result == ( data, @@ -412,10 +412,10 @@ async def returns_null(): ], ) - @mark.asyncio + @pytest.mark.asyncio async def throws(): result = await cast( - Awaitable[ExecutionResult], execute_query(query, ThrowingData()) + "Awaitable[ExecutionResult]", execute_query(query, ThrowingData()) ) assert result == ( data, @@ -478,9 +478,10 @@ def describe_nulls_the_top_level_if_non_nullable_field(): } """ - @mark.asyncio + @pytest.mark.asyncio async def returns_null(): result = await execute_sync_and_async(query, NullingData()) + await asyncio.sleep(0) # strangely needed to get coverage on Python 3.11 assert result == ( None, [ @@ -493,9 +494,10 @@ async def returns_null(): ], ) - @mark.asyncio + @pytest.mark.asyncio async def throws(): result = await execute_sync_and_async(query, ThrowingData()) + await asyncio.sleep(0) # strangely needed to get coverage on Python 3.11 assert result == ( None, [ @@ -508,7 +510,6 @@ async def throws(): ) def describe_handles_non_null_argument(): - # noinspection PyPep8Naming schema_with_non_null_arg = GraphQLSchema( GraphQLObjectType( diff --git a/tests/execution/test_oneof.py b/tests/execution/test_oneof.py new file mode 100644 index 00000000..2040b1a7 --- /dev/null +++ b/tests/execution/test_oneof.py @@ -0,0 +1,151 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from graphql.execution import ExecutionResult, execute +from graphql.language import parse +from graphql.utilities import build_schema + +if TYPE_CHECKING: + from graphql.pyutils import AwaitableOrValue + +schema = build_schema(""" + type Query { + test(input: TestInputObject!): TestObject + } + + input TestInputObject @oneOf { + a: String + b: Int + } + + type TestObject { + a: String + b: Int + } + """) + + +def execute_query( + query: str, root_value: Any, variable_values: dict[str, Any] | None = None +) -> AwaitableOrValue[ExecutionResult]: + return execute(schema, parse(query), root_value, variable_values=variable_values) + + +def describe_execute_handles_one_of_input_objects(): + def describe_one_of_input_objects(): + root_value = { + "test": lambda _info, input: input, # noqa: A006 + } + + def accepts_a_good_default_value(): + query = """ + query ($input: TestInputObject! = {a: "abc"}) { + test(input: $input) { + a + b + } + } + """ + result = execute_query(query, root_value) + + assert result == ({"test": {"a": "abc", "b": None}}, None) + + def rejects_a_bad_default_value(): + query = """ + query ($input: TestInputObject! = {a: "abc", b: 123}) { + test(input: $input) { + a + b + } + } + """ + result = execute_query(query, root_value) + + assert result == ( + {"test": None}, + [ + { + # This type of error would be caught at validation-time + # hence the vague error message here. + "message": "Argument 'input' of non-null type" + " 'TestInputObject!' must not be null.", + "locations": [(3, 31)], + "path": ["test"], + } + ], + ) + + def accepts_a_good_variable(): + query = """ + query ($input: TestInputObject!) { + test(input: $input) { + a + b + } + } + """ + result = execute_query(query, root_value, {"input": {"a": "abc"}}) + + assert result == ({"test": {"a": "abc", "b": None}}, None) + + def accepts_a_good_variable_with_an_undefined_key(): + query = """ + query ($input: TestInputObject!) { + test(input: $input) { + a + b + } + } + """ + result = execute_query(query, root_value, {"input": {"a": "abc"}}) + + assert result == ({"test": {"a": "abc", "b": None}}, None) + + def rejects_a_variable_with_multiple_non_null_keys(): + query = """ + query ($input: TestInputObject!) { + test(input: $input) { + a + b + } + } + """ + result = execute_query(query, root_value, {"input": {"a": "abc", "b": 123}}) + + assert result == ( + None, + [ + { + "message": "Variable '$input' got invalid value" + " {'a': 'abc', 'b': 123}; Exactly one key must be specified" + " for OneOf type 'TestInputObject'.", + "locations": [(2, 24)], + } + ], + ) + + def rejects_a_variable_with_multiple_nullable_keys(): + query = """ + query ($input: TestInputObject!) { + test(input: $input) { + a + b + } + } + """ + result = execute_query( + query, root_value, {"input": {"a": "abc", "b": None}} + ) + + assert result == ( + None, + [ + { + "message": "Variable '$input' got invalid value" + " {'a': 'abc', 'b': None}; Exactly one key must be specified" + " for OneOf type 'TestInputObject'.", + "locations": [(2, 24)], + } + ], + ) diff --git a/tests/execution/test_parallel.py b/tests/execution/test_parallel.py index aeb2a142..f4dc86b1 100644 --- a/tests/execution/test_parallel.py +++ b/tests/execution/test_parallel.py @@ -1,18 +1,18 @@ import asyncio from typing import Awaitable -from pytest import mark +import pytest from graphql.execution import execute from graphql.language import parse from graphql.type import ( - GraphQLSchema, - GraphQLObjectType, - GraphQLField, - GraphQLList, - GraphQLInterfaceType, GraphQLBoolean, + GraphQLField, GraphQLInt, + GraphQLInterfaceType, + GraphQLList, + GraphQLObjectType, + GraphQLSchema, GraphQLString, ) @@ -32,7 +32,28 @@ async def wait(self) -> bool: def describe_parallel_execution(): - @mark.asyncio + @pytest.mark.asyncio + async def resolve_single_field(): + # make sure that the special case of resolving a single field works + async def resolve(*_args): + return True + + schema = GraphQLSchema( + GraphQLObjectType( + "Query", + { + "foo": GraphQLField(GraphQLBoolean, resolve=resolve), + }, + ) + ) + + awaitable_result = execute(schema, parse("{foo}")) + assert isinstance(awaitable_result, Awaitable) + result = await awaitable_result + + assert result == ({"foo": True}, None) + + @pytest.mark.asyncio async def resolve_fields_in_parallel(): barrier = Barrier(2) @@ -58,7 +79,26 @@ async def resolve(*_args): assert result == ({"foo": True, "bar": True}, None) - @mark.asyncio + @pytest.mark.asyncio + async def resolve_single_element_list(): + # make sure that the special case of resolving a single element list works + async def resolve(*_args): + return [True] + + schema = GraphQLSchema( + GraphQLObjectType( + "Query", + {"foo": GraphQLField(GraphQLList(GraphQLBoolean), resolve=resolve)}, + ) + ) + + awaitable_result = execute(schema, parse("{foo}")) + assert isinstance(awaitable_result, Awaitable) + result = await awaitable_result + + assert result == ({"foo": [True]}, None) + + @pytest.mark.asyncio async def resolve_list_in_parallel(): barrier = Barrier(2) @@ -88,7 +128,7 @@ async def resolve_list(*args): assert result == ({"foo": [True, True]}, None) - @mark.asyncio + @pytest.mark.asyncio async def resolve_is_type_of_in_parallel(): FooType = GraphQLInterfaceType("Foo", {"foo": GraphQLField(GraphQLString)}) diff --git a/tests/execution/test_resolve.py b/tests/execution/test_resolve.py index f945e6c3..db52d638 100644 --- a/tests/execution/test_resolve.py +++ b/tests/execution/test_resolve.py @@ -2,14 +2,16 @@ from typing import Any from graphql.error import GraphQLError -from graphql.execution import execute_sync, ExecutionResult -from graphql.language import parse, SourceLocation +from graphql.execution import ExecutionResult, execute_sync +from graphql.language import SourceLocation, parse from graphql.type import ( GraphQLArgument, GraphQLField, + GraphQLID, GraphQLInputField, GraphQLInputObjectType, GraphQLInt, + GraphQLList, GraphQLObjectType, GraphQLSchema, GraphQLString, @@ -54,7 +56,7 @@ def default_function_accesses_keys_of_chain_map(): def default_function_calls_methods(): class RootValue: - _secret = "secretValue" + _secret = "secretValue" # noqa: S105 def test(self, _info): return self._secret @@ -108,7 +110,7 @@ def uses_provided_resolve_function(): "aStr": GraphQLArgument(GraphQLString), "aInt": GraphQLArgument(GraphQLInt), }, - resolve=lambda source, info, **args: repr([source, args]), + resolve=lambda source, _info, **args: repr([source, args]), ) ) @@ -146,7 +148,7 @@ def transforms_arguments_using_out_names(): "aStr": GraphQLArgument(GraphQLString, out_name="a_str"), "aInt": GraphQLArgument(GraphQLInt, out_name="a_int"), }, - resolve=lambda source, info, **args: repr([source, args]), + resolve=lambda source, _info, **args: repr([source, args]), ) ) @@ -187,7 +189,7 @@ def transforms_arguments_with_inputs_using_out_names(): GraphQLField( GraphQLString, args={"aInput": GraphQLArgument(TestInputObject, out_name="a_input")}, - resolve=lambda source, info, **args: repr([source, args]), + resolve=lambda source, _info, **args: repr([source, args]), ) ) @@ -213,6 +215,91 @@ def execute_query(query: str, root_value: Any = None) -> ExecutionResult: None, ) + def transforms_default_values_using_out_names(): + # This is an extension of GraphQL.js. + resolver_kwargs: Any + + def search_resolver(_obj: None, _info, **kwargs): + nonlocal resolver_kwargs + resolver_kwargs = kwargs + return [{"id": "42"}] + + filters_type = GraphQLInputObjectType( + "SearchFilters", + {"pageSize": GraphQLInputField(GraphQLInt, out_name="page_size")}, + ) + result_type = GraphQLObjectType("SearchResult", {"id": GraphQLField(GraphQLID)}) + query = GraphQLObjectType( + "Query", + { + "search": GraphQLField( + GraphQLList(result_type), + { + "searchFilters": GraphQLArgument( + filters_type, {"pageSize": 10}, out_name="search_filters" + ) + }, + resolve=search_resolver, + ) + }, + ) + schema = GraphQLSchema(query) + + resolver_kwargs = None + result = execute_sync(schema, parse("{ search { id } }")) + assert result == ({"search": [{"id": "42"}]}, None) + assert resolver_kwargs == {"search_filters": {"page_size": 10}} + + resolver_kwargs = None + result = execute_sync( + schema, parse("{ search(searchFilters:{pageSize: 25}) { id } }") + ) + assert result == ({"search": [{"id": "42"}]}, None) + assert resolver_kwargs == {"search_filters": {"page_size": 25}} + + resolver_kwargs = None + result = execute_sync( + schema, + parse( + """ + query ($searchFilters: SearchFilters) { + search(searchFilters: $searchFilters) { id } + } + """ + ), + ) + assert result == ({"search": [{"id": "42"}]}, None) + assert resolver_kwargs == {"search_filters": {"page_size": 10}} + + resolver_kwargs = None + result = execute_sync( + schema, + parse( + """ + query ($searchFilters: SearchFilters) { + search(searchFilters: $searchFilters) { id } + } + """ + ), + variable_values={"searchFilters": {"pageSize": 25}}, + ) + assert result == ({"search": [{"id": "42"}]}, None) + assert resolver_kwargs == {"search_filters": {"page_size": 25}} + + resolver_kwargs = None + result = execute_sync( + schema, + parse( + """ + query ($searchFilters: SearchFilters = {pageSize: 25}) { + search(searchFilters: $searchFilters) { id } + } + """ + ), + ) + assert result == ({"search": [{"id": "42"}]}, None) + assert resolver_kwargs == {"search_filters": {"page_size": 25}} + def pass_error_from_resolver_wrapped_as_located_graphql_error(): def resolve(_obj, _info): raise ValueError("Some error") diff --git a/tests/execution/test_schema.py b/tests/execution/test_schema.py index 150f1681..7096c5fb 100644 --- a/tests/execution/test_schema.py +++ b/tests/execution/test_schema.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from graphql.execution import execute_sync from graphql.language import parse from graphql.type import ( @@ -17,9 +19,8 @@ def describe_execute_handles_execution_with_a_complex_schema(): def executes_using_a_schema(): class Article: - # noinspection PyShadowingBuiltins - def __init__(self, id: int): + def __init__(self, id: int): # noqa: A002 self.id = id self.isPublished = True self.author = JohnSmith() @@ -77,7 +78,7 @@ def __init__(self, id: int): "article": GraphQLField( BlogArticle, args={"id": GraphQLArgument(GraphQLID)}, - resolve=lambda _obj, _info, id: Article(id), + resolve=lambda _obj, _info, id: Article(id), # noqa: A006 ), "feed": GraphQLField( GraphQLList(BlogArticle), @@ -90,7 +91,7 @@ def __init__(self, id: int): # noinspection PyPep8Naming,PyMethodMayBeStatic class Author: - def pic(self, info_, width: int, height: int) -> "Pic": + def pic(self, _info, width: int, height: int) -> Pic: return Pic(123, width, height) @property diff --git a/tests/execution/test_stream.py b/tests/execution/test_stream.py new file mode 100644 index 00000000..46237fc1 --- /dev/null +++ b/tests/execution/test_stream.py @@ -0,0 +1,2248 @@ +from __future__ import annotations + +from asyncio import Event, Lock, gather, sleep +from typing import Any, Awaitable, NamedTuple + +import pytest + +from graphql.error import GraphQLError +from graphql.execution import ( + ExecutionResult, + ExperimentalIncrementalExecutionResults, + IncrementalStreamResult, + experimental_execute_incrementally, +) +from graphql.execution.incremental_publisher import StreamRecord +from graphql.language import DocumentNode, parse +from graphql.pyutils import Path +from graphql.type import ( + GraphQLField, + GraphQLID, + GraphQLList, + GraphQLNonNull, + GraphQLObjectType, + GraphQLSchema, + GraphQLString, +) + +try: # pragma: no cover + anext # noqa: B018 +except NameError: # pragma: no cover (Python < 3.10) + # noinspection PyShadowingBuiltins + async def anext(iterator): + """Return the next item from an async iterator.""" + return await iterator.__anext__() + + +friend_type = GraphQLObjectType( + "Friend", + { + "id": GraphQLField(GraphQLID), + "name": GraphQLField(GraphQLString), + "nonNullName": GraphQLField(GraphQLNonNull(GraphQLString)), + }, +) + + +class Friend(NamedTuple): + id: int + name: str + + +friends = [Friend(1, "Luke"), Friend(2, "Han"), Friend(3, "Leia")] + +query = GraphQLObjectType( + "Query", + { + "scalarList": GraphQLField(GraphQLList(GraphQLString)), + "scalarListList": GraphQLField(GraphQLList(GraphQLList(GraphQLString))), + "friendList": GraphQLField(GraphQLList(friend_type)), + "nonNullFriendList": GraphQLField(GraphQLList(GraphQLNonNull(friend_type))), + "nestedObject": GraphQLField( + GraphQLObjectType( + "NestedObject", + { + "scalarField": GraphQLField(GraphQLString), + "nonNullScalarField": GraphQLField(GraphQLNonNull(GraphQLString)), + "nestedFriendList": GraphQLField(GraphQLList(friend_type)), + "deeperNestedObject": GraphQLField( + GraphQLObjectType( + "DeeperNestedObject", + { + "nonNullScalarField": GraphQLField( + GraphQLNonNull(GraphQLString) + ), + "deeperNestedFriendList": GraphQLField( + GraphQLList(friend_type) + ), + }, + ) + ), + }, + ) + ), + }, +) + +schema = GraphQLSchema(query) + + +async def complete(document: DocumentNode, root_value: Any = None) -> Any: + result = experimental_execute_incrementally(schema, document, root_value) + if isinstance(result, Awaitable): + result = await result + + if isinstance(result, ExperimentalIncrementalExecutionResults): + results: list[Any] = [result.initial_result.formatted] + async for patch in result.subsequent_results: + results.append(patch.formatted) + return results + + assert isinstance(result, ExecutionResult) + return result.formatted + + +async def complete_async( + document: DocumentNode, num_calls: int, root_value: Any = None +) -> Any: + result = experimental_execute_incrementally(schema, document, root_value) + assert isinstance(result, Awaitable) + result = await result + assert isinstance(result, ExperimentalIncrementalExecutionResults) + + class IteratorResult: + """Iterator result with formatted output.""" + + def __init__(self, value=None): + self.value = value + + @property + def formatted(self): + if self.value is None: + return {"done": True, "value": None} + return {"done": False, "value": self.value.formatted} + + lock = Lock() + iterator = result.subsequent_results + + async def locked_next(): + """Get next value with lock for concurrent access.""" + async with lock: + try: + next_value = await anext(iterator) + except StopAsyncIteration: + return None + return next_value + + next_results = [locked_next() for _i in range(num_calls)] + + results = [result.initial_result] + results.extend(await gather(*next_results)) + + return [IteratorResult(result).formatted for result in results] + + +def modified_args(args: dict[str, Any], **modifications: Any) -> dict[str, Any]: + return {**args, **modifications} + + +def describe_execute_stream_directive(): + def can_format_and_print_incremental_stream_result(): + result = IncrementalStreamResult(items=["hello", "world"], id="foo") + assert result.formatted == {"items": ["hello", "world"], "id": "foo"} + assert ( + str(result) == "IncrementalStreamResult(items=['hello', 'world'], id='foo')" + ) + + result = IncrementalStreamResult( + items=["hello", "world"], + id="foo", + sub_path=["bar", 1], + errors=[GraphQLError("oops")], + extensions={"baz": 2}, + ) + assert result.formatted == { + "items": ["hello", "world"], + "id": "foo", + "subPath": ["bar", 1], + "errors": [{"message": "oops"}], + "extensions": {"baz": 2}, + } + assert ( + str(result) == "IncrementalStreamResult(items=['hello', 'world']," + " id='foo', sub_path=['bar', 1], errors=[GraphQLError('oops')]," + " extensions={'baz': 2})" + ) + + # noinspection PyTypeChecker + def can_compare_incremental_stream_result(): + args: dict[str, Any] = { + "items": ["hello", "world"], + "id": "foo", + "sub_path": ["bar", 1], + "errors": [GraphQLError("oops")], + "extensions": {"baz": 2}, + } + result = IncrementalStreamResult(**args) + assert result == IncrementalStreamResult(**args) + assert result != IncrementalStreamResult( + **modified_args(args, items=["hello", "foo"]) + ) + assert result != IncrementalStreamResult(**modified_args(args, id="bar")) + assert result != IncrementalStreamResult( + **modified_args(args, sub_path=["bar", 2]) + ) + assert result != IncrementalStreamResult(**modified_args(args, errors=[])) + assert result != IncrementalStreamResult( + **modified_args(args, extensions={"baz": 1}) + ) + assert result == tuple(args.values()) + assert result == tuple(args.values())[:4] + assert result == tuple(args.values())[:3] + assert result == tuple(args.values())[:2] + assert result != tuple(args.values())[:1] + assert result != (["hello", "world"], "bar") + args["subPath"] = args.pop("sub_path") + assert result == args + assert result != {**args, "items": ["hello", "foo"]} + assert result != {**args, "id": "bar"} + assert result != {**args, "subPath": ["bar", 2]} + assert result != {**args, "errors": []} + assert result != {**args, "extensions": {"baz": 1}} + + def can_print_stream_record(): + record = StreamRecord(Path(None, 0, None)) + assert str(record) == "StreamRecord(path=[0])" + record = StreamRecord(Path(None, "bar", "Bar"), "foo") + assert str(record) == "StreamRecord(path=['bar'], label='foo')" + + @pytest.mark.asyncio + async def can_stream_a_list_field(): + document = parse("{ scalarList @stream(initialCount: 1) }") + result = await complete( + document, {"scalarList": ["apple", "banana", "coconut"]} + ) + assert result == [ + { + "data": {"scalarList": ["apple"]}, + "pending": [{"id": "0", "path": ["scalarList"]}], + "hasNext": True, + }, + {"incremental": [{"items": ["banana"], "id": "0"}], "hasNext": True}, + { + "incremental": [{"items": ["coconut"], "id": "0"}], + "completed": [{"id": "0"}], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def can_use_default_value_of_initial_count(): + document = parse("{ scalarList @stream }") + result = await complete( + document, {"scalarList": ["apple", "banana", "coconut"]} + ) + assert result == [ + { + "data": {"scalarList": []}, + "pending": [{"id": "0", "path": ["scalarList"]}], + "hasNext": True, + }, + {"incremental": [{"items": ["apple"], "id": "0"}], "hasNext": True}, + {"incremental": [{"items": ["banana"], "id": "0"}], "hasNext": True}, + { + "incremental": [{"items": ["coconut"], "id": "0"}], + "completed": [{"id": "0"}], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def negative_values_of_initial_count_throw_field_errors(): + document = parse("{ scalarList @stream(initialCount: -2) }") + result = await complete( + document, {"scalarList": ["apple", "banana", "coconut"]} + ) + assert result == { + "data": {"scalarList": None}, + "errors": [ + { + "message": "initialCount must be a positive integer", + "locations": [{"line": 1, "column": 3}], + "path": ["scalarList"], + } + ], + } + + @pytest.mark.asyncio + async def non_integer_values_of_initial_count_throw_field_errors(): + document = parse("{ scalarList @stream(initialCount: 1.5) }") + result = await complete(document, {"scalarList": ["apple", "half of a banana"]}) + assert result == { + "data": {"scalarList": None}, + "errors": [ + { + "message": "Argument 'initialCount' has invalid value 1.5.", + "locations": [{"line": 1, "column": 36}], + "path": ["scalarList"], + } + ], + } + + @pytest.mark.asyncio + async def returns_label_from_stream_directive(): + document = parse( + '{ scalarList @stream(initialCount: 1, label: "scalar-stream") }' + ) + result = await complete( + document, {"scalarList": ["apple", "banana", "coconut"]} + ) + assert result == [ + { + "data": {"scalarList": ["apple"]}, + "pending": [ + {"id": "0", "path": ["scalarList"], "label": "scalar-stream"} + ], + "hasNext": True, + }, + {"incremental": [{"items": ["banana"], "id": "0"}], "hasNext": True}, + { + "incremental": [{"items": ["coconut"], "id": "0"}], + "completed": [{"id": "0"}], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def throws_an_error_for_stream_directive_with_non_string_label(): + document = parse("{ scalarList @stream(initialCount: 1, label: 42) }") + result = await complete(document, {"scalarList": ["some apples"]}) + assert result == { + "data": {"scalarList": None}, + "errors": [ + { + "locations": [{"line": 1, "column": 46}], + "message": "Argument 'label' has invalid value 42.", + "path": ["scalarList"], + } + ], + } + + @pytest.mark.asyncio + async def can_disable_stream_using_if_argument(): + document = parse("{ scalarList @stream(initialCount: 0, if: false) }") + result = await complete( + document, {"scalarList": ["apple", "banana", "coconut"]} + ) + assert result == {"data": {"scalarList": ["apple", "banana", "coconut"]}} + + @pytest.mark.asyncio + @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") + async def does_not_disable_stream_with_null_if_argument(): + document = parse( + "query ($shouldStream: Boolean)" + " { scalarList @stream(initialCount: 2, if: $shouldStream) }" + ) + result = await complete( + document, {"scalarList": ["apple", "banana", "coconut"]} + ) + assert result == [ + { + "data": {"scalarList": ["apple", "banana"]}, + "pending": [{"id": "0", "path": ["scalarList"]}], + "hasNext": True, + }, + { + "incremental": [{"items": ["coconut"], "id": "0"}], + "completed": [{"id": "0"}], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def can_stream_multi_dimensional_lists(): + document = parse("{ scalarListList @stream(initialCount: 1) }") + result = await complete( + document, + { + "scalarListList": lambda _info: [ + ["apple", "apple", "apple"], + ["banana", "banana", "banana"], + ["coconut", "coconut", "coconut"], + ] + }, + ) + assert result == [ + { + "data": {"scalarListList": [["apple", "apple", "apple"]]}, + "pending": [{"id": "0", "path": ["scalarListList"]}], + "hasNext": True, + }, + { + "incremental": [{"items": [["banana", "banana", "banana"]], "id": "0"}], + "hasNext": True, + }, + { + "incremental": [ + {"items": [["coconut", "coconut", "coconut"]], "id": "0"} + ], + "completed": [{"id": "0"}], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def can_stream_a_field_that_returns_a_list_of_awaitables(): + document = parse( + """ + query { + friendList @stream(initialCount: 2) { + name + id + } + } + """ + ) + + async def await_friend(f): + return f + + result = await complete( + document, + {"friendList": lambda _info: [await_friend(f) for f in friends]}, + ) + assert result == [ + { + "data": { + "friendList": [ + {"name": "Luke", "id": "1"}, + {"name": "Han", "id": "2"}, + ], + }, + "pending": [{"id": "0", "path": ["friendList"]}], + "hasNext": True, + }, + { + "incremental": [{"items": [{"name": "Leia", "id": "3"}], "id": "0"}], + "completed": [{"id": "0"}], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def can_stream_in_correct_order_with_list_of_awaitables(): + document = parse( + """ + query { + friendList @stream(initialCount: 0) { + name + id + } + } + """ + ) + + async def await_friend(f): + return f + + result = await complete( + document, + {"friendList": lambda _info: [await_friend(f) for f in friends]}, + ) + assert result == [ + { + "data": {"friendList": []}, + "pending": [{"id": "0", "path": ["friendList"]}], + "hasNext": True, + }, + { + "incremental": [{"items": [{"name": "Luke", "id": "1"}], "id": "0"}], + "hasNext": True, + }, + { + "incremental": [{"items": [{"name": "Han", "id": "2"}], "id": "0"}], + "hasNext": True, + }, + { + "incremental": [{"items": [{"name": "Leia", "id": "3"}], "id": "0"}], + "completed": [{"id": "0"}], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def can_stream_a_field_that_returns_a_list_with_nested_async_fields(): + document = parse( + """ + query { + friendList @stream(initialCount: 2) { + name + id + } + } + """ + ) + + async def get_name(f): + return f.name + + async def get_id(f): + return f.id + + result = await complete( + document, + { + "friendList": lambda _info: [ + {"name": get_name(f), "id": get_id(f)} for f in friends + ] + }, + ) + assert result == [ + { + "data": { + "friendList": [ + {"name": "Luke", "id": "1"}, + {"name": "Han", "id": "2"}, + ] + }, + "pending": [{"id": "0", "path": ["friendList"]}], + "hasNext": True, + }, + { + "incremental": [{"items": [{"name": "Leia", "id": "3"}], "id": "0"}], + "completed": [{"id": "0"}], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def handles_error_in_list_of_awaitables_before_initial_count_reached(): + document = parse( + """ + query { + friendList @stream(initialCount: 2) { + name + id + } + } + """ + ) + + async def await_friend(f, i): + if i == 1: + raise RuntimeError("bad") + return f + + result = await complete( + document, + { + "friendList": lambda _info: [ + await_friend(f, i) for i, f in enumerate(friends) + ] + }, + ) + assert result == [ + { + "data": {"friendList": [{"name": "Luke", "id": "1"}, None]}, + "errors": [ + { + "message": "bad", + "locations": [{"line": 3, "column": 15}], + "path": ["friendList", 1], + } + ], + "pending": [{"id": "0", "path": ["friendList"]}], + "hasNext": True, + }, + { + "incremental": [{"items": [{"name": "Leia", "id": "3"}], "id": "0"}], + "completed": [{"id": "0"}], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def handles_error_in_list_of_awaitables_after_initial_count_reached(): + document = parse( + """ + query { + friendList @stream(initialCount: 1) { + name + id + } + } + """ + ) + + async def await_friend(f, i): + if i == 1: + raise RuntimeError("bad") + return f + + result = await complete( + document, + { + "friendList": lambda _info: [ + await_friend(f, i) for i, f in enumerate(friends) + ] + }, + ) + assert result == [ + { + "data": {"friendList": [{"name": "Luke", "id": "1"}]}, + "pending": [{"id": "0", "path": ["friendList"]}], + "hasNext": True, + }, + { + "incremental": [ + { + "items": [None], + "id": "0", + "errors": [ + { + "message": "bad", + "locations": [{"line": 3, "column": 15}], + "path": ["friendList", 1], + } + ], + } + ], + "hasNext": True, + }, + { + "incremental": [{"items": [{"name": "Leia", "id": "3"}], "id": "0"}], + "completed": [{"id": "0"}], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def can_stream_a_field_that_returns_an_async_iterable(): + document = parse( + """ + query { + friendList @stream { + name + id + } + } + """ + ) + + async def friend_list(_info): + for i in range(3): + yield friends[i] + + result = await complete(document, {"friendList": friend_list}) + assert result == [ + { + "data": {"friendList": []}, + "pending": [{"id": "0", "path": ["friendList"]}], + "hasNext": True, + }, + { + "incremental": [{"items": [{"name": "Luke", "id": "1"}], "id": "0"}], + "hasNext": True, + }, + { + "incremental": [{"items": [{"name": "Han", "id": "2"}], "id": "0"}], + "hasNext": True, + }, + { + "incremental": [{"items": [{"name": "Leia", "id": "3"}], "id": "0"}], + "hasNext": True, + }, + {"completed": [{"id": "0"}], "hasNext": False}, + ] + + @pytest.mark.asyncio + async def can_stream_a_field_that_returns_an_async_iterable_with_initial_count(): + document = parse( + """ + query { + friendList @stream(initialCount: 2) { + name + id + } + } + """ + ) + + async def friend_list(_info): + for i in range(3): + yield friends[i] + + result = await complete(document, {"friendList": friend_list}) + assert result == [ + { + "data": { + "friendList": [ + {"name": "Luke", "id": "1"}, + {"name": "Han", "id": "2"}, + ] + }, + "pending": [{"id": "0", "path": ["friendList"]}], + "hasNext": True, + }, + { + "incremental": [{"items": [{"name": "Leia", "id": "3"}], "id": "0"}], + "hasNext": True, + }, + {"completed": [{"id": "0"}], "hasNext": False}, + ] + + @pytest.mark.asyncio + async def negative_initial_count_throw_error_on_field_returning_async_iterable(): + document = parse( + """ + query { + friendList @stream(initialCount: -2) { + name + id + } + } + """ + ) + + async def friend_list(_info): + yield {} # pragma: no cover + + result = await complete(document, {"friendList": friend_list}) + assert result == { + "errors": [ + { + "message": "initialCount must be a positive integer", + "locations": [{"line": 3, "column": 15}], + "path": ["friendList"], + } + ], + "data": {"friendList": None}, + } + + @pytest.mark.asyncio + async def can_handle_concurrent_calls_to_next_without_waiting(): + document = parse( + """ + query { + friendList @stream(initialCount: 2) { + name + id + } + } + """ + ) + + async def friend_list(_info): + for i in range(3): + yield friends[i] + + result = await complete_async(document, 3, {"friendList": friend_list}) + assert result == [ + { + "done": False, + "value": { + "data": { + "friendList": [ + {"name": "Luke", "id": "1"}, + {"name": "Han", "id": "2"}, + ] + }, + "pending": [{"id": "0", "path": ["friendList"]}], + "hasNext": True, + }, + }, + { + "done": False, + "value": { + "incremental": [ + {"items": [{"name": "Leia", "id": "3"}], "id": "0"} + ], + "hasNext": True, + }, + }, + { + "done": False, + "value": {"completed": [{"id": "0"}], "hasNext": False}, + }, + {"done": True, "value": None}, + ] + + @pytest.mark.asyncio + async def handles_error_in_async_iterable_before_initial_count_is_reached(): + document = parse( + """ + query { + friendList @stream(initialCount: 2) { + name + id + } + } + """ + ) + + async def friend_list(_info): + yield friends[0] + raise RuntimeError("bad") + + result = await complete(document, {"friendList": friend_list}) + assert result == { + "errors": [ + { + "message": "bad", + "locations": [{"line": 3, "column": 15}], + "path": ["friendList"], + } + ], + "data": {"friendList": None}, + } + + @pytest.mark.asyncio + async def handles_error_in_async_iterable_after_initial_count_is_reached(): + document = parse( + """ + query { + friendList @stream(initialCount: 1) { + name + id + } + } + """ + ) + + async def friend_list(_info): + yield friends[0] + raise RuntimeError("bad") + + result = await complete(document, {"friendList": friend_list}) + assert result == [ + { + "data": {"friendList": [{"name": "Luke", "id": "1"}]}, + "pending": [{"id": "0", "path": ["friendList"]}], + "hasNext": True, + }, + { + "completed": [ + { + "id": "0", + "errors": [ + { + "message": "bad", + "locations": [{"line": 3, "column": 15}], + "path": ["friendList"], + }, + ], + }, + ], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def handles_null_for_non_null_list_items_after_initial_count_is_reached(): + document = parse( + """ + query { + nonNullFriendList @stream(initialCount: 1) { + name + } + } + """ + ) + + result = await complete( + document, {"nonNullFriendList": lambda _info: [friends[0], None]} + ) + assert result == [ + { + "data": {"nonNullFriendList": [{"name": "Luke"}]}, + "pending": [{"id": "0", "path": ["nonNullFriendList"]}], + "hasNext": True, + }, + { + "completed": [ + { + "id": "0", + "errors": [ + { + "message": "Cannot return null for non-nullable field" + " Query.nonNullFriendList.", + "locations": [{"line": 3, "column": 15}], + "path": ["nonNullFriendList", 1], + }, + ], + }, + ], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def handles_null_for_non_null_async_items_after_initial_count_is_reached(): + document = parse( + """ + query { + nonNullFriendList @stream(initialCount: 1) { + name + } + } + """ + ) + + async def friend_list(_info): + try: + yield friends[0] + yield None + finally: + raise RuntimeError("Oops") + + result = await complete(document, {"nonNullFriendList": friend_list}) + assert result == [ + { + "data": {"nonNullFriendList": [{"name": "Luke"}]}, + "pending": [{"id": "0", "path": ["nonNullFriendList"]}], + "hasNext": True, + }, + { + "completed": [ + { + "id": "0", + "errors": [ + { + "message": "Cannot return null for non-nullable field" + " Query.nonNullFriendList.", + "locations": [{"line": 3, "column": 15}], + "path": ["nonNullFriendList", 1], + }, + ], + }, + ], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def handles_error_thrown_in_complete_value_after_initial_count_is_reached(): + document = parse( + """ + query { + scalarList @stream(initialCount: 1) + } + """ + ) + + async def scalar_list(_info): + return [friends[0].name, {}] + + result = await complete(document, {"scalarList": scalar_list}) + assert result == [ + { + "data": {"scalarList": ["Luke"]}, + "pending": [{"id": "0", "path": ["scalarList"]}], + "hasNext": True, + }, + { + "incremental": [ + { + "items": [None], + "id": "0", + "errors": [ + { + "message": "String cannot represent value: {}", + "locations": [{"line": 3, "column": 15}], + "path": ["scalarList", 1], + }, + ], + }, + ], + "completed": [{"id": "0"}], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def handles_async_error_in_complete_value_after_initial_count_is_reached(): + document = parse( + """ + query { + friendList @stream(initialCount: 1) { + nonNullName + } + } + """ + ) + + async def throw(): + raise RuntimeError("Oops") + + async def get_friend(i): + return {"nonNullName": throw() if i < 0 else friends[i].name} + + def get_friends(_info): + return [get_friend(i) for i in (0, -1, 1)] + + result = await complete( + document, + { + "friendList": get_friends, + }, + ) + assert result == [ + { + "data": {"friendList": [{"nonNullName": "Luke"}]}, + "pending": [{"id": "0", "path": ["friendList"]}], + "hasNext": True, + }, + { + "incremental": [ + { + "items": [None], + "id": "0", + "errors": [ + { + "message": "Oops", + "locations": [{"line": 4, "column": 17}], + "path": ["friendList", 1, "nonNullName"], + }, + ], + }, + ], + "hasNext": True, + }, + { + "incremental": [{"items": [{"nonNullName": "Han"}], "id": "0"}], + "completed": [{"id": "0"}], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def handles_nested_async_error_in_complete_value_after_initial_count(): + document = parse( + """ + query { + friendList @stream(initialCount: 1) { + nonNullName + } + } + """ + ) + + async def get_friend_name(i): + if i < 0: + raise RuntimeError("Oops") + return friends[i].name + + def get_friends(_info): + return [{"nonNullName": get_friend_name(i)} for i in (0, -1, 1)] + + result = await complete( + document, + { + "friendList": get_friends, + }, + ) + assert result == [ + { + "data": {"friendList": [{"nonNullName": "Luke"}]}, + "pending": [{"id": "0", "path": ["friendList"]}], + "hasNext": True, + }, + { + "incremental": [ + { + "items": [None], + "id": "0", + "errors": [ + { + "message": "Oops", + "locations": [{"line": 4, "column": 17}], + "path": ["friendList", 1, "nonNullName"], + }, + ], + }, + ], + "hasNext": True, + }, + { + "incremental": [{"items": [{"nonNullName": "Han"}], "id": "0"}], + "completed": [{"id": "0"}], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def handles_async_error_in_complete_value_after_initial_count_non_null(): + document = parse( + """ + query { + nonNullFriendList @stream(initialCount: 1) { + nonNullName + } + } + """ + ) + + async def throw(): + raise RuntimeError("Oops") + + async def get_friend(i): + return {"nonNullName": throw() if i < 0 else friends[i].name} + + def get_friends(_info): + return [get_friend(i) for i in (0, -1, 1)] + + result = await complete( + document, + { + "nonNullFriendList": get_friends, + }, + ) + assert result == [ + { + "data": {"nonNullFriendList": [{"nonNullName": "Luke"}]}, + "pending": [{"id": "0", "path": ["nonNullFriendList"]}], + "hasNext": True, + }, + { + "completed": [ + { + "id": "0", + "errors": [ + { + "message": "Oops", + "locations": [{"line": 4, "column": 17}], + "path": ["nonNullFriendList", 1, "nonNullName"], + }, + ], + }, + ], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def handles_nested_async_error_in_complete_value_after_initial_non_null(): + document = parse( + """ + query { + nonNullFriendList @stream(initialCount: 1) { + nonNullName + } + } + """ + ) + + async def get_friend_name(i): + if i < 0: + raise RuntimeError("Oops") + return friends[i].name + + def get_friends(_info): + return [{"nonNullName": get_friend_name(i)} for i in (0, -1, 1)] + + result = await complete( + document, + { + "nonNullFriendList": get_friends, + }, + ) + assert result == [ + { + "data": { + "nonNullFriendList": [{"nonNullName": "Luke"}], + }, + "pending": [{"id": "0", "path": ["nonNullFriendList"]}], + "hasNext": True, + }, + { + "completed": [ + { + "id": "0", + "errors": [ + { + "message": "Oops", + "locations": [{"line": 4, "column": 17}], + "path": ["nonNullFriendList", 1, "nonNullName"], + }, + ], + }, + ], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def handles_async_error_in_complete_value_after_initial_from_async_iterable(): + document = parse( + """ + query { + friendList @stream(initialCount: 1) { + nonNullName + } + } + """ + ) + + async def throw(): + raise RuntimeError("Oops") + + async def get_friend(i): + return {"nonNullName": throw() if i < 0 else friends[i].name} + + async def get_friends(_info): + for i in 0, -1, 1: + yield await get_friend(i) + + result = await complete( + document, + { + "friendList": get_friends, + }, + ) + assert result == [ + { + "data": {"friendList": [{"nonNullName": "Luke"}]}, + "pending": [{"id": "0", "path": ["friendList"]}], + "hasNext": True, + }, + { + "incremental": [ + { + "items": [None], + "id": "0", + "errors": [ + { + "message": "Oops", + "locations": [{"line": 4, "column": 17}], + "path": ["friendList", 1, "nonNullName"], + }, + ], + }, + ], + "hasNext": True, + }, + { + "incremental": [{"items": [{"nonNullName": "Han"}], "id": "0"}], + "hasNext": True, + }, + {"completed": [{"id": "0"}], "hasNext": False}, + ] + + @pytest.mark.asyncio + async def handles_async_error_in_complete_value_from_async_generator_non_null(): + document = parse( + """ + query { + nonNullFriendList @stream(initialCount: 1) { + nonNullName + } + } + """ + ) + + async def throw(): + raise RuntimeError("Oops") + + async def get_friend(i): + return {"nonNullName": throw() if i < 0 else friends[i].name} + + async def get_friends(_info): + for i in 0, -1, 1: # pragma: no cover exit + yield await get_friend(i) + + result = await complete( + document, + {"nonNullFriendList": get_friends}, + ) + assert result == [ + { + "data": {"nonNullFriendList": [{"nonNullName": "Luke"}]}, + "pending": [{"id": "0", "path": ["nonNullFriendList"]}], + "hasNext": True, + }, + { + "completed": [ + { + "id": "0", + "errors": [ + { + "message": "Oops", + "locations": [{"line": 4, "column": 17}], + "path": ["nonNullFriendList", 1, "nonNullName"], + }, + ], + }, + ], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def handles_async_errors_in_complete_value_after_initial_count_no_aclose(): + # Handles async errors thrown by complete_value after initialCount is reached + # from async iterable for a non-nullable list when the async iterable does + # not provide an aclose method. + document = parse( + """ + query { + nonNullFriendList @stream(initialCount: 1) { + nonNullName + } + } + """ + ) + + async def throw(): + raise RuntimeError("Oops") + + class AsyncIterableWithoutAclose: + def __init__(self): + self.count = 0 + + def __aiter__(self): + return self + + async def __anext__(self): + count = self.count + self.count += 1 + if count == 1: + name = throw() + else: + if count: + count -= 1 # pragma: no cover + name = friends[count].name + return {"nonNullName": name} + + async_iterable = AsyncIterableWithoutAclose() + result = await complete(document, {"nonNullFriendList": async_iterable}) + assert result == [ + { + "data": {"nonNullFriendList": [{"nonNullName": "Luke"}]}, + "pending": [{"id": "0", "path": ["nonNullFriendList"]}], + "hasNext": True, + }, + { + "completed": [ + { + "id": "0", + "errors": [ + { + "message": "Oops", + "locations": [{"line": 4, "column": 17}], + "path": ["nonNullFriendList", 1, "nonNullName"], + }, + ], + }, + ], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def handles_async_errors_in_complete_value_after_initial_count_slow_aclose(): + # Handles async errors thrown by completeValue after initialCount is reached + # from async iterable for a non-nullable list when the async iterable provides + # concurrent next/return methods and has a slow aclose() + document = parse( + """ + query { + nonNullFriendList @stream(initialCount: 1) { + nonNullName + } + } + """ + ) + + async def throw(): + raise RuntimeError("Oops") + + class AsyncIterableWithSlowAclose: + def __init__(self): + self.count = 0 + self.finished = False + + def __aiter__(self): + return self + + async def __anext__(self): + if self.finished: + raise StopAsyncIteration # pragma: no cover + count = self.count + self.count += 1 + if count == 1: + name = throw() + else: + if count: + count -= 1 # pragma: no cover + name = friends[count].name + return {"nonNullName": name} + + async def aclose(self): + await sleep(0) + self.finished = True + + async_iterable = AsyncIterableWithSlowAclose() + result = await complete(document, {"nonNullFriendList": async_iterable}) + assert result == [ + { + "data": {"nonNullFriendList": [{"nonNullName": "Luke"}]}, + "pending": [{"id": "0", "path": ["nonNullFriendList"]}], + "hasNext": True, + }, + { + "completed": [ + { + "id": "0", + "errors": [ + { + "message": "Oops", + "locations": [{"line": 4, "column": 17}], + "path": ["nonNullFriendList", 1, "nonNullName"], + }, + ], + }, + ], + "hasNext": False, + }, + ] + assert async_iterable.finished + + @pytest.mark.asyncio + async def filters_payloads_that_are_nulled(): + document = parse( + """ + query { + nestedObject { + nonNullScalarField + nestedFriendList @stream(initialCount: 0) { + name + } + } + } + """ + ) + + async def resolve_null(_info): + return None + + async def friend_list(_info): + yield friends[0] + + result = await complete( + document, + { + "nestedObject": { + "nonNullScalarField": resolve_null, + "nestedFriendList": friend_list, + } + }, + ) + + assert result == { + "errors": [ + { + "message": "Cannot return null for non-nullable field" + " NestedObject.nonNullScalarField.", + "locations": [{"line": 4, "column": 17}], + "path": ["nestedObject", "nonNullScalarField"], + }, + ], + "data": {"nestedObject": None}, + } + + @pytest.mark.asyncio + async def filters_payloads_that_are_nulled_by_a_later_synchronous_error(): + document = parse( + """ + query { + nestedObject { + nestedFriendList @stream(initialCount: 0) { + name + } + nonNullScalarField + } + } + """ + ) + + async def friend_list(_info): + yield friends[0] # pragma: no cover + + result = await complete( + document, + { + "nestedObject": { + "nestedFriendList": friend_list, + "nonNullScalarField": lambda _info: None, + } + }, + ) + + assert result == { + "errors": [ + { + "message": "Cannot return null for non-nullable field" + " NestedObject.nonNullScalarField.", + "locations": [{"line": 7, "column": 17}], + "path": ["nestedObject", "nonNullScalarField"], + }, + ], + "data": {"nestedObject": None}, + } + + @pytest.mark.asyncio + @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") + async def does_not_filter_payloads_when_null_error_is_in_a_different_path(): + document = parse( + """ + query { + otherNestedObject: nestedObject { + ... @defer { + scalarField + } + } + nestedObject { + nestedFriendList @stream(initialCount: 0) { + name + } + } + } + """ + ) + + async def error_field(_info): + raise RuntimeError("Oops") + + async def friend_list(_info): + yield friends[0] + + result = await complete( + document, + { + "nestedObject": { + "scalarField": error_field, + "nestedFriendList": friend_list, + } + }, + ) + + assert result == [ + { + "data": { + "otherNestedObject": {}, + "nestedObject": {"nestedFriendList": []}, + }, + "pending": [ + {"id": "0", "path": ["otherNestedObject"]}, + {"id": "1", "path": ["nestedObject", "nestedFriendList"]}, + ], + "hasNext": True, + }, + { + "incremental": [ + { + "data": {"scalarField": None}, + "id": "0", + "errors": [ + { + "message": "Oops", + "locations": [{"line": 5, "column": 19}], + "path": ["otherNestedObject", "scalarField"], + }, + ], + }, + {"items": [{"name": "Luke"}], "id": "1"}, + ], + "completed": [{"id": "0"}], + "hasNext": True, + }, + {"completed": [{"id": "1"}], "hasNext": False}, + ] + + @pytest.mark.asyncio + @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") + async def filters_stream_payloads_that_are_nulled_in_a_deferred_payload(): + document = parse( + """ + query { + nestedObject { + ... @defer { + deeperNestedObject { + nonNullScalarField + deeperNestedFriendList @stream(initialCount: 0) { + name + } + } + } + } + } + """ + ) + + async def resolve_null(_info): + return None + + async def friend_list(_info): + yield friends[0] + + result = await complete( + document, + { + "nestedObject": { + "deeperNestedObject": { + "nonNullScalarField": resolve_null, + "deeperNestedFriendList": friend_list, + } + } + }, + ) + + assert result == [ + { + "data": {"nestedObject": {}}, + "pending": [{"id": "0", "path": ["nestedObject"]}], + "hasNext": True, + }, + { + "incremental": [ + { + "data": {"deeperNestedObject": None}, + "id": "0", + "errors": [ + { + "message": "Cannot return null for non-nullable field" + " DeeperNestedObject.nonNullScalarField.", + "locations": [{"line": 6, "column": 21}], + "path": [ + "nestedObject", + "deeperNestedObject", + "nonNullScalarField", + ], + }, + ], + }, + ], + "completed": [{"id": "0"}], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") + async def filters_defer_payloads_that_are_nulled_in_a_stream_response(): + document = parse( + """ + query { + friendList @stream(initialCount: 0) { + nonNullName + ... @defer { + name + } + } + } + """ + ) + + async def resolve_null(_info): + return None + + async def friend(): + return { + "name": friends[0].name, + "nonNullName": resolve_null, + } + + async def friend_list(_info): + yield await friend() + + result = await complete(document, {"friendList": friend_list}) + + assert result == [ + { + "data": {"friendList": []}, + "pending": [{"id": "0", "path": ["friendList"]}], + "hasNext": True, + }, + { + "incremental": [ + { + "items": [None], + "id": "0", + "errors": [ + { + "message": "Cannot return null for non-nullable field" + " Friend.nonNullName.", + "locations": [{"line": 4, "column": 17}], + "path": ["friendList", 0, "nonNullName"], + }, + ], + }, + ], + "hasNext": True, + }, + {"completed": [{"id": "0"}], "hasNext": False}, + ] + + @pytest.mark.timeout(1) + @pytest.mark.asyncio + async def returns_iterator_and_ignores_error_when_stream_payloads_are_filtered(): + finished = False + + async def resolve_null(_info): + return None + + async def iterable(_info): + nonlocal finished + for i in range(3): + friend = friends[i] + yield {"name": friend.name, "nonNullName": None} + finished = True + + document = parse( + """ + query { + nestedObject { + ... @defer { + deeperNestedObject { + nonNullScalarField + deeperNestedFriendList @stream(initialCount: 0) { + name + } + } + } + } + } + """ + ) + + execute_result = experimental_execute_incrementally( + schema, + document, + { + "nestedObject": { + "deeperNestedObject": { + "nonNullScalarField": resolve_null, + "deeperNestedFriendList": iterable, + } + } + }, + ) + + assert isinstance(execute_result, ExperimentalIncrementalExecutionResults) + iterator = execute_result.subsequent_results + + result1 = execute_result.initial_result + assert result1 == { + "data": {"nestedObject": {}}, + "pending": [{"id": "0", "path": ["nestedObject"]}], + "hasNext": True, + } + + assert not finished + + result2 = await anext(iterator) + assert result2.formatted == { + "incremental": [ + { + "data": {"deeperNestedObject": None}, + "id": "0", + "errors": [ + { + "message": "Cannot return null for non-nullable field" + " DeeperNestedObject.nonNullScalarField.", + "locations": [{"line": 6, "column": 21}], + "path": [ + "nestedObject", + "deeperNestedObject", + "nonNullScalarField", + ], + }, + ], + }, + ], + "completed": [{"id": "0"}], + "hasNext": False, + } + + with pytest.raises(StopAsyncIteration): + await anext(iterator) + + assert finished + + @pytest.mark.asyncio + async def handles_awaitables_from_complete_value_after_initial_count_is_reached(): + document = parse( + """ + query { + friendList @stream(initialCount: 1) { + id + name + } + } + """ + ) + + async def get_friend_name(i): + return friends[i].name + + async def get_friend(i): + if i < 2: + return friends[i] + return {"id": friends[2].id, "name": get_friend_name(i)} + + async def get_friends(_info): + for i in range(3): + yield await get_friend(i) + + result = await complete( + document, + { + "friendList": get_friends, + }, + ) + assert result == [ + { + "data": {"friendList": [{"id": "1", "name": "Luke"}]}, + "pending": [{"id": "0", "path": ["friendList"]}], + "hasNext": True, + }, + { + "incremental": [{"items": [{"id": "2", "name": "Han"}], "id": "0"}], + "hasNext": True, + }, + { + "incremental": [{"items": [{"id": "3", "name": "Leia"}], "id": "0"}], + "hasNext": True, + }, + {"completed": [{"id": "0"}], "hasNext": False}, + ] + + @pytest.mark.asyncio + async def handles_overlapping_deferred_and_non_deferred_streams(): + document = parse( + """ + query { + nestedObject { + nestedFriendList @stream(initialCount: 0) { + id + } + } + nestedObject { + ... @defer { + nestedFriendList @stream(initialCount: 0) { + id + name + } + } + } + } + """ + ) + + async def get_nested_friend_list(_info): + for i in range(2): + yield friends[i] + + result = await complete( + document, + { + "nestedObject": { + "nestedFriendList": get_nested_friend_list, + } + }, + ) + + assert result == [ + { + "data": {"nestedObject": {"nestedFriendList": []}}, + "pending": [ + {"id": "0", "path": ["nestedObject"]}, + {"id": "1", "path": ["nestedObject", "nestedFriendList"]}, + ], + "hasNext": True, + }, + { + "incremental": [{"items": [{"id": "1", "name": "Luke"}], "id": "1"}], + "completed": [{"id": "0"}], + "hasNext": True, + }, + { + "incremental": [{"items": [{"id": "2", "name": "Han"}], "id": "1"}], + "hasNext": True, + }, + {"completed": [{"id": "1"}], "hasNext": False}, + ] + + @pytest.mark.asyncio + async def returns_payloads_properly_when_parent_deferred_slower_than_stream(): + resolve_slow_field = Event() + + async def slow_field(_info): + await resolve_slow_field.wait() + return "slow" + + document = parse( + """ + query { + nestedObject { + ... DeferFragment @defer + } + } + fragment DeferFragment on NestedObject { + scalarField + nestedFriendList @stream(initialCount: 0) { + name + } + } + """ + ) + + async def get_friends(_info): + for i in range(2): + yield friends[i] + + execute_result = experimental_execute_incrementally( + schema, + document, + { + "nestedObject": { + "scalarField": slow_field, + "nestedFriendList": get_friends, + } + }, + ) + + assert isinstance(execute_result, ExperimentalIncrementalExecutionResults) + iterator = execute_result.subsequent_results + + result1 = execute_result.initial_result + assert result1 == { + "data": {"nestedObject": {}}, + "pending": [{"id": "0", "path": ["nestedObject"]}], + "hasNext": True, + } + + resolve_slow_field.set() + result2 = await anext(iterator) + assert result2.formatted == { + "pending": [{"id": "1", "path": ["nestedObject", "nestedFriendList"]}], + "incremental": [ + {"data": {"scalarField": "slow", "nestedFriendList": []}, "id": "0"}, + ], + "completed": [{"id": "0"}], + "hasNext": True, + } + result3 = await anext(iterator) + assert result3.formatted == { + "incremental": [{"items": [{"name": "Luke"}], "id": "1"}], + "hasNext": True, + } + result4 = await anext(iterator) + assert result4.formatted == { + "incremental": [{"items": [{"name": "Han"}], "id": "1"}], + "hasNext": True, + } + result5 = await anext(iterator) + assert result5.formatted == {"completed": [{"id": "1"}], "hasNext": False} + + with pytest.raises(StopAsyncIteration): + await anext(iterator) + + @pytest.mark.timeout(1) + @pytest.mark.asyncio + async def can_defer_fields_that_are_resolved_after_async_iterable_is_complete(): + resolve_slow_field = Event() + resolve_iterable = Event() + + async def slow_field(_info): + await resolve_slow_field.wait() + return "Han" + + document = parse( + """ + query { + friendList @stream(initialCount: 1, label:"stream-label") { + ...NameFragment @defer(label: "DeferName") @defer(label: "DeferName") + id + } + } + fragment NameFragment on Friend { + name + } + """ + ) + + async def get_friends(_info): + yield friends[0] + yield {"id": friends[1].id, "name": slow_field} + await resolve_iterable.wait() + + execute_result = await experimental_execute_incrementally( # type: ignore + schema, + document, + { + "friendList": get_friends, + }, + ) + + assert isinstance(execute_result, ExperimentalIncrementalExecutionResults) + iterator = execute_result.subsequent_results + + result1 = execute_result.initial_result + assert result1 == { + "data": {"friendList": [{"id": "1"}]}, + "pending": [ + {"id": "0", "path": ["friendList", 0], "label": "DeferName"}, + {"id": "1", "path": ["friendList"], "label": "stream-label"}, + ], + "hasNext": True, + } + + resolve_iterable.set() + result2 = await anext(iterator) + assert result2.formatted == { + "pending": [{"id": "2", "path": ["friendList", 1], "label": "DeferName"}], + "incremental": [ + {"data": {"name": "Luke"}, "id": "0"}, + {"items": [{"id": "2"}], "id": "1"}, + ], + "completed": [{"id": "0"}], + "hasNext": True, + } + + resolve_slow_field.set() + result3 = await anext(iterator) + assert result3.formatted == { + "completed": [{"id": "1"}], + "hasNext": True, + } + result4 = await anext(iterator) + assert result4.formatted == { + "incremental": [{"data": {"name": "Han"}, "id": "2"}], + "completed": [{"id": "2"}], + "hasNext": False, + } + + with pytest.raises(StopAsyncIteration): + await anext(iterator) + + @pytest.mark.asyncio + async def can_defer_fields_that_are_resolved_before_async_iterable_is_complete(): + resolve_slow_field = Event() + resolve_iterable = Event() + + async def slow_field(_info): + await resolve_slow_field.wait() + return "Han" + + document = parse( + """ + query { + friendList @stream(initialCount: 1, label:"stream-label") { + ...NameFragment @defer(label: "DeferName") @defer(label: "DeferName") + id + } + } + fragment NameFragment on Friend { + name + } + """ + ) + + async def get_friends(_info): + yield friends[0] + yield {"id": friends[1].id, "name": slow_field} + await resolve_iterable.wait() + + execute_result = await experimental_execute_incrementally( # type: ignore + schema, + document, + { + "friendList": get_friends, + }, + ) + + assert isinstance(execute_result, ExperimentalIncrementalExecutionResults) + iterator = execute_result.subsequent_results + + result1 = execute_result.initial_result + assert result1 == { + "data": {"friendList": [{"id": "1"}]}, + "pending": [ + {"id": "0", "path": ["friendList", 0], "label": "DeferName"}, + {"id": "1", "path": ["friendList"], "label": "stream-label"}, + ], + "hasNext": True, + } + + resolve_slow_field.set() + result2 = await anext(iterator) + assert result2.formatted == { + "pending": [{"id": "2", "path": ["friendList", 1], "label": "DeferName"}], + "incremental": [ + {"data": {"name": "Luke"}, "id": "0"}, + {"items": [{"id": "2"}], "id": "1"}, + ], + "completed": [{"id": "0"}], + "hasNext": True, + } + + result3 = await anext(iterator) + assert result3.formatted == { + "incremental": [ + {"data": {"name": "Han"}, "id": "2"}, + ], + "completed": [{"id": "2"}], + "hasNext": True, + } + + resolve_iterable.set() + result4 = await anext(iterator) + assert result4.formatted == { + "completed": [{"id": "1"}], + "hasNext": False, + } + + with pytest.raises(StopAsyncIteration): + await anext(iterator) + + @pytest.mark.asyncio + async def finishes_async_iterable_when_finished_generator_is_closed(): + finished = False + + async def iterable(_info): + nonlocal finished + for i in range(3): + yield friends[i] + finished = True + + document = parse( + """ + query { + friendList @stream(initialCount: 1) { + id + ... @defer { + name + } + } + } + """ + ) + + execute_result = await experimental_execute_incrementally( # type: ignore + schema, document, {"friendList": iterable} + ) + assert isinstance(execute_result, ExperimentalIncrementalExecutionResults) + iterator = execute_result.subsequent_results + + result1 = execute_result.initial_result + assert result1 == { + "data": {"friendList": [{"id": "1"}]}, + "pending": [ + {"id": "0", "path": ["friendList", 0]}, + {"id": "1", "path": ["friendList"]}, + ], + "hasNext": True, + } + + await iterator.aclose() + with pytest.raises(StopAsyncIteration): + await anext(iterator) + + assert finished + + @pytest.mark.asyncio + async def finishes_async_iterable_when_underlying_iterator_has_no_close_method(): + class Iterable: + def __init__(self): + self.index = 0 + + def __aiter__(self): + return self + + async def __anext__(self): + index = self.index + self.index = index + 1 + try: + return friends[index] + except IndexError: + raise StopAsyncIteration + + iterable = Iterable() + + document = parse( + """ + query { + friendList @stream(initialCount: 1) { + name + id + } + } + """ + ) + + execute_result = await experimental_execute_incrementally( # type: ignore + schema, document, {"friendList": iterable} + ) + assert isinstance(execute_result, ExperimentalIncrementalExecutionResults) + iterator = execute_result.subsequent_results + + result1 = execute_result.initial_result + assert result1 == { + "data": {"friendList": [{"id": "1", "name": "Luke"}]}, + "pending": [{"id": "0", "path": ["friendList"]}], + "hasNext": True, + } + + await iterator.aclose() + with pytest.raises(StopAsyncIteration): + await anext(iterator) + + assert iterable.index == 4 + + @pytest.mark.asyncio + async def finishes_async_iterable_when_error_is_raised_in_finished_generator(): + finished = False + + async def iterable(_info): + nonlocal finished + for i in range(3): + yield friends[i] + finished = True + + document = parse( + """ + query { + friendList @stream(initialCount: 1) { + ... @defer { + name + } + id + } + } + """ + ) + + execute_result = await experimental_execute_incrementally( # type: ignore + schema, document, {"friendList": iterable} + ) + assert isinstance(execute_result, ExperimentalIncrementalExecutionResults) + iterator = execute_result.subsequent_results + + result1 = execute_result.initial_result + assert result1 == { + "data": {"friendList": [{"id": "1"}]}, + "pending": [ + {"id": "0", "path": ["friendList", 0]}, + {"id": "1", "path": ["friendList"]}, + ], + "hasNext": True, + } + + with pytest.raises(RuntimeError, match="bad"): + await iterator.athrow(RuntimeError("bad")) + + with pytest.raises(StopAsyncIteration): + await anext(iterator) + + assert finished diff --git a/tests/execution/test_subscribe.py b/tests/execution/test_subscribe.py index 8b1fe639..8a6b4c38 100644 --- a/tests/execution/test_subscribe.py +++ b/tests/execution/test_subscribe.py @@ -1,11 +1,26 @@ import asyncio +from contextlib import suppress +from typing import ( + Any, + AsyncIterable, + AsyncIterator, + Callable, + Dict, + List, + Optional, + TypeVar, + Union, +) -from typing import Any, Dict, List, Callable - -from pytest import mark, raises +import pytest -from graphql.language import parse -from graphql.pyutils import SimplePubSub +from graphql.execution import ( + ExecutionResult, + create_source_event_stream, + subscribe, +) +from graphql.language import DocumentNode, parse +from graphql.pyutils import AwaitableOrValue, SimplePubSub, is_awaitable from graphql.type import ( GraphQLArgument, GraphQLBoolean, @@ -13,13 +28,21 @@ GraphQLInt, GraphQLList, GraphQLObjectType, + GraphQLResolveInfo, GraphQLSchema, GraphQLString, ) -from graphql.execution import create_source_event_stream, subscribe, MapAsyncIterator + +from ..fixtures import cleanup +from ..utils.assert_equal_awaitables_or_values import assert_equal_awaitables_or_values try: - anext + from typing import TypedDict +except ImportError: # Python < 3.8 + from typing_extensions import TypedDict + +try: + anext # noqa: B018 except NameError: # pragma: no cover (Python < 3.10) # noinspection PyShadowingBuiltins async def anext(iterator): @@ -27,13 +50,29 @@ async def anext(iterator): return await iterator.__anext__() -Email = Dict # should become a TypedDict once we require Python 3.8 +T = TypeVar("T") + +Email = TypedDict( + "Email", + { + "from": str, + "subject": str, + "message": str, + "unread": bool, + }, +) + + +async def async_subject(email: Email, _info: GraphQLResolveInfo) -> str: + return email["subject"] + EmailType = GraphQLObjectType( "Email", { "from": GraphQLField(GraphQLString), "subject": GraphQLField(GraphQLString), + "asyncSubject": GraphQLField(GraphQLString, resolve=async_subject), "message": GraphQLField(GraphQLString), "unread": GraphQLField(GraphQLBoolean), }, @@ -76,18 +115,31 @@ async def anext(iterator): ) -def create_subscription(pubsub: SimplePubSub): +def create_subscription( + pubsub: SimplePubSub, variable_values: Optional[Dict[str, Any]] = None +) -> AwaitableOrValue[Union[AsyncIterator[ExecutionResult], ExecutionResult]]: document = parse( """ - subscription ($priority: Int = 0) { + subscription ( + $priority: Int = 0 + $shouldDefer: Boolean = false + $shouldStream: Boolean = false + $asyncResolver: Boolean = false + ) { importantEmail(priority: $priority) { email { from subject + ... @include(if: $asyncResolver) { + asyncSubject + } } - inbox { - unread - total + ... @defer(if: $shouldDefer) { + inbox { + emails @include(if: $shouldStream) @stream(if: $shouldStream) + unread + total + } } } } @@ -113,15 +165,40 @@ def transform(new_email): "importantEmail": pubsub.get_subscriber(transform), } - return subscribe(email_schema, document, data) + return subscribe(email_schema, document, data, variable_values=variable_values) DummyQueryType = GraphQLObjectType("Query", {"dummy": GraphQLField(GraphQLString)}) +def subscribe_with_bad_fn( + subscribe_fn: Callable, +) -> AwaitableOrValue[Union[ExecutionResult, AsyncIterable[Any]]]: + schema = GraphQLSchema( + query=DummyQueryType, + subscription=GraphQLObjectType( + "Subscription", + {"foo": GraphQLField(GraphQLString, subscribe=subscribe_fn)}, + ), + ) + document = parse("subscription { foo }") + return subscribe_with_bad_args(schema, document) + + +def subscribe_with_bad_args( + schema: GraphQLSchema, + document: DocumentNode, + variable_values: Optional[Dict[str, Any]] = None, +): + return assert_equal_awaitables_or_values( + subscribe(schema, document, variable_values=variable_values), + create_source_event_stream(schema, document, variable_values=variable_values), + ) + + # Check all error cases when initializing the subscription. def describe_subscription_initialization_phase(): - @mark.asyncio + @pytest.mark.asyncio async def accepts_positional_arguments(): document = parse( """ @@ -131,19 +208,17 @@ async def accepts_positional_arguments(): """ ) - async def empty_async_iterator(_info): + async def empty_async_iterable(_info): for value in (): # type: ignore yield value # pragma: no cover - ai = await subscribe( - email_schema, document, {"importantEmail": empty_async_iterator} - ) + ai = subscribe(email_schema, document, {"importantEmail": empty_async_iterable}) - with raises(StopAsyncIteration): + with pytest.raises(StopAsyncIteration): await anext(ai) await ai.aclose() # type: ignore - @mark.asyncio + @pytest.mark.asyncio async def accepts_multiple_subscription_fields_defined_in_schema(): schema = GraphQLSchema( query=DummyQueryType, @@ -159,16 +234,16 @@ async def accepts_multiple_subscription_fields_defined_in_schema(): async def foo_generator(_info): yield {"foo": "FooValue"} - subscription = await subscribe( + subscription = subscribe( schema, parse("subscription { foo }"), {"foo": foo_generator} ) - assert isinstance(subscription, MapAsyncIterator) + assert isinstance(subscription, AsyncIterator) assert await anext(subscription) == ({"foo": "FooValue"}, None) - await subscription.aclose() + await subscription.aclose() # type: ignore - @mark.asyncio + @pytest.mark.asyncio async def accepts_type_definition_with_sync_subscribe_function(): async def foo_generator(_obj, _info): yield {"foo": "FooValue"} @@ -181,64 +256,42 @@ async def foo_generator(_obj, _info): ), ) - subscription = await subscribe(schema, parse("subscription { foo }")) - assert isinstance(subscription, MapAsyncIterator) + subscription = subscribe(schema, parse("subscription { foo }")) + assert isinstance(subscription, AsyncIterator) assert await anext(subscription) == ({"foo": "FooValue"}, None) - await subscription.aclose() + await subscription.aclose() # type: ignore - @mark.asyncio + @pytest.mark.asyncio async def accepts_type_definition_with_async_subscribe_function(): async def foo_generator(_obj, _info): await asyncio.sleep(0) yield {"foo": "FooValue"} - schema = GraphQLSchema( - query=DummyQueryType, - subscription=GraphQLObjectType( - "Subscription", - {"foo": GraphQLField(GraphQLString, subscribe=foo_generator)}, - ), - ) - - subscription = await subscribe(schema, parse("subscription { foo }")) - assert isinstance(subscription, MapAsyncIterator) - - assert await anext(subscription) == ({"foo": "FooValue"}, None) - - await subscription.aclose() + async def subscribe_fn(obj, info): + await asyncio.sleep(0) + return foo_generator(obj, info) - @mark.asyncio - async def uses_a_custom_default_subscribe_field_resolver(): schema = GraphQLSchema( query=DummyQueryType, subscription=GraphQLObjectType( - "Subscription", {"foo": GraphQLField(GraphQLString)} + "Subscription", + {"foo": GraphQLField(GraphQLString, subscribe=subscribe_fn)}, ), ) - class Root: - @staticmethod - async def custom_foo(): - yield {"foo": "FooValue"} + awaitable = subscribe(schema, parse("subscription { foo }")) + assert is_awaitable(awaitable) - subscription = await subscribe( - schema, - document=parse("subscription { foo }"), - root_value=Root(), - subscribe_field_resolver=lambda root, _info: root.custom_foo(), - ) - assert isinstance(subscription, MapAsyncIterator) + subscription = await awaitable + assert isinstance(subscription, AsyncIterator) - assert await anext(subscription) == ( - {"foo": "FooValue"}, - None, - ) + assert await anext(subscription) == ({"foo": "FooValue"}, None) - await subscription.aclose() + await subscription.aclose() # type: ignore - @mark.asyncio + @pytest.mark.asyncio async def should_only_resolve_the_first_field_of_invalid_multi_field(): did_resolve = {"foo": False, "bar": False} @@ -261,8 +314,8 @@ async def subscribe_bar(_obj, _info): # pragma: no cover ), ) - subscription = await subscribe(schema, parse("subscription { foo bar }")) - assert isinstance(subscription, MapAsyncIterator) + subscription = subscribe(schema, parse("subscription { foo bar }")) + assert isinstance(subscription, AsyncIterator) assert await anext(subscription) == ( {"foo": "FooValue", "bar": None}, @@ -271,37 +324,14 @@ async def subscribe_bar(_obj, _info): # pragma: no cover assert did_resolve == {"foo": True, "bar": False} - await subscription.aclose() - - @mark.asyncio - async def throws_an_error_if_some_of_required_arguments_are_missing(): - document = parse("subscription { foo }") - - schema = GraphQLSchema( - query=DummyQueryType, - subscription=GraphQLObjectType( - "Subscription", {"foo": GraphQLField(GraphQLString)} - ), - ) - - with raises(TypeError, match="^Expected None to be a GraphQL schema\\.$"): - await subscribe(None, document) # type: ignore - - with raises(TypeError, match="missing .* positional argument: 'schema'"): - await subscribe(document=document) # type: ignore + await subscription.aclose() # type: ignore - with raises(TypeError, match="^Must provide document\\.$"): - await subscribe(schema, None) # type: ignore - - with raises(TypeError, match="missing .* positional argument: 'document'"): - await subscribe(schema=schema) # type: ignore - - @mark.asyncio + @pytest.mark.asyncio async def resolves_to_an_error_if_schema_does_not_support_subscriptions(): schema = GraphQLSchema(query=DummyQueryType) document = parse("subscription { unknownField }") - result = await subscribe(schema, document) + result = subscribe_with_bad_args(schema, document) assert result == ( None, @@ -314,7 +344,7 @@ async def resolves_to_an_error_if_schema_does_not_support_subscriptions(): ], ) - @mark.asyncio + @pytest.mark.asyncio async def resolves_to_an_error_for_unknown_subscription_field(): schema = GraphQLSchema( query=DummyQueryType, @@ -324,7 +354,7 @@ async def resolves_to_an_error_for_unknown_subscription_field(): ) document = parse("subscription { unknownField }") - result = await subscribe(schema, document) + result = subscribe_with_bad_args(schema, document) assert result == ( None, [ @@ -335,7 +365,7 @@ async def resolves_to_an_error_for_unknown_subscription_field(): ], ) - @mark.asyncio + @pytest.mark.asyncio async def should_pass_through_unexpected_errors_thrown_in_subscribe(): schema = GraphQLSchema( query=DummyQueryType, @@ -343,49 +373,41 @@ async def should_pass_through_unexpected_errors_thrown_in_subscribe(): "Subscription", {"foo": GraphQLField(GraphQLString)} ), ) - with raises(TypeError, match="^Must provide document\\.$"): - await subscribe(schema=schema, document={}) # type: ignore + with pytest.raises(AttributeError): + subscribe_with_bad_args(schema=schema, document={}) # type: ignore - @mark.asyncio - @mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") + @pytest.mark.asyncio + @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") async def throws_an_error_if_subscribe_does_not_return_an_iterator(): - schema = GraphQLSchema( - query=DummyQueryType, - subscription=GraphQLObjectType( - "Subscription", + expected_result = ( + None, + [ { - "foo": GraphQLField( - GraphQLString, subscribe=lambda _obj, _info: "test" - ) - }, - ), + "message": "Subscription field must return AsyncIterable." + " Received: 'test'.", + "locations": [(1, 16)], + "path": ["foo"], + } + ], ) - document = parse("subscription { foo }") + def sync_fn(_obj, _info): + return "test" - with raises(TypeError) as exc_info: - await subscribe(schema, document) + assert subscribe_with_bad_fn(sync_fn) == expected_result - assert str(exc_info.value) == ( - "Subscription field must return AsyncIterable. Received: 'test'." - ) + async def async_fn(obj, info): + return sync_fn(obj, info) - @mark.asyncio - async def resolves_to_an_error_for_subscription_resolver_errors(): - async def subscribe_with_fn(subscribe_fn: Callable): - schema = GraphQLSchema( - query=DummyQueryType, - subscription=GraphQLObjectType( - "Subscription", - {"foo": GraphQLField(GraphQLString, subscribe=subscribe_fn)}, - ), - ) - document = parse("subscription { foo }") - result = await subscribe(schema, document) + result = subscribe_with_bad_fn(async_fn) + assert is_awaitable(result) + assert await result == expected_result - assert await create_source_event_stream(schema, document) == result - return result + del result + cleanup() + @pytest.mark.asyncio + async def resolves_to_an_error_for_subscription_resolver_errors(): expected_result = ( None, [ @@ -398,30 +420,35 @@ async def subscribe_with_fn(subscribe_fn: Callable): ) # Returning an error - def return_error(_obj, _info): + def return_error(*_args): return TypeError("test error") - assert await subscribe_with_fn(return_error) == expected_result + assert subscribe_with_bad_fn(return_error) == expected_result # Throwing an error def throw_error(*_args): raise TypeError("test error") - assert await subscribe_with_fn(throw_error) == expected_result + assert subscribe_with_bad_fn(throw_error) == expected_result # Resolving to an error - async def resolve_error(*_args): - return TypeError("test error") + async def resolve_to_error(*args): + return return_error(*args) - assert await subscribe_with_fn(resolve_error) == expected_result + result = subscribe_with_bad_fn(resolve_to_error) + assert is_awaitable(result) + assert await result == expected_result # Rejecting with an error - async def reject_error(*_args): - return TypeError("test error") - assert await subscribe_with_fn(reject_error) == expected_result + async def reject_with_error(*args): + return throw_error(*args) + + result = subscribe_with_bad_fn(reject_with_error) + assert is_awaitable(result) + assert await result == expected_result - @mark.asyncio + @pytest.mark.asyncio async def resolves_to_an_error_if_variables_were_wrong_type(): schema = GraphQLSchema( query=DummyQueryType, @@ -446,7 +473,9 @@ async def resolves_to_an_error_if_variables_were_wrong_type(): # If we receive variables that cannot be coerced correctly, subscribe() will # resolve to an ExecutionResult that contains an informative error description. - result = await subscribe(schema, document, variable_values=variable_values) + result = subscribe_with_bad_args( + schema, document, variable_values=variable_values + ) assert result == ( None, @@ -459,20 +488,20 @@ async def resolves_to_an_error_if_variables_were_wrong_type(): ], ) - assert result.errors[0].original_error is None # type: ignore + assert result.errors[0].original_error # Once a subscription returns a valid AsyncIterator, it can still yield errors. def describe_subscription_publish_phase(): - @mark.asyncio + @pytest.mark.asyncio async def produces_a_payload_for_multiple_subscribe_in_same_subscription(): pubsub = SimplePubSub() - subscription = await create_subscription(pubsub) - assert isinstance(subscription, MapAsyncIterator) + subscription = create_subscription(pubsub) + assert isinstance(subscription, AsyncIterator) - second_subscription = await create_subscription(pubsub) - assert isinstance(subscription, MapAsyncIterator) + second_subscription = create_subscription(pubsub) + assert isinstance(subscription, AsyncIterator) payload1 = anext(subscription) payload2 = anext(second_subscription) @@ -499,11 +528,48 @@ async def produces_a_payload_for_multiple_subscribe_in_same_subscription(): assert await payload1 == (expected_payload, None) assert await payload2 == (expected_payload, None) - @mark.asyncio + @pytest.mark.asyncio + async def produces_a_payload_when_queried_fields_are_async(): + pubsub = SimplePubSub() + subscription = create_subscription(pubsub, {"asyncResolver": True}) + assert isinstance(subscription, AsyncIterator) + + assert ( + pubsub.emit( + { + "from": "yuzhi@graphql.org", + "subject": "Alright", + "message": "Tests are good", + "unread": True, + } + ) + is True + ) + + assert await anext(subscription) == ( + { + "importantEmail": { + "email": { + "from": "yuzhi@graphql.org", + "subject": "Alright", + "asyncSubject": "Alright", + }, + "inbox": {"unread": 1, "total": 2}, + } + }, + None, + ) + + with suppress(RuntimeError): # suppress error for Python < 3.8 + await subscription.aclose() # type: ignore + with pytest.raises(StopAsyncIteration): + await anext(subscription) + + @pytest.mark.asyncio async def produces_a_payload_per_subscription_event(): pubsub = SimplePubSub() - subscription = await create_subscription(pubsub) - assert isinstance(subscription, MapAsyncIterator) + subscription = create_subscription(pubsub) + assert isinstance(subscription, AsyncIterator) # Wait for the next subscription payload. payload = anext(subscription) @@ -558,7 +624,8 @@ async def produces_a_payload_per_subscription_event(): # The client decides to disconnect. # noinspection PyUnresolvedReferences - await subscription.aclose() + with suppress(RuntimeError): # suppress error for Python < 3.8 + await subscription.aclose() # type: ignore # Which may result in disconnecting upstream services as well. assert ( @@ -574,14 +641,159 @@ async def produces_a_payload_per_subscription_event(): ) # No more listeners. # Awaiting subscription after closing it results in completed results. - with raises(StopAsyncIteration): + with pytest.raises(StopAsyncIteration): assert await anext(subscription) - @mark.asyncio + @pytest.mark.asyncio + async def subscribe_function_returns_errors_with_defer(): + pubsub = SimplePubSub() + subscription = create_subscription(pubsub, {"shouldDefer": True}) + assert isinstance(subscription, AsyncIterator) + + # Wait for the next subscription payload. + payload = anext(subscription) + + # A new email arrives! + assert ( + pubsub.emit( + { + "from": "yuzhi@graphql.org", + "subject": "Alright", + "message": "Tests are good", + "unread": True, + } + ) + is True + ) + + error_result = ( + {"importantEmail": None}, + [ + { + "message": "`@defer` directive not supported" + " on subscription operations." + " Disable `@defer` by setting the `if` argument to `false`.", + "locations": [(8, 11)], + "path": ["importantEmail"], + } + ], + ) + + # The previously waited on payload now has a value. + result = await payload + assert result == error_result + + # Another new email arrives, + # after all incrementally delivered payloads are received. + assert ( + pubsub.emit( + { + "from": "hyo@graphql.org", + "subject": "Tools", + "message": "I <3 making things", + "unread": True, + } + ) + is True + ) + + # The next waited on payload will have a value. + result = await anext(subscription) + assert result == error_result + + with suppress(RuntimeError): # suppress error for Python < 3.8 + await subscription.aclose() # type: ignore + + # Awaiting a subscription after closing it results in completed results. + with pytest.raises(StopAsyncIteration): + assert await anext(subscription) + + @pytest.mark.asyncio + async def subscribe_function_returns_errors_with_stream(): + pubsub = SimplePubSub() + subscription = create_subscription(pubsub, {"shouldStream": True}) + assert isinstance(subscription, AsyncIterator) + + # Wait for the next subscription payload. + payload = anext(subscription) + + # A new email arrives! + assert ( + pubsub.emit( + { + "from": "yuzhi@graphql.org", + "subject": "Alright", + "message": "Tests are good", + "unread": True, + } + ) + is True + ) + + # The previously waited on payload now has a value. + assert await payload == ( + { + "importantEmail": { + "email": {"from": "yuzhi@graphql.org", "subject": "Alright"}, + "inbox": {"emails": None, "unread": 1, "total": 2}, + } + }, + [ + { + "message": "`@stream` directive not supported" + " on subscription operations." + " Disable `@stream` by setting the `if` argument to `false`.", + "locations": [(18, 17)], + "path": ["importantEmail", "inbox", "emails"], + } + ], + ) + + # Another new email arrives, + # after all incrementally delivered payloads are received. + assert ( + pubsub.emit( + { + "from": "hyo@graphql.org", + "subject": "Tools", + "message": "I <3 making things", + "unread": True, + } + ) + is True + ) + + # The next waited on payload will have a value. + assert await anext(subscription) == ( + { + "importantEmail": { + "email": {"from": "hyo@graphql.org", "subject": "Tools"}, + "inbox": {"emails": None, "unread": 2, "total": 3}, + } + }, + [ + { + "message": "`@stream` directive not supported" + " on subscription operations." + " Disable `@stream` by setting the `if` argument to `false`.", + "locations": [(18, 17)], + "path": ["importantEmail", "inbox", "emails"], + } + ], + ) + + # The client disconnects before the deferred payload is consumed. + await subscription.aclose() # type: ignore + + # Awaiting a subscription after closing it results in completed results. + with pytest.raises(StopAsyncIteration): + assert await anext(subscription) + + @pytest.mark.asyncio async def produces_a_payload_when_there_are_multiple_events(): pubsub = SimplePubSub() - subscription = await create_subscription(pubsub) - assert isinstance(subscription, MapAsyncIterator) + subscription = create_subscription(pubsub) + assert isinstance(subscription, AsyncIterator) payload = anext(subscription) @@ -633,11 +845,11 @@ async def produces_a_payload_when_there_are_multiple_events(): None, ) - @mark.asyncio + @pytest.mark.asyncio async def should_not_trigger_when_subscription_is_already_done(): pubsub = SimplePubSub() - subscription = await create_subscription(pubsub) - assert isinstance(subscription, MapAsyncIterator) + subscription = create_subscription(pubsub) + assert isinstance(subscription, AsyncIterator) payload = anext(subscription) @@ -665,7 +877,8 @@ async def should_not_trigger_when_subscription_is_already_done(): ) payload = anext(subscription) - await subscription.aclose() + with suppress(RuntimeError): # suppress error for Python < 3.8 + await subscription.aclose() # type: ignore # A new email arrives! assert ( @@ -680,14 +893,14 @@ async def should_not_trigger_when_subscription_is_already_done(): is False ) - with raises(StopAsyncIteration): + with pytest.raises(StopAsyncIteration): await payload - @mark.asyncio + @pytest.mark.asyncio async def should_not_trigger_when_subscription_is_thrown(): pubsub = SimplePubSub() - subscription = await create_subscription(pubsub) - assert isinstance(subscription, MapAsyncIterator) + subscription = create_subscription(pubsub) + assert isinstance(subscription, AsyncIterator) payload = anext(subscription) @@ -717,18 +930,18 @@ async def should_not_trigger_when_subscription_is_thrown(): payload = anext(subscription) # Throw error - with raises(RuntimeError) as exc_info: - await subscription.athrow(RuntimeError("ouch")) + with pytest.raises(RuntimeError) as exc_info: + await subscription.athrow(RuntimeError("ouch")) # type: ignore assert str(exc_info.value) == "ouch" - with raises(StopAsyncIteration): + with pytest.raises(StopAsyncIteration): await payload - @mark.asyncio + @pytest.mark.asyncio async def event_order_is_correct_for_multiple_publishes(): pubsub = SimplePubSub() - subscription = await create_subscription(pubsub) - assert isinstance(subscription, MapAsyncIterator) + subscription = create_subscription(pubsub) + assert isinstance(subscription, AsyncIterator) payload = anext(subscription) @@ -780,7 +993,7 @@ async def event_order_is_correct_for_multiple_publishes(): None, ) - @mark.asyncio + @pytest.mark.asyncio async def should_handle_error_during_execution_of_source_event(): async def generate_messages(_obj, _info): yield "Hello" @@ -807,8 +1020,8 @@ def resolve_message(message, _info): ) document = parse("subscription { newMessage }") - subscription = await subscribe(schema, document) - assert isinstance(subscription, MapAsyncIterator) + subscription = subscribe(schema, document) + assert isinstance(subscription, AsyncIterator) assert await anext(subscription) == ({"newMessage": "Hello"}, None) @@ -828,7 +1041,7 @@ def resolve_message(message, _info): # Subsequent events are still executed. assert await anext(subscription) == ({"newMessage": "Bonjour"}, None) - @mark.asyncio + @pytest.mark.asyncio async def should_pass_through_error_thrown_in_source_event_stream(): async def generate_messages(_obj, _info): yield "Hello" @@ -852,21 +1065,21 @@ def resolve_message(message, _info): ) document = parse("subscription { newMessage }") - subscription = await subscribe(schema, document) - assert isinstance(subscription, MapAsyncIterator) + subscription = subscribe(schema, document) + assert isinstance(subscription, AsyncIterator) - assert await (anext(subscription)) == ({"newMessage": "Hello"}, None) + assert await anext(subscription) == ({"newMessage": "Hello"}, None) - with raises(RuntimeError) as exc_info: + with pytest.raises(RuntimeError) as exc_info: await anext(subscription) assert str(exc_info.value) == "test error" - with raises(StopAsyncIteration): + with pytest.raises(StopAsyncIteration): await anext(subscription) - @mark.asyncio - async def should_work_with_async_resolve_function(): + @pytest.mark.asyncio + async def should_work_with_sync_resolve_function(): async def generate_messages(_obj, _info): yield "Hello" @@ -888,7 +1101,145 @@ def resolve_message(message, _info): ) document = parse("subscription { newMessage }") - subscription = await subscribe(schema, document) - assert isinstance(subscription, MapAsyncIterator) + subscription = subscribe(schema, document) + assert isinstance(subscription, AsyncIterator) assert await anext(subscription) == ({"newMessage": "Hello"}, None) + + @pytest.mark.asyncio + async def should_work_with_async_resolve_function(): + async def generate_messages(_obj, _info): + await asyncio.sleep(0) + yield "Hello" + + async def resolve_message(message, _info): + await asyncio.sleep(0) + return message + + schema = GraphQLSchema( + query=QueryType, + subscription=GraphQLObjectType( + "Subscription", + { + "newMessage": GraphQLField( + GraphQLString, + resolve=resolve_message, + subscribe=generate_messages, + ) + }, + ), + ) + + document = parse("subscription { newMessage }") + subscription = subscribe(schema, document) + assert isinstance(subscription, AsyncIterator) + + assert await anext(subscription) == ({"newMessage": "Hello"}, None) + + @pytest.mark.asyncio + async def should_work_with_custom_async_iterator(): + class MessageGenerator: + resolved: List[str] = [] + + def __init__(self, values, _info): + self.values = values + + def __aiter__(self): + return self + + async def __anext__(self): + if not self.values: + raise StopAsyncIteration + await asyncio.sleep(0) + return self.values.pop(0) + + @classmethod + async def resolve(cls, message, _info) -> str: + await asyncio.sleep(0) + cls.resolved.append(message) + return message + "!" + + schema = GraphQLSchema( + query=QueryType, + subscription=GraphQLObjectType( + "Subscription", + { + "newMessage": GraphQLField( + GraphQLString, + resolve=MessageGenerator.resolve, + subscribe=MessageGenerator, + ) + }, + ), + ) + + document = parse("subscription { newMessage }") + subscription = subscribe(schema, document, ["Hello", "Dolly"]) + assert isinstance(subscription, AsyncIterator) + + assert [result async for result in subscription] == [ + ({"newMessage": "Hello!"}, None), + ({"newMessage": "Dolly!"}, None), + ] + + assert MessageGenerator.resolved == ["Hello", "Dolly"] + + await subscription.aclose() # type: ignore + + @pytest.mark.asyncio + async def should_close_custom_async_iterator(): + class MessageGenerator: + closed: bool = False + resolved: List[str] = [] + + def __init__(self, values, _info): + self.values = values + + def __aiter__(self): + return self + + async def __anext__(self): + if not self.values: + raise StopAsyncIteration + await asyncio.sleep(0) + return self.values.pop(0) + + @classmethod + async def resolve(cls, message, _info) -> str: + await asyncio.sleep(0) + cls.resolved.append(message) + return message + "!" + + @classmethod + async def aclose(cls) -> None: + cls.closed = True + + schema = GraphQLSchema( + query=QueryType, + subscription=GraphQLObjectType( + "Subscription", + { + "newMessage": GraphQLField( + GraphQLString, + resolve=MessageGenerator.resolve, + subscribe=MessageGenerator, + ) + }, + ), + ) + + document = parse("subscription { newMessage }") + subscription = subscribe(schema, document, ["Hello", "Dolly"]) + assert isinstance(subscription, AsyncIterator) + + assert not MessageGenerator.closed + + assert [result async for result in subscription] == [ + ({"newMessage": "Hello!"}, None), + ({"newMessage": "Dolly!"}, None), + ] + + assert MessageGenerator.closed + assert MessageGenerator.resolved == ["Hello", "Dolly"] + + await subscription.aclose() diff --git a/tests/execution/test_sync.py b/tests/execution/test_sync.py index d5604310..d5e9504f 100644 --- a/tests/execution/test_sync.py +++ b/tests/execution/test_sync.py @@ -1,14 +1,14 @@ -from gc import collect -from inspect import isawaitable - -from pytest import mark, raises +import pytest from graphql import graphql_sync from graphql.execution import execute, execute_sync from graphql.language import parse +from graphql.pyutils import is_awaitable from graphql.type import GraphQLField, GraphQLObjectType, GraphQLSchema, GraphQLString from graphql.validation import validate +from ..fixtures import cleanup + def describe_execute_synchronously_when_possible(): def _resolve_sync(root_value, _info): @@ -52,11 +52,11 @@ def does_not_return_an_awaitable_if_mutation_fields_are_all_synchronous(): None, ) - @mark.asyncio + @pytest.mark.asyncio async def returns_an_awaitable_if_any_field_is_asynchronous(): doc = "query Example { syncField, asyncField }" result = execute(schema, parse(doc), "rootValue") - assert isawaitable(result) + assert is_awaitable(result) assert await result == ( {"syncField": "rootValue", "asyncField": "rootValue"}, None, @@ -81,19 +81,21 @@ def does_not_throw_if_not_encountering_async_execution_with_check_sync(): None, ) - @mark.asyncio - @mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") + @pytest.mark.asyncio + @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") async def throws_if_encountering_async_execution_with_check_sync(): doc = "query Example { syncField, asyncField }" - with raises(RuntimeError) as exc_info: + with pytest.raises(RuntimeError) as exc_info: execute_sync( schema, document=parse(doc), root_value="rootValue", check_sync=True ) msg = str(exc_info.value) assert msg == "GraphQL execution failed to complete synchronously." + del exc_info + cleanup() - @mark.asyncio - @mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") + @pytest.mark.asyncio + @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") async def throws_if_encountering_async_operation_without_check_sync(): doc = "query Example { syncField, asyncField }" result = execute_sync(schema, document=parse(doc), root_value="rootValue") @@ -108,9 +110,46 @@ async def throws_if_encountering_async_operation_without_check_sync(): } ], ) - # garbage collect coroutine in order to not postpone the warning del result - collect() + cleanup() + + @pytest.mark.asyncio + @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") + async def throws_if_encountering_async_iterable_execution_with_check_sync(): + doc = """ + query Example { + ...deferFrag @defer(label: "deferLabel") + } + fragment deferFrag on Query { + syncField + } + """ + with pytest.raises(RuntimeError) as exc_info: + execute_sync( + schema, document=parse(doc), root_value="rootValue", check_sync=True + ) + msg = str(exc_info.value) + assert msg == "GraphQL execution failed to complete synchronously." + del exc_info + cleanup() + + @pytest.mark.asyncio + @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") + async def throws_if_encountering_async_iterable_execution_without_check_sync(): + doc = """ + query Example { + ...deferFrag @defer(label: "deferLabel") + } + fragment deferFrag on Query { + syncField + } + """ + with pytest.raises(RuntimeError) as exc_info: + execute_sync(schema, document=parse(doc), root_value="rootValue") + msg = str(exc_info.value) + assert msg == "GraphQL execution failed to complete synchronously." + del exc_info + cleanup() def describe_graphql_sync(): def reports_errors_raised_during_schema_validation(): @@ -150,17 +189,19 @@ def does_not_throw_if_not_encountering_async_operation_with_check_sync(): None, ) - @mark.asyncio - @mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") + @pytest.mark.asyncio + @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") async def throws_if_encountering_async_operation_with_check_sync(): doc = "query Example { syncField, asyncField }" - with raises(RuntimeError) as exc_info: + with pytest.raises(RuntimeError) as exc_info: graphql_sync(schema, doc, "rootValue", check_sync=True) msg = str(exc_info.value) assert msg == "GraphQL execution failed to complete synchronously." + del exc_info + cleanup() - @mark.asyncio - @mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") + @pytest.mark.asyncio + @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") async def throws_if_encountering_async_operation_without_check_sync(): doc = "query Example { syncField, asyncField }" result = graphql_sync(schema, doc, "rootValue") @@ -175,6 +216,5 @@ async def throws_if_encountering_async_operation_without_check_sync(): } ], ) - # garbage collect coroutine in order to not postpone the warning del result - collect() + cleanup() diff --git a/tests/execution/test_union_interface.py b/tests/execution/test_union_interface.py index e6858e43..e772db5d 100644 --- a/tests/execution/test_union_interface.py +++ b/tests/execution/test_union_interface.py @@ -1,4 +1,4 @@ -from typing import Optional, Union, List +from __future__ import annotations from graphql.execution import execute_sync from graphql.language import parse @@ -15,12 +15,11 @@ class Dog: - name: str barks: bool - mother: Optional["Dog"] - father: Optional["Dog"] - progeny: List["Dog"] + mother: Dog | None + father: Dog | None + progeny: list[Dog] def __init__(self, name: str, barks: bool): self.name = name @@ -31,12 +30,11 @@ def __init__(self, name: str, barks: bool): class Cat: - name: str meows: bool - mother: Optional["Cat"] - father: Optional["Cat"] - progeny: List["Cat"] + mother: Cat | None + father: Cat | None + progeny: list[Cat] def __init__(self, name: str, meows: bool): self.name = name @@ -47,16 +45,15 @@ def __init__(self, name: str, meows: bool): class Person: - name: str - pets: Optional[List[Union[Dog, Cat]]] - friends: Optional[List[Union[Dog, Cat, "Person"]]] + pets: list[Dog | Cat] | None + friends: list[Dog | Cat | Person] | None def __init__( self, name: str, - pets: Optional[List[Union[Dog, Cat]]] = None, - friends: Optional[List[Union[Dog, Cat, "Person"]]] = None, + pets: list[Dog | Cat] | None = None, + friends: list[Dog | Cat | Person] | None = None, ): self.name = name self.pets = pets @@ -66,7 +63,8 @@ def __init__( NamedType = GraphQLInterfaceType("Named", {"name": GraphQLField(GraphQLString)}) LifeType = GraphQLInterfaceType( - "Life", lambda: {"progeny": GraphQLField(GraphQLList(LifeType))} # type: ignore + "Life", + lambda: {"progeny": GraphQLField(GraphQLList(LifeType))}, # type: ignore ) MammalType = GraphQLInterfaceType( @@ -89,7 +87,7 @@ def __init__( "father": GraphQLField(DogType), # type: ignore }, interfaces=[MammalType, LifeType, NamedType], - is_type_of=lambda value, info: isinstance(value, Dog), + is_type_of=lambda value, _info: isinstance(value, Dog), ) CatType = GraphQLObjectType( @@ -102,7 +100,7 @@ def __init__( "father": GraphQLField(CatType), # type: ignore }, interfaces=[MammalType, LifeType, NamedType], - is_type_of=lambda value, info: isinstance(value, Cat), + is_type_of=lambda value, _info: isinstance(value, Cat), ) @@ -113,7 +111,7 @@ def resolve_pet_type(value, _info, _type): return CatType.name # Not reachable. All possible types have been considered. - assert False, "Unexpected pet type" + assert False, "Unexpected pet type" # pragma: no cover PetType = GraphQLUnionType("Pet", [DogType, CatType], resolve_type=resolve_pet_type) diff --git a/tests/execution/test_variables.py b/tests/execution/test_variables.py index dde674c7..3dfdb3ed 100644 --- a/tests/execution/test_variables.py +++ b/tests/execution/test_variables.py @@ -1,9 +1,12 @@ +from __future__ import annotations + from math import nan -from typing import Any, Dict, Optional +from typing import Any -from graphql.execution import execute_sync, ExecutionResult +from graphql.error import GraphQLError +from graphql.execution import ExecutionResult, execute_sync from graphql.execution.values import get_variable_values -from graphql.language import parse, OperationDefinitionNode, StringValueNode, ValueNode +from graphql.language import OperationDefinitionNode, StringValueNode, ValueNode, parse from graphql.pyutils import Undefined from graphql.type import ( GraphQLArgument, @@ -21,6 +24,25 @@ GraphQLString, ) +TestFaultyScalarGraphQLError = GraphQLError( + "FaultyScalarErrorMessage", extensions={"code": "FaultyScalarExtensionCode"} +) + + +def faulty_parse_value(_value: str) -> str: + raise TestFaultyScalarGraphQLError + + +def faulty_parse_literal(_ast: ValueNode, _variables=None) -> str: + raise TestFaultyScalarGraphQLError + + +TestFaultyScalar = GraphQLScalarType( + name="FaultyScalar", + parse_value=faulty_parse_value, + parse_literal=faulty_parse_literal, +) + def parse_serialized_value(value: str) -> str: assert value == "SerializedValue" @@ -47,6 +69,7 @@ def parse_literal_value(ast: ValueNode, _variables=None) -> str: "b": GraphQLInputField(GraphQLList(GraphQLString)), "c": GraphQLInputField(GraphQLNonNull(GraphQLString)), "d": GraphQLInputField(TestComplexScalar), + "e": GraphQLInputField(TestFaultyScalar), }, ) @@ -132,7 +155,7 @@ def field_with_input_arg(input_arg: GraphQLArgument): def execute_query( - query: str, variable_values: Optional[Dict[str, Any]] = None + query: str, variable_values: dict[str, Any] | None = None ) -> ExecutionResult: document = parse(query) return execute_sync(schema, document, variable_values=variable_values) @@ -253,6 +276,27 @@ def properly_runs_parse_literal_on_complex_scalar_types(): None, ) + def errors_on_faulty_scalar_type_input(): + result = execute_query( + """ + { + fieldWithObjectInput(input: {c: "foo", e: "bar"}) + } + """ + ) + + assert result == ( + {"fieldWithObjectInput": None}, + [ + { + "message": "Argument 'input' has invalid value" + ' { c: "foo", e: "bar" }.', + "path": ["fieldWithObjectInput"], + "locations": [(3, 51)], + } + ], + ) + def describe_using_variables(): doc = """ query ($input: TestInputObject) { @@ -365,6 +409,22 @@ def executes_with_complex_scalar_input(): None, ) + def errors_on_faulty_scalar_type_input(): + params = {"input": {"c": "foo", "e": "SerializedValue"}} + result = execute_query(doc, params) + + assert result == ( + None, + [ + { + "message": "Variable '$input' got invalid value" + " 'SerializedValue' at 'input.e'; FaultyScalarErrorMessage", + "locations": [(2, 24)], + "extensions": {"code": "FaultyScalarExtensionCode"}, + } + ], + ) + def errors_on_null_for_nested_non_null(): params = {"input": {"a": "foo", "b": "bar", "c": None}} result = execute_query(doc, params) @@ -676,8 +736,8 @@ def reports_error_for_array_passed_into_string_input(): ) errors = result.errors - assert errors is not None - assert errors[0].original_error is None + assert errors + assert errors[0].original_error def reports_error_for_non_provided_variables_for_non_nullable_inputs(): # Note: this test would typically fail validation before @@ -981,7 +1041,7 @@ def describe_get_variable_values_limit_maximum_number_of_coercion_errors(): input_value = {"input": [0, 1, 2]} - def _invalid_value_error(value: int, index: int) -> Dict[str, Any]: + def _invalid_value_error(value: int, index: int) -> dict[str, Any]: return { "message": "Variable '$input' got invalid value" f" {value} at 'input[{index}]';" diff --git a/tests/fixtures/__init__.py b/tests/fixtures/__init__.py index fe63e16b..5e4058f9 100644 --- a/tests/fixtures/__init__.py +++ b/tests/fixtures/__init__.py @@ -1,42 +1,56 @@ """Fixtures for graphql tests""" + import json -from os.path import dirname, join +from gc import collect +from pathlib import Path -from pytest import fixture +import pytest __all__ = [ + "big_schema_introspection_result", + "big_schema_sdl", + "cleanup", "kitchen_sink_query", "kitchen_sink_sdl", - "big_schema_sdl", - "big_schema_introspection_result", ] +def cleanup(rounds=5): + """Run garbage collector. + + This can be used to remove coroutines that were not awaited after running tests. + """ + for _generation in range(rounds): + collect() + + def read_graphql(name): - path = join(dirname(__file__), name + ".graphql") - return open(path, encoding="utf-8").read() + path = (Path(__file__).parent / name).with_suffix(".graphql") + with path.open(encoding="utf-8") as file: + return file.read() def read_json(name): - path = join(dirname(__file__), name + ".json") - return json.load(open(path, encoding="utf-8")) + path = (Path(__file__).parent / name).with_suffix(".json") + with path.open(encoding="utf-8") as file: + return json.load(file) -@fixture(scope="module") +@pytest.fixture(scope="module") def kitchen_sink_query(): return read_graphql("kitchen_sink") -@fixture(scope="module") +@pytest.fixture(scope="module") def kitchen_sink_sdl(): return read_graphql("schema_kitchen_sink") -@fixture(scope="module") +@pytest.fixture(scope="module") def big_schema_sdl(): return read_graphql("github_schema") -@fixture(scope="module") +@pytest.fixture(scope="module") def big_schema_introspection_result(): return read_json("github_schema") diff --git a/tests/fixtures/kitchen_sink.graphql b/tests/fixtures/kitchen_sink.graphql index a2d9f671..66a71fb3 100644 --- a/tests/fixtures/kitchen_sink.graphql +++ b/tests/fixtures/kitchen_sink.graphql @@ -9,6 +9,21 @@ query queryName($foo: ComplexType, $site: Site = MOBILE) @onQuery { ...frag @onFragmentSpread } } + + field3! + field4? + requiredField5: field5! + requiredSelectionSet(first: 10)! @directive { + field + } + + unsetListItemsRequiredList: listField[]! + requiredListItemsUnsetList: listField[!] + requiredListItemsRequiredList: listField[!]! + unsetListItemsOptionalList: listField[]? + optionalListItemsUnsetList: listField[?] + optionalListItemsOptionalList: listField[?]? + multidimensionalList: listField[[[!]!]!]! } ... @skip(unless: $foo) { id diff --git a/tests/fixtures/schema_kitchen_sink.graphql b/tests/fixtures/schema_kitchen_sink.graphql index 8ec1f2d8..c1d9d06e 100644 --- a/tests/fixtures/schema_kitchen_sink.graphql +++ b/tests/fixtures/schema_kitchen_sink.graphql @@ -26,6 +26,7 @@ type Foo implements Bar & Baz & Two { five(argument: [String] = ["string", "string"]): String six(argument: InputType = {key: "value"}): Type seven(argument: Int = null): Type + eight(argument: OneOfInputType): Type } type AnnotatedObject @onObject(arg: "value") { @@ -115,6 +116,11 @@ input InputType { answer: Int = 42 } +input OneOfInputType @oneOf { + string: String + int: Int +} + input AnnotatedInput @onInputObject { annotatedField: Type @onInputFieldDefinition } diff --git a/tests/language/test_ast.py b/tests/language/test_ast.py index bbf4440c..e9cb80c8 100644 --- a/tests/language/test_ast.py +++ b/tests/language/test_ast.py @@ -1,7 +1,9 @@ -from copy import copy, deepcopy +from __future__ import annotations + import weakref +from copy import copy, deepcopy -from graphql.language import Location, Node, Source, Token, TokenKind +from graphql.language import Location, NameNode, Node, Source, Token, TokenKind from graphql.pyutils import inspect @@ -12,6 +14,13 @@ class SampleTestNode(Node): beta: int +class SampleNamedNode(Node): + __slots__ = "foo", "name" + + foo: str + name: str | None + + def describe_token_class(): def initializes(): token = Token( @@ -44,7 +53,7 @@ def can_check_equality(): token1 = Token(TokenKind.NAME, 1, 2, 1, 2, value="test") token2 = Token(TokenKind.NAME, 1, 2, 1, 2, value="test") assert token2 == token1 - assert not token2 != token1 + assert token2 == token1 token3 = Token(TokenKind.NAME, 1, 2, 1, 2, value="text") assert token3 != token1 token4 = Token(TokenKind.NAME, 1, 4, 1, 2, value="test") @@ -54,8 +63,8 @@ def can_check_equality(): def can_compare_with_string(): token = Token(TokenKind.NAME, 1, 2, 1, 2, value="test") - assert token == "Name 'test'" - assert token != "Name 'foo'" + assert token == "Name 'test'" # noqa: S105 + assert token != "Name 'foo'" # noqa: S105 def does_not_equal_incompatible_object(): token = Token(TokenKind.NAME, 1, 2, 1, 2, value="test") @@ -113,16 +122,16 @@ def can_check_equality_with_tuple_or_list(): loc = Location(token1, token2, source) assert loc == (1, 3) assert loc == [1, 3] - assert not loc != (1, 3) - assert not loc != [1, 3] + assert loc == (1, 3) + assert loc == [1, 3] assert loc != (1, 2) assert loc != [2, 3] def does_not_equal_incompatible_object(): loc = Location(token1, token2, source) - assert not loc == (1, 2, 3) assert loc != (1, 2, 3) - assert not loc == {1: 2} + assert loc != (1, 2, 3) + assert loc != {1: 2} assert loc != {1: 2} def can_hash(): @@ -160,11 +169,30 @@ def has_representation_with_loc(): node = SampleTestNode(alpha=1, beta=2, loc=3) assert repr(node) == "SampleTestNode at 3" + def has_representation_when_named(): + name_node = NameNode(value="baz") + node = SampleNamedNode(foo="bar", name=name_node) + assert repr(node) == "SampleNamedNode(name='baz')" + node = SampleNamedNode(alpha=1, beta=2, name=name_node, loc=3) + assert repr(node) == "SampleNamedNode(name='baz') at 3" + + def has_representation_when_named_but_name_is_none(): + node = SampleNamedNode(alpha=1, beta=2, name=None) + assert repr(node) == "SampleNamedNode" + node = SampleNamedNode(alpha=1, beta=2, name=None, loc=3) + assert repr(node) == "SampleNamedNode at 3" + + def has_special_representation_when_it_is_a_name_node(): + node = NameNode(value="foo") + assert repr(node) == "NameNode('foo')" + node = NameNode(value="foo", loc=3) + assert repr(node) == "NameNode('foo') at 3" + def can_check_equality(): node = SampleTestNode(alpha=1, beta=2) node2 = SampleTestNode(alpha=1, beta=2) assert node2 == node - assert not node2 != node + assert node2 == node node2 = SampleTestNode(alpha=1, beta=1) assert node2 != node node3 = Node(alpha=1, beta=2) @@ -180,18 +208,19 @@ def can_hash(): assert node3 != node assert hash(node3) != hash(node) + # noinspection PyProtectedMember def caches_are_hashed(): node = SampleTestNode(alpha=1) assert not hasattr(node, "_hash") hash1 = hash(node) assert hasattr(node, "_hash") - assert hash1 == getattr(node, "_hash") + assert hash1 == node._hash # noqa: SLF001 node.alpha = 2 assert not hasattr(node, "_hash") hash2 = hash(node) assert hash2 != hash1 assert hasattr(node, "_hash") - assert hash2 == getattr(node, "_hash") + assert hash2 == node._hash # noqa: SLF001 def can_create_weak_reference(): node = SampleTestNode(alpha=1, beta=2) diff --git a/tests/language/test_block_string.py b/tests/language/test_block_string.py index d617de27..d135dde9 100644 --- a/tests/language/test_block_string.py +++ b/tests/language/test_block_string.py @@ -1,8 +1,10 @@ -from typing import cast, Collection, Optional +from __future__ import annotations + +from typing import Collection, cast from graphql.language.block_string import ( - is_printable_as_block_string, dedent_block_string_lines, + is_printable_as_block_string, print_block_string, ) @@ -146,13 +148,13 @@ def __init__(self, string: str) -> None: def __str__(self) -> str: return self.string - _assert_printable(cast(str, LazyString(""))) - _assert_non_printable(cast(str, LazyString(" "))) + _assert_printable(cast("str", LazyString(""))) + _assert_non_printable(cast("str", LazyString(" "))) def describe_print_block_string(): def _assert_block_string( - s: str, readable: str, minimize: Optional[str] = None + s: str, readable: str, minimize: str | None = None ) -> None: assert print_block_string(s) == readable assert print_block_string(s, minimize=True) == minimize or readable @@ -210,4 +212,4 @@ class LazyString: def __str__(self) -> str: return "lazy" - _assert_block_string(cast(str, LazyString()), '"""lazy"""') + _assert_block_string(cast("str", LazyString()), '"""lazy"""') diff --git a/tests/language/test_block_string_fuzz.py b/tests/language/test_block_string_fuzz.py index b780ce9f..0e17b4d4 100644 --- a/tests/language/test_block_string_fuzz.py +++ b/tests/language/test_block_string_fuzz.py @@ -1,9 +1,9 @@ -from pytest import mark +import pytest -from graphql.language import Source, Lexer, TokenKind +from graphql.language import Lexer, Source, TokenKind from graphql.language.block_string import ( - print_block_string, is_printable_as_block_string, + print_block_string, ) from ..utils import dedent, gen_fuzz_strings @@ -41,8 +41,8 @@ def assert_non_printable_block_string(test_value: str) -> None: def describe_print_block_string(): - @mark.slow - @mark.timeout(20) + @pytest.mark.slow + @pytest.mark.timeout(80) def correctly_print_random_strings(): # Testing with length >7 is taking exponentially more time. However, it is # highly recommended testing with increased limit if you make any change. diff --git a/tests/language/test_character_classes.py b/tests/language/test_character_classes.py index 37cddf5d..8fef324c 100644 --- a/tests/language/test_character_classes.py +++ b/tests/language/test_character_classes.py @@ -1,13 +1,14 @@ -from string import ascii_letters as letters, digits, punctuation +from string import ascii_letters as letters +from string import digits, punctuation from graphql.language.character_classes import ( is_digit, is_letter, - is_name_start, is_name_continue, + is_name_start, ) -non_ascii = "ยฏ๏ผฟยฑยนยฒยณยฝยฃยบยตร„รคร–รถร˜รธร—ใ€‡แงใ€ธฮฑฮ‘ฯ‰ฮฉ" +non_ascii = "ยฏ๏ผฟยฑยนยฒยณยฝยฃยบยตร„รคร–รถร˜รธร—ใ€‡แงใ€ธฮฑฮ‘ฯ‰ฮฉ" # noqa: RUF001 def describe_digit(): diff --git a/tests/language/test_lexer.py b/tests/language/test_lexer.py index a1ac5abf..a44e859d 100644 --- a/tests/language/test_lexer.py +++ b/tests/language/test_lexer.py @@ -1,6 +1,8 @@ -from typing import List, Optional, Tuple +from __future__ import annotations -from pytest import raises +from typing import Optional, Tuple + +import pytest from graphql.error import GraphQLSyntaxError from graphql.language import Lexer, Source, SourceLocation, Token, TokenKind @@ -9,7 +11,13 @@ from ..utils import dedent -Location = Optional[Tuple[int, int]] +try: + from typing import TypeAlias +except ImportError: # Python < 3.10 + from typing_extensions import TypeAlias + + +Location: TypeAlias = Optional[Tuple[int, int]] def lex_one(s: str) -> Token: @@ -24,7 +32,7 @@ def lex_second(s: str) -> Token: def assert_syntax_error(text: str, message: str, location: Location) -> None: - with raises(GraphQLSyntaxError) as exc_info: + with pytest.raises(GraphQLSyntaxError) as exc_info: lex_second(text) error = exc_info.value assert error.message == f"Syntax Error: {message}" @@ -34,7 +42,7 @@ def assert_syntax_error(text: str, message: str, location: Location) -> None: def describe_lexer(): def ignores_bom_header(): - token = lex_one("\uFEFF foo") + token = lex_one("\ufeff foo") assert token == Token(TokenKind.NAME, 2, 5, 1, 3, "foo") def tracks_line_breaks(): @@ -72,7 +80,7 @@ def skips_whitespace_and_comments(): assert token == Token(TokenKind.NAME, 3, 6, 1, 4, "foo") def errors_respect_whitespace(): - with raises(GraphQLSyntaxError) as exc_info: + with pytest.raises(GraphQLSyntaxError) as exc_info: lex_one("\n\n ~\n") assert str(exc_info.value) == dedent( @@ -90,7 +98,7 @@ def errors_respect_whitespace(): def updates_line_numbers_in_error_for_file_context(): s = "\n\n ~\n\n" source = Source(s, "foo.js", SourceLocation(11, 12)) - with raises(GraphQLSyntaxError) as exc_info: + with pytest.raises(GraphQLSyntaxError) as exc_info: Lexer(source).advance() assert str(exc_info.value) == dedent( """ @@ -106,7 +114,7 @@ def updates_line_numbers_in_error_for_file_context(): def updates_column_numbers_in_error_for_file_context(): source = Source("~", "foo.js", SourceLocation(1, 5)) - with raises(GraphQLSyntaxError) as exc_info: + with pytest.raises(GraphQLSyntaxError) as exc_info: Lexer(source).advance() assert str(exc_info.value) == dedent( """ @@ -138,8 +146,8 @@ def lexes_strings(): assert lex_one('"slashes \\\\ \\/"') == Token( TokenKind.STRING, 0, 15, 1, 1, "slashes \\ /" ) - assert lex_one('"unescaped surrogate pair \uD83D\uDE00"') == Token( - TokenKind.STRING, 0, 29, 1, 1, "unescaped surrogate pair \uD83D\uDE00" + assert lex_one('"unescaped surrogate pair \ud83d\ude00"') == Token( + TokenKind.STRING, 0, 29, 1, 1, "unescaped surrogate pair \ud83d\ude00" ) assert lex_one('"unescaped unicode outside BMP \U0001f600"') == Token( TokenKind.STRING, 0, 33, 1, 1, "unescaped unicode outside BMP \U0001f600" @@ -153,10 +161,10 @@ def lexes_strings(): "unescaped maximal unicode outside BMP \U0010ffff", ) assert lex_one('"unicode \\u1234\\u5678\\u90AB\\uCDEF"') == Token( - TokenKind.STRING, 0, 34, 1, 1, "unicode \u1234\u5678\u90AB\uCDEF" + TokenKind.STRING, 0, 34, 1, 1, "unicode \u1234\u5678\u90ab\ucdef" ) assert lex_one('"unicode \\u{1234}\\u{5678}\\u{90AB}\\u{CDEF}"') == Token( - TokenKind.STRING, 0, 42, 1, 1, "unicode \u1234\u5678\u90AB\uCDEF" + TokenKind.STRING, 0, 42, 1, 1, "unicode \u1234\u5678\u90ab\ucdef" ) assert lex_one('"string with unicode escape outside BMP \\u{1F600}"') == Token( TokenKind.STRING, @@ -164,7 +172,7 @@ def lexes_strings(): 50, 1, 1, - "string with unicode escape outside BMP \U0001F600", + "string with unicode escape outside BMP \U0001f600", ) assert lex_one('"string with minimal unicode escape \\u{0}"') == Token( TokenKind.STRING, 0, 42, 1, 1, "string with minimal unicode escape \u0000" @@ -175,7 +183,7 @@ def lexes_strings(): 47, 1, 1, - "string with maximal unicode escape \U0010FFFF", + "string with maximal unicode escape \U0010ffff", ) assert lex_one( '"string with maximal minimal unicode escape \\u{00000000}"' @@ -215,7 +223,7 @@ def lexes_strings(): 56, 1, 1, - "string with unicode surrogate pair escape \U0010FFFF", + "string with unicode surrogate pair escape \U0010ffff", ) def lex_reports_useful_string_errors(): @@ -230,17 +238,17 @@ def lex_reports_useful_string_errors(): (1, 1), ) assert_syntax_error( - '"bad surrogate \uDEAD"', + '"bad surrogate \udead"', "Invalid character within String: U+DEAD.", (1, 16), ) assert_syntax_error( - '"bad high surrogate pair \uDEAD\uDEAD"', + '"bad high surrogate pair \udead\udead"', "Invalid character within String: U+DEAD.", (1, 26), ) assert_syntax_error( - '"bad low surrogate pair \uD800\uD800"', + '"bad low surrogate pair \ud800\ud800"', "Invalid character within String: U+D800.", (1, 25), ) @@ -322,12 +330,12 @@ def lex_reports_useful_string_errors(): (1, 25), ) assert_syntax_error( - '"cannot escape half a pair \uD83D\\uDE00 esc"', + '"cannot escape half a pair \ud83d\\uDE00 esc"', "Invalid character within String: U+D83D.", (1, 28), ) assert_syntax_error( - '"cannot escape half a pair \\uD83D\uDE00 esc"', + '"cannot escape half a pair \\uD83D\ude00 esc"', "Invalid Unicode escape sequence: '\\uD83D'.", (1, 28), ) @@ -366,13 +374,13 @@ def lexes_block_strings(): 1, "unescaped \\n\\r\\b\\t\\f\\u1234", ) - assert lex_one('"""unescaped surrogate pair \uD83D\uDE00"""') == Token( + assert lex_one('"""unescaped surrogate pair \ud83d\ude00"""') == Token( TokenKind.BLOCK_STRING, 0, 33, 1, 1, - "unescaped surrogate pair \uD83D\uDE00", + "unescaped surrogate pair \ud83d\ude00", ) assert lex_one('"""unescaped unicode outside BMP \U0001f600"""') == Token( TokenKind.BLOCK_STRING, @@ -386,29 +394,25 @@ def lexes_block_strings(): TokenKind.BLOCK_STRING, 0, 19, 1, 1, "slashes \\\\ \\/" ) assert lex_one( - '"""\n\n spans\n multiple\n' - ' lines\n\n """' + '"""\n\n spans\n multiple\n lines\n\n """' ) == Token(TokenKind.BLOCK_STRING, 0, 68, 1, 1, "spans\n multiple\n lines") def advance_line_after_lexing_multiline_block_string(): - assert ( - lex_second( - '''""" + assert lex_second( + '''""" spans multiple lines \n """ second_token''' - ) - == Token(TokenKind.NAME, 71, 83, 8, 6, "second_token") - ) + ) == Token(TokenKind.NAME, 71, 83, 8, 6, "second_token") def lex_reports_useful_block_string_errors(): assert_syntax_error('"""', "Unterminated string.", (1, 4)) assert_syntax_error('"""no end quote', "Unterminated string.", (1, 16)) assert_syntax_error( - '"""contains invalid surrogate \uDEAD"""', + '"""contains invalid surrogate \udead"""', "Invalid character within String: U+DEAD.", (1, 31), ) @@ -512,6 +516,7 @@ def lex_does_not_allow_name_start_after_a_number(): # noinspection PyArgumentEqualDefault def lexes_punctuation(): assert lex_one("!") == Token(TokenKind.BANG, 0, 1, 1, 1, None) + assert lex_one("?") == Token(TokenKind.QUESTION_MARK, 0, 1, 1, 1, None) assert lex_one("$") == Token(TokenKind.DOLLAR, 0, 1, 1, 1, None) assert lex_one("(") == Token(TokenKind.PAREN_L, 0, 1, 1, 1, None) assert lex_one(")") == Token(TokenKind.PAREN_R, 0, 1, 1, 1, None) @@ -530,16 +535,16 @@ def lex_reports_useful_unknown_character_error(): assert_syntax_error("~", "Unexpected character: '~'.", (1, 1)) assert_syntax_error("\x00", "Unexpected character: U+0000.", (1, 1)) assert_syntax_error("\b", "Unexpected character: U+0008.", (1, 1)) - assert_syntax_error("\xAA", "Unexpected character: U+00AA.", (1, 1)) - assert_syntax_error("\u0AAA", "Unexpected character: U+0AAA.", (1, 1)) - assert_syntax_error("\u203B", "Unexpected character: U+203B.", (1, 1)) + assert_syntax_error("\xaa", "Unexpected character: U+00AA.", (1, 1)) + assert_syntax_error("\u0aaa", "Unexpected character: U+0AAA.", (1, 1)) + assert_syntax_error("\u203b", "Unexpected character: U+203B.", (1, 1)) assert_syntax_error("\U0001f600", "Unexpected character: U+1F600.", (1, 1)) - assert_syntax_error("\uD83D\uDE00", "Unexpected character: U+1F600.", (1, 1)) - assert_syntax_error("\uD800\uDC00", "Unexpected character: U+10000.", (1, 1)) - assert_syntax_error("\uDBFF\uDFFF", "Unexpected character: U+10FFFF.", (1, 1)) - assert_syntax_error("\uD800", "Invalid character: U+D800.", (1, 1)) - assert_syntax_error("\uDBFF", "Invalid character: U+DBFF.", (1, 1)) - assert_syntax_error("\uDEAD", "Invalid character: U+DEAD.", (1, 1)) + assert_syntax_error("\ud83d\ude00", "Unexpected character: U+1F600.", (1, 1)) + assert_syntax_error("\ud800\udc00", "Unexpected character: U+10000.", (1, 1)) + assert_syntax_error("\udbff\udfff", "Unexpected character: U+10FFFF.", (1, 1)) + assert_syntax_error("\ud800", "Invalid character: U+D800.", (1, 1)) + assert_syntax_error("\udbff", "Invalid character: U+DBFF.", (1, 1)) + assert_syntax_error("\udead", "Invalid character: U+DEAD.", (1, 1)) # noinspection PyArgumentEqualDefault def lex_reports_useful_information_for_dashes_in_names(): @@ -547,7 +552,7 @@ def lex_reports_useful_information_for_dashes_in_names(): lexer = Lexer(source) first_token = lexer.advance() assert first_token == Token(TokenKind.NAME, 0, 1, 1, 1, "a") - with raises(GraphQLSyntaxError) as exc_info: + with pytest.raises(GraphQLSyntaxError) as exc_info: lexer.advance() error = exc_info.value assert error.message == ( @@ -573,8 +578,8 @@ def produces_double_linked_list_of_tokens_including_comments(): assert end_token.kind != TokenKind.COMMENT assert start_token.prev is None assert end_token.next is None - tokens: List[Token] = [] - tok: Optional[Token] = start_token + tokens: list[Token] = [] + tok: Token | None = start_token while tok: assert not tokens or tok.prev == tokens[-1] tokens.append(tok) @@ -601,11 +606,11 @@ def lexes_comments(): assert lex_one("# Comment \U0001f600").prev == Token( TokenKind.COMMENT, 0, 11, 1, 1, " Comment \U0001f600" ) - assert lex_one("# Comment \uD83D\uDE00").prev == Token( - TokenKind.COMMENT, 0, 12, 1, 1, " Comment \uD83D\uDE00" + assert lex_one("# Comment \ud83d\ude00").prev == Token( + TokenKind.COMMENT, 0, 12, 1, 1, " Comment \ud83d\ude00" ) assert_syntax_error( - "# Invalid surrogate \uDEAD", "Invalid character: U+DEAD.", (1, 21) + "# Invalid surrogate \udead", "Invalid character: U+DEAD.", (1, 21) ) @@ -615,6 +620,7 @@ def _is_punctuator_token(text: str) -> bool: def returns_true_for_punctuator_tokens(): assert _is_punctuator_token("!") is True + assert _is_punctuator_token("?") is True assert _is_punctuator_token("$") is True assert _is_punctuator_token("&") is True assert _is_punctuator_token("(") is True diff --git a/tests/language/test_location.py b/tests/language/test_location.py index 62096c19..c9ae2c14 100644 --- a/tests/language/test_location.py +++ b/tests/language/test_location.py @@ -10,34 +10,34 @@ def can_compare_with_other_source_location(): location = SourceLocation(1, 2) same_location = SourceLocation(1, 2) assert location == same_location - assert not location != same_location + assert not location != same_location # noqa: SIM202 different_location = SourceLocation(1, 1) - assert not location == different_location + assert not location == different_location # noqa: SIM201 assert location != different_location different_location = SourceLocation(2, 2) - assert not location == different_location + assert not location == different_location # noqa: SIM201 assert location != different_location def can_compare_with_location_tuple(): location = SourceLocation(1, 2) same_location = (1, 2) assert location == same_location - assert not location != same_location + assert not location != same_location # noqa: SIM202 different_location = (1, 1) - assert not location == different_location + assert not location == different_location # noqa: SIM201 assert location != different_location different_location = (2, 2) - assert not location == different_location + assert not location == different_location # noqa: SIM201 assert location != different_location def can_compare_with_formatted_location(): location = SourceLocation(1, 2) same_location = location.formatted assert location == same_location - assert not location != same_location + assert not location != same_location # noqa: SIM202 different_location = SourceLocation(1, 1).formatted - assert not location == different_location + assert not location == different_location # noqa: SIM201 assert location != different_location different_location = SourceLocation(2, 2).formatted - assert not location == different_location + assert not location == different_location # noqa: SIM201 assert location != different_location diff --git a/tests/language/test_parser.py b/tests/language/test_parser.py index 027a605b..0121db23 100644 --- a/tests/language/test_parser.py +++ b/tests/language/test_parser.py @@ -1,46 +1,62 @@ -from typing import cast, Optional, Tuple +from __future__ import annotations -from pytest import raises +from typing import Optional, Tuple, cast + +import pytest from graphql.error import GraphQLSyntaxError from graphql.language import ( ArgumentNode, DefinitionNode, DocumentNode, + ErrorBoundaryNode, FieldNode, IntValueNode, + ListNullabilityOperatorNode, ListTypeNode, ListValueNode, - NameNode, NamedTypeNode, + NameNode, + NonNullAssertionNode, NonNullTypeNode, + NullabilityAssertionNode, NullValueNode, ObjectFieldNode, ObjectValueNode, OperationDefinitionNode, OperationType, SelectionSetNode, + Source, StringValueNode, - ValueNode, - VariableNode, Token, TokenKind, + ValueNode, + VariableNode, parse, + parse_const_value, parse_type, parse_value, - parse_const_value, - Source, ) from graphql.pyutils import inspect from ..fixtures import kitchen_sink_query # noqa: F401 from ..utils import dedent -Location = Optional[Tuple[int, int]] +try: + from typing import TypeAlias +except ImportError: # Python < 3.10 + from typing_extensions import TypeAlias + + +Location: TypeAlias = Optional[Tuple[int, int]] + + +def parse_ccn(source: str) -> DocumentNode: + return parse(source, experimental_client_controlled_nullability=True) def assert_syntax_error(text: str, message: str, location: Location) -> None: - with raises(GraphQLSyntaxError) as exc_info: + with pytest.raises(GraphQLSyntaxError) as exc_info: parse(text) error = exc_info.value assert error.message == f"Syntax Error: {message}" @@ -48,9 +64,18 @@ def assert_syntax_error(text: str, message: str, location: Location) -> None: assert error.locations == [location] +def assert_syntax_error_ccn(text: str, message: str, location: Location) -> None: + with pytest.raises(GraphQLSyntaxError) as exc_info: + parse_ccn(text) + error = exc_info.value + assert error.message == f"Syntax Error: {message}" + assert error.description == message + assert error.locations == [location] + + def describe_parser(): def parse_provides_useful_errors(): - with raises(GraphQLSyntaxError) as exc_info: + with pytest.raises(GraphQLSyntaxError) as exc_info: parse("{") error = exc_info.value assert error.message == "Syntax Error: Expected Name, found ." @@ -78,7 +103,7 @@ def parse_provides_useful_errors(): assert_syntax_error('{ ""', "Expected Name, found String ''.", (1, 3)) def parse_provides_useful_error_when_using_source(): - with raises(GraphQLSyntaxError) as exc_info: + with pytest.raises(GraphQLSyntaxError) as exc_info: parse(Source("query", "MyQuery.graphql")) error = exc_info.value assert str(error) == dedent( @@ -91,6 +116,22 @@ def parse_provides_useful_error_when_using_source(): """ ) + def limits_by_a_maximum_number_of_tokens(): + parse("{ foo }", max_tokens=3) + with pytest.raises( + GraphQLSyntaxError, + match="Syntax Error:" + r" Document contains more than 2 tokens\. Parsing aborted\.", + ): + parse("{ foo }", max_tokens=2) + parse('{ foo(bar: "baz") }', max_tokens=8) + with pytest.raises( + GraphQLSyntaxError, + match="Syntax Error:" + r" Document contains more than 7 tokens\. Parsing aborted\.", + ): + parse('{ foo(bar: "baz") }', max_tokens=7) + def parses_variable_inline_values(): parse("{ field(complex: { a: { b: [ $var ] } }) }") @@ -133,27 +174,27 @@ def parses_multi_byte_characters(): # Note: \u0A0A could be naively interpreted as two line-feed chars. doc = parse( """ - # This comment has a \u0A0A multi-byte character. - { field(arg: "Has a \u0A0A multi-byte character.") } + # This comment has a \u0a0a multi-byte character. + { field(arg: "Has a \u0a0a multi-byte character.") } """ ) definitions = doc.definitions assert isinstance(definitions, tuple) assert len(definitions) == 1 - selection_set = cast(OperationDefinitionNode, definitions[0]).selection_set + selection_set = cast("OperationDefinitionNode", definitions[0]).selection_set selections = selection_set.selections assert isinstance(selections, tuple) assert len(selections) == 1 - arguments = cast(FieldNode, selections[0]).arguments + arguments = cast("FieldNode", selections[0]).arguments assert isinstance(arguments, tuple) assert len(arguments) == 1 value = arguments[0].value assert isinstance(value, StringValueNode) - assert value.value == "Has a \u0A0A multi-byte character." + assert value.value == "Has a \u0a0a multi-byte character." # noinspection PyShadowingNames def parses_kitchen_sink(kitchen_sink_query): # noqa: F811 - parse(kitchen_sink_query) + parse_ccn(kitchen_sink_query) def allows_non_keywords_anywhere_a_name_is_allowed(): non_keywords = ( @@ -216,6 +257,214 @@ def parses_named_subscription_operations(): """ ) + def parses_required_field(): + doc = parse_ccn("{ requiredField! }") + assert isinstance(doc, DocumentNode) + definitions = doc.definitions + assert isinstance(definitions, tuple) + assert len(definitions) == 1 + definition = cast("OperationDefinitionNode", definitions[0]) + selection_set: SelectionSetNode | None = definition.selection_set + assert isinstance(selection_set, SelectionSetNode) + selections = selection_set.selections + assert isinstance(selections, tuple) + assert len(selections) == 1 + field = selections[0] + assert isinstance(field, FieldNode) + nullability_assertion = field.nullability_assertion + assert isinstance(nullability_assertion, NonNullAssertionNode) + assert nullability_assertion.loc == (15, 16) + assert nullability_assertion.nullability_assertion is None + + def parses_optional_field(): + parse_ccn("{ optionalField? }") + + def does_not_parse_field_with_multiple_designators(): + assert_syntax_error_ccn( + "{ optionalField?! }", "Expected Name, found '!'.", (1, 17) + ) + assert_syntax_error_ccn( + "{ optionalField!? }", "Expected Name, found '?'.", (1, 17) + ) + + def parses_required_with_alias(): + parse_ccn("{ requiredField: field! }") + + def parses_optional_with_alias(): + parse_ccn("{ requiredField: field? }") + + def does_not_parse_aliased_field_with_bang_on_left_of_colon(): + assert_syntax_error_ccn( + "{ requiredField!: field }", "Expected Name, found ':'.", (1, 17) + ) + + def does_not_parse_aliased_field_with_question_mark_on_left_of_colon(): + assert_syntax_error_ccn( + "{ requiredField?: field }", "Expected Name, found ':'.", (1, 17) + ) + + def does_not_parse_aliased_field_with_bang_on_left_and_right_of_colon(): + assert_syntax_error_ccn( + "{ requiredField!: field! }", "Expected Name, found ':'.", (1, 17) + ) + + def does_not_parse_aliased_field_with_question_mark_on_left_and_right_of_colon(): + assert_syntax_error_ccn( + "{ requiredField?: field? }", "Expected Name, found ':'.", (1, 17) + ) + + def does_not_parse_designator_on_query(): + assert_syntax_error_ccn("query? { field }", "Expected '{', found '?'.", (1, 6)) + + def parses_required_within_fragment(): + parse_ccn("fragment MyFragment on Query { field! }") + + def parses_optional_within_fragment(): + parse_ccn("fragment MyFragment on Query { field? }") + + def parses_field_with_required_list_elements(): + doc = parse_ccn("{ field[!] }") + assert isinstance(doc, DocumentNode) + definitions = doc.definitions + assert isinstance(definitions, tuple) + assert len(definitions) == 1 + definition = cast("OperationDefinitionNode", definitions[0]) + selection_set: SelectionSetNode | None = definition.selection_set + assert isinstance(selection_set, SelectionSetNode) + selections = selection_set.selections + assert isinstance(selections, tuple) + assert len(selections) == 1 + field = selections[0] + assert isinstance(field, FieldNode) + nullability_assertion: NullabilityAssertionNode | None = ( + field.nullability_assertion + ) + assert isinstance(nullability_assertion, ListNullabilityOperatorNode) + assert nullability_assertion.loc == (7, 10) + nullability_assertion = nullability_assertion.nullability_assertion + assert isinstance(nullability_assertion, NonNullAssertionNode) + assert nullability_assertion.loc == (8, 9) + assert nullability_assertion.nullability_assertion is None + + def parses_field_with_optional_list_elements(): + doc = parse_ccn("{ field[?] }") + assert isinstance(doc, DocumentNode) + definitions = doc.definitions + assert isinstance(definitions, tuple) + assert len(definitions) == 1 + definition = cast("OperationDefinitionNode", definitions[0]) + selection_set: SelectionSetNode | None = definition.selection_set + assert isinstance(selection_set, SelectionSetNode) + selections = selection_set.selections + assert isinstance(selections, tuple) + assert len(selections) == 1 + field = selections[0] + assert isinstance(field, FieldNode) + nullability_assertion: NullabilityAssertionNode | None = ( + field.nullability_assertion + ) + assert isinstance(nullability_assertion, ListNullabilityOperatorNode) + assert nullability_assertion.loc == (7, 10) + nullability_assertion = nullability_assertion.nullability_assertion + assert isinstance(nullability_assertion, ErrorBoundaryNode) + assert nullability_assertion.loc == (8, 9) + assert nullability_assertion.nullability_assertion is None + + def parses_field_with_required_list(): + doc = parse_ccn("{ field[]! }") + assert isinstance(doc, DocumentNode) + definitions = doc.definitions + assert isinstance(definitions, tuple) + assert len(definitions) == 1 + definition = cast("OperationDefinitionNode", definitions[0]) + selection_set: SelectionSetNode | None = definition.selection_set + assert isinstance(selection_set, SelectionSetNode) + selections = selection_set.selections + assert isinstance(selections, tuple) + assert len(selections) == 1 + field = selections[0] + assert isinstance(field, FieldNode) + nullability_assertion: NullabilityAssertionNode | None = ( + field.nullability_assertion + ) + assert isinstance(nullability_assertion, NonNullAssertionNode) + assert nullability_assertion.loc == (7, 10) + nullability_assertion = nullability_assertion.nullability_assertion + assert isinstance(nullability_assertion, ListNullabilityOperatorNode) + assert nullability_assertion.loc == (7, 9) + assert nullability_assertion.nullability_assertion is None + + def parses_field_with_optional_list(): + doc = parse_ccn("{ field[]? }") + assert isinstance(doc, DocumentNode) + definitions = doc.definitions + assert isinstance(definitions, tuple) + assert len(definitions) == 1 + definition = cast("OperationDefinitionNode", definitions[0]) + selection_set: SelectionSetNode | None = definition.selection_set + assert isinstance(selection_set, SelectionSetNode) + selections = selection_set.selections + assert isinstance(selections, tuple) + assert len(selections) == 1 + field = selections[0] + assert isinstance(field, FieldNode) + nullability_assertion: NullabilityAssertionNode | None = ( + field.nullability_assertion + ) + assert isinstance(nullability_assertion, ErrorBoundaryNode) + assert nullability_assertion.loc == (7, 10) + nullability_assertion = nullability_assertion.nullability_assertion + assert isinstance(nullability_assertion, ListNullabilityOperatorNode) + assert nullability_assertion.loc == (7, 9) + assert nullability_assertion.nullability_assertion is None + + def parses_field_with_mixed_list_elements(): + doc = parse_ccn("{ field[[[?]!]]! }") + assert isinstance(doc, DocumentNode) + definitions = doc.definitions + assert isinstance(definitions, tuple) + assert len(definitions) == 1 + definition = cast("OperationDefinitionNode", definitions[0]) + selection_set: SelectionSetNode | None = definition.selection_set + assert isinstance(selection_set, SelectionSetNode) + selections = selection_set.selections + assert isinstance(selections, tuple) + assert len(selections) == 1 + field = selections[0] + assert isinstance(field, FieldNode) + nullability_assertion: NullabilityAssertionNode | None = ( + field.nullability_assertion + ) + assert isinstance(nullability_assertion, NonNullAssertionNode) + assert nullability_assertion.loc == (7, 16) + nullability_assertion = nullability_assertion.nullability_assertion + assert isinstance(nullability_assertion, ListNullabilityOperatorNode) + assert nullability_assertion.loc == (7, 15) + nullability_assertion = nullability_assertion.nullability_assertion + assert isinstance(nullability_assertion, ListNullabilityOperatorNode) + assert nullability_assertion.loc == (8, 14) + nullability_assertion = nullability_assertion.nullability_assertion + assert isinstance(nullability_assertion, NonNullAssertionNode) + assert nullability_assertion.loc == (9, 13) + nullability_assertion = nullability_assertion.nullability_assertion + assert isinstance(nullability_assertion, ListNullabilityOperatorNode) + assert nullability_assertion.loc == (9, 12) + nullability_assertion = nullability_assertion.nullability_assertion + assert isinstance(nullability_assertion, ErrorBoundaryNode) + assert nullability_assertion.loc == (10, 11) + assert nullability_assertion.nullability_assertion is None + + def does_not_parse_field_with_unbalanced_brackets(): + assert_syntax_error_ccn("{ field[[] }", "Expected ']', found '}'.", (1, 12)) + assert_syntax_error_ccn("{ field[]] }", "Expected Name, found ']'.", (1, 10)) + assert_syntax_error_ccn("{ field] }", "Expected Name, found ']'.", (1, 8)) + assert_syntax_error_ccn("{ field[ }", "Expected ']', found '}'.", (1, 10)) + + def does_not_parse_field_with_assorted_invalid_nullability_designators(): + assert_syntax_error_ccn("{ field[][] }", "Expected Name, found '['.", (1, 10)) + assert_syntax_error_ccn("{ field[!!] }", "Expected ']', found '!'.", (1, 10)) + assert_syntax_error_ccn("{ field[]?! }", "Expected Name, found '!'.", (1, 11)) + def creates_ast(): doc = parse( dedent( @@ -234,14 +483,14 @@ def creates_ast(): definitions = doc.definitions assert isinstance(definitions, tuple) assert len(definitions) == 1 - definition = cast(OperationDefinitionNode, definitions[0]) + definition = cast("OperationDefinitionNode", definitions[0]) assert isinstance(definition, DefinitionNode) assert definition.loc == (0, 40) assert definition.operation == OperationType.QUERY assert definition.name is None assert definition.variable_definitions == () assert definition.directives == () - selection_set: Optional[SelectionSetNode] = definition.selection_set + selection_set: SelectionSetNode | None = definition.selection_set assert isinstance(selection_set, SelectionSetNode) assert selection_set.loc == (0, 40) selections = selection_set.selections @@ -270,6 +519,7 @@ def creates_ast(): assert value.loc == (13, 14) assert value.value == "4" assert argument.loc == (9, 14) + assert field.nullability_assertion is None assert field.directives == () selection_set = field.selection_set assert isinstance(selection_set, SelectionSetNode) @@ -285,17 +535,7 @@ def creates_ast(): assert name.loc == (22, 24) assert name.value == "id" assert field.arguments == () - assert field.directives == () - assert field.selection_set is None - field = selections[0] - assert isinstance(field, FieldNode) - assert field.loc == (22, 24) - assert field.alias is None - name = field.name - assert isinstance(name, NameNode) - assert name.loc == (22, 24) - assert name.value == "id" - assert field.arguments == () + assert field.nullability_assertion is None assert field.directives == () assert field.selection_set is None field = selections[1] @@ -307,6 +547,7 @@ def creates_ast(): assert name.loc == (30, 34) assert name.value == "name" assert field.arguments == () + assert field.nullability_assertion is None assert field.directives == () assert field.selection_set is None @@ -334,7 +575,7 @@ def creates_ast_from_nameless_query_without_variables(): assert definition.name is None assert definition.variable_definitions == () assert definition.directives == () - selection_set: Optional[SelectionSetNode] = definition.selection_set + selection_set: SelectionSetNode | None = definition.selection_set assert isinstance(selection_set, SelectionSetNode) assert selection_set.loc == (6, 29) selections = selection_set.selections @@ -349,6 +590,7 @@ def creates_ast_from_nameless_query_without_variables(): assert name.loc == (10, 14) assert name.value == "node" assert field.arguments == () + assert field.nullability_assertion is None assert field.directives == () selection_set = field.selection_set assert isinstance(selection_set, SelectionSetNode) @@ -365,6 +607,7 @@ def creates_ast_from_nameless_query_without_variables(): assert name.loc == (21, 23) assert name.value == "id" assert field.arguments == () + assert field.nullability_assertion is None assert field.directives == () assert field.selection_set is None @@ -375,7 +618,7 @@ def allows_parsing_without_source_location_information(): def legacy_allows_parsing_fragment_defined_variables(): document = "fragment a($v: Boolean = false) on t { f(v: $v) }" parse(document, allow_legacy_fragment_variables=True) - with raises(GraphQLSyntaxError): + with pytest.raises(GraphQLSyntaxError): parse(document) def contains_location_information_that_only_stringifies_start_end(): @@ -387,7 +630,8 @@ def contains_location_information_that_only_stringifies_start_end(): def contains_references_to_source(): source = Source("{ id }") result = parse(source) - assert result.loc and result.loc.source is source + assert result.loc + assert result.loc.source is source def contains_references_to_start_and_end_tokens(): result = parse("{ id }") @@ -408,13 +652,16 @@ def allows_comments_everywhere_in_the_source(): # bottom comment""" ) top_comment = result.loc and result.loc.start_token.next - assert top_comment and top_comment.kind is TokenKind.COMMENT + assert top_comment + assert top_comment.kind is TokenKind.COMMENT assert top_comment.value == " top comment" field_comment = top_comment.next.next.next # type: ignore - assert field_comment and field_comment.kind is TokenKind.COMMENT + assert field_comment + assert field_comment.kind is TokenKind.COMMENT assert field_comment.value == " field comment" bottom_comment = field_comment.next.next # type: ignore - assert bottom_comment and bottom_comment.kind is TokenKind.COMMENT + assert bottom_comment + assert bottom_comment.kind is TokenKind.COMMENT assert bottom_comment.value == " bottom comment" @@ -486,7 +733,7 @@ def allows_variables(): assert name.value == "var" def correct_message_for_incomplete_variable(): - with raises(GraphQLSyntaxError) as exc_info: + with pytest.raises(GraphQLSyntaxError) as exc_info: parse_value("$") assert exc_info.value == { "message": "Syntax Error: Expected Name, found .", @@ -494,7 +741,7 @@ def correct_message_for_incomplete_variable(): } def correct_message_for_unexpected_token(): - with raises(GraphQLSyntaxError) as exc_info: + with pytest.raises(GraphQLSyntaxError) as exc_info: parse_value(":") assert exc_info.value == { "message": "Syntax Error: Unexpected ':'.", @@ -520,7 +767,7 @@ def parses_values(): assert value.block is False def does_not_allow_variables(): - with raises(GraphQLSyntaxError) as exc_info: + with pytest.raises(GraphQLSyntaxError) as exc_info: parse_const_value("{ field: $var }") assert exc_info.value == { "message": "Syntax Error: Unexpected variable '$var' in constant value.", @@ -528,7 +775,7 @@ def does_not_allow_variables(): } def correct_message_for_unexpected_token(): - with raises(GraphQLSyntaxError) as exc_info: + with pytest.raises(GraphQLSyntaxError) as exc_info: parse_const_value("$$") assert exc_info.value == { "message": "Syntax Error: Unexpected '$'.", diff --git a/tests/language/test_predicates.py b/tests/language/test_predicates.py index 06369b0a..f87148e4 100644 --- a/tests/language/test_predicates.py +++ b/tests/language/test_predicates.py @@ -2,19 +2,20 @@ from typing import Callable from graphql.language import ( - ast, Node, - parse_value, + ast, + is_const_value_node, is_definition_node, is_executable_definition_node, + is_nullability_assertion_node, is_selection_node, - is_value_node, - is_const_value_node, + is_type_definition_node, + is_type_extension_node, is_type_node, is_type_system_definition_node, - is_type_definition_node, is_type_system_extension_node, - is_type_extension_node, + is_value_node, + parse_value, ) all_ast_nodes = sorted( @@ -77,6 +78,14 @@ def check_selection_node(): "selection", ] + def check_nullability_assertion_node(): + assert filter_nodes(is_nullability_assertion_node) == [ + "error_boundary", + "list_nullability_operator", + "non_null_assertion", + "nullability_assertion", + ] + def check_value_node(): assert filter_nodes(is_value_node) == [ "boolean_value", diff --git a/tests/language/test_print_string.py b/tests/language/test_print_string.py index 644c6669..8daa2e27 100644 --- a/tests/language/test_print_string.py +++ b/tests/language/test_print_string.py @@ -21,23 +21,23 @@ def does_not_escape_space(): assert print_string(" ") == '" "' def does_not_escape_non_ascii_character(): - assert print_string("\u21BB") == '"\u21BB"' + assert print_string("\u21bb") == '"\u21bb"' def does_not_escape_supplementary_character(): assert print_string("\U0001f600") == '"\U0001f600"' def escapes_all_control_chars(): assert print_string( - "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0A\x0B\x0C\x0D\x0E\x0F" - "\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1A\x1B\x1C\x1D\x1E\x1F" - "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2A\x2B\x2C\x2D\x2E\x2F" - "\x30\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3A\x3B\x3C\x3D\x3E\x3F" - "\x40\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4A\x4B\x4C\x4D\x4E\x4F" - "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5A\x5B\x5C\x5D\x5E\x5F" - "\x60\x61\x62\x63\x64\x65\x66\x67\x68\x69\x6A\x6B\x6C\x6D\x6E\x6F" - "\x70\x71\x72\x73\x74\x75\x76\x77\x78\x79\x7A\x7B\x7C\x7D\x7E\x7F" - "\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8A\x8B\x8C\x8D\x8E\x8F" - "\x90\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9A\x9B\x9C\x9D\x9E\x9F" + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f" + "\x30\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f" + "\x40\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f" + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f" + "\x60\x61\x62\x63\x64\x65\x66\x67\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f" + "\x70\x71\x72\x73\x74\x75\x76\x77\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f" + "\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f" + "\x90\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f" ) == ( '"\\u0000\\u0001\\u0002\\u0003\\u0004\\u0005\\u0006\\u0007' "\\b\\t\\n\\u000B\\f\\r\\u000E\\u000F" diff --git a/tests/language/test_printer.py b/tests/language/test_printer.py index 4468478b..42531096 100644 --- a/tests/language/test_printer.py +++ b/tests/language/test_printer.py @@ -1,6 +1,6 @@ from copy import deepcopy -from pytest import raises +import pytest from graphql.language import FieldNode, NameNode, parse, print_ast @@ -15,12 +15,12 @@ def prints_minimal_ast(): def produces_helpful_error_messages(): bad_ast = {"random": "Data"} - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: # noinspection PyTypeChecker print_ast(bad_ast) # type: ignore assert str(exc_info.value) == "Not an AST Node: {'random': 'Data'}." corrupt_ast = FieldNode(name="random data") - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: print_ast(corrupt_ast) assert str(exc_info.value) == "Invalid AST Node: 'random data'." @@ -60,11 +60,11 @@ def correctly_prints_mutation_operation_with_artifacts(): def prints_query_with_variable_directives(): query_ast_with_variable_directive = parse( - "query ($foo: TestType = {a: 123}" " @testDirective(if: true) @test) { id }" + "query ($foo: TestType = { a: 123 } @testDirective(if: true) @test) { id }" ) assert print_ast(query_ast_with_variable_directive) == dedent( """ - query ($foo: TestType = {a: 123} @testDirective(if: true) @test) { + query ($foo: TestType = { a: 123 } @testDirective(if: true) @test) { id } """ @@ -106,6 +106,75 @@ def puts_arguments_on_multiple_lines_if_line_has_more_than_80_chars(): """ ) + def puts_large_object_values_on_multiple_lines_if_line_has_more_than_80_chars(): + printed = print_ast( + parse( + "{trip(obj:{wheelchair:false,smallObj:{a: 1},largeObj:" + "{wheelchair:false,smallObj:{a: 1},arriveBy:false," + "includePlannedCancellations:true,transitDistanceReluctance:2000," + 'anotherLongFieldName:"Lots and lots and lots and lots of text"},' + "arriveBy:false,includePlannedCancellations:true," + "transitDistanceReluctance:2000,anotherLongFieldName:" + '"Lots and lots and lots and lots of text"}){dateTime}}' + ) + ) + + assert printed == dedent( + """ + { + trip( + obj: { + wheelchair: false + smallObj: { a: 1 } + largeObj: { + wheelchair: false + smallObj: { a: 1 } + arriveBy: false + includePlannedCancellations: true + transitDistanceReluctance: 2000 + anotherLongFieldName: "Lots and lots and lots and lots of text" + } + arriveBy: false + includePlannedCancellations: true + transitDistanceReluctance: 2000 + anotherLongFieldName: "Lots and lots and lots and lots of text" + } + ) { + dateTime + } + } + """ + ) + + def puts_large_list_values_on_multiple_lines_if_line_has_more_than_80_chars(): + printed = print_ast( + parse( + '{trip(list:[["small array", "small", "small"],' + ' ["Lots and lots and lots and lots of text",' + ' "Lots and lots and lots and lots of text",' + ' "Lots and lots and lots and lots of text"]]){dateTime}}' + ) + ) + + assert printed == dedent( + """ + { + trip( + list: [ + ["small array", "small", "small"] + [ + "Lots and lots and lots and lots of text" + "Lots and lots and lots and lots of text" + "Lots and lots and lots and lots of text" + ] + ] + ) { + dateTime + } + } + """ + ) + def legacy_prints_fragment_with_variable_directives(): query_ast_with_variable_directive = parse( "fragment Foo($foo: TestType @test) on TestType @testDirective { id }", @@ -129,11 +198,17 @@ def legacy_correctly_prints_fragment_defined_variables(): assert print_ast(fragment_with_variable) == dedent(source) def prints_kitchen_sink_without_altering_ast(kitchen_sink_query): # noqa: F811 - ast = parse(kitchen_sink_query, no_location=True) + ast = parse( + kitchen_sink_query, + no_location=True, + experimental_client_controlled_nullability=True, + ) ast_before_print_call = deepcopy(ast) printed = print_ast(ast) - printed_ast = parse(printed, no_location=True) + printed_ast = parse( + printed, no_location=True, experimental_client_controlled_nullability=True + ) assert printed_ast == ast assert deepcopy(ast) == ast_before_print_call @@ -150,6 +225,19 @@ def prints_kitchen_sink_without_altering_ast(kitchen_sink_query): # noqa: F811 ...frag @onFragmentSpread } } + field3! + field4? + requiredField5: field5! + requiredSelectionSet(first: 10)! @directive { + field + } + unsetListItemsRequiredList: listField[]! + requiredListItemsUnsetList: listField[!] + requiredListItemsRequiredList: listField[!]! + unsetListItemsOptionalList: listField[]? + optionalListItemsUnsetList: listField[?] + optionalListItemsOptionalList: listField[?]? + multidimensionalList: listField[[[!]!]!]! } ... @skip(unless: $foo) { id @@ -185,9 +273,9 @@ def prints_kitchen_sink_without_altering_ast(kitchen_sink_query): # noqa: F811 foo( size: $size bar: $b - obj: {key: "value", block: """ + obj: { key: "value", block: """ block string uses \""" - """} + """ } ) } diff --git a/tests/language/test_schema_parser.py b/tests/language/test_schema_parser.py index 7eef1978..df64381a 100644 --- a/tests/language/test_schema_parser.py +++ b/tests/language/test_schema_parser.py @@ -1,7 +1,11 @@ +from __future__ import annotations + +import pickle +from copy import deepcopy from textwrap import dedent -from typing import List, Optional, Tuple +from typing import Optional, Tuple -from pytest import raises +import pytest from graphql.error import GraphQLSyntaxError from graphql.language import ( @@ -18,8 +22,8 @@ InterfaceTypeDefinitionNode, InterfaceTypeExtensionNode, ListTypeNode, - NameNode, NamedTypeNode, + NameNode, NonNullTypeNode, ObjectTypeDefinitionNode, ObjectTypeExtensionNode, @@ -37,11 +41,17 @@ from ..fixtures import kitchen_sink_sdl # noqa: F401 -Location = Optional[Tuple[int, int]] +try: + from typing import TypeAlias +except ImportError: # Python < 3.10 + from typing_extensions import TypeAlias + + +Location: TypeAlias = Optional[Tuple[int, int]] def assert_syntax_error(text: str, message: str, location: Location) -> None: - with raises(GraphQLSyntaxError) as exc_info: + with pytest.raises(GraphQLSyntaxError) as exc_info: parse(text) error = exc_info.value assert error.message == f"Syntax Error: {message}" @@ -71,7 +81,7 @@ def field_node(name: NameNode, type_: TypeNode, loc: Location): return field_node_with_args(name, type_, [], loc) -def field_node_with_args(name: NameNode, type_: TypeNode, args: List, loc: Location): +def field_node_with_args(name: NameNode, type_: TypeNode, args: list, loc: Location): return FieldDefinitionNode( name=name, arguments=args, type=type_, directives=[], loc=loc, description=None ) @@ -88,7 +98,7 @@ def enum_value_node(name: str, loc: Location): def input_value_node( - name: NameNode, type_: TypeNode, default_value: Optional[ValueNode], loc: Location + name: NameNode, type_: TypeNode, default_value: ValueNode | None, loc: Location ): return InputValueDefinitionNode( name=name, @@ -104,7 +114,7 @@ def boolean_value_node(value: bool, loc: Location): return BooleanValueNode(value=value, loc=loc) -def string_value_node(value: str, block: Optional[bool], loc: Location): +def string_value_node(value: str, block: bool | None, loc: Location): return StringValueNode(value=value, block=block, loc=loc) @@ -113,8 +123,8 @@ def list_type_node(type_: TypeNode, loc: Location): def schema_extension_node( - directives: List[DirectiveNode], - operation_types: List[OperationTypeDefinitionNode], + directives: list[DirectiveNode], + operation_types: list[OperationTypeDefinitionNode], loc: Location, ): return SchemaExtensionNode( @@ -126,7 +136,7 @@ def operation_type_definition(operation: OperationType, type_: TypeNode, loc: Lo return OperationTypeDefinitionNode(operation=operation, type=type_, loc=loc) -def directive_node(name: NameNode, arguments: List[ArgumentNode], loc: Location): +def directive_node(name: NameNode, arguments: list[ArgumentNode], loc: Location): return DirectiveNode(name=name, arguments=arguments, loc=loc) @@ -796,19 +806,36 @@ def directive_with_incorrect_locations(): def parses_kitchen_sink_schema(kitchen_sink_sdl): # noqa: F811 assert parse(kitchen_sink_sdl) - def can_pickle_and_unpickle_kitchen_sink_schema_ast(kitchen_sink_sdl): # noqa: F811 - import pickle - - # create a schema AST from the kitchen sink SDL - doc = parse(kitchen_sink_sdl) - # check that the schema AST can be pickled - # (particularly, there should be no recursion error) - dumped = pickle.dumps(doc) - # check that the pickle size is reasonable - assert len(dumped) < 50 * len(kitchen_sink_sdl) - loaded = pickle.loads(dumped) - # check that the un-pickled schema AST is still the same - assert loaded == doc - # check that pickling again creates the same result - dumped_again = pickle.dumps(doc) - assert dumped_again == dumped + def describe_deepcopy_and_pickle(): + def can_deep_copy_ast(kitchen_sink_sdl): # noqa: F811 + # create a schema AST from the kitchen sink SDL + doc = parse(kitchen_sink_sdl) + # make a deepcopy of the schema AST + copied_doc = deepcopy(doc) + # check that the copied AST is equal to the original one + assert copied_doc == doc + + def can_pickle_and_unpickle_ast(kitchen_sink_sdl): # noqa: F811 + # create a schema AST from the kitchen sink SDL + doc = parse(kitchen_sink_sdl) + # check that the schema AST can be pickled + # (particularly, there should be no recursion error) + dumped = pickle.dumps(doc) + # check that the pickle size is reasonable + assert len(dumped) < 50 * len(kitchen_sink_sdl) + loaded = pickle.loads(dumped) + # check that the un-pickled schema AST is still the same + assert loaded == doc + # check that pickling again creates the same result + dumped_again = pickle.dumps(doc) + assert dumped_again == dumped + + def can_deep_copy_pickled_ast(kitchen_sink_sdl): # noqa: F811 + # create a schema AST from the kitchen sink SDL + doc = parse(kitchen_sink_sdl) + # pickle and unpickle the schema AST + loaded_doc = pickle.loads(pickle.dumps(doc)) + # make a deepcopy of this + copied_doc = deepcopy(loaded_doc) + # check that the result is still equal to the original schema AST + assert copied_doc == doc diff --git a/tests/language/test_schema_printer.py b/tests/language/test_schema_printer.py index cd097a80..083dcd0f 100644 --- a/tests/language/test_schema_printer.py +++ b/tests/language/test_schema_printer.py @@ -1,8 +1,8 @@ from copy import deepcopy -from pytest import raises +import pytest -from graphql.language import ScalarTypeDefinitionNode, NameNode, print_ast, parse +from graphql.language import NameNode, ScalarTypeDefinitionNode, parse, print_ast from ..fixtures import kitchen_sink_sdl # noqa: F401 from ..utils import dedent @@ -15,7 +15,7 @@ def prints_minimal_ast(): def produces_helpful_error_messages(): bad_ast = {"random": "Data"} - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: # noinspection PyTypeChecker print_ast(bad_ast) # type: ignore msg = str(exc_info.value) @@ -56,8 +56,9 @@ def prints_kitchen_sink_without_altering_ast(kitchen_sink_sdl): # noqa: F811 three(argument: InputType, other: String): Int four(argument: String = "string"): String five(argument: [String] = ["string", "string"]): String - six(argument: InputType = {key: "value"}): Type + six(argument: InputType = { key: "value" }): Type seven(argument: Int = null): Type + eight(argument: OneOfInputType): Type } type AnnotatedObject @onObject(arg: "value") { @@ -140,6 +141,11 @@ def prints_kitchen_sink_without_altering_ast(kitchen_sink_sdl): # noqa: F811 answer: Int = 42 } + input OneOfInputType @oneOf { + string: String + int: Int + } + input AnnotatedInput @onInputObject { annotatedField: Type @onInputFieldDefinition } diff --git a/tests/language/test_source.py b/tests/language/test_source.py index 1b74aa1b..b973410d 100644 --- a/tests/language/test_source.py +++ b/tests/language/test_source.py @@ -1,8 +1,9 @@ -import weakref +from __future__ import annotations -from typing import cast, Tuple +import weakref +from typing import cast -from pytest import raises +import pytest from graphql.language import Source, SourceLocation @@ -55,17 +56,17 @@ def can_be_stringified(): def can_be_compared(): source = Source("foo") - assert source == source - assert not source != source + assert source == source # noqa: PLR0124 + assert not source != source # noqa: PLR0124, SIM202 assert source == "foo" - assert not source != "foo" + assert not source != "foo" # noqa: SIM202 same_source = Source("foo") assert source == same_source - assert not source != same_source + assert not source != same_source # noqa: SIM202 different_source = Source("bar") - assert not source == different_source + assert not source == different_source # noqa: SIM201 assert source != different_source - assert not source == "bar" + assert not source == "bar" # noqa: SIM201 assert source != "bar" def can_create_weak_reference(): @@ -79,35 +80,35 @@ def can_create_custom_attribute(): assert node.custom == "bar" # type: ignore def rejects_invalid_location_offset(): - def create_source(location_offset: Tuple[int, int]) -> Source: - return Source("", "", cast(SourceLocation, location_offset)) + def create_source(location_offset: tuple[int, int]) -> Source: + return Source("", "", cast("SourceLocation", location_offset)) - with raises(TypeError): + with pytest.raises(TypeError): create_source(None) # type: ignore - with raises(TypeError): + with pytest.raises(TypeError): create_source(1) # type: ignore - with raises(TypeError): + with pytest.raises(TypeError): create_source((1,)) # type: ignore - with raises(TypeError): + with pytest.raises(TypeError): create_source((1, 2, 3)) # type: ignore - with raises( + with pytest.raises( ValueError, match="line in location_offset is 1-indexed and must be positive\\.", ): create_source((0, 1)) - with raises( + with pytest.raises( ValueError, match="line in location_offset is 1-indexed and must be positive\\.", ): create_source((-1, 1)) - with raises( + with pytest.raises( ValueError, match="column in location_offset is 1-indexed and must be positive\\.", ): create_source((1, 0)) - with raises( + with pytest.raises( ValueError, match="column in location_offset is 1-indexed and must be positive\\.", ): diff --git a/tests/language/test_visitor.py b/tests/language/test_visitor.py index d16fe88d..f3fdb370 100644 --- a/tests/language/test_visitor.py +++ b/tests/language/test_visitor.py @@ -1,23 +1,25 @@ +from __future__ import annotations + from copy import copy from functools import partial -from typing import cast, List, Optional +from typing import Any, cast -from pytest import mark, raises +import pytest from graphql.language import ( - Node, - FieldNode, - NameNode, - SelectionNode, - SelectionSetNode, - parse, - visit, BREAK, REMOVE, SKIP, + FieldNode, + NameNode, + Node, ParallelVisitor, + SelectionNode, + SelectionSetNode, Visitor, VisitorKeyMap, + parse, + visit, ) from ..fixtures import kitchen_sink_query # noqa: F401 @@ -83,7 +85,7 @@ def get_value(node): def describe_visitor(): def visit_with_invalid_node(): - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: # noinspection PyTypeChecker visit("invalid", Visitor()) # type: ignore assert str(exc_info.value) == "Not an AST Node: 'invalid'." @@ -95,7 +97,7 @@ class TestVisitor: def enter(self, *_args): pass - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: # noinspection PyTypeChecker visit(ast, TestVisitor()) # type: ignore assert str(exc_info.value) == "Not an AST Visitor: ." @@ -107,61 +109,53 @@ def enter(self, node, *args): assert isinstance(node, Node) assert len(args) == 4 visited.append(f"enter:{node.kind}") - pass def leave(self, node, *args): assert isinstance(self, TestVisitorWithInstanceMethods) assert isinstance(node, Node) assert len(args) == 4 visited.append(f"leave:{node.kind}") - pass def enter_field(self, node, *args): assert isinstance(self, TestVisitorWithInstanceMethods) assert isinstance(node, Node) assert len(args) == 4 visited.append(f"enter_field:{node.kind}") - pass def leave_field(self, node, *args): assert isinstance(self, TestVisitorWithInstanceMethods) assert isinstance(node, Node) assert len(args) == 4 visited.append(f"leave_field:{node.kind}") - pass class TestVisitorWithClassMethods(Visitor): @classmethod - def enter(cls, node, *args): + def enter(cls, node, *args) -> None: assert cls is TestVisitorWithClassMethods assert isinstance(node, Node) assert len(args) == 4 visited.append(f"enter:{node.kind}") - pass @classmethod - def leave(cls, node, *args): + def leave(cls, node, *args) -> None: assert cls is TestVisitorWithClassMethods assert isinstance(node, Node) assert len(args) == 4 visited.append(f"leave:{node.kind}") - pass @classmethod - def enter_field(cls, node, *args): + def enter_field(cls, node, *args) -> None: assert cls is TestVisitorWithClassMethods assert isinstance(node, Node) assert len(args) == 4 visited.append(f"enter_field:{node.kind}") - pass @classmethod - def leave_field(cls, node, *args): + def leave_field(cls, node, *args) -> None: assert cls is TestVisitorWithClassMethods assert isinstance(node, Node) assert len(args) == 4 visited.append(f"leave_field:{node.kind}") - pass class TestVisitorWithStaticMethods(Visitor): @staticmethod @@ -169,28 +163,24 @@ def enter(node, *args): assert isinstance(node, Node) assert len(args) == 4 visited.append(f"enter:{node.kind}") - pass @staticmethod def leave(node, *args): assert isinstance(node, Node) assert len(args) == 4 visited.append(f"leave:{node.kind}") - pass @staticmethod def enter_field(node, *args): assert isinstance(node, Node) assert len(args) == 4 visited.append(f"enter_field:{node.kind}") - pass @staticmethod def leave_field(node, *args): assert isinstance(node, Node) assert len(args) == 4 visited.append(f"leave_field:{node.kind}") - pass for visitor_class in ( TestVisitorWithInstanceMethods, @@ -198,7 +188,7 @@ def leave_field(node, *args): TestVisitorWithStaticMethods, ): ast = parse("{ a }") - visited: List[str] = [] + visited: list[str] = [] visit(ast, visitor_class()) assert visited == [ "enter:document", @@ -242,12 +232,6 @@ def leave_document(*args): visitor.leave, ) - # also test deprecated method - assert visitor.get_visit_fn("document") == visitor.enter_document - assert visitor.get_visit_fn("field") == visitor.enter - assert visitor.get_visit_fn("document", True) == visitor.leave_document - assert visitor.get_visit_fn("field", True) == visitor.leave - def validates_path_argument(): ast = parse("{ a }", no_location=True) visited = [] @@ -348,7 +332,9 @@ def leave_operation_definition(self, *args): assert edited_ast == ast assert visited == ["enter", "leave"] - @mark.parametrize("remove_action", (REMOVE, Ellipsis), ids=("REMOVE", "Ellipsis")) + @pytest.mark.parametrize( + "remove_action", [REMOVE, Ellipsis], ids=["REMOVE", "Ellipsis"] + ) def allows_for_editing_on_enter(remove_action): ast = parse("{ a, b, c { a, b, c } }", no_location=True) @@ -359,12 +345,15 @@ def enter(*args): node = args[0] if isinstance(node, FieldNode) and node.name.value == "b": return remove_action + return None edited_ast = visit(ast, TestVisitor()) assert ast == parse("{ a, b, c { a, b, c } }", no_location=True) assert edited_ast == parse("{ a, c { a, c } }", no_location=True) - @mark.parametrize("remove_action", (REMOVE, Ellipsis), ids=("REMOVE", "Ellipsis")) + @pytest.mark.parametrize( + "remove_action", [REMOVE, Ellipsis], ids=["REMOVE", "Ellipsis"] + ) def allows_for_editing_on_leave(remove_action): ast = parse("{ a, b, c { a, b, c } }", no_location=True) @@ -375,18 +364,19 @@ def leave(*args): node = args[0] if isinstance(node, FieldNode) and node.name.value == "b": return remove_action + return None edited_ast = visit(ast, TestVisitor()) assert ast == parse("{ a, b, c { a, b, c } }", no_location=True) assert edited_ast == parse("{ a, c { a, c } }", no_location=True) - @mark.parametrize("skip_action", (SKIP, False), ids=("SKIP", "False")) + @pytest.mark.parametrize("skip_action", [SKIP, False], ids=["SKIP", "False"]) def ignores_false_returned_on_leave(skip_action): ast = parse("{ a, b, c { a, b, c } }", no_location=True) class TestVisitor(Visitor): @staticmethod - def leave(*args): + def leave(*_args): return skip_action returned_ast = visit(ast, TestVisitor()) @@ -407,16 +397,18 @@ def enter(self, *args): assert node.selection_set node.selection_set.selections = ( added_field, - ) + node.selection_set.selections + *node.selection_set.selections, + ) return node if node == added_field: self.did_visit_added_field = True + return None visitor = TestVisitor() visit(ast, visitor) assert visitor.did_visit_added_field - @mark.parametrize("skip_action", (SKIP, False), ids=("SKIP", "False")) + @pytest.mark.parametrize("skip_action", [SKIP, False], ids=["SKIP", "False"]) def allows_skipping_a_sub_tree(skip_action): ast = parse("{ a, b { x }, c }", no_location=True) visited = [] @@ -430,6 +422,7 @@ def enter(*args): visited.append(["enter", kind, value]) if kind == "field" and node.name.value == "b": return skip_action + return None @staticmethod def leave(*args): @@ -457,7 +450,7 @@ def leave(*args): ["leave", "document", None], ] - @mark.parametrize("break_action", (BREAK, True), ids=("BREAK", "True")) + @pytest.mark.parametrize("break_action", [BREAK, True], ids=["BREAK", "True"]) def allows_early_exit_while_visiting(break_action): ast = parse("{ a, b { x }, c }", no_location=True) visited = [] @@ -471,6 +464,7 @@ def enter(*args): visited.append(["enter", kind, value]) if kind == "name" and node.value == "x": return break_action + return None @staticmethod def leave(*args): @@ -496,7 +490,7 @@ def leave(*args): ["enter", "name", "x"], ] - @mark.parametrize("break_action", (BREAK, True), ids=("BREAK", "True")) + @pytest.mark.parametrize("break_action", [BREAK, True], ids=["BREAK", "True"]) def allows_early_exit_while_leaving(break_action): ast = parse("{ a, b { x }, c }", no_location=True) visited = [] @@ -517,6 +511,7 @@ def leave(*args): visited.append(["leave", kind, value]) if kind == "name" and node.value == "x": return break_action + return None visit(ast, TestVisitor()) assert visited == [ @@ -584,11 +579,14 @@ class CustomFieldNode(SelectionNode): __slots__ = "name", "selection_set" name: NameNode - selection_set: Optional[SelectionSetNode] + selection_set: SelectionSetNode | None - custom_selection_set = cast(FieldNode, custom_ast.definitions[0]).selection_set + custom_selection_set = cast( + "FieldNode", custom_ast.definitions[0] + ).selection_set assert custom_selection_set is not None - custom_selection_set.selections = custom_selection_set.selections + ( + custom_selection_set.selections = ( + *custom_selection_set.selections, CustomFieldNode( name=NameNode(value="NameNodeToBeSkipped"), selection_set=SelectionSetNode( @@ -662,7 +660,7 @@ def leave(node, *_args): ] def cannot_define_visitor_with_unknown_ast_nodes(): - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: class VisitorWithNonExistingNode(Visitor): def enter_field(self, *_args): @@ -673,7 +671,7 @@ def leave_garfield(self, *_args): assert str(exc_info.value) == "Invalid AST node kind: garfield." - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: class VisitorWithUnspecificNode(Visitor): def enter_type_system_extension(self, *_args): @@ -738,10 +736,10 @@ def leave(*args): # noinspection PyShadowingNames def visits_kitchen_sink(kitchen_sink_query): # noqa: F811 - ast = parse(kitchen_sink_query) - visited: List = [] + ast = parse(kitchen_sink_query, experimental_client_controlled_nullability=True) + visited: list[Any] = [] record = visited.append - arg_stack: List = [] + arg_stack: list[Any] = [] push = arg_stack.append pop = arg_stack.pop @@ -884,6 +882,272 @@ def leave(*args): ["leave", "field", 1, None], ["leave", "selection_set", "selection_set", "field"], ["leave", "field", 0, None], + ["enter", "field", 1, None], + ["enter", "name", "name", "field"], + ["leave", "name", "name", "field"], + ["enter", "non_null_assertion", "nullability_assertion", "field"], + ["leave", "non_null_assertion", "nullability_assertion", "field"], + ["leave", "field", 1, None], + ["enter", "field", 2, None], + ["enter", "name", "name", "field"], + ["leave", "name", "name", "field"], + ["enter", "error_boundary", "nullability_assertion", "field"], + ["leave", "error_boundary", "nullability_assertion", "field"], + ["leave", "field", 2, None], + ["enter", "field", 3, None], + ["enter", "name", "alias", "field"], + ["leave", "name", "alias", "field"], + ["enter", "name", "name", "field"], + ["leave", "name", "name", "field"], + ["enter", "non_null_assertion", "nullability_assertion", "field"], + ["leave", "non_null_assertion", "nullability_assertion", "field"], + ["leave", "field", 3, None], + ["enter", "field", 4, None], + ["enter", "name", "name", "field"], + ["leave", "name", "name", "field"], + ["enter", "argument", 0, None], + ["enter", "name", "name", "argument"], + ["leave", "name", "name", "argument"], + ["enter", "int_value", "value", "argument"], + ["leave", "int_value", "value", "argument"], + ["leave", "argument", 0, None], + ["enter", "directive", 0, None], + ["enter", "name", "name", "directive"], + ["leave", "name", "name", "directive"], + ["leave", "directive", 0, None], + ["enter", "selection_set", "selection_set", "field"], + ["enter", "field", 0, None], + ["enter", "name", "name", "field"], + ["leave", "name", "name", "field"], + ["leave", "field", 0, None], + ["leave", "selection_set", "selection_set", "field"], + ["enter", "non_null_assertion", "nullability_assertion", "field"], + ["leave", "non_null_assertion", "nullability_assertion", "field"], + ["leave", "field", 4, None], + ["enter", "field", 5, None], + ["enter", "name", "alias", "field"], + ["leave", "name", "alias", "field"], + ["enter", "name", "name", "field"], + ["leave", "name", "name", "field"], + ["enter", "non_null_assertion", "nullability_assertion", "field"], + [ + "enter", + "list_nullability_operator", + "nullability_assertion", + "non_null_assertion", + ], + [ + "leave", + "list_nullability_operator", + "nullability_assertion", + "non_null_assertion", + ], + ["leave", "non_null_assertion", "nullability_assertion", "field"], + ["leave", "field", 5, None], + ["enter", "field", 6, None], + ["enter", "name", "alias", "field"], + ["leave", "name", "alias", "field"], + ["enter", "name", "name", "field"], + ["leave", "name", "name", "field"], + ["enter", "list_nullability_operator", "nullability_assertion", "field"], + [ + "enter", + "non_null_assertion", + "nullability_assertion", + "list_nullability_operator", + ], + [ + "leave", + "non_null_assertion", + "nullability_assertion", + "list_nullability_operator", + ], + ["leave", "list_nullability_operator", "nullability_assertion", "field"], + ["leave", "field", 6, None], + ["enter", "field", 7, None], + ["enter", "name", "alias", "field"], + ["leave", "name", "alias", "field"], + ["enter", "name", "name", "field"], + ["leave", "name", "name", "field"], + ["enter", "non_null_assertion", "nullability_assertion", "field"], + [ + "enter", + "list_nullability_operator", + "nullability_assertion", + "non_null_assertion", + ], + [ + "enter", + "non_null_assertion", + "nullability_assertion", + "list_nullability_operator", + ], + [ + "leave", + "non_null_assertion", + "nullability_assertion", + "list_nullability_operator", + ], + [ + "leave", + "list_nullability_operator", + "nullability_assertion", + "non_null_assertion", + ], + ["leave", "non_null_assertion", "nullability_assertion", "field"], + ["leave", "field", 7, None], + ["enter", "field", 8, None], + ["enter", "name", "alias", "field"], + ["leave", "name", "alias", "field"], + ["enter", "name", "name", "field"], + ["leave", "name", "name", "field"], + ["enter", "error_boundary", "nullability_assertion", "field"], + [ + "enter", + "list_nullability_operator", + "nullability_assertion", + "error_boundary", + ], + [ + "leave", + "list_nullability_operator", + "nullability_assertion", + "error_boundary", + ], + ["leave", "error_boundary", "nullability_assertion", "field"], + ["leave", "field", 8, None], + ["enter", "field", 9, None], + ["enter", "name", "alias", "field"], + ["leave", "name", "alias", "field"], + ["enter", "name", "name", "field"], + ["leave", "name", "name", "field"], + ["enter", "list_nullability_operator", "nullability_assertion", "field"], + [ + "enter", + "error_boundary", + "nullability_assertion", + "list_nullability_operator", + ], + [ + "leave", + "error_boundary", + "nullability_assertion", + "list_nullability_operator", + ], + ["leave", "list_nullability_operator", "nullability_assertion", "field"], + ["leave", "field", 9, None], + ["enter", "field", 10, None], + ["enter", "name", "alias", "field"], + ["leave", "name", "alias", "field"], + ["enter", "name", "name", "field"], + ["leave", "name", "name", "field"], + ["enter", "error_boundary", "nullability_assertion", "field"], + [ + "enter", + "list_nullability_operator", + "nullability_assertion", + "error_boundary", + ], + [ + "enter", + "error_boundary", + "nullability_assertion", + "list_nullability_operator", + ], + [ + "leave", + "error_boundary", + "nullability_assertion", + "list_nullability_operator", + ], + [ + "leave", + "list_nullability_operator", + "nullability_assertion", + "error_boundary", + ], + ["leave", "error_boundary", "nullability_assertion", "field"], + ["leave", "field", 10, None], + ["enter", "field", 11, None], + ["enter", "name", "alias", "field"], + ["leave", "name", "alias", "field"], + ["enter", "name", "name", "field"], + ["leave", "name", "name", "field"], + ["enter", "non_null_assertion", "nullability_assertion", "field"], + [ + "enter", + "list_nullability_operator", + "nullability_assertion", + "non_null_assertion", + ], + [ + "enter", + "non_null_assertion", + "nullability_assertion", + "list_nullability_operator", + ], + [ + "enter", + "list_nullability_operator", + "nullability_assertion", + "non_null_assertion", + ], + [ + "enter", + "non_null_assertion", + "nullability_assertion", + "list_nullability_operator", + ], + [ + "enter", + "list_nullability_operator", + "nullability_assertion", + "non_null_assertion", + ], + [ + "enter", + "non_null_assertion", + "nullability_assertion", + "list_nullability_operator", + ], + [ + "leave", + "non_null_assertion", + "nullability_assertion", + "list_nullability_operator", + ], + [ + "leave", + "list_nullability_operator", + "nullability_assertion", + "non_null_assertion", + ], + [ + "leave", + "non_null_assertion", + "nullability_assertion", + "list_nullability_operator", + ], + [ + "leave", + "list_nullability_operator", + "nullability_assertion", + "non_null_assertion", + ], + [ + "leave", + "non_null_assertion", + "nullability_assertion", + "list_nullability_operator", + ], + [ + "leave", + "list_nullability_operator", + "nullability_assertion", + "non_null_assertion", + ], + ["leave", "non_null_assertion", "nullability_assertion", "field"], + ["leave", "field", 11, None], ["leave", "selection_set", "selection_set", "inline_fragment"], ["leave", "inline_fragment", 1, None], ["enter", "inline_fragment", 2, None], @@ -1117,7 +1381,7 @@ def leave(*args): def describe_visit_in_parallel(): - @mark.parametrize("skip_action", (SKIP, False), ids=("SKIP", "False")) + @pytest.mark.parametrize("skip_action", [SKIP, False], ids=["SKIP", "False"]) def allows_skipping_a_sub_tree(skip_action): # Note: nearly identical to the above test but using ParallelVisitor ast = parse("{ a, b { x }, c }") @@ -1132,6 +1396,7 @@ def enter(*args): visited.append(["enter", kind, value]) if kind == "field" and node.name.value == "b": return skip_action + return None @staticmethod def leave(*args): @@ -1159,7 +1424,7 @@ def leave(*args): ["leave", "document", None], ] - @mark.parametrize("skip_action", (SKIP, False), ids=("SKIP", "False")) + @pytest.mark.parametrize("skip_action", [SKIP, False], ids=["SKIP", "False"]) def allows_skipping_different_sub_trees(skip_action): ast = parse("{ a { x }, b { y} }") visited = [] @@ -1177,6 +1442,7 @@ def enter(self, *args): visited.append([f"no-{name}", "enter", kind, value]) if kind == "field" and node.name.value == name: return skip_action + return None def leave(self, *args): check_visitor_fn_args(ast, *args) @@ -1223,7 +1489,7 @@ def leave(self, *args): ["no-b", "leave", "document", None], ] - @mark.parametrize("break_action", (BREAK, True), ids=("BREAK", "True")) + @pytest.mark.parametrize("break_action", [BREAK, True], ids=["BREAK", "True"]) def allows_early_exit_while_visiting(break_action): # Note: nearly identical to the above test but using ParallelVisitor. ast = parse("{ a, b { x }, c }") @@ -1238,6 +1504,7 @@ def enter(*args): visited.append(["enter", kind, value]) if kind == "name" and node.value == "x": return break_action + return None @staticmethod def leave(*args): @@ -1263,7 +1530,7 @@ def leave(*args): ["enter", "name", "x"], ] - @mark.parametrize("break_action", (BREAK, True), ids=("BREAK", "True")) + @pytest.mark.parametrize("break_action", [BREAK, True], ids=["BREAK", "True"]) def allows_early_exit_from_different_points(break_action): ast = parse("{ a { y }, b { x } }") visited = [] @@ -1281,6 +1548,7 @@ def enter(self, *args): visited.append([f"break-{name}", "enter", kind, value]) if kind == "name" and node.value == name: return break_action + return None def leave(self, *args): assert self.name == "b" @@ -1314,7 +1582,7 @@ def leave(self, *args): ["break-b", "enter", "name", "b"], ] - @mark.parametrize("break_action", (BREAK, True), ids=("BREAK", "True")) + @pytest.mark.parametrize("break_action", [BREAK, True], ids=["BREAK", "True"]) def allows_early_exit_while_leaving(break_action): # Note: nearly identical to the above test but using ParallelVisitor. ast = parse("{ a, b { x }, c }") @@ -1336,6 +1604,7 @@ def leave(*args): visited.append(["leave", kind, value]) if kind == "name" and node.value == "x": return break_action + return None visit(ast, ParallelVisitor([TestVisitor()])) assert visited == [ @@ -1355,7 +1624,7 @@ def leave(*args): ["leave", "name", "x"], ] - @mark.parametrize("break_action", (BREAK, True), ids=("BREAK", "True")) + @pytest.mark.parametrize("break_action", [BREAK, True], ids=["BREAK", "True"]) def allows_early_exit_from_leaving_different_points(break_action): ast = parse("{ a { y }, b { x } }") visited = [] @@ -1380,6 +1649,7 @@ def leave(self, *args): visited.append([f"break-{name}", "leave", kind, value]) if kind == "field" and node.name.value == name: return break_action + return None visit(ast, ParallelVisitor([TestVisitor("a"), TestVisitor("b")])) assert visited == [ @@ -1421,7 +1691,9 @@ def leave(self, *args): ["break-b", "leave", "field", None], ] - @mark.parametrize("remove_action", (REMOVE, Ellipsis), ids=("REMOVE", "Ellipsis")) + @pytest.mark.parametrize( + "remove_action", [REMOVE, Ellipsis], ids=["REMOVE", "Ellipsis"] + ) def allows_for_editing_on_enter(remove_action): ast = parse("{ a, b, c { a, b, c } }", no_location=True) visited = [] @@ -1433,6 +1705,7 @@ def enter(*args): node = args[0] if node.kind == "field" and node.name.value == "b": return remove_action + return None class TestVisitor2(Visitor): @staticmethod @@ -1479,7 +1752,9 @@ def leave(*args): ["leave", "document", None], ] - @mark.parametrize("remove_action", (REMOVE, Ellipsis), ids=("REMOVE", "Ellipsis")) + @pytest.mark.parametrize( + "remove_action", [REMOVE, Ellipsis], ids=["REMOVE", "Ellipsis"] + ) def allows_for_editing_on_leave(remove_action): ast = parse("{ a, b, c { a, b, c } }", no_location=True) visited = [] @@ -1491,6 +1766,7 @@ def leave(*args): node = args[0] if node.kind == "field" and node.name.value == "b": return remove_action + return None class TestVisitor2(Visitor): @staticmethod diff --git a/tests/pyutils/test_async_reduce.py b/tests/pyutils/test_async_reduce.py new file mode 100644 index 00000000..0ac606c8 --- /dev/null +++ b/tests/pyutils/test_async_reduce.py @@ -0,0 +1,63 @@ +from functools import reduce + +import pytest + +from graphql.pyutils import async_reduce, is_awaitable + + +def describe_async_reduce(): + def works_like_reduce_for_lists_of_ints(): + initial_value = -15 + + def callback(accumulator, current_value): + return accumulator + current_value + + values = range(7, 13) + result = async_reduce(callback, values, initial_value) + assert result == 42 + assert result == reduce(callback, values, initial_value) + + @pytest.mark.asyncio + async def works_with_sync_values_and_sync_initial_value(): + def callback(accumulator, current_value): + return accumulator + "-" + current_value + + values = ["bar", "baz"] + result = async_reduce(callback, values, "foo") + assert not is_awaitable(result) + assert result == "foo-bar-baz" + + @pytest.mark.asyncio + async def works_with_async_initial_value(): + async def async_initial_value(): + return "foo" + + def callback(accumulator, current_value): + return accumulator + "-" + current_value + + values = ["bar", "baz"] + result = async_reduce(callback, values, async_initial_value()) + assert is_awaitable(result) + assert await result == "foo-bar-baz" + + @pytest.mark.asyncio + async def works_with_async_callback(): + async def async_callback(accumulator, current_value): + return accumulator + "-" + current_value + + values = ["bar", "baz"] + result = async_reduce(async_callback, values, "foo") + assert is_awaitable(result) + assert await result == "foo-bar-baz" + + @pytest.mark.asyncio + async def works_with_async_callback_and_async_initial_value(): + async def async_initial_value(): + return 1 / 8 + + async def async_callback(accumulator, current_value): + return accumulator * current_value + + result = async_reduce(async_callback, range(6, 9), async_initial_value()) + assert is_awaitable(result) + assert await result == 42 diff --git a/tests/pyutils/test_description.py b/tests/pyutils/test_description.py index af87ccb6..781ab14e 100644 --- a/tests/pyutils/test_description.py +++ b/tests/pyutils/test_description.py @@ -1,26 +1,26 @@ from contextlib import contextmanager from typing import cast -from pytest import raises +import pytest from graphql import graphql_sync +from graphql.pyutils import ( + Description, + is_description, + register_description, + unregister_description, +) from graphql.type import ( GraphQLArgument, GraphQLDirective, GraphQLEnumValue, GraphQLField, GraphQLInputField, - GraphQLObjectType, GraphQLNamedType, + GraphQLObjectType, GraphQLSchema, GraphQLString, ) -from graphql.pyutils import ( - Description, - is_description, - register_description, - unregister_description, -) from graphql.utilities import get_introspection_query, print_schema from ..utils import dedent @@ -34,7 +34,7 @@ def __str__(self) -> str: return str(self.text) -lazy_string = cast(str, LazyString("Why am I so lazy?")) +lazy_string = cast("str", LazyString("Why am I so lazy?")) @contextmanager @@ -43,7 +43,7 @@ def registered(base: type): try: yield None finally: - unregister_description(LazyString) + unregister_description(base) def describe_description(): @@ -95,12 +95,12 @@ def can_register_and_unregister(): Description.bases = str def can_only_register_types(): - with raises(TypeError, match="Only types can be registered\\."): + with pytest.raises(TypeError, match="Only types can be registered\\."): # noinspection PyTypeChecker register_description("foo") # type: ignore def can_only_unregister_types(): - with raises(TypeError, match="Only types can be unregistered\\."): + with pytest.raises(TypeError, match="Only types can be unregistered\\."): # noinspection PyTypeChecker unregister_description("foo") # type: ignore @@ -109,26 +109,17 @@ def graphql_named_type(): named_type = GraphQLNamedType(name="Foo", description="not lazy") assert named_type.name == "Foo" assert named_type.description == "not lazy" - with raises(TypeError, match="Expected name to be a string\\."): - GraphQLNamedType(name=lazy_string) - with raises(TypeError, match="The description must be a string\\."): - GraphQLNamedType(name="Foo", description=lazy_string) with registered(LazyString): named_type = GraphQLNamedType(name="Foo", description=lazy_string) assert named_type.description is lazy_string assert str(named_type.description).endswith("lazy?") - with raises(TypeError, match="Expected name to be a string\\."): - GraphQLNamedType(name=lazy_string) def graphql_field(): field = GraphQLField(GraphQLString, description="not lazy") assert field.description == "not lazy" field = GraphQLField(GraphQLString, deprecation_reason="not lazy") assert field.deprecation_reason == "not lazy" - with raises(TypeError, match="The description must be a string\\."): - GraphQLField(GraphQLString, description=lazy_string) - with raises(TypeError, match="The deprecation reason must be a string\\."): - GraphQLField(GraphQLString, deprecation_reason=lazy_string) + GraphQLField(GraphQLString, description=lazy_string) with registered(LazyString): field = GraphQLField( GraphQLString, @@ -143,8 +134,6 @@ def graphql_field(): def graphql_argument(): arg = GraphQLArgument(GraphQLString, description="not lazy") assert arg.description == "not lazy" - with raises(TypeError, match="Argument description must be a string\\."): - GraphQLArgument(GraphQLString, description=lazy_string) with registered(LazyString): arg = GraphQLArgument(GraphQLString, description=lazy_string) assert arg.description is lazy_string @@ -155,15 +144,6 @@ def graphql_enum_value(): assert value.description == "not lazy" value = GraphQLEnumValue(deprecation_reason="not lazy") assert value.deprecation_reason == "not lazy" - with raises( - TypeError, match="The description of the enum value must be a string\\." - ): - GraphQLEnumValue(description=lazy_string) - with raises( - TypeError, - match="The deprecation reason for the enum value must be a string\\.", - ): - GraphQLEnumValue(deprecation_reason=lazy_string) with registered(LazyString): value = GraphQLEnumValue( description=lazy_string, deprecation_reason=lazy_string @@ -176,8 +156,6 @@ def graphql_enum_value(): def graphql_input_field(): field = GraphQLInputField(GraphQLString, description="not lazy") assert field.description == "not lazy" - with raises(TypeError, match="Input field description must be a string\\."): - GraphQLInputField(GraphQLString, description=lazy_string) with registered(LazyString): field = GraphQLInputField(GraphQLString, description=lazy_string) assert field.description is lazy_string @@ -187,16 +165,10 @@ def graphql_directive(): directive = GraphQLDirective("Foo", [], description="not lazy") assert directive.name == "Foo" assert directive.description == "not lazy" - with raises(TypeError, match="Expected name to be a string\\."): - GraphQLDirective(lazy_string, []) - with raises(TypeError, match="Foo description must be a string\\."): - GraphQLDirective("Foo", [], description=lazy_string) with registered(LazyString): directive = GraphQLDirective("Foo", [], description=lazy_string) assert directive.description is lazy_string assert str(directive.description).endswith("lazy?") - with raises(TypeError, match="Expected name to be a string\\."): - GraphQLDirective(lazy_string, []) def handels_introspection(): class Lazy: @@ -214,8 +186,8 @@ def __str__(self) -> str: with registered(Lazy): field = GraphQLField( GraphQLString, - description=cast(str, description), - deprecation_reason=cast(str, deprecation_reason), + description=cast("str", description), + deprecation_reason=cast("str", deprecation_reason), ) schema = GraphQLSchema(GraphQLObjectType("Query", {"lazyField": field})) @@ -250,8 +222,8 @@ def __str__(self) -> str: with registered(Lazy): field = GraphQLField( GraphQLString, - description=cast(str, description), - deprecation_reason=cast(str, deprecation_reason), + description=cast("str", description), + deprecation_reason=cast("str", deprecation_reason), ) schema = GraphQLSchema(GraphQLObjectType("Query", {"lazyField": field})) diff --git a/tests/pyutils/test_format_list.py b/tests/pyutils/test_format_list.py new file mode 100644 index 00000000..09567645 --- /dev/null +++ b/tests/pyutils/test_format_list.py @@ -0,0 +1,39 @@ +import pytest + +from graphql.pyutils import and_list, or_list + + +def describe_and_list(): + def does_not_accept_an_empty_list(): + with pytest.raises(ValueError, match="Missing list items to be formatted"): + and_list([]) + + def handles_single_item(): + assert and_list(["A"]) == "A" + + def handles_two_items(): + assert and_list(["A", "B"]) == "A and B" + + def handles_three_items(): + assert and_list(["A", "B", "C"]) == "A, B, and C" + + def handles_more_than_five_items(): + assert and_list(["A", "B", "C", "D", "E", "F"]) == "A, B, C, D, E, and F" + + +def describe_or_list(): + def does_not_accept_an_empty_list(): + with pytest.raises(ValueError, match="Missing list items to be formatted"): + or_list([]) + + def handles_single_item(): + assert or_list(["A"]) == "A" + + def handles_two_items(): + assert or_list(["A", "B"]) == "A or B" + + def handles_three_items(): + assert or_list(["A", "B", "C"]) == "A, B, or C" + + def handles_more_than_five_items(): + assert or_list(["A", "B", "C", "D", "E", "F"]) == "A, B, C, D, E, or F" diff --git a/tests/pyutils/test_frozen_dict.py b/tests/pyutils/test_frozen_dict.py deleted file mode 100644 index 594f4826..00000000 --- a/tests/pyutils/test_frozen_dict.py +++ /dev/null @@ -1,97 +0,0 @@ -from copy import copy, deepcopy - -from pytest import raises - -from graphql.pyutils import FrozenError, FrozenDict - - -def describe_frozen_list(): - def can_read(): - fd = FrozenDict({1: 2, 3: 4}) - assert fd == {1: 2, 3: 4} - assert list(i for i in fd) == [1, 3] - assert fd.copy() == fd - assert 3 in fd - assert 2 not in fd - assert fd[1] == 2 - with raises(KeyError): - # noinspection PyStatementEffect - fd[2] - assert len(fd) == 2 - assert fd.get(1) == 2 - assert fd.get(2, 5) == 5 - assert list(fd.items()) == [(1, 2), (3, 4)] - assert list(fd.keys()) == [1, 3] - assert list(fd.values()) == [2, 4] - - def cannot_write(): - fd = FrozenDict({1: 2, 3: 4}) - with raises(FrozenError): - fd[1] = 2 - with raises(FrozenError): - fd[4] = 5 - with raises(FrozenError): - del fd[1] - with raises(FrozenError): - del fd[3] - with raises(FrozenError): - fd.clear() - with raises(FrozenError): - fd.pop(1) - with raises(FrozenError): - fd.pop(4, 5) - with raises(FrozenError): - fd.popitem() - with raises(FrozenError): - fd.setdefault(1, 2) - with raises(FrozenError): - fd.setdefault(4, 5) - with raises(FrozenError): - fd.update({1: 2}) - with raises(FrozenError): - fd.update({4: 5}) - with raises(FrozenError): - fd += {4: 5} - assert fd == {1: 2, 3: 4} - - def can_hash(): - fd1 = FrozenDict({1: 2, 3: 4}) - fd2 = FrozenDict({1: 2, 3: 4}) - assert fd2 == fd1 - assert fd2 is not fd1 - assert hash(fd2) is not hash(fd1) - fd3 = FrozenDict({1: 2, 3: 5}) - assert fd3 != fd1 - assert hash(fd3) != hash(fd1) - - def can_copy(): - fd1 = FrozenDict({1: 2, 3: 4}) - fd2 = fd1.copy() - assert isinstance(fd2, FrozenDict) - assert fd2 == fd1 - assert hash(fd2) == hash(fd1) - assert fd2 is not fd1 - fd3 = copy(fd1) - assert isinstance(fd3, FrozenDict) - assert fd3 == fd1 - assert hash(fd3) == hash(fd1) - assert fd3 is not fd1 - - def can_deep_copy(): - fd11 = FrozenDict({1: 2, 3: 4}) - fd12 = FrozenDict({2: 1, 4: 3}) - fd1 = FrozenDict({1: fd11, 2: fd12}) - assert fd1[1] is fd11 - assert fd1[2] is fd12 - fd2 = deepcopy(fd1) - assert isinstance(fd2, FrozenDict) - assert fd2 == fd1 - assert hash(fd2) == hash(fd1) - fd21 = fd2[1] - fd22 = fd2[2] - assert isinstance(fd21, FrozenDict) - assert isinstance(fd22, FrozenDict) - assert fd21 == fd11 - assert fd21 is not fd11 - assert fd22 == fd12 - assert fd22 is not fd12 diff --git a/tests/pyutils/test_frozen_list.py b/tests/pyutils/test_frozen_list.py deleted file mode 100644 index fea86bff..00000000 --- a/tests/pyutils/test_frozen_list.py +++ /dev/null @@ -1,113 +0,0 @@ -from copy import copy, deepcopy - -from pytest import raises - -from graphql.pyutils import FrozenError, FrozenList - - -def describe_frozen_list(): - def can_read(): - fl = FrozenList([1, 2, 3]) - assert fl == [1, 2, 3] - assert list(i for i in fl) == fl - assert fl.copy() == fl - assert 2 in fl - assert 4 not in fl - assert fl + [4, 5] == [1, 2, 3, 4, 5] - assert [4, 5] + fl == [4, 5, 1, 2, 3] - assert fl * 2 == [1, 2, 3, 1, 2, 3] - assert 2 * fl == [1, 2, 3, 1, 2, 3] - assert fl[1] == 2 - with raises(IndexError): - fl[3] - assert fl[1:4] == [2, 3] - assert fl[::2] == [1, 3] - assert len(fl) == 3 - assert min(fl) == 1 - assert max(fl) == 3 - assert sum(fl) == 6 - assert fl.index(2) == 1 - with raises(ValueError): - fl.index(4) - assert fl.count(2) == 1 - assert fl.count(4) == 0 - assert list(reversed(fl)) == [3, 2, 1] - assert sorted(fl) == [1, 2, 3] - - def cannot_write(): - fl = FrozenList([1, 2, 3]) - with raises(FrozenError): - fl[1] = 4 - with raises(FrozenError): - fl[1:4] = [4] - with raises(FrozenError): - del fl[1] - with raises(FrozenError): - del fl[1:4] - with raises(FrozenError): - fl[1::2] = [4] - with raises(FrozenError): - del fl[::2] - with raises(FrozenError): - fl.append(4) - with raises(FrozenError): - fl.clear() - with raises(FrozenError): - fl.extend([4]) - with raises(FrozenError): - fl += [4] - with raises(FrozenError): - fl *= 2 - with raises(FrozenError): - fl.insert(1, 4) - with raises(FrozenError): - fl.pop() - with raises(FrozenError): - fl.remove(2) - with raises(FrozenError): - fl.sort() - with raises(FrozenError): - fl.reverse() - assert fl == [1, 2, 3] - - def can_add_rol(): - fl1 = FrozenList([1, 2]) - rol2 = FrozenList([3, 4]) - assert fl1 + rol2 == [1, 2, 3, 4] - - def can_add_tuple(): - fl = FrozenList([1, 2]) - assert fl + (3, 4) == [1, 2, 3, 4] - - def can_hash(): - fl1 = FrozenList([1, 2]) - fl2 = FrozenList([1, 2]) - assert fl2 == fl1 - assert fl2 is not fl1 - assert hash(fl2) == hash(fl1) - fl3 = FrozenList([1, 3]) - assert fl3 != fl1 - assert hash(fl3) != hash(fl1) - - def can_copy(): - fl1 = FrozenList([1, 2]) - fl2 = copy(fl1) - assert isinstance(fl2, FrozenList) - assert fl2 == fl1 - assert hash(fl2) == hash(fl1) - assert fl2 is not fl1 - - def can_deep_copy(): - fl11 = FrozenList([1, 2]) - fl12 = FrozenList([2, 1]) - fl1 = FrozenList([fl11, fl12]) - fl2 = deepcopy(fl1) - assert isinstance(fl2, FrozenList) - assert fl2 == fl1 - assert hash(fl2) == hash(fl1) - assert isinstance(fl2[0], FrozenList) - assert isinstance(fl2[1], FrozenList) - assert fl2[0] == fl1[0] - assert fl2[0] is not fl1[0] - assert fl2[1] == fl1[1] - assert fl2[1] is not fl1[1] diff --git a/tests/pyutils/test_group_by.py b/tests/pyutils/test_group_by.py index 1309fb6f..cafe4742 100644 --- a/tests/pyutils/test_group_by.py +++ b/tests/pyutils/test_group_by.py @@ -4,7 +4,7 @@ def describe_group_by(): def does_accept_an_empty_list(): def key_fn(_x: str) -> str: - raise TypeError("Unexpected call of key function.") + raise TypeError("Unexpected call of key function.") # pragma: no cover assert group_by([], key_fn) == {} diff --git a/tests/pyutils/test_identity_func.py b/tests/pyutils/test_identity_func.py index b3ca5143..6c4c33db 100644 --- a/tests/pyutils/test_identity_func.py +++ b/tests/pyutils/test_identity_func.py @@ -1,4 +1,4 @@ -from graphql.pyutils import identity_func, Undefined +from graphql.pyutils import Undefined, identity_func def describe_identity_func(): diff --git a/tests/pyutils/test_inspect.py b/tests/pyutils/test_inspect.py index 6ace1fcd..94c62b48 100644 --- a/tests/pyutils/test_inspect.py +++ b/tests/pyutils/test_inspect.py @@ -1,18 +1,20 @@ -from math import nan, inf +from __future__ import annotations + from contextlib import contextmanager from importlib import import_module -from typing import Any, Dict, FrozenSet, List, Set, Tuple +from math import inf, nan +from typing import Any -from pytest import mark +import pytest -from graphql.pyutils import inspect, Undefined +from graphql.pyutils import Undefined, inspect from graphql.type import ( GraphQLDirective, GraphQLField, GraphQLInt, GraphQLList, - GraphQLObjectType, GraphQLNonNull, + GraphQLObjectType, GraphQLString, ) @@ -137,7 +139,7 @@ def test_generator(): assert inspect(test_generator) == "" assert inspect(test_generator()) == "" - @mark.asyncio + @pytest.mark.asyncio async def inspect_coroutine(): async def test_coroutine(): pass @@ -166,13 +168,13 @@ def inspect_lists(): assert inspect([["a", "b"], "c"]) == "[['a', 'b'], 'c']" def inspect_overly_large_list(): - s: List[int] = list(range(20)) + s: list[int] = list(range(20)) assert inspect(s) == "[0, 1, 2, 3, 4, ..., 16, 17, 18, 19]" with increased_list_size(): assert inspect(s) == repr(s) def inspect_overly_nested_list(): - s: List[List[List]] = [[[]]] + s: list[list[list]] = [[[]]] assert inspect(s) == "[[[]]]" s = [[[1, 2, 3]]] assert inspect(s) == "[[[...]]]" @@ -180,7 +182,7 @@ def inspect_overly_nested_list(): assert inspect(s) == repr(s) def inspect_recursive_list(): - s: List[Any] = [1, 2, 3] + s: list[Any] = [1, 2, 3] s[1] = s assert inspect(s) == "[1, [...], 3]" @@ -198,7 +200,7 @@ def inspect_overly_large_tuple(): assert inspect(s) == repr(s) def inspect_overly_nested_tuple(): - s: Tuple[Tuple[Tuple]] = (((),),) + s: tuple[tuple[tuple]] = (((),),) assert inspect(s) == "(((),),)" s = (((1, 2, 3),),) assert inspect(s) == "(((...),),)" @@ -206,7 +208,7 @@ def inspect_overly_nested_tuple(): assert inspect(s) == repr(s) def inspect_recursive_tuple(): - s: List[Any] = [1, 2, 3] + s: list[Any] = [1, 2, 3] s[1] = s t = tuple(s) assert inspect(t) == "(1, [1, [...], 3], 3)" @@ -239,7 +241,7 @@ def inspect_overly_large_dict(): assert inspect(s) == repr(s) def inspect_overly_nested_dict(): - s: Dict[str, Dict[str, Dict]] = {"a": {"b": {}}} + s: dict[str, dict[str, dict]] = {"a": {"b": {}}} assert inspect(s) == "{'a': {'b': {}}}" s = {"a": {"b": {"c": 3}}} assert inspect(s) == "{'a': {'b': {...}}}" @@ -247,7 +249,7 @@ def inspect_overly_nested_dict(): assert inspect(s) == repr(s) def inspect_recursive_dict(): - s: Dict[int, Any] = {} + s: dict[int, Any] = {} s[1] = s assert inspect(s) == "{1: {...}}" @@ -259,14 +261,16 @@ def inspect_sets(): def inspect_overly_large_set(): s = set(range(20)) r = inspect(s) - assert r.startswith("{") and r.endswith("}") - assert "..., " in r and "5" not in s # sets are unordered + assert r.startswith("{") + assert r.endswith("}") + assert "..., " in r + assert "5" not in s # sets are unordered assert len(r) == 36 with increased_list_size(): assert inspect(s) == repr(s) def inspect_overly_nested_set(): - s: List[List[Set]] = [[set()]] + s: list[list[set]] = [[set()]] assert inspect(s) == "[[set()]]" s = [[{1, 2, 3}]] assert inspect(s) == "[[set(...)]]" @@ -284,14 +288,16 @@ def inspect_frozensets(): def inspect_overly_large_frozenset(): s = frozenset(range(20)) r = inspect(s) - assert r.startswith("frozenset({") and r.endswith("})") - assert "..., " in r and "5" not in s # frozensets are unordered + assert r.startswith("frozenset({") + assert r.endswith("})") + assert "..., " in r + assert "5" not in s # frozensets are unordered assert len(r) == 47 with increased_list_size(): assert inspect(s) == repr(s) def inspect_overly_nested_frozenset(): - s: FrozenSet[FrozenSet[FrozenSet]] = frozenset([frozenset([frozenset()])]) + s: frozenset[frozenset[frozenset]] = frozenset([frozenset([frozenset()])]) assert inspect(s) == "frozenset({frozenset({frozenset()})})" s = frozenset([frozenset([frozenset([1, 2, 3])])]) assert inspect(s) == "frozenset({frozenset({frozenset(...)})})" diff --git a/tests/pyutils/test_is_awaitable.py b/tests/pyutils/test_is_awaitable.py index 896697d5..b05f01af 100644 --- a/tests/pyutils/test_is_awaitable.py +++ b/tests/pyutils/test_is_awaitable.py @@ -1,7 +1,8 @@ import asyncio from inspect import isawaitable +from sys import version_info as python_version -from pytest import mark +import pytest from graphql.pyutils import is_awaitable @@ -60,45 +61,59 @@ def some_generator(): assert not is_awaitable(some_generator()) def declines_a_coroutine_function(): - async def some_coroutine(): + async def some_async_function(): return True # pragma: no cover - assert not isawaitable(some_coroutine) - assert not is_awaitable(some_coroutine) + assert not isawaitable(some_async_function) + assert not is_awaitable(some_async_function) - @mark.asyncio - @mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") + @pytest.mark.asyncio async def recognizes_a_coroutine_object(): - async def some_coroutine(): - return False # pragma: no cover + async def some_async_function(): + return True + + some_coroutine = some_async_function() - assert isawaitable(some_coroutine()) - assert is_awaitable(some_coroutine()) + assert isawaitable(some_coroutine) + assert is_awaitable(some_coroutine) - @mark.filterwarnings("ignore::Warning") # Deprecation and Runtime - def recognizes_an_old_style_coroutine(): - @asyncio.coroutine - def some_old_style_coroutine(): - yield False # pragma: no cover + assert await some_coroutine is True - assert is_awaitable(some_old_style_coroutine()) - assert is_awaitable(some_old_style_coroutine()) + @pytest.mark.filterwarnings("ignore::Warning") # Deprecation and Runtime warnings + @pytest.mark.skipif( + python_version >= (3, 11), + reason="Generator-based coroutines not supported any more since Python 3.11", + ) + async def recognizes_an_old_style_coroutine(): # pragma: no cover + @asyncio.coroutine # type: ignore + def some_function(): + yield True - @mark.asyncio - @mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") + some_old_style_coroutine = some_function() + assert is_awaitable(some_old_style_coroutine) + assert is_awaitable(some_old_style_coroutine) + + @pytest.mark.asyncio async def recognizes_a_future_object(): - async def some_coroutine(): - return False # pragma: no cover + async def some_async_function(): + return True - some_future = asyncio.ensure_future(some_coroutine()) + some_coroutine = some_async_function() + some_future = asyncio.ensure_future(some_coroutine) assert is_awaitable(some_future) assert is_awaitable(some_future) - @mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") - def declines_an_async_generator(): - async def some_async_generator(): - yield True # pragma: no cover + assert await some_future is True + + @pytest.mark.asyncio + async def declines_an_async_generator(): + async def some_async_generator_function(): + yield True + + some_async_generator = some_async_generator_function() + + assert not isawaitable(some_async_generator) + assert not is_awaitable(some_async_generator) - assert not isawaitable(some_async_generator()) - assert not is_awaitable(some_async_generator()) + assert await some_async_generator.__anext__() is True diff --git a/tests/pyutils/test_is_iterable.py b/tests/pyutils/test_is_iterable.py index 5dbf210a..00883604 100644 --- a/tests/pyutils/test_is_iterable.py +++ b/tests/pyutils/test_is_iterable.py @@ -3,7 +3,7 @@ from decimal import Decimal from itertools import count -from graphql.pyutils import FrozenDict, FrozenList, is_collection, is_iterable +from graphql.pyutils import is_collection, is_iterable def describe_is_collection(): @@ -12,18 +12,13 @@ def should_return_true_for_lists(): assert is_collection([0, 1, 2]) is True assert is_collection(["A", "B", "C"]) is True - def should_return_true_for_frozen_lists(): - assert is_collection(FrozenList()) is True - assert is_collection(FrozenList([0, 1, 2])) is True - assert is_collection(FrozenList(["A", "B", "C"])) is True - def should_return_true_for_tuples(): assert is_collection(()) is True assert is_collection((0, 1, 1)) is True assert is_collection(("A", "B", "C")) is True def should_return_true_for_named_tuples(): - named = namedtuple("named", "A B C") + named = namedtuple("named", "A B C") # noqa: PYI024 assert is_collection(named(0, 1, 2)) is True def should_return_true_for_arrays(): @@ -100,11 +95,6 @@ def should_return_false_for_dicts(): assert is_collection({"__iter__": True}) is False assert is_collection({0: "A", 1: "B", 2: "C"}) is False - def should_return_false_for_frozen_dicts(): - assert is_collection(FrozenDict()) is False - assert is_collection(FrozenDict({"__iter__": True})) is False - assert is_collection(FrozenDict({0: "A", 1: "B", 2: "C"})) is False - def should_return_false_for_default_dicts(): assert is_collection(defaultdict(list)) is False @@ -126,18 +116,13 @@ def should_return_true_for_lists(): assert is_iterable([0, 1, 2]) is True assert is_iterable(["A", "B", "C"]) is True - def should_return_true_for_frozen_lists(): - assert is_iterable(FrozenList()) is True - assert is_iterable(FrozenList([0, 1, 2])) is True - assert is_iterable(FrozenList(["A", "B", "C"])) is True - def should_return_true_for_tuples(): assert is_iterable(()) is True assert is_iterable((0, 1, 1)) is True assert is_iterable(("A", "B", "C")) is True def should_return_true_for_named_tuples(): - named = namedtuple("named", "a b c") + named = namedtuple("named", "a b c") # noqa: PYI024 assert is_iterable(named(0, 1, 2)) is True def should_return_true_for_arrays(): @@ -214,11 +199,6 @@ def should_return_false_for_dicts(): assert is_iterable({"__iter__": True}) is False assert is_iterable({0: "A", 1: "B", 2: "C"}) is False - def should_return_false_for_frozen_dicts(): - assert is_iterable(FrozenDict()) is False - assert is_iterable(FrozenDict({"__iter__": True})) is False - assert is_iterable(FrozenDict({0: "A", 1: "B", 2: "C"})) is False - def should_return_false_for_default_dicts(): assert is_iterable(defaultdict(list)) is False diff --git a/tests/pyutils/test_ref_map.py b/tests/pyutils/test_ref_map.py new file mode 100644 index 00000000..96e15c58 --- /dev/null +++ b/tests/pyutils/test_ref_map.py @@ -0,0 +1,124 @@ +import pytest + +from graphql.pyutils import RefMap + +obj1 = {"a": 1, "b": 2, "c": 3} +obj2 = obj1.copy() +obj3 = obj1.copy() +obj4 = obj1.copy() + + +def describe_object_map(): + def can_create_an_empty_map(): + m = RefMap[str, int]() + assert not m + assert len(m) == 0 + assert list(m) == [] + assert list(m.keys()) == [] + assert list(m.values()) == [] + assert list(m.items()) == [] + + def can_create_a_map_with_scalar_keys_and_values(): + m = RefMap[str, int](list(obj1.items())) + assert m + assert len(m) == 3 + assert list(m) == ["a", "b", "c"] + assert list(m.keys()) == ["a", "b", "c"] + assert list(m.values()) == [1, 2, 3] + assert list(m.items()) == [("a", 1), ("b", 2), ("c", 3)] + for k, v in m.items(): + assert k in m + assert m[k] == v + assert m.get(k) == v + assert v not in m + with pytest.raises(KeyError): + m[v] # type: ignore + assert m.get(v) is None + + def can_create_a_map_with_one_object_as_key(): + m = RefMap[dict, int]([(obj1, 1)]) + assert m + assert len(m) == 1 + assert list(m) == [obj1] + assert list(m.keys()) == [obj1] + assert list(m.values()) == [1] + assert list(m.items()) == [(obj1, 1)] + assert obj1 in m + assert 1 not in m + assert obj2 not in m + assert m[obj1] == 1 + assert m.get(obj1) == 1 + with pytest.raises(KeyError): + m[1] # type: ignore + assert m.get(1) is None + with pytest.raises(KeyError): + m[obj2] + assert m.get(obj2) is None + + def can_create_a_map_with_three_objects_as_keys(): + m = RefMap[dict, int]([(obj1, 1), (obj2, 2), (obj3, 3)]) + assert m + assert len(m) == 3 + assert list(m) == [obj1, obj2, obj3] + assert list(m.keys()) == [obj1, obj2, obj3] + assert list(m.values()) == [1, 2, 3] + assert list(m.items()) == [(obj1, 1), (obj2, 2), (obj3, 3)] + for k, v in m.items(): + assert k in m + assert m[k] == v + assert m.get(k) == v + assert v not in m + with pytest.raises(KeyError): + m[v] # type: ignore + assert m.get(v) is None + assert obj4 not in m + with pytest.raises(KeyError): + m[obj4] + assert m.get(obj4) is None + + def can_set_a_key_that_is_an_object(): + m = RefMap[dict, int]() + m[obj1] = 1 + assert m[obj1] == 1 + assert list(m) == [obj1] + with pytest.raises(KeyError): + m[obj2] + m[obj2] = 2 + assert m[obj1] == 1 + assert m[obj2] == 2 + assert list(m) == [obj1, obj2] + m[obj2] = 3 + assert m[obj1] == 1 + assert m[obj2] == 3 + assert list(m) == [obj1, obj2] + assert len(m) == 2 + + def can_delete_a_key_that_is_an_object(): + m = RefMap[dict, int]([(obj1, 1), (obj2, 2), (obj3, 3)]) + del m[obj2] + assert obj2 not in m + assert list(m) == [obj1, obj3] + with pytest.raises(KeyError): + del m[obj2] + assert list(m) == [obj1, obj3] + assert len(m) == 2 + + def can_update_a_map(): + m = RefMap[dict, int]([(obj1, 1), (obj2, 2)]) + m.update([]) + assert list(m.keys()) == [obj1, obj2] + assert len(m) == 2 + m.update([(obj2, 3), (obj3, 4)]) + assert list(m.keys()) == [obj1, obj2, obj3] + assert list(m.values()) == [1, 3, 4] + assert list(m.items()) == [(obj1, 1), (obj2, 3), (obj3, 4)] + assert obj3 in m + assert m[obj2] == 3 + assert m[obj3] == 4 + assert len(m) == 3 + + def can_get_the_representation_of_a_ref_map(): + m = RefMap[dict, int]([(obj1, 1), (obj2, 2)]) + assert repr(m) == ( + "RefMap([({'a': 1, 'b': 2, 'c': 3}, 1), ({'a': 1, 'b': 2, 'c': 3}, 2)])" + ) diff --git a/tests/pyutils/test_ref_set.py b/tests/pyutils/test_ref_set.py new file mode 100644 index 00000000..fead877b --- /dev/null +++ b/tests/pyutils/test_ref_set.py @@ -0,0 +1,89 @@ +import pytest + +from graphql.pyutils import RefSet + +obj1 = ["a", "b", "c"] +obj2 = obj1.copy() +obj3 = obj1.copy() +obj4 = obj1.copy() + + +def describe_object_set(): + def can_create_an_empty_set(): + s = RefSet[int]() + assert not s + assert len(s) == 0 + assert list(s) == [] + + def can_create_a_set_with_scalar_values(): + s = RefSet[str](obj1) + assert s + assert len(s) == 3 + assert list(s) == ["a", "b", "c"] + for v in s: + assert v in s + + def can_create_a_set_with_one_object_as_value(): + s = RefSet[list]([obj1]) + assert s + assert len(s) == 1 + assert obj1 in s + assert obj2 not in s + + def can_create_a_set_with_three_objects_as_keys(): + s = RefSet[list]([obj1, obj2, obj3]) + assert s + assert len(s) == 3 + assert list(s) == [obj1, obj2, obj3] + for v in s: + assert v in s + assert obj4 not in s + + def can_add_a_value_that_is_an_object(): + s = RefSet[list]() + s.add(obj1) + assert obj1 in s + assert list(s) == [obj1] + assert obj2 not in s + s.add(obj2) + assert obj1 in s + assert obj2 in s + assert list(s) == [obj1, obj2] + s.add(obj2) + assert obj1 in s + assert obj2 in s + assert list(s) == [obj1, obj2] + assert len(s) == 2 + + def can_remove_a_value_that_is_an_object(): + s = RefSet[list]([obj1, obj2, obj3]) + s.remove(obj2) + assert obj2 not in s + assert list(s) == [obj1, obj3] + with pytest.raises(KeyError): + s.remove(obj2) + assert list(s) == [obj1, obj3] + assert len(s) == 2 + + def can_discard_a_value_that_is_an_object(): + s = RefSet[list]([obj1, obj2, obj3]) + s.discard(obj2) + assert obj2 not in s + assert list(s) == [obj1, obj3] + s.discard(obj2) + assert list(s) == [obj1, obj3] + assert len(s) == 2 + + def can_update_a_set(): + s = RefSet[list]([obj1, obj2]) + s.update([]) + assert list(s) == [obj1, obj2] + assert len(s) == 2 + s.update([obj2, obj3]) + assert list(s) == [obj1, obj2, obj3] + assert obj3 in s + assert len(s) == 3 + + def can_get_the_representation_of_a_ref_set(): + s = RefSet[list]([obj1, obj2]) + assert repr(s) == ("RefSet([['a', 'b', 'c'], ['a', 'b', 'c']])") diff --git a/tests/pyutils/test_simple_pub_sub.py b/tests/pyutils/test_simple_pub_sub.py index 47060d3f..f0a88dcb 100644 --- a/tests/pyutils/test_simple_pub_sub.py +++ b/tests/pyutils/test_simple_pub_sub.py @@ -1,13 +1,12 @@ from asyncio import sleep -from inspect import isawaitable -from pytest import mark, raises +import pytest -from graphql.pyutils import SimplePubSub +from graphql.pyutils import SimplePubSub, is_awaitable def describe_simple_pub_sub(): - @mark.asyncio + @pytest.mark.asyncio async def subscribe_async_iterator_mock(): pubsub = SimplePubSub() iterator = pubsub.get_subscriber() @@ -22,9 +21,9 @@ async def subscribe_async_iterator_mock(): # Read ahead i3 = await iterator.__anext__() - assert isawaitable(i3) + assert is_awaitable(i3) i4 = await iterator.__anext__() - assert isawaitable(i4) + assert is_awaitable(i4) # Publish assert pubsub.emit("Coconut") is True @@ -44,14 +43,14 @@ async def subscribe_async_iterator_mock(): assert pubsub.emit("Fig") is False # Find that cancelled read-ahead got a "done" result - with raises(StopAsyncIteration): + with pytest.raises(StopAsyncIteration): await i5 # And next returns empty completion value - with raises(StopAsyncIteration): + with pytest.raises(StopAsyncIteration): await iterator.__anext__() - @mark.asyncio + @pytest.mark.asyncio async def iterator_aclose_empties_push_queue(): pubsub = SimplePubSub() assert not pubsub.subscribers @@ -69,7 +68,7 @@ async def iterator_aclose_empties_push_queue(): assert iterator.pull_queue.qsize() == 0 assert not iterator.listening - @mark.asyncio + @pytest.mark.asyncio async def iterator_aclose_empties_pull_queue(): pubsub = SimplePubSub() assert not pubsub.subscribers @@ -86,11 +85,11 @@ async def iterator_aclose_empties_pull_queue(): assert iterator.pull_queue.qsize() == 0 assert not iterator.listening - @mark.asyncio + @pytest.mark.asyncio async def iterator_aclose_is_idempotent(): pubsub = SimplePubSub() iterator = pubsub.get_subscriber() assert iterator.listening - for n in range(3): + for _n in range(3): await iterator.aclose() assert not iterator.listening diff --git a/tests/pyutils/test_suggestion_list.py b/tests/pyutils/test_suggestion_list.py index 57161386..216ba3c5 100644 --- a/tests/pyutils/test_suggestion_list.py +++ b/tests/pyutils/test_suggestion_list.py @@ -1,9 +1,9 @@ -from typing import List +from __future__ import annotations from graphql.pyutils import suggestion_list -def expect_suggestions(input_: str, options: List[str], expected: List[str]) -> None: +def expect_suggestions(input_: str, options: list[str], expected: list[str]) -> None: assert suggestion_list(input_, options) == expected diff --git a/tests/pyutils/test_undefined.py b/tests/pyutils/test_undefined.py index b7ad8cf6..b34611e3 100644 --- a/tests/pyutils/test_undefined.py +++ b/tests/pyutils/test_undefined.py @@ -1,7 +1,11 @@ -from graphql.pyutils import Undefined +import pickle +import pytest -def describe_invalid(): +from graphql.pyutils import Undefined, UndefinedType + + +def describe_Undefined(): def has_repr(): assert repr(Undefined) == "Undefined" @@ -17,12 +21,28 @@ def is_hashable(): def as_bool_is_false(): assert bool(Undefined) is False - def only_equal_to_itself(): - assert Undefined == Undefined - assert not Undefined != Undefined + def only_equal_to_itself_and_none(): + # because we want it to behave similarly to JavaScript + assert Undefined == Undefined # noqa: PLR0124 none_object = None - assert Undefined != none_object - assert not Undefined == none_object + assert Undefined == none_object + assert none_object == Undefined false_object = False assert Undefined != false_object - assert not Undefined == false_object + assert false_object != Undefined + + def should_not_be_an_exception(): + # because we want to create similar code to JavaScript where + # undefined return values are different from exceptions + # (for instance, this is used in the completeValue function) + assert not isinstance(Undefined, Exception) + + def cannot_be_redefined(): + with pytest.warns(RuntimeWarning, match="Redefinition of 'Undefined'"): + redefined_undefined = UndefinedType() + assert redefined_undefined is Undefined + + def can_be_pickled(): + pickled_undefined = pickle.dumps(Undefined) + unpickled_undefined = pickle.loads(pickled_undefined) + assert unpickled_undefined is Undefined diff --git a/tests/star_wars_data.py b/tests/star_wars_data.py index 167d2a32..158bf937 100644 --- a/tests/star_wars_data.py +++ b/tests/star_wars_data.py @@ -1,11 +1,13 @@ -"""This defines a basic set of data for our Star Wars Schema. +"""Define a basic set of data for our Star Wars Schema. -This data is hard coded for the sake of the demo, but you could imagine fetching this +The data is hard coded for the sake of the demo, but you could imagine fetching this data from a backend service rather than from hardcoded JSON objects in a more complex demo. """ -from typing import Awaitable, Collection, Dict, Iterator, Optional +from __future__ import annotations + +from typing import Awaitable, Collection, Iterator __all__ = ["get_droid", "get_friends", "get_hero", "get_human", "get_secret_backstory"] @@ -26,7 +28,7 @@ class Human(Character): homePlanet: str # noinspection PyShadowingBuiltins - def __init__(self, id, name, friends, appearsIn, homePlanet): + def __init__(self, id, name, friends, appearsIn, homePlanet): # noqa: A002 self.id, self.name = id, name self.friends, self.appearsIn = friends, appearsIn self.homePlanet = homePlanet @@ -38,7 +40,7 @@ class Droid(Character): primaryFunction: str # noinspection PyShadowingBuiltins - def __init__(self, id, name, friends, appearsIn, primaryFunction): + def __init__(self, id, name, friends, appearsIn, primaryFunction): # noqa: A002 self.id, self.name = id, name self.friends, self.appearsIn = friends, appearsIn self.primaryFunction = primaryFunction @@ -80,7 +82,7 @@ def __init__(self, id, name, friends, appearsIn, primaryFunction): id="1004", name="Wilhuff Tarkin", friends=["1001"], appearsIn=[4], homePlanet=None ) -human_data: Dict[str, Human] = { +human_data: dict[str, Human] = { "1000": luke, "1001": vader, "1002": han, @@ -104,17 +106,17 @@ def __init__(self, id, name, friends, appearsIn, primaryFunction): primaryFunction="Astromech", ) -droid_data: Dict[str, Droid] = {"2000": threepio, "2001": artoo} +droid_data: dict[str, Droid] = {"2000": threepio, "2001": artoo} # noinspection PyShadowingBuiltins -async def get_character(id: str) -> Optional[Character]: +async def get_character(id: str) -> Character | None: # noqa: A002 """Helper function to get a character by ID.""" # We use an async function just to illustrate that GraphQL-core supports it. return human_data.get(id) or droid_data.get(id) -def get_friends(character: Character) -> Iterator[Awaitable[Optional[Character]]]: +def get_friends(character: Character) -> Iterator[Awaitable[Character | None]]: """Allows us to query for a character's friends.""" # Notice that GraphQL-core accepts iterators of awaitables. return map(get_character, character.friends) @@ -130,18 +132,18 @@ def get_hero(episode: int) -> Character: # noinspection PyShadowingBuiltins -def get_human(id: str) -> Optional[Human]: +def get_human(id: str) -> Human | None: # noqa: A002 """Allows us to query for the human with the given id.""" return human_data.get(id) # noinspection PyShadowingBuiltins -def get_droid(id: str) -> Optional[Droid]: +def get_droid(id: str) -> Droid | None: # noqa: A002 """Allows us to query for the droid with the given id.""" return droid_data.get(id) # noinspection PyUnusedLocal -def get_secret_backstory(character: Character) -> str: +def get_secret_backstory(character: Character) -> str: # noqa: ARG001 """Raise an error when attempting to get the secret backstory.""" raise RuntimeError("secretBackstory is secret.") diff --git a/tests/star_wars_schema.py b/tests/star_wars_schema.py index 575bf482..5f4c0809 100644 --- a/tests/star_wars_schema.py +++ b/tests/star_wars_schema.py @@ -140,8 +140,7 @@ "name": GraphQLField(GraphQLString, description="The name of the human."), "friends": GraphQLField( GraphQLList(character_interface), - description="The friends of the human," - " or an empty list if they have none.", + description="The friends of the human, or an empty list if they have none.", resolve=lambda human, _info: get_friends(human), ), "appearsIn": GraphQLField( @@ -182,8 +181,7 @@ "name": GraphQLField(GraphQLString, description="The name of the droid."), "friends": GraphQLField( GraphQLList(character_interface), - description="The friends of the droid," - " or an empty list if they have none.", + description="The friends of the droid, or an empty list if they have none.", resolve=lambda droid, _info: get_friends(droid), ), "appearsIn": GraphQLField( @@ -238,7 +236,7 @@ GraphQLNonNull(GraphQLString), description="id of the human" ) }, - resolve=lambda _source, _info, id: get_human(id), + resolve=lambda _source, _info, id: get_human(id), # noqa: A006 ), "droid": GraphQLField( droid_type, @@ -247,7 +245,7 @@ GraphQLNonNull(GraphQLString), description="id of the droid" ) }, - resolve=lambda _source, _info, id: get_droid(id), + resolve=lambda _source, _info, id: get_droid(id), # noqa: A006 ), }, ) diff --git a/tests/test_docs.py b/tests/test_docs.py index 7796ec31..23c157e2 100644 --- a/tests/test_docs.py +++ b/tests/test_docs.py @@ -1,11 +1,19 @@ """Test all code snippets in the documentation""" +from __future__ import annotations + from pathlib import Path -from typing import Any, Dict, List +from typing import Any, Dict from .utils import dedent -Scope = Dict[str, Any] +try: + from typing import TypeAlias +except ImportError: # Python < 3.10 + from typing_extensions import TypeAlias + + +Scope: TypeAlias = Dict[str, Any] def get_snippets(source, indent=4): @@ -13,19 +21,19 @@ def get_snippets(source, indent=4): if not source.endswith(".rst"): # pragma: no cover source += ".rst" source_path = Path(__file__).parents[1] / "docs" / source - lines = open(source_path).readlines() - snippets: List[str] = [] - snippet: List[str] = [] + with source_path.open() as source_file: + lines = source_file.readlines() + snippets: list[str] = [] + snippet: list[str] = [] snippet_start = " " * indent for line in lines: if not line.rstrip() and snippet: snippet.append(line) elif line.startswith(snippet_start): snippet.append(line[indent:]) - else: - if snippet: - snippets.append("".join(snippet).rstrip() + "\n") - snippet = [] + elif snippet: + snippets.append("".join(snippet).rstrip() + "\n") + snippet = [] if snippet: snippets.append("".join(snippet).rstrip() + "\n") return snippets @@ -49,7 +57,8 @@ def describe_introduction(): def getting_started(capsys): intro = get_snippets("intro") pip_install = intro.pop(0) - assert "pip install" in pip_install and "graphql-core" in pip_install + assert "pip install" in pip_install + assert "graphql-core" in pip_install poetry_install = intro.pop(0) assert "poetry install" in poetry_install create_schema = intro.pop(0) @@ -58,7 +67,9 @@ def getting_started(capsys): exec(create_schema, scope) schema = scope.get("schema") schema_class = scope.get("GraphQLSchema") - assert schema and schema_class and isinstance(schema, schema_class) + assert schema + assert schema_class + assert isinstance(schema, schema_class) query = intro.pop(0) assert "graphql_sync" in query exec(query, scope) @@ -133,21 +144,20 @@ def executing_queries(capsys): queries = get_snippets("usage/queries") async_query = queries.pop(0) - assert "asyncio" in async_query and "graphql_sync" not in async_query + assert "asyncio" in async_query + assert "graphql_sync" not in async_query assert "asyncio.run" in async_query - try: # pragma: no cover - from asyncio import run # noqa: F401 - except ImportError: # Python < 3.7 - assert "ExecutionResult" in expected_result(queries) - else: # pragma: no cover - exec(async_query, scope) - out, err = capsys.readouterr() - assert not err - assert "R2-D2" in out - assert out == expected_result(queries) + from asyncio import run # noqa: F401 + + exec(async_query, scope) + out, err = capsys.readouterr() + assert not err + assert "R2-D2" in out + assert out == expected_result(queries) sync_query = queries.pop(0) - assert "graphql_sync" in sync_query and "asyncio" not in sync_query + assert "graphql_sync" in sync_query + assert "asyncio" not in sync_query exec(sync_query, scope) out, err = capsys.readouterr() assert not err @@ -167,7 +177,8 @@ def executing_queries(capsys): exec(typename_query, scope) out, err = capsys.readouterr() assert not err - assert "__typename" in out and "Human" in out + assert "__typename" in out + assert "Human" in out assert out == expected_result(queries) backstory_query = queries.pop(0) @@ -175,7 +186,8 @@ def executing_queries(capsys): exec(backstory_query, scope) out, err = capsys.readouterr() assert not err - assert "errors" in out and "secretBackstory" in out + assert "errors" in out + assert "secretBackstory" in out assert out == expected_result(queries) def using_the_sdl(capsys): @@ -204,11 +216,14 @@ def using_the_sdl(capsys): assert schema.get_type("Episode").values["EMPIRE"].value == 5 query = use_sdl.pop(0) - assert "graphql_sync" in query and "print(result)" in query + assert "graphql_sync" in query + assert "print(result)" in query exec(query, scope) out, err = capsys.readouterr() assert not err - assert "Luke" in out and "appearsIn" in out and "EMPIRE" in out + assert "Luke" in out + assert "appearsIn" in out + assert "EMPIRE" in out assert out == expected_result(use_sdl) def using_resolver_methods(capsys): @@ -225,11 +240,14 @@ def using_resolver_methods(capsys): assert "Root" in scope query = methods.pop(0) - assert "graphql_sync" in query and "Root()" in query + assert "graphql_sync" in query + assert "Root()" in query exec(query, scope) out, err = capsys.readouterr() assert not err - assert "R2-D2" in out and "primaryFunction" in out and "Astromech" in out + assert "R2-D2" in out + assert "primaryFunction" in out + assert "Astromech" in out assert out == expected_result(methods) def using_introspection(capsys): @@ -339,7 +357,8 @@ def extending_a_schema(capsys): exec(query, scope) out, err = capsys.readouterr() assert not err - assert "lastName" in out and "Skywalker" in out + assert "lastName" in out + assert "Skywalker" in out assert out == expected_result(extension) def validating_queries(): diff --git a/tests/test_star_wars_query.py b/tests/test_star_wars_query.py index 5c6fa40e..bb1008b8 100644 --- a/tests/test_star_wars_query.py +++ b/tests/test_star_wars_query.py @@ -1,4 +1,4 @@ -from pytest import mark +import pytest from graphql import graphql, graphql_sync @@ -7,7 +7,7 @@ def describe_star_wars_query_tests(): def describe_basic_queries(): - @mark.asyncio + @pytest.mark.asyncio async def correctly_identifies_r2_d2_as_hero_of_the_star_wars_saga(): source = """ query HeroNameQuery { @@ -19,7 +19,7 @@ async def correctly_identifies_r2_d2_as_hero_of_the_star_wars_saga(): result = await graphql(schema=schema, source=source) assert result == ({"hero": {"name": "R2-D2"}}, None) - @mark.asyncio + @pytest.mark.asyncio async def accepts_positional_arguments_to_graphql(): source = """ query HeroNameQuery { @@ -34,7 +34,7 @@ async def accepts_positional_arguments_to_graphql(): sync_result = graphql_sync(schema, source) assert sync_result == result - @mark.asyncio + @pytest.mark.asyncio async def allows_us_to_query_for_the_id_and_friends_of_r2_d2(): source = """ query HeroNameAndFriendsQuery { @@ -64,7 +64,7 @@ async def allows_us_to_query_for_the_id_and_friends_of_r2_d2(): ) def describe_nested_queries(): - @mark.asyncio + @pytest.mark.asyncio async def allows_us_to_query_for_the_friends_of_friends_of_r2_d2(): source = """ query NestedQuery { @@ -122,7 +122,7 @@ async def allows_us_to_query_for_the_friends_of_friends_of_r2_d2(): ) def describe_using_ids_and_query_parameters_to_refetch_objects(): - @mark.asyncio + @pytest.mark.asyncio async def allows_us_to_query_for_r2_d2_directly_using_his_id(): source = """ query { @@ -134,7 +134,7 @@ async def allows_us_to_query_for_r2_d2_directly_using_his_id(): result = await graphql(schema=schema, source=source) assert result == ({"droid": {"name": "R2-D2"}}, None) - @mark.asyncio + @pytest.mark.asyncio async def allows_us_to_query_characters_directly_using_their_id(): source = """ query FetchLukeAndC3POQuery { @@ -152,7 +152,7 @@ async def allows_us_to_query_characters_directly_using_their_id(): None, ) - @mark.asyncio + @pytest.mark.asyncio async def allows_creating_a_generic_query_to_fetch_luke_using_his_id(): source = """ query FetchSomeIDQuery($someId: String!) { @@ -167,7 +167,7 @@ async def allows_creating_a_generic_query_to_fetch_luke_using_his_id(): ) assert result == ({"human": {"name": "Luke Skywalker"}}, None) - @mark.asyncio + @pytest.mark.asyncio async def allows_creating_a_generic_query_to_fetch_han_using_his_id(): source = """ query FetchSomeIDQuery($someId: String!) { @@ -182,7 +182,7 @@ async def allows_creating_a_generic_query_to_fetch_han_using_his_id(): ) assert result == ({"human": {"name": "Han Solo"}}, None) - @mark.asyncio + @pytest.mark.asyncio async def generic_query_that_gets_null_back_when_passed_invalid_id(): source = """ query humanQuery($id: String!) { @@ -198,7 +198,7 @@ async def generic_query_that_gets_null_back_when_passed_invalid_id(): assert result == ({"human": None}, None) def describe_using_aliases_to_change_the_key_in_the_response(): - @mark.asyncio + @pytest.mark.asyncio async def allows_us_to_query_for_luke_changing_his_key_with_an_alias(): source = """ query FetchLukeAliased { @@ -210,7 +210,7 @@ async def allows_us_to_query_for_luke_changing_his_key_with_an_alias(): result = await graphql(schema=schema, source=source) assert result == ({"luke": {"name": "Luke Skywalker"}}, None) - @mark.asyncio + @pytest.mark.asyncio async def query_for_luke_and_leia_using_two_root_fields_and_an_alias(): source = """ query FetchLukeAndLeiaAliased { @@ -229,7 +229,7 @@ async def query_for_luke_and_leia_using_two_root_fields_and_an_alias(): ) def describe_uses_fragments_to_express_more_complex_queries(): - @mark.asyncio + @pytest.mark.asyncio async def allows_us_to_query_using_duplicated_content(): source = """ query DuplicateFields { @@ -252,7 +252,7 @@ async def allows_us_to_query_using_duplicated_content(): None, ) - @mark.asyncio + @pytest.mark.asyncio async def allows_us_to_use_a_fragment_to_avoid_duplicating_content(): source = """ query UseFragment { @@ -278,7 +278,7 @@ async def allows_us_to_use_a_fragment_to_avoid_duplicating_content(): ) def describe_using_typename_to_find_the_type_of_an_object(): - @mark.asyncio + @pytest.mark.asyncio async def allows_us_to_verify_that_r2_d2_is_a_droid(): source = """ query CheckTypeOfR2 { @@ -291,7 +291,7 @@ async def allows_us_to_verify_that_r2_d2_is_a_droid(): result = await graphql(schema=schema, source=source) assert result == ({"hero": {"__typename": "Droid", "name": "R2-D2"}}, None) - @mark.asyncio + @pytest.mark.asyncio async def allows_us_to_verify_that_luke_is_a_human(): source = """ query CheckTypeOfLuke { @@ -308,7 +308,7 @@ async def allows_us_to_verify_that_luke_is_a_human(): ) def describe_reporting_errors_raised_in_resolvers(): - @mark.asyncio + @pytest.mark.asyncio async def correctly_reports_error_on_accessing_secret_backstory(): source = """ query HeroNameQuery { @@ -330,7 +330,7 @@ async def correctly_reports_error_on_accessing_secret_backstory(): ], ) - @mark.asyncio + @pytest.mark.asyncio async def correctly_reports_error_on_accessing_backstory_in_a_list(): source = """ query HeroNameQuery { @@ -374,7 +374,7 @@ async def correctly_reports_error_on_accessing_backstory_in_a_list(): ], ) - @mark.asyncio + @pytest.mark.asyncio async def correctly_reports_error_on_accessing_through_an_alias(): source = """ query HeroNameQuery { diff --git a/tests/test_star_wars_validation.py b/tests/test_star_wars_validation.py index 798a28ea..a40a5224 100644 --- a/tests/test_star_wars_validation.py +++ b/tests/test_star_wars_validation.py @@ -1,13 +1,17 @@ -from typing import List +from __future__ import annotations -from graphql.error import GraphQLError -from graphql.language import parse, Source +from typing import TYPE_CHECKING + +from graphql.language import Source, parse from graphql.validation import validate from .star_wars_schema import star_wars_schema +if TYPE_CHECKING: + from graphql.error import GraphQLError + -def validation_errors(query: str) -> List[GraphQLError]: +def validation_errors(query: str) -> list[GraphQLError]: """Helper function to test a query and the expected response.""" source = Source(query, "StarWars.graphql") ast = parse(source) diff --git a/tests/test_user_registry.py b/tests/test_user_registry.py index d5f2ba95..0cb2b5b9 100644 --- a/tests/test_user_registry.py +++ b/tests/test_user_registry.py @@ -4,23 +4,16 @@ operations on a simulated user registry database backend. """ -from asyncio import sleep, wait +from __future__ import annotations + +from asyncio import create_task, sleep, wait from collections import defaultdict from enum import Enum -from inspect import isawaitable -from typing import Any, Dict, List, NamedTuple, Optional - -try: - from asyncio import create_task -except ImportError: # Python < 3.7 - create_task = None # type: ignore +from typing import Any, AsyncIterable, NamedTuple -from pytest import fixture, mark +import pytest from graphql import ( - graphql, - parse, - subscribe, GraphQLArgument, GraphQLBoolean, GraphQLEnumType, @@ -33,10 +26,11 @@ GraphQLObjectType, GraphQLSchema, GraphQLString, + graphql, + parse, + subscribe, ) - -from graphql.pyutils import SimplePubSub, SimplePubSubIterator -from graphql.execution.map_async_iterator import MapAsyncIterator +from graphql.pyutils import SimplePubSub, SimplePubSubIterator, is_awaitable class User(NamedTuple): @@ -44,8 +38,8 @@ class User(NamedTuple): firstName: str lastName: str - tweets: Optional[int] - id: Optional[str] = None + tweets: int | None + id: str | None = None verified: bool = False @@ -61,10 +55,10 @@ class UserRegistry: """Simulation of a user registry with asynchronous database backend access.""" def __init__(self, **users): - self._registry: Dict[str, User] = users + self._registry: dict[str, User] = users self._pubsub = defaultdict(SimplePubSub) - async def get(self, id_: str) -> Optional[User]: + async def get(self, id_: str) -> User | None: """Get a user object from the registry""" await sleep(0) return self._registry.get(id_) @@ -100,7 +94,7 @@ def emit_event(self, mutation: MutationEnum, user: User) -> None: self._pubsub[None].emit(payload) # notify all user subscriptions self._pubsub[user.id].emit(payload) # notify single user subscriptions - def event_iterator(self, id_: Optional[str]) -> SimplePubSubIterator: + def event_iterator(self, id_: str | None) -> SimplePubSubIterator: return self._pubsub[id_].get_subscriber() @@ -140,19 +134,17 @@ async def resolve_user(_root, info, **args): async def resolve_create_user(_root, info, data): """Resolver function for creating a user object""" - user = await info.context["registry"].create(**data) - return user + return await info.context["registry"].create(**data) # noinspection PyShadowingBuiltins -async def resolve_update_user(_root, info, id, data): +async def resolve_update_user(_root, info, id, data): # noqa: A002 """Resolver function for updating a user object""" - user = await info.context["registry"].update(id, **data) - return user + return await info.context["registry"].update(id, **data) # noinspection PyShadowingBuiltins -async def resolve_delete_user(_root, info, id): +async def resolve_delete_user(_root, info, id): # noqa: A002 """Resolver function for deleting a user object""" user = await info.context["registry"].get(id) await info.context["registry"].delete(user.id) @@ -160,15 +152,15 @@ async def resolve_delete_user(_root, info, id): # noinspection PyShadowingBuiltins -async def subscribe_user(_root, info, id=None): +async def subscribe_user(_root, info, id=None): # noqa: A002 """Subscribe to mutations of a specific user object or all user objects""" async_iterator = info.context["registry"].event_iterator(id) async for event in async_iterator: - yield await event if isawaitable(event) else event # pragma: no cover exit + yield await event if is_awaitable(event) else event # pragma: no cover exit # noinspection PyShadowingBuiltins,PyUnusedLocal -async def resolve_subscription_user(event, info, id): +async def resolve_subscription_user(event, info, id): # noqa: ARG001, A002 """Resolver function for user subscriptions""" user = event["user"] mutation = MutationEnum(event["mutation"]).value @@ -221,13 +213,13 @@ async def resolve_subscription_user(event, info, id): ) -@fixture +@pytest.fixture def context(): return {"registry": UserRegistry()} def describe_query(): - @mark.asyncio + @pytest.mark.asyncio async def query_user(context): user = await context["registry"].create( firstName="John", lastName="Doe", tweets=42, verified=True @@ -259,7 +251,7 @@ async def query_user(context): def describe_mutation(): - @mark.asyncio + @pytest.mark.asyncio async def create_user(context): received = {} @@ -270,7 +262,7 @@ def receive(msg): return receive # noinspection PyProtectedMember - pubsub = context["registry"]._pubsub + pubsub = context["registry"]._pubsub # noqa: SLF001 pubsub[None].subscribers.add(subscriber("User")) pubsub["0"].subscribers.add(subscriber("User 0")) @@ -281,7 +273,12 @@ def receive(msg): } } """ - user_data = dict(firstName="John", lastName="Doe", tweets=42, verified=True) + user_data = { + "firstName": "John", + "lastName": "Doe", + "tweets": 42, + "verified": True, + } variables = {"userData": user_data} result = await graphql( schema, query, context_value=context, variable_values=variables @@ -306,7 +303,7 @@ def receive(msg): "User 0": {"user": user, "mutation": MutationEnum.CREATED.value}, } - @mark.asyncio + @pytest.mark.asyncio async def update_user(context): received = {} @@ -317,7 +314,7 @@ def receive(msg): return receive # noinspection PyProtectedMember - pubsub = context["registry"]._pubsub + pubsub = context["registry"]._pubsub # noqa: SLF001 pubsub[None].subscribers.add(subscriber("User")) pubsub["0"].subscribers.add(subscriber("User 0")) @@ -362,7 +359,7 @@ def receive(msg): "User 0": {"user": user, "mutation": MutationEnum.UPDATED.value}, } - @mark.asyncio + @pytest.mark.asyncio async def delete_user(context): received = {} @@ -373,7 +370,7 @@ def receive(msg): return receive # noinspection PyProtectedMember - pubsub = context["registry"]._pubsub + pubsub = context["registry"]._pubsub # noqa: SLF001 pubsub[None].subscribers.add(subscriber("User")) pubsub["0"].subscribers.add(subscriber("User 0")) @@ -404,7 +401,7 @@ def receive(msg): def describe_subscription(): - @mark.asyncio + @pytest.mark.asyncio async def subscribe_to_user_mutations(context): query = """ subscription ($userId: ID!) { @@ -416,10 +413,10 @@ async def subscribe_to_user_mutations(context): """ variables = {"userId": "0"} - subscription_one = await subscribe( + subscription_one = subscribe( schema, parse(query), context_value=context, variable_values=variables ) - assert isinstance(subscription_one, MapAsyncIterator) + assert isinstance(subscription_one, AsyncIterable) query = """ subscription { @@ -430,14 +427,14 @@ async def subscribe_to_user_mutations(context): } """ - subscription_all = await subscribe(schema, parse(query), context_value=context) - assert isinstance(subscription_all, MapAsyncIterator) + subscription_all = subscribe(schema, parse(query), context_value=context) + assert isinstance(subscription_all, AsyncIterable) received_one = [] received_all = [] async def mutate_users(): - await sleep(0) # make sure subscribers are running + await sleep(2 / 512) # make sure subscribers are running await graphql( schema, """ @@ -498,25 +495,24 @@ async def mutate_users(): ) async def receive_one(): - async for result in subscription_one: # type: ignore # pragma: no cover + async for result in subscription_one: # pragma: no cover received_one.append(result) if len(received_one) == 3: # pragma: no cover else break async def receive_all(): - async for result in subscription_all: # type: ignore # pragma: no cover + async for result in subscription_all: # pragma: no cover received_all.append(result) if len(received_all) == 6: # pragma: no cover else break tasks = [ - create_task(task()) if create_task else task() - for task in (mutate_users, receive_one, receive_all) + create_task(task()) for task in (mutate_users, receive_one, receive_all) ] done, pending = await wait(tasks, timeout=1) assert not pending - expected_data: List[Dict[str, Any]] = [ + expected_data: list[dict[str, Any]] = [ { "mutation": "CREATED", "user": { diff --git a/tests/test_version.py b/tests/test_version.py index fe575f04..49dad199 100644 --- a/tests/test_version.py +++ b/tests/test_version.py @@ -5,8 +5,8 @@ VersionInfo, version, version_info, - version_js, version_info_js, + version_js, ) _re_version = re.compile(r"(\d+)\.(\d+)\.(\d+)(?:(a|b|r?c)(\d+))?$") diff --git a/tests/type/test_assert_name.py b/tests/type/test_assert_name.py index 0d535d42..24ffc55d 100644 --- a/tests/type/test_assert_name.py +++ b/tests/type/test_assert_name.py @@ -1,34 +1,27 @@ -from pytest import mark, raises +import pytest from graphql.error import GraphQLError -from graphql.type import assert_name, assert_enum_value_name +from graphql.type import assert_enum_value_name, assert_name def describe_assert_name(): def pass_through_valid_name(): assert assert_name("_ValidName123") == "_ValidName123" - def throws_for_non_strings(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - assert_name({}) # type: ignore - msg = str(exc_info.value) - assert msg == "Expected name to be a string." - def throws_on_empty_strings(): - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: assert_name("") msg = str(exc_info.value) assert msg == "Expected name to be a non-empty string." def throws_for_names_with_invalid_characters(): - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: assert_name(">--()-->") msg = str(exc_info.value) assert msg == "Names must only contain [_a-zA-Z0-9] but '>--()-->' does not." def throws_for_names_starting_with_invalid_characters(): - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: assert_name("42MeaningsOfLife") msg = str(exc_info.value) assert msg == ( @@ -41,35 +34,35 @@ def pass_through_valid_name(): assert assert_enum_value_name("_ValidName123") == "_ValidName123" def throws_for_non_strings(): - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: # noinspection PyTypeChecker assert_enum_value_name({}) # type: ignore msg = str(exc_info.value) assert msg == "Expected name to be a string." def throws_on_empty_strings(): - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: assert_enum_value_name("") msg = str(exc_info.value) assert msg == "Expected name to be a non-empty string." def throws_for_names_with_invalid_characters(): - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: assert_enum_value_name(">--()-->") msg = str(exc_info.value) assert msg == "Names must only contain [_a-zA-Z0-9] but '>--()-->' does not." def throws_for_names_starting_with_invalid_characters(): - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: assert_enum_value_name("42MeaningsOfLife") msg = str(exc_info.value) assert msg == ( "Names must start with [_a-zA-Z] but '42MeaningsOfLife' does not." ) - @mark.parametrize("name", ("true", "false", "null")) + @pytest.mark.parametrize("name", ["true", "false", "null"]) def throws_for_restricted_names(name): - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: assert_enum_value_name(name) msg = str(exc_info.value) assert msg == (f"Enum values cannot be named: {name}.") diff --git a/tests/type/test_custom_scalars.py b/tests/type/test_custom_scalars.py index a4dac893..82c611f6 100644 --- a/tests/type/test_custom_scalars.py +++ b/tests/type/test_custom_scalars.py @@ -1,9 +1,10 @@ +from __future__ import annotations + from math import isfinite -from typing import Any, Dict, NamedTuple +from typing import TYPE_CHECKING, Any, NamedTuple from graphql import graphql_sync from graphql.error import GraphQLError -from graphql.language import ValueNode from graphql.pyutils import inspect from graphql.type import ( GraphQLArgument, @@ -15,6 +16,9 @@ ) from graphql.utilities import value_from_ast_untyped +if TYPE_CHECKING: + from graphql.language import ValueNode + # this test is not (yet) part of GraphQL.js, see # https://github.com/graphql/graphql-js/issues/2657 @@ -31,7 +35,7 @@ def is_finite(value: Any) -> bool: ) -def serialize_money(output_value: Any) -> Dict[str, float]: +def serialize_money(output_value: Any) -> dict[str, float]: if not isinstance(output_value, Money): raise GraphQLError("Cannot serialize money value: " + inspect(output_value)) return output_value._asdict() @@ -185,7 +189,7 @@ def parse_literal(): def parse_literal_with_errors(): source = """ query Money($amount: String!, $currency: Float!) { - toEuros(money: {amount: $amount, currency: $currency}) + toEuros(money: { amount: $amount, currency: $currency }) } """ @@ -196,7 +200,7 @@ def parse_literal_with_errors(): [ { "message": "Argument 'money' has invalid value" - " {amount: $amount, currency: $currency}.", + " { amount: $amount, currency: $currency }.", "locations": [(3, 30)], }, ], diff --git a/tests/type/test_definition.py b/tests/type/test_definition.py index 1c5b5bfc..ac7830ef 100644 --- a/tests/type/test_definition.py +++ b/tests/type/test_definition.py @@ -1,16 +1,25 @@ +from __future__ import annotations + +import pickle +import sys from enum import Enum from math import isnan, nan -from typing import cast, Dict +from typing import Any, Callable + +try: + from typing import TypedDict +except ImportError: # Python < 3.8 + from typing_extensions import TypedDict -from pytest import mark, raises +import pytest from graphql.error import GraphQLError from graphql.language import ( - parse_value, EnumTypeDefinitionNode, EnumTypeExtensionNode, EnumValueNode, - Node, + FieldNode, + FragmentDefinitionNode, InputObjectTypeDefinitionNode, InputObjectTypeExtensionNode, InputValueDefinitionNode, @@ -18,20 +27,20 @@ InterfaceTypeExtensionNode, ObjectTypeDefinitionNode, ObjectTypeExtensionNode, + OperationDefinitionNode, ScalarTypeDefinitionNode, ScalarTypeExtensionNode, StringValueNode, - TypeDefinitionNode, - TypeExtensionNode, - ValueNode, UnionTypeDefinitionNode, UnionTypeExtensionNode, + ValueNode, + parse_value, ) -from graphql.pyutils import Undefined +from graphql.pyutils import Path, Undefined, is_awaitable from graphql.type import ( GraphQLArgument, - GraphQLEnumValue, GraphQLEnumType, + GraphQLEnumValue, GraphQLField, GraphQLInputField, GraphQLInputObjectType, @@ -40,9 +49,13 @@ GraphQLList, GraphQLNonNull, GraphQLObjectType, + GraphQLOutputType, + GraphQLResolveInfo, GraphQLScalarType, + GraphQLSchema, GraphQLString, GraphQLUnionType, + introspection_types, ) ScalarType = GraphQLScalarType("Scalar") @@ -80,7 +93,7 @@ def defines_a_scalar_type(): } def accepts_a_scalar_type_defining_serialize(): - def serialize(value): + def serialize(_value): pass scalar = GraphQLScalarType("SomeScalar", serialize) @@ -156,109 +169,48 @@ def accepts_a_scalar_type_with_ast_node_and_extension_ast_nodes(): assert scalar.extension_ast_nodes == tuple(extension_ast_nodes) def rejects_a_scalar_type_with_incorrectly_typed_name(): - with raises(TypeError, match="missing .* required .* 'name'"): + with pytest.raises(TypeError, match="missing .* required .* 'name'"): # noinspection PyArgumentList GraphQLScalarType() # type: ignore - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: # noinspection PyTypeChecker GraphQLScalarType(None) # type: ignore assert str(exc_info.value) == "Must provide name." - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: # noinspection PyTypeChecker GraphQLScalarType(42, {}) # type: ignore assert str(exc_info.value) == "Expected name to be a string." def rejects_a_scalar_type_with_invalid_name(): - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: GraphQLScalarType("") assert str(exc_info.value) == "Expected name to be a non-empty string." - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: GraphQLScalarType("bad-name") assert str(exc_info.value) == ( "Names must only contain [_a-zA-Z0-9] but 'bad-name' does not." ) - def rejects_a_scalar_type_with_incorrectly_typed_description(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLScalarType("SomeScalar", description=[]) # type: ignore - assert str(exc_info.value) == "The description must be a string." - - def rejects_a_scalar_type_defining_specified_by_url_with_an_incorrect_type(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLScalarType("SomeScalar", specified_by_url={}) # type: ignore - assert ( - str(exc_info.value) - == "SomeScalar must provide 'specified_by_url' as a string, but got: {}." - ) - - def rejects_a_scalar_type_defining_serialize_with_incorrect_type(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLScalarType("SomeScalar", {}) # type: ignore - assert str(exc_info.value) == ( - "SomeScalar must provide 'serialize' as a function." - " If this custom Scalar is also used as an input type," - " ensure 'parse_value' and 'parse_literal' functions" - " are also provided." - ) - def rejects_a_scalar_type_defining_parse_literal_but_not_parse_value(): def parse_literal(_node: ValueNode, _vars=None): return Undefined # pragma: no cover - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: GraphQLScalarType("SomeScalar", parse_literal=parse_literal) assert str(exc_info.value) == ( - "SomeScalar must provide both" - " 'parse_value' and 'parse_literal' as functions." + "SomeScalar must provide both 'parse_value' and 'parse_literal' functions." ) - def rejects_a_scalar_type_incorrectly_defining_parse_literal_and_value(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLScalarType( - "SomeScalar", parse_value={}, parse_literal={} # type: ignore - ) - assert str(exc_info.value) == ( - "SomeScalar must provide both" - " 'parse_value' and 'parse_literal' as functions." - ) - - def rejects_a_scalar_type_with_an_incorrect_ast_node(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLScalarType("SomeScalar", ast_node=Node()) # type: ignore - msg = str(exc_info.value) - assert msg == "SomeScalar AST node must be a TypeDefinitionNode." - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLScalarType( - "SomeScalar", ast_node=TypeDefinitionNode() # type: ignore - ) - msg = str(exc_info.value) - assert msg == "SomeScalar AST node must be a ScalarTypeDefinitionNode." + def pickles_a_custom_scalar_type(): + foo_type = GraphQLScalarType("Foo") + cycled_foo_type = pickle.loads(pickle.dumps(foo_type)) + assert cycled_foo_type.name == foo_type.name + assert cycled_foo_type is not foo_type - def rejects_a_scalar_type_with_incorrect_extension_ast_nodes(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLScalarType( - "SomeScalar", extension_ast_nodes=[Node()] # type: ignore - ) - assert str(exc_info.value) == ( - "SomeScalar extension AST nodes must be specified" - " as a collection of TypeExtensionNode instances." - ) - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLScalarType( - "SomeScalar", extension_ast_nodes=[TypeExtensionNode()] # type: ignore - ) - assert str(exc_info.value) == ( - "SomeScalar extension AST nodes must be specified" - " as a collection of ScalarTypeExtensionNode instances." - ) + def pickles_a_specified_scalar_type(): + cycled_int_type = pickle.loads(pickle.dumps(GraphQLInt)) + assert cycled_int_type.name == "Int" + assert cycled_int_type is GraphQLInt def describe_type_system_fields(): @@ -305,44 +257,6 @@ def defines_a_scalar_type_with_a_deprecation_reason(): assert field.deprecation_reason is deprecation_reason assert field.to_kwargs()["deprecation_reason"] is deprecation_reason - def rejects_a_field_with_incorrect_type(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLField(InputObjectType) # type: ignore - assert str(exc_info.value) == "Field type must be an output type." - - def rejects_a_field_with_incorrect_args(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLField(GraphQLString, args=[]) # type: ignore - assert str(exc_info.value) == ( - "Field args must be a dict with argument names as keys." - ) - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLField(GraphQLString, args={"arg": GraphQLObjectType}) # type: ignore - assert str(exc_info.value) == ( - "Field args must be GraphQLArguments or input type objects." - ) - - def rejects_a_field_with_an_incorrectly_typed_description(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLField(GraphQLString, description=[]) # type: ignore - assert str(exc_info.value) == "The description must be a string." - - def rejects_a_field_with_an_incorrectly_typed_deprecation_reason(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLField(GraphQLString, deprecation_reason=[]) # type: ignore - assert str(exc_info.value) == "The deprecation reason must be a string." - - def rejects_a_field_with_an_incorrect_ast_node(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLField(GraphQLString, ast_node=Node()) # type: ignore - assert str(exc_info.value) == "Field AST node must be a FieldDefinitionNode." - def describe_type_system_objects(): def defines_an_object_type(): @@ -511,7 +425,8 @@ def accepts_a_lambda_as_an_object_field_resolver(): "SomeObject", { "f": GraphQLField( - ScalarType, resolve=lambda _obj, _info: {} # pragma: no cover + ScalarType, + resolve=lambda _obj, _info: {}, # pragma: no cover ) }, ) @@ -530,87 +445,46 @@ def accepts_an_object_type_with_ast_node_and_extension_ast_nodes(): assert object_type.extension_ast_nodes == tuple(extension_ast_nodes) def rejects_an_object_type_with_incorrectly_typed_name(): - with raises(TypeError, match="missing .* required .* 'name'"): + with pytest.raises(TypeError, match="missing .* required .* 'name'"): # noinspection PyArgumentList GraphQLObjectType() # type: ignore - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: # noinspection PyTypeChecker GraphQLObjectType(None, {}) # type: ignore assert str(exc_info.value) == "Must provide name." - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: # noinspection PyTypeChecker GraphQLObjectType(42, {}) # type: ignore assert str(exc_info.value) == "Expected name to be a string." def rejects_an_object_type_with_invalid_name(): - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: GraphQLObjectType("", {}) assert str(exc_info.value) == "Expected name to be a non-empty string." - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: GraphQLObjectType("bad-name", {}) assert str(exc_info.value) == ( "Names must only contain [_a-zA-Z0-9] but 'bad-name' does not." ) - def rejects_an_object_type_field_with_undefined_config(): - undefined_field = cast(GraphQLField, None) - obj_type = GraphQLObjectType("SomeObject", {"f": undefined_field}) - with raises(TypeError) as exc_info: - assert not obj_type.fields - msg = str(exc_info.value) - assert msg == "SomeObject fields must be GraphQLField or output type objects." - - def rejects_an_object_type_with_incorrectly_typed_fields(): - invalid_field = cast(GraphQLField, [GraphQLField(ScalarType)]) - obj_type = GraphQLObjectType("SomeObject", {"f": invalid_field}) - with raises(TypeError) as exc_info: - assert not obj_type.fields - msg = str(exc_info.value) - assert msg == "SomeObject fields must be GraphQLField or output type objects." - def rejects_an_object_type_with_incorrectly_named_fields(): obj_type = GraphQLObjectType( "SomeObject", {"bad-name": GraphQLField(ScalarType)} ) - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: assert not obj_type.fields msg = str(exc_info.value) assert msg == "Names must only contain [_a-zA-Z0-9] but 'bad-name' does not." - def rejects_an_object_type_field_function_that_returns_incorrect_type(): - obj_type = GraphQLObjectType( - "SomeObject", lambda: [GraphQLField(ScalarType)] # type: ignore - ) - with raises(TypeError) as exc_info: - assert not obj_type.fields - assert str(exc_info.value) == ( - "SomeObject fields must be specified as a mapping with field names as keys." - ) - def rejects_an_object_type_field_function_that_raises_an_error(): def fields(): raise RuntimeError("Oops!") obj_type = GraphQLObjectType("SomeObject", fields) - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: assert not obj_type.fields assert str(exc_info.value) == "SomeObject fields cannot be resolved. Oops!" - def rejects_an_object_type_with_incorrectly_typed_field_args(): - invalid_args = [{"bad_args": GraphQLArgument(ScalarType)}] - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLObjectType( - "SomeObject", - { - "badField": GraphQLField( - ScalarType, args=invalid_args # type: ignore - ) - }, - ) - msg = str(exc_info.value) - assert msg == "Field args must be a dict with argument names as keys." - def rejects_an_object_type_with_incorrectly_named_field_args(): obj_type = GraphQLObjectType( "SomeObject", @@ -620,7 +494,7 @@ def rejects_an_object_type_with_incorrectly_named_field_args(): ) }, ) - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: assert not obj_type.fields msg = str(exc_info.value) assert msg == ( @@ -628,97 +502,15 @@ def rejects_an_object_type_with_incorrectly_named_field_args(): " Names must only contain [_a-zA-Z0-9] but 'bad-name' does not." ) - def rejects_an_object_type_with_incorrectly_typed_interfaces(): - obj_type = GraphQLObjectType("SomeObject", {}, interfaces={}) - with raises(TypeError) as exc_info: - assert not obj_type.interfaces - assert str(exc_info.value) == ( - "SomeObject interfaces must be specified" - " as a collection of GraphQLInterfaceType instances." - ) - - def rejects_object_type_with_incorrectly_typed_interfaces_as_a_function(): - obj_type = GraphQLObjectType("SomeObject", {}, interfaces=lambda: {}) - with raises(TypeError) as exc_info: - assert not obj_type.interfaces - assert str(exc_info.value) == ( - "SomeObject interfaces must be specified" - " as a collection of GraphQLInterfaceType instances." - ) - def rejects_object_type_with_interfaces_as_function_that_raises_an_error(): def interfaces(): raise RuntimeError("Oops!") obj_type = GraphQLObjectType("SomeObject", {}, interfaces=interfaces) - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: assert not obj_type.interfaces assert str(exc_info.value) == "SomeObject interfaces cannot be resolved. Oops!" - def rejects_an_empty_object_field_resolver(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLObjectType( - "SomeObject", - {"field": GraphQLField(ScalarType, resolve={})}, # type: ignore - ) - msg = str(exc_info.value) - assert msg == "Field resolver must be a function if provided, but got: {}." - - def rejects_a_constant_scalar_value_resolver(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLObjectType( - "SomeObject", - {"field": GraphQLField(ScalarType, resolve=0)}, # type: ignore - ) - msg = str(exc_info.value) - assert msg == "Field resolver must be a function if provided, but got: 0." - - def rejects_an_object_type_with_an_incorrect_type_for_is_type_of(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLObjectType("AnotherObject", {}, is_type_of={}) # type: ignore - assert str(exc_info.value) == ( - "AnotherObject must provide 'is_type_of' as a function, but got: {}." - ) - - def rejects_an_object_type_with_an_incorrect_ast_node(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLObjectType("SomeObject", {}, ast_node=Node()) # type: ignore - msg = str(exc_info.value) - assert msg == "SomeObject AST node must be a TypeDefinitionNode." - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLObjectType( - "SomeObject", {}, ast_node=TypeDefinitionNode() # type: ignore - ) - msg = str(exc_info.value) - assert msg == "SomeObject AST node must be an ObjectTypeDefinitionNode." - - def rejects_an_object_type_with_incorrect_extension_ast_nodes(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLObjectType( - "SomeObject", {}, extension_ast_nodes=[Node()] # type: ignore - ) - assert str(exc_info.value) == ( - "SomeObject extension AST nodes must be specified" - " as a collection of TypeExtensionNode instances." - ) - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLObjectType( - "SomeObject", - {}, - extension_ast_nodes=[TypeExtensionNode()], # type: ignore - ) - assert str(exc_info.value) == ( - "SomeObject extension AST nodes must be specified" - " as a collection of ObjectTypeExtensionNode instances." - ) - def describe_type_system_interfaces(): def defines_an_interface_type(): @@ -752,7 +544,8 @@ def resolve_type(_obj, _info, _type): def accepts_an_interface_type_with_output_types_as_fields(): interface = GraphQLInterfaceType( - "AnotherInterface", {"someField": ScalarType} # type: ignore + "AnotherInterface", + {"someField": ScalarType}, # type: ignore ) fields = interface.fields assert isinstance(fields, dict) @@ -820,123 +613,37 @@ def accepts_an_interface_type_with_ast_node_and_extension_ast_nodes(): assert interface_type.ast_node is ast_node assert interface_type.extension_ast_nodes == tuple(extension_ast_nodes) - def rejects_an_interface_type_with_incorrectly_typed_fields(): - interface = GraphQLInterfaceType("SomeInterface", []) # type: ignore - with raises(TypeError) as exc_info: - assert not interface.fields - assert str(exc_info.value) == ( - "SomeInterface fields must be specified" - " as a mapping with field names as keys." - ) - interface = GraphQLInterfaceType( - "SomeInterface", {"f": InputObjectType} # type: ignore - ) - with raises(TypeError) as exc_info: - assert not interface.fields - assert str(exc_info.value) == ( - "SomeInterface fields must be GraphQLField or output type objects." - ) - def rejects_an_interface_type_with_unresolvable_fields(): def fields(): raise RuntimeError("Oops!") interface = GraphQLInterfaceType("SomeInterface", fields) - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: assert not interface.fields assert str(exc_info.value) == "SomeInterface fields cannot be resolved. Oops!" - def rejects_an_interface_type_with_incorrectly_typed_name(): - with raises(TypeError, match="missing .* required .* 'name'"): - # noinspection PyArgumentList - GraphQLInterfaceType() # type: ignore - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLInterfaceType(None, {}) # type: ignore - assert str(exc_info.value) == "Must provide name." - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLInterfaceType(42, {}) # type: ignore - assert str(exc_info.value) == "Expected name to be a string." - def rejects_an_interface_type_with_invalid_name(): - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: GraphQLInterfaceType("", {}) assert str(exc_info.value) == "Expected name to be a non-empty string." - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: GraphQLInterfaceType("bad-name", {}) assert str(exc_info.value) == ( "Names must only contain [_a-zA-Z0-9] but 'bad-name' does not." ) - def rejects_an_interface_type_with_incorrectly_typed_interfaces(): - interface = GraphQLInterfaceType("AnotherInterface", {}, lambda: {}) - with raises(TypeError) as exc_info: - assert not interface.interfaces - assert str(exc_info.value) == ( - "AnotherInterface interfaces must be specified" - " as a collection of GraphQLInterfaceType instances." - ) - def rejects_an_interface_type_with_unresolvable_interfaces(): def interfaces(): raise RuntimeError("Oops!") interface = GraphQLInterfaceType("AnotherInterface", {}, interfaces) - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: assert not interface.interfaces assert ( str(exc_info.value) == "AnotherInterface interfaces cannot be resolved. Oops!" ) - def rejects_an_interface_type_with_an_incorrect_type_for_resolve_type(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLInterfaceType( - "AnotherInterface", {}, resolve_type={} # type: ignore - ) - assert str(exc_info.value) == ( - "AnotherInterface must provide 'resolve_type' as a function," - " but got: {}." - ) - - def rejects_an_interface_type_with_an_incorrect_ast_node(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLInterfaceType("SomeInterface", {}, ast_node=Node()) # type: ignore - msg = str(exc_info.value) - assert msg == "SomeInterface AST node must be a TypeDefinitionNode." - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLInterfaceType( - "SomeInterface", {}, ast_node=TypeDefinitionNode() # type: ignore - ) - msg = str(exc_info.value) - assert msg == "SomeInterface AST node must be an InterfaceTypeDefinitionNode." - - def rejects_an_interface_type_with_incorrect_extension_ast_nodes(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLInterfaceType( - "SomeInterface", {}, extension_ast_nodes=[Node()] # type: ignore - ) - assert str(exc_info.value) == ( - "SomeInterface extension AST nodes must be specified" - " as a collection of TypeExtensionNode instances." - ) - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLInterfaceType( - "SomeInterface", - {}, - extension_ast_nodes=[TypeExtensionNode()], # type: ignore - ) - assert str(exc_info.value) == ( - "SomeInterface extension AST nodes must be specified" - " as a collection of InterfaceTypeExtensionNode instances." - ) - def describe_type_system_unions(): def accepts_a_union_type_defining_resolve_type(): @@ -951,7 +658,9 @@ def accepts_a_union_type_with_function_returning_a_list_of_types(): assert union_type.types == (ObjectType,) def accepts_a_union_type_without_types(): - with raises(TypeError, match="missing 1 required positional argument: 'types'"): + with pytest.raises( + TypeError, match="missing 1 required positional argument: 'types'" + ): # noinspection PyArgumentList GraphQLUnionType("SomeUnion") # type: ignore union_type = GraphQLUnionType("SomeUnion", None) # type: ignore @@ -971,91 +680,25 @@ def accepts_a_union_type_with_ast_node_and_extension_ast_nodes(): assert union_type.ast_node is ast_node assert union_type.extension_ast_nodes == tuple(extension_ast_nodes) - def rejects_a_union_type_with_incorrectly_typed__name(): - with raises(TypeError, match="missing .* required .* 'name'"): - # noinspection PyArgumentList - GraphQLUnionType() # type: ignore - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLUnionType(None, []) # type: ignore - assert str(exc_info.value) == "Must provide name." - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLUnionType(42, []) # type: ignore - assert str(exc_info.value) == "Expected name to be a string." - def rejects_a_union_type_with_invalid_name(): - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: GraphQLUnionType("", []) assert str(exc_info.value) == "Expected name to be a non-empty string." - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: GraphQLUnionType("bad-name", []) assert str(exc_info.value) == ( "Names must only contain [_a-zA-Z0-9] but 'bad-name' does not." ) - def rejects_a_union_type_with_an_incorrect_type_for_resolve_type(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLUnionType("SomeUnion", [], resolve_type={}) # type: ignore - assert str(exc_info.value) == ( - "SomeUnion must provide 'resolve_type' as a function, but got: {}." - ) - - def rejects_a_union_type_with_incorrectly_typed_types(): - union_type = GraphQLUnionType("SomeUnion", {"type": ObjectType}) # type: ignore - with raises(TypeError) as exc_info: - assert not union_type.types - assert str(exc_info.value) == ( - "SomeUnion types must be specified" - " as a collection of GraphQLObjectType instances." - ) - def rejects_a_union_type_with_unresolvable_types(): def types(): raise RuntimeError("Oops!") union_type = GraphQLUnionType("SomeUnion", types) - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: assert not union_type.types assert str(exc_info.value) == "SomeUnion types cannot be resolved. Oops!" - def rejects_a_union_type_with_an_incorrect_ast_node(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLUnionType("SomeUnion", [], ast_node=Node()) # type: ignore - msg = str(exc_info.value) - assert msg == "SomeUnion AST node must be a TypeDefinitionNode." - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLUnionType( - "SomeUnion", [], ast_node=TypeDefinitionNode() # type: ignore - ) - msg = str(exc_info.value) - assert msg == "SomeUnion AST node must be a UnionTypeDefinitionNode." - - def rejects_a_union_type_with_incorrect_extension_ast_nodes(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLUnionType( - "SomeUnion", [], extension_ast_nodes=[Node()] # type: ignore - ) - assert str(exc_info.value) == ( - "SomeUnion extension AST nodes must be specified" - " as a collection of TypeExtensionNode instances." - ) - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLUnionType( - "SomeUnion", - [], - extension_ast_nodes=[TypeExtensionNode()], # type: ignore - ) - assert str(exc_info.value) == ( - "SomeUnion extension AST nodes must be specified" - " as a collection of UnionTypeExtensionNode instances." - ) - def describe_type_system_enums(): def defines_an_enum_using_a_dict(): @@ -1071,41 +714,43 @@ def defines_an_enum_using_an_enum_value_map(): assert enum_type.values == {"RED": red, "BLUE": blue} def defines_an_enum_using_a_python_enum(): - colors = Enum("Colors", "RED BLUE") - enum_type = GraphQLEnumType("SomeEnum", colors) + Colors = Enum("Colors", "RED BLUE") + enum_type = GraphQLEnumType("SomeEnum", Colors) assert enum_type.values == { "RED": GraphQLEnumValue(1), "BLUE": GraphQLEnumValue(2), } def defines_an_enum_using_values_of_a_python_enum(): - colors = Enum("Colors", "RED BLUE") - enum_type = GraphQLEnumType("SomeEnum", colors, names_as_values=False) + Colors = Enum("Colors", "RED BLUE") + enum_type = GraphQLEnumType("SomeEnum", Colors, names_as_values=False) assert enum_type.values == { "RED": GraphQLEnumValue(1), "BLUE": GraphQLEnumValue(2), } def defines_an_enum_using_names_of_a_python_enum(): - colors = Enum("Colors", "RED BLUE") - enum_type = GraphQLEnumType("SomeEnum", colors, names_as_values=True) + Colors = Enum("Colors", "RED BLUE") + enum_type = GraphQLEnumType("SomeEnum", Colors, names_as_values=True) assert enum_type.values == { "RED": GraphQLEnumValue("RED"), "BLUE": GraphQLEnumValue("BLUE"), } def defines_an_enum_using_members_of_a_python_enum(): - colors = Enum("Colors", "RED BLUE") - enum_type = GraphQLEnumType("SomeEnum", colors, names_as_values=None) + Colors = Enum("Colors", "RED BLUE") + enum_type = GraphQLEnumType("SomeEnum", Colors, names_as_values=None) assert enum_type.values == { - "RED": GraphQLEnumValue(colors.RED), - "BLUE": GraphQLEnumValue(colors.BLUE), + "RED": GraphQLEnumValue(Colors.RED), + "BLUE": GraphQLEnumValue(Colors.BLUE), } def defines_an_enum_type_with_a_description(): description = "nice enum" enum_type = GraphQLEnumType( - "SomeEnum", {}, description=description # type: ignore + "SomeEnum", + {}, + description=description, ) assert enum_type.description is description assert enum_type.to_kwargs()["description"] is description @@ -1175,30 +820,30 @@ def serializes_an_enum(): assert enum_type.values["FOO"].value == "fooValue" assert enum_type.values["BAR"].value == ["barValue"] assert enum_type.values["BAZ"].value is None - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: enum_type.serialize(None) msg = exc_info.value.message assert msg == "Enum 'SomeEnum' cannot represent value: None" - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: enum_type.serialize(Undefined) msg = exc_info.value.message assert msg == "Enum 'SomeEnum' cannot represent value: Undefined" assert enum_type.serialize("fooValue") == "FOO" - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: enum_type.serialize("FOO") msg = exc_info.value.message assert msg == "Enum 'SomeEnum' cannot represent value: 'FOO'" assert enum_type.serialize(["barValue"]) == "BAR" - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: enum_type.serialize("BAR") msg = exc_info.value.message assert msg == "Enum 'SomeEnum' cannot represent value: 'BAR'" assert enum_type.serialize("BAZ") == "BAZ" - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: enum_type.serialize("bazValue") msg = exc_info.value.message assert msg == "Enum 'SomeEnum' cannot represent value: 'bazValue'" - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: enum_type.serialize(["bazValue"]) msg = exc_info.value.message assert msg == "Enum 'SomeEnum' cannot represent value: ['bazValue']" @@ -1214,37 +859,37 @@ def parses_an_enum(): "SomeEnum", {"FOO": "fooValue", "BAR": ["barValue"], "BAZ": None} ) assert enum_type.parse_value("FOO") == "fooValue" - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: enum_type.parse_value("fooValue") msg = exc_info.value.message assert msg == "Value 'fooValue' does not exist in 'SomeEnum' enum." assert enum_type.parse_value("BAR") == ["barValue"] - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: # noinspection PyTypeChecker enum_type.parse_value(["barValue"]) # type: ignore msg = exc_info.value.message assert msg == "Enum 'SomeEnum' cannot represent non-string value: ['barValue']." assert enum_type.parse_value("BAZ") is None assert enum_type.parse_literal(EnumValueNode(value="FOO")) == "fooValue" - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: enum_type.parse_literal(StringValueNode(value="FOO")) assert exc_info.value.message == ( "Enum 'SomeEnum' cannot represent non-enum value: \"FOO\"." " Did you mean the enum value 'FOO'?" ) - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: enum_type.parse_literal(EnumValueNode(value="fooValue")) msg = exc_info.value.message assert msg == "Value 'fooValue' does not exist in 'SomeEnum' enum." assert enum_type.parse_literal(EnumValueNode(value="BAR")) == ["barValue"] - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: enum_type.parse_literal(StringValueNode(value="BAR")) assert exc_info.value.message == ( "Enum 'SomeEnum' cannot represent non-enum value: \"BAR\"." " Did you mean the enum value 'BAR' or 'BAZ'?" ) assert enum_type.parse_literal(EnumValueNode(value="BAZ")) is None - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: enum_type.parse_literal(StringValueNode(value="BAZ")) assert exc_info.value.message == ( "Enum 'SomeEnum' cannot represent non-enum value: \"BAZ\"." @@ -1256,7 +901,7 @@ def accepts_an_enum_type_with_ast_node_and_extension_ast_nodes(): extension_ast_nodes = [EnumTypeExtensionNode()] enum_type = GraphQLEnumType( "SomeEnum", - {}, # type: ignore + {}, ast_node=ast_node, extension_ast_nodes=extension_ast_nodes, ) @@ -1264,40 +909,40 @@ def accepts_an_enum_type_with_ast_node_and_extension_ast_nodes(): assert enum_type.extension_ast_nodes == tuple(extension_ast_nodes) def rejects_an_enum_type_with_incorrectly_typed_name(): - with raises(TypeError, match="missing .* required .* 'name'"): + with pytest.raises(TypeError, match="missing .* required .* 'name'"): # noinspection PyArgumentList GraphQLEnumType() # type: ignore - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: # noinspection PyTypeChecker GraphQLEnumType(None, {}) # type: ignore assert str(exc_info.value) == "Must provide name." - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: # noinspection PyTypeChecker GraphQLEnumType(42, {}) # type: ignore assert str(exc_info.value) == "Expected name to be a string." def rejects_an_enum_type_with_invalid_name(): - values: Dict[str, GraphQLEnumValue] = {} - with raises(GraphQLError) as exc_info: + values: dict[str, GraphQLEnumValue] = {} + with pytest.raises(GraphQLError) as exc_info: GraphQLEnumType("", values) assert str(exc_info.value) == "Expected name to be a non-empty string." - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: GraphQLEnumType("bad-name", values) assert str(exc_info.value) == ( "Names must only contain [_a-zA-Z0-9] but 'bad-name' does not." ) def rejects_an_enum_type_with_incorrectly_named_values(): - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: GraphQLEnumType("SomeEnum", {"bad-name": GraphQLField(ScalarType)}) msg = str(exc_info.value) assert msg == "Names must only contain [_a-zA-Z0-9] but 'bad-name' does not." def rejects_an_enum_type_without_values(): - with raises(TypeError, match="missing .* required .* 'values'"): + with pytest.raises(TypeError, match="missing .* required .* 'values'"): # noinspection PyArgumentList GraphQLEnumType("SomeEnum") # type: ignore - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: # noinspection PyTypeChecker GraphQLEnumType("SomeEnum", values=None) # type: ignore assert str(exc_info.value) == ( @@ -1305,55 +950,13 @@ def rejects_an_enum_type_without_values(): ) def rejects_an_enum_type_with_incorrectly_typed_values(): - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: # noinspection PyTypeChecker GraphQLEnumType("SomeEnum", [{"FOO": 10}]) # type: ignore assert str(exc_info.value) == ( "SomeEnum values must be an Enum or a mapping with value names as keys." ) - def rejects_an_enum_type_with_an_incorrectly_typed_description(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLEnumType("SomeEnum", {"foo": None}, description=[]) # type: ignore - assert str(exc_info.value) == "The description must be a string." - - def rejects_an_enum_type_with_an_incorrect_ast_node(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLEnumType("SomeEnum", {"foo": None}, ast_node=Node()) # type: ignore - msg = str(exc_info.value) - assert msg == "SomeEnum AST node must be a TypeDefinitionNode." - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLEnumType( - "SomeEnum", {"foo": None}, ast_node=TypeDefinitionNode() # type: ignore - ) - msg = str(exc_info.value) - assert msg == "SomeEnum AST node must be an EnumTypeDefinitionNode." - - def rejects_an_enum_type_with_incorrect_extension_ast_nodes(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLEnumType( - "SomeEnum", {"foo": None}, extension_ast_nodes=[Node()] # type: ignore - ) - assert str(exc_info.value) == ( - "SomeEnum extension AST nodes must be specified" - " as a collection of TypeExtensionNode instances." - ) - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLEnumType( - "SomeEnum", - {"foo": None}, - extension_ast_nodes=[TypeExtensionNode()], # type: ignore - ) - assert str(exc_info.value) == ( - "SomeEnum extension AST nodes must be specified" - " as a collection of EnumTypeExtensionNode instances." - ) - def describe_enum_values(): def accepts_an_enum_value_without_value(): enum_value = GraphQLEnumValue() @@ -1393,27 +996,6 @@ def can_compare_enum_values(): deprecation_reason="reason 2" ) - def rejects_an_enum_value_with_an_incorrectly_typed_description(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLEnumValue(description=[]) # type: ignore - msg = str(exc_info.value) - assert msg == "The description of the enum value must be a string." - - def rejects_an_enum_value_with_an_incorrectly_typed_deprecation_reason(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLEnumValue(deprecation_reason=[]) # type: ignore - msg = str(exc_info.value) - assert msg == "The deprecation reason for the enum value must be a string." - - def rejects_an_enum_value_with_an_incorrect_ast_node(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLEnumValue(ast_node=TypeDefinitionNode()) # type: ignore - msg = str(exc_info.value) - assert msg == "AST node must be an EnumValueDefinitionNode." - def describe_type_system_input_objects(): def accepts_an_input_object_type_with_a_description(): @@ -1448,62 +1030,6 @@ def accepts_an_input_object_type_with_ast_node_and_extension_ast_nodes(): assert input_obj_type.ast_node is ast_node assert input_obj_type.extension_ast_nodes == tuple(extension_ast_nodes) - def rejects_an_input_object_type_with_incorrect_out_type_function(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLInputObjectType("SomeInputObject", {}, out_type=[]) # type: ignore - assert str(exc_info.value) == ( - "The out type for SomeInputObject must be a function or a class." - ) - - def rejects_an_input_object_type_with_incorrectly_typed_description(): - # noinspection PyTypeChecker - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLInputObjectType( - "SomeInputObject", {}, description=[] # type: ignore - ) - assert str(exc_info.value) == "The description must be a string." - - def rejects_an_input_object_type_with_an_incorrect_ast_node(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLInputObjectType( - "SomeInputObject", {}, ast_node=Node() # type: ignore - ) - msg = str(exc_info.value) - assert msg == "SomeInputObject AST node must be a TypeDefinitionNode." - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLInputObjectType( - "SomeInputObject", {}, ast_node=TypeDefinitionNode() # type: ignore - ) - assert str(exc_info.value) == ( - "SomeInputObject AST node must be an InputObjectTypeDefinitionNode." - ) - - def rejects_an_input_object_type_with_incorrect_extension_ast_nodes(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLInputObjectType( - "SomeInputObject", {}, extension_ast_nodes=[Node()] # type: ignore - ) - assert str(exc_info.value) == ( - "SomeInputObject extension AST nodes must be specified" - " as a collection of TypeExtensionNode instances." - ) - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLInputObjectType( - "SomeInputObject", - {}, - extension_ast_nodes=[TypeExtensionNode()], # type: ignore - ) - assert str(exc_info.value) == ( - "SomeInputObject extension AST nodes must be specified" - " as a collection of InputObjectTypeExtensionNode instances." - ) - def describe_input_objects_must_have_fields(): def accepts_an_input_object_type_with_fields(): input_obj_type = GraphQLInputObjectType( @@ -1523,7 +1049,8 @@ def accepts_an_input_object_type_with_fields(): def accepts_an_input_object_type_with_input_type_as_field(): # this is a shortcut syntax for simple input fields input_obj_type = GraphQLInputObjectType( - "SomeInputObject", {"f": ScalarType} # type: ignore + "SomeInputObject", + {"f": ScalarType}, # type: ignore ) field = input_obj_type.fields["f"] assert isinstance(field, GraphQLInputField) @@ -1545,55 +1072,33 @@ def accepts_an_input_object_type_with_a_field_function(): assert input_field.out_name is None def rejects_an_input_object_type_with_incorrectly_typed_name(): - with raises(TypeError, match="missing .* required .* 'name'"): + with pytest.raises(TypeError, match="missing .* required .* 'name'"): # noinspection PyArgumentList GraphQLInputObjectType() # type: ignore - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: # noinspection PyTypeChecker GraphQLInputObjectType(None, {}) # type: ignore assert str(exc_info.value) == "Must provide name." - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: # noinspection PyTypeChecker GraphQLInputObjectType(42, {}) # type: ignore assert str(exc_info.value) == "Expected name to be a string." def rejects_an_input_object_type_with_invalid_name(): - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: GraphQLInputObjectType("", {}) assert str(exc_info.value) == "Expected name to be a non-empty string." - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: GraphQLInputObjectType("bad-name", {}) assert str(exc_info.value) == ( "Names must only contain [_a-zA-Z0-9] but 'bad-name' does not." ) - def rejects_an_input_object_type_with_incorrect_fields(): - input_obj_type = GraphQLInputObjectType( - "SomeInputObject", [] # type: ignore - ) - with raises(TypeError) as exc_info: - assert not input_obj_type.fields - assert str(exc_info.value) == ( - "SomeInputObject fields must be specified" - " as a mapping with field names as keys." - ) - - def rejects_an_input_object_type_with_incorrect_fields_function(): - input_obj_type = GraphQLInputObjectType( - "SomeInputObject", lambda: [] # type: ignore - ) - with raises(TypeError) as exc_info: - assert not input_obj_type.fields - assert str(exc_info.value) == ( - "SomeInputObject fields must be specified" - " as a mapping with field names as keys." - ) - def rejects_an_input_object_type_with_incorrectly_named_fields(): input_obj_type = GraphQLInputObjectType( "SomeInputObject", {"bad-name": GraphQLInputField(ScalarType)} ) - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: assert not input_obj_type.fields msg = str(exc_info.value) assert msg == ( @@ -1605,51 +1110,12 @@ def fields(): raise RuntimeError("Oops!") input_obj_type = GraphQLInputObjectType("SomeInputObject", fields) - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: assert not input_obj_type.fields assert str(exc_info.value) == ( "SomeInputObject fields cannot be resolved. Oops!" ) - def describe_input_objects_fields_must_not_have_resolvers(): - def rejects_an_input_object_type_with_resolvers(): - def resolve(): - pass - - with raises( - TypeError, match="got an unexpected keyword argument 'resolve'" - ): - # noinspection PyArgumentList - GraphQLInputObjectType( - "SomeInputObject", - { - "f": GraphQLInputField( # type: ignore - ScalarType, - resolve=resolve, - ) - }, - ) - input_obj_type = GraphQLInputObjectType( - "SomeInputObject", - {"f": GraphQLField(ScalarType, resolve=resolve)}, # type: ignore - ) - with raises(TypeError) as exc_info: - assert not input_obj_type.fields - assert str(exc_info.value) == ( - "SomeInputObject fields must be GraphQLInputField" - " or input type objects." - ) - - def rejects_an_input_object_type_with_resolver_constant(): - with raises( - TypeError, match="got an unexpected keyword argument 'resolve'" - ): - # noinspection PyArgumentList - GraphQLInputObjectType( - "SomeInputObject", - {"f": GraphQLInputField(ScalarType, resolve={})}, # type: ignore - ) - def describe_type_system_arguments(): def accepts_an_argument_with_a_description(): @@ -1678,43 +1144,10 @@ def accepts_an_argument_with_an_ast_node(): assert argument.to_kwargs()["ast_node"] is ast_node def rejects_an_argument_without_type(): - with raises(TypeError, match="missing 1 required positional argument"): + with pytest.raises(TypeError, match="missing 1 required positional argument"): # noinspection PyArgumentList GraphQLArgument() # type: ignore - def rejects_an_argument_with_an_incorrect_type(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLArgument(GraphQLObjectType) # type: ignore - msg = str(exc_info.value) - assert msg == "Argument type must be a GraphQL input type." - - def rejects_an_argument_with_an_incorrectly_typed_description(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLArgument(GraphQLString, description=[]) # type: ignore - assert str(exc_info.value) == "Argument description must be a string." - - def rejects_an_argument_with_an_incorrectly_typed_deprecation_reason(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLArgument(GraphQLString, deprecation_reason=[]) # type: ignore - assert str(exc_info.value) == "Argument deprecation reason must be a string." - - def rejects_an_argument_with_an_incorrect_out_name(): - # This is an extension of GraphQL.js. - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLArgument(GraphQLString, out_name=[]) # type: ignore - assert str(exc_info.value) == "Argument out name must be a string." - - def rejects_an_argument_with_an_incorrect_ast_node(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLArgument(GraphQLString, ast_node=Node()) # type: ignore - msg = str(exc_info.value) - assert msg == "Argument AST node must be an InputValueDefinitionNode." - def describe_type_system_input_fields(): def accepts_an_input_field_with_a_description(): @@ -1743,43 +1176,10 @@ def accepts_an_input_field_with_an_ast_node(): assert input_field.to_kwargs()["ast_node"] is ast_node def rejects_an_input_field_without_type(): - with raises(TypeError, match="missing 1 required positional argument"): + with pytest.raises(TypeError, match="missing 1 required positional argument"): # noinspection PyArgumentList GraphQLInputField() # type: ignore - def rejects_an_input_field_with_an_incorrect_type(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLInputField(GraphQLObjectType) # type: ignore - msg = str(exc_info.value) - assert msg == "Input field type must be a GraphQL input type." - - def rejects_an_input_field_with_an_incorrectly_typed_description(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLInputField(GraphQLString, description=[]) # type: ignore - assert str(exc_info.value) == "Input field description must be a string." - - def rejects_an_input_field_with_an_incorrectly_typed_deprecation_reason(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLInputField(GraphQLString, deprecation_reason=[]) # type: ignore - assert str(exc_info.value) == "Input field deprecation reason must be a string." - - def rejects_an_input_field_with_an_incorrect_out_name(): - # This is an extension of GraphQL.js. - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLInputField(GraphQLString, out_name=[]) # type: ignore - assert str(exc_info.value) == "Input field out name must be a string." - - def rejects_an_input_field_with_an_incorrect_ast_node(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLInputField(GraphQLString, ast_node=Node()) # type: ignore - msg = str(exc_info.value) - assert msg == "Input field AST node must be an InputValueDefinitionNode." - def deprecation_reason_is_preserved_on_fields(): input_obj_type = GraphQLInputObjectType( "someInputObject", @@ -1810,20 +1210,10 @@ def describe_type_system_list(): NonNullScalarType, ] - @mark.parametrize("type_", types, ids=lambda type_: type_.__class__.__name__) + @pytest.mark.parametrize("type_", types, ids=lambda type_: type_.__class__.__name__) def accepts_a_type_as_item_type_of_list(type_): assert GraphQLList(type_) - not_types = [{}, dict, str, object, None] - - @mark.parametrize("type_", not_types, ids=lambda type_: repr(type_)) - def rejects_a_non_type_as_item_type_of_list(type_): - with raises(TypeError) as exc_info: - GraphQLList(type_) - assert str(exc_info.value) == ( - f"Can only create a wrapper for a GraphQLType, but got: {type_}." - ) - def describe_type_system_non_null(): types = [ @@ -1837,26 +1227,10 @@ def describe_type_system_non_null(): ListOfNonNullScalarsType, ] - @mark.parametrize("type_", types, ids=lambda type_: type_.__class__.__name__) + @pytest.mark.parametrize("type_", types, ids=lambda type_: type_.__class__.__name__) def accepts_a_type_as_nullable_type_of_non_null(type_): assert GraphQLNonNull(type_) - not_types = [NonNullScalarType, {}, dict, str, object, None] - - @mark.parametrize("type_", not_types, ids=lambda type_: repr(type_)) - def rejects_a_non_type_as_nullable_type_of_non_null(type_): - with raises(TypeError) as exc_info: - GraphQLNonNull(type_) - assert ( - str(exc_info.value) - == ( - "Can only create NonNull of a Nullable GraphQLType" - f" but got: {type_}." - ) - if isinstance(type_, GraphQLNonNull) - else f"Can only create a wrapper for a GraphQLType, but got: {type_}." - ) - def describe_type_system_test_utility_methods(): def stringifies_simple_types(): @@ -1902,3 +1276,66 @@ def fields_have_repr(): repr(GraphQLField(GraphQLList(GraphQLInt))) == ">>" ) + + +def describe_type_system_introspection_types(): + def cannot_redefine_introspection_types(): + for name, introspection_type in introspection_types.items(): + assert introspection_type.name == name + with pytest.raises( + TypeError, match=f"Redefinition of reserved type '{name}'" + ): + introspection_type.__class__(**introspection_type.to_kwargs()) + + +def describe_resolve_info(): + class InfoArgs(TypedDict): + """Arguments for GraphQLResolveInfo""" + + field_name: str + field_nodes: list[FieldNode] + return_type: GraphQLOutputType + parent_type: GraphQLObjectType + path: Path + schema: GraphQLSchema + fragments: dict[str, FragmentDefinitionNode] + root_value: Any + operation: OperationDefinitionNode + variable_values: dict[str, Any] + is_awaitable: Callable[[Any], bool] + + info_args: InfoArgs = { + "field_name": "foo", + "field_nodes": [], + "return_type": GraphQLString, + "parent_type": GraphQLObjectType("Foo", {}), + "path": Path(None, "foo", None), + "schema": GraphQLSchema(), + "fragments": {}, + "root_value": None, + "operation": OperationDefinitionNode(), + "variable_values": {}, + "is_awaitable": is_awaitable, + } + + def resolve_info_with_unspecified_context_type_can_use_any_type(): + info_int = GraphQLResolveInfo(**info_args, context=42) + assert info_int.context == 42 + info_str = GraphQLResolveInfo(**info_args, context="foo") + assert info_str.context == "foo" + + def resolve_info_with_unspecified_context_type_remembers_type(): + info = GraphQLResolveInfo(**info_args, context=42) + assert info.context == 42 + info = GraphQLResolveInfo(**info_args, context="foo") # type: ignore + assert info.context == "foo" + + @pytest.mark.skipif( + sys.version_info < (3, 9), reason="this needs at least Python 3.9" + ) + def resolve_info_with_specified_context_type_checks_type(): + info_int = GraphQLResolveInfo[int](**info_args, context=42) + assert isinstance(info_int.context, int) + # this should not pass type checking now: + info_str = GraphQLResolveInfo[int](**info_args, context="foo") # type: ignore + assert isinstance(info_str.context, str) diff --git a/tests/type/test_directives.py b/tests/type/test_directives.py index 278d1d05..4257d81f 100644 --- a/tests/type/test_directives.py +++ b/tests/type/test_directives.py @@ -1,7 +1,7 @@ -from pytest import raises +import pytest from graphql.error import GraphQLError -from graphql.language import DirectiveLocation, DirectiveDefinitionNode, Node +from graphql.language import DirectiveDefinitionNode, DirectiveLocation from graphql.type import GraphQLArgument, GraphQLDirective, GraphQLInt, GraphQLString @@ -61,7 +61,9 @@ def defines_a_repeatable_directive(): def directive_accepts_input_types_as_arguments(): # noinspection PyTypeChecker directive = GraphQLDirective( - name="Foo", locations=[], args={"arg": GraphQLString} # type: ignore + name="Foo", + locations=[], + args={"arg": GraphQLString}, # type: ignore ) arg = directive.args["arg"] assert isinstance(arg, GraphQLArgument) @@ -70,7 +72,8 @@ def directive_accepts_input_types_as_arguments(): def directive_accepts_strings_as_locations(): # noinspection PyTypeChecker directive = GraphQLDirective( - name="Foo", locations=["SCHEMA", "OBJECT"] # type: ignore + name="Foo", + locations=["SCHEMA", "OBJECT"], # type: ignore ) assert directive.locations == ( DirectiveLocation.SCHEMA, @@ -88,80 +91,52 @@ def directive_has_repr(): def can_compare_with_other_source_directive(): locations = [DirectiveLocation.QUERY] directive = GraphQLDirective("Foo", locations) - assert directive == directive - assert not directive != directive - assert not directive == {} + assert directive == directive # noqa: PLR0124 + assert not directive != directive # noqa: PLR0124, SIM202 + assert not directive == {} # noqa: SIM201 assert directive != {} same_directive = GraphQLDirective("Foo", locations) assert directive == same_directive - assert not directive != same_directive + assert not directive != same_directive # noqa: SIM202 other_directive = GraphQLDirective("Bar", locations) - assert not directive == other_directive + assert not directive == other_directive # noqa: SIM201 assert directive != other_directive other_locations = [DirectiveLocation.MUTATION] other_directive = GraphQLDirective("Foo", other_locations) - assert not directive == other_directive + assert not directive == other_directive # noqa: SIM201 assert directive != other_directive other_directive = GraphQLDirective("Foo", locations, is_repeatable=True) - assert not directive == other_directive + assert not directive == other_directive # noqa: SIM201 assert directive != other_directive other_directive = GraphQLDirective("Foo", locations, description="other") - assert not directive == other_directive + assert not directive == other_directive # noqa: SIM201 assert directive != other_directive def rejects_a_directive_with_incorrectly_typed_name(): - with raises(TypeError, match="missing .* required .* 'name'"): + with pytest.raises(TypeError, match="missing .* required .* 'name'"): # noinspection PyArgumentList GraphQLDirective() # type: ignore - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: # noinspection PyTypeChecker GraphQLDirective(None, []) # type: ignore assert str(exc_info.value) == "Must provide name." - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: # noinspection PyTypeChecker GraphQLDirective(42, {}) # type: ignore assert str(exc_info.value) == "Expected name to be a string." def rejects_a_directive_with_invalid_name(): - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: GraphQLDirective("", []) assert str(exc_info.value) == "Expected name to be a non-empty string." - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: GraphQLDirective("bad-name", []) assert str(exc_info.value) == ( "Names must only contain [_a-zA-Z0-9] but 'bad-name' does not." ) - def rejects_a_directive_with_incorrectly_typed_args(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLDirective("Foo", locations=[], args=["arg"]) # type: ignore - assert str(exc_info.value) == ( - "Foo args must be a dict with argument names as keys." - ) - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLDirective( - "Foo", - locations=[], - args={1: GraphQLArgument(GraphQLString)}, # type: ignore - ) - assert str(exc_info.value) == ( - "Foo args must be a dict with argument names as keys." - ) - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLDirective( - "Foo", - locations=[], - args={"arg": GraphQLDirective("Bar", [])}, # type: ignore - ) - assert str(exc_info.value) == ( - "Foo args must be GraphQLArgument or input type objects." - ) - def rejects_a_directive_with_incorrectly_named_args(): - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: GraphQLDirective( "Foo", locations=[DirectiveLocation.QUERY], @@ -171,14 +146,8 @@ def rejects_a_directive_with_incorrectly_named_args(): "Names must only contain [_a-zA-Z0-9] but 'bad-name' does not." ) - def rejects_a_directive_with_incorrectly_typed_repeatable_flag(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLDirective("Foo", locations=[], is_repeatable=None) # type: ignore - assert str(exc_info.value) == "Foo is_repeatable flag must be True or False." - def rejects_a_directive_with_undefined_locations(): - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: # noinspection PyTypeChecker GraphQLDirective("Foo", locations=None) # type: ignore assert str(exc_info.value) == ( @@ -187,33 +156,17 @@ def rejects_a_directive_with_undefined_locations(): ) def rejects_a_directive_with_incorrectly_typed_locations(): - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: # noinspection PyTypeChecker GraphQLDirective("Foo", locations="bad") # type: ignore assert ( str(exc_info.value) == "Foo locations must be specified" " as a collection of DirectiveLocation enum values." ) - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: # noinspection PyTypeChecker GraphQLDirective("Foo", locations=["bad"]) # type: ignore assert str(exc_info.value) == ( "Foo locations must be specified" " as a collection of DirectiveLocation enum values." ) - - def rejects_a_directive_with_incorrectly_typed_description(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLDirective( - "Foo", locations=[], description={"bad": True} # type: ignore - ) - assert str(exc_info.value) == "Foo description must be a string." - - def rejects_a_directive_with_incorrectly_typed_ast_node(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLDirective("Foo", locations=[], ast_node=Node()) # type: ignore - assert str(exc_info.value) == ( - "Foo AST node must be a DirectiveDefinitionNode." - ) diff --git a/tests/type/test_enum.py b/tests/type/test_enum.py index f85a466b..20f8b5f4 100644 --- a/tests/type/test_enum.py +++ b/tests/type/test_enum.py @@ -1,6 +1,8 @@ +from __future__ import annotations + from datetime import datetime from enum import Enum -from typing import Any, Dict, Optional +from typing import Any from graphql import graphql_sync from graphql.type import ( @@ -27,7 +29,7 @@ class ColorTypeEnumValues(Enum): class Complex1: # noinspection PyMethodMayBeStatic - some_random_object = datetime.now() + some_random_object = datetime.now() # noqa: DTZ005 class Complex2: @@ -51,7 +53,7 @@ class Complex2: "fromInt": GraphQLArgument(GraphQLInt), "fromString": GraphQLArgument(GraphQLString), }, - resolve=lambda _source, info, **args: args.get("fromInt") + resolve=lambda _source, _info, **args: args.get("fromInt") or args.get("fromString") or args.get("fromEnum"), ), @@ -61,7 +63,7 @@ class Complex2: "fromEnum": GraphQLArgument(ColorType), "fromInt": GraphQLArgument(GraphQLInt), }, - resolve=lambda _source, info, **args: args.get("fromEnum"), + resolve=lambda _source, _info, **args: args.get("fromEnum"), ), "complexEnum": GraphQLField( ComplexEnum, @@ -72,13 +74,16 @@ class Complex2: "provideGoodValue": GraphQLArgument(GraphQLBoolean), "provideBadValue": GraphQLArgument(GraphQLBoolean), }, - resolve=lambda _source, info, **args: + resolve=lambda _source, _info, **args: # Note: this is one of the references of the internal values # which ComplexEnum allows. - complex2 if args.get("provideGoodValue") + complex2 + if args.get("provideGoodValue") # Note: similar object, but not the same *reference* as # complex2 above. Enum internal values require object equality. - else Complex2() if args.get("provideBadValue") else args.get("fromEnum"), + else Complex2() + if args.get("provideBadValue") + else args.get("fromEnum"), ), }, ) @@ -89,7 +94,7 @@ class Complex2: "favoriteEnum": GraphQLField( ColorType, args={"color": GraphQLArgument(ColorType)}, - resolve=lambda _source, info, color=None: color, + resolve=lambda _source, _info, color=None: color, ) }, ) @@ -100,7 +105,7 @@ class Complex2: "subscribeToEnum": GraphQLField( ColorType, args={"color": GraphQLArgument(ColorType)}, - resolve=lambda _source, info, color=None: color, + resolve=lambda _source, _info, color=None: color, ) }, ) @@ -110,15 +115,15 @@ class Complex2: ) -def execute_query(source: str, variable_values: Optional[Dict[str, Any]] = None): +def execute_query(source: str, variable_values: dict[str, Any] | None = None): return graphql_sync(schema, source, variable_values=variable_values) def describe_type_system_enum_values(): def can_use_python_enums_instead_of_dicts(): assert ColorType2.values == ColorType.values - keys = [key for key in ColorType.values] - keys2 = [key for key in ColorType2.values] + keys = list(ColorType.values) + keys2 = list(ColorType2.values) assert keys2 == keys values = [value.value for value in ColorType.values.values()] values2 = [value.value for value in ColorType2.values.values()] diff --git a/tests/type/test_extensions.py b/tests/type/test_extensions.py index 8c062458..d28b9482 100644 --- a/tests/type/test_extensions.py +++ b/tests/type/test_extensions.py @@ -1,6 +1,4 @@ -from typing import Any, Dict, cast - -from pytest import mark, param, raises +import pytest from graphql.type import ( GraphQLArgument, @@ -19,11 +17,10 @@ dummy_type = GraphQLScalarType("DummyScalar") -bad_extensions = [param([], id="list"), param({1: "ext"}, id="non_string_key")] - - -def bad_extensions_msg(name: str) -> str: - return f"{name} extensions must be a dictionary with string keys." +bad_extensions = [ + pytest.param([], id="list"), + pytest.param({1: "ext"}, id="non_string_key"), +] def describe_type_system_extensions(): @@ -40,12 +37,6 @@ def with_extensions(): assert some_scalar.extensions is scalar_extensions assert some_scalar.to_kwargs()["extensions"] is scalar_extensions - @mark.parametrize("extensions", bad_extensions) - def with_bad_extensions(extensions): - with raises(TypeError, match=bad_extensions_msg("SomeScalar")): - # noinspection PyTypeChecker - GraphQLScalarType("SomeScalar", extensions=extensions) - def describe_graphql_object_type(): def without_extensions(): some_object = GraphQLObjectType( @@ -98,18 +89,6 @@ def with_extensions(): assert some_field.to_kwargs()["extensions"] is field_extensions assert some_arg.to_kwargs()["extensions"] is arg_extensions - @mark.parametrize("extensions", bad_extensions) - def with_bad_extensions(extensions): - with raises(TypeError, match=bad_extensions_msg("SomeObject")): - # noinspection PyTypeChecker - GraphQLObjectType("SomeObject", {}, extensions=extensions) - with raises(TypeError, match=bad_extensions_msg("Field")): - # noinspection PyTypeChecker - GraphQLField(dummy_type, extensions=extensions) - with raises(TypeError, match=bad_extensions_msg("Argument")): - # noinspection PyTypeChecker - GraphQLArgument(dummy_type, extensions=extensions) - def describe_graphql_interface_type(): def without_extensions(): some_interface = GraphQLInterfaceType( @@ -162,12 +141,6 @@ def with_extensions(): assert some_field.to_kwargs()["extensions"] is field_extensions assert some_arg.to_kwargs()["extensions"] is arg_extensions - @mark.parametrize("extensions", bad_extensions) - def with_bad_extensions(extensions): - with raises(TypeError, match=bad_extensions_msg("SomeInterface")): - # noinspection PyTypeChecker - GraphQLInterfaceType("SomeInterface", {}, extensions=extensions) - def describe_graphql_union_type(): def without_extensions(): some_union = GraphQLUnionType("SomeUnion", []) @@ -185,12 +158,6 @@ def with_extensions(): assert some_union.to_kwargs()["extensions"] is union_extensions - @mark.parametrize("extensions", bad_extensions) - def with_bad_extensions(extensions): - with raises(TypeError, match=bad_extensions_msg("SomeUnion")): - # noinspection PyTypeChecker - GraphQLUnionType("SomeUnion", [], extensions=extensions) - def describe_graphql_enum_type(): def without_extensions(): some_enum = GraphQLEnumType("SomeEnum", {"SOME_VALUE": None}) @@ -219,17 +186,6 @@ def with_extensions(): assert some_enum.to_kwargs()["extensions"] is enum_extensions assert some_value.to_kwargs()["extensions"] is value_extensions - @mark.parametrize("extensions", bad_extensions) - def with_bad_extensions(extensions): - with raises(TypeError, match=bad_extensions_msg("SomeEnum")): - # noinspection PyTypeChecker - GraphQLEnumType( - "SomeEnum", cast(Dict[str, Any], {}), extensions=extensions - ) - with raises(TypeError, match=bad_extensions_msg("Enum value")): - # noinspection PyTypeChecker - GraphQLEnumValue(extensions=extensions) - def describe_graphql_input_object_type(): def without_extensions(): some_input_object = GraphQLInputObjectType( @@ -266,15 +222,6 @@ def with_extensions(): ) assert some_input_field.to_kwargs()["extensions"] is input_field_extensions - @mark.parametrize("extensions", bad_extensions) - def with_bad_extensions(extensions): - with raises(TypeError, match=bad_extensions_msg("SomeInputObject")): - # noinspection PyTypeChecker - GraphQLInputObjectType("SomeInputObject", {}, extensions=extensions) - with raises(TypeError, match=bad_extensions_msg("Input field")): - # noinspection PyTypeChecker - GraphQLInputField(dummy_type, extensions=extensions) - def describe_graphql_directive(): def without_extensions(): some_directive = GraphQLDirective( @@ -306,12 +253,6 @@ def with_extensions(): assert some_directive.to_kwargs()["extensions"] is directive_extensions assert some_arg.to_kwargs()["extensions"] is arg_extensions - @mark.parametrize("extensions", bad_extensions) - def with_bad_extensions(extensions): - with raises(TypeError, match=bad_extensions_msg("Directive")): - # noinspection PyTypeChecker - GraphQLDirective("SomeDirective", [], extensions=extensions) - def describe_graphql_schema(): def without_extensions(): schema = GraphQLSchema() @@ -327,9 +268,3 @@ def with_extensions(): assert schema.extensions is schema_extensions assert schema.to_kwargs()["extensions"] is schema_extensions - - @mark.parametrize("extensions", bad_extensions) - def with_bad_extensions(extensions): - with raises(TypeError, match=bad_extensions_msg("Schema")): - # noinspection PyTypeChecker - GraphQLSchema(extensions=extensions) diff --git a/tests/type/test_introspection.py b/tests/type/test_introspection.py index f51d8f06..1a52f7a2 100644 --- a/tests/type/test_introspection.py +++ b/tests/type/test_introspection.py @@ -1,5 +1,5 @@ from graphql import graphql_sync -from graphql.utilities import get_introspection_query, build_schema +from graphql.utilities import build_schema, get_introspection_query def describe_introspection(): @@ -364,6 +364,17 @@ def executes_an_introspection_query(): "isDeprecated": False, "deprecationReason": None, }, + { + "name": "isOneOf", + "args": [], + "type": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": None, + }, + "isDeprecated": False, + "deprecationReason": None, + }, ], "inputFields": None, "interfaces": [], @@ -981,6 +992,12 @@ def executes_an_introspection_query(): } ], }, + { + "name": "oneOf", + "isRepeatable": False, + "locations": ["INPUT_OBJECT"], + "args": [], + }, ], } } @@ -1079,7 +1096,7 @@ def introspects_any_default_value(): """ input InputObjectWithDefaultValues { a: String = "Emoji: \\u{1F600}" - b: Complex = {x: ["abc"], y: 123} + b: Complex = { x: ["abc"], y: 123 } } input Complex { @@ -1109,7 +1126,7 @@ def introspects_any_default_value(): "__type": { "inputFields": [ {"name": "a", "defaultValue": '"Emoji: \U0001f600"'}, - {"name": "b", "defaultValue": '{x: ["abc"], y: 123}'}, + {"name": "b", "defaultValue": '{ x: ["abc"], y: 123 }'}, ] } }, @@ -1433,6 +1450,109 @@ def respects_the_include_deprecated_parameter_for_enum_values(): None, ) + def identifies_one_of_for_input_objects(): + schema = build_schema( + """ + input SomeInputObject @oneOf { + a: String + } + + input AnotherInputObject { + a: String + b: String + } + + type Query { + someField(someArg: SomeInputObject): String + anotherField(anotherArg: AnotherInputObject): String + } + """ + ) + + source = """ + { + oneOfInputObject: __type(name: "SomeInputObject") { + isOneOf + } + inputObject: __type(name: "AnotherInputObject") { + isOneOf + } + } + """ + + assert graphql_sync(schema=schema, source=source) == ( + { + "oneOfInputObject": { + "isOneOf": True, + }, + "inputObject": { + "isOneOf": False, + }, + }, + None, + ) + + def returns_null_for_one_of_for_other_types(): + schema = build_schema( + """ + type SomeObject implements SomeInterface { + fieldA: String + } + enum SomeEnum { + SomeObject + } + interface SomeInterface { + fieldA: String + } + union SomeUnion = SomeObject + type Query { + someField(enum: SomeEnum): SomeUnion + anotherField(enum: SomeEnum): SomeInterface + } + """ + ) + + source = """ + { + object: __type(name: "SomeObject") { + isOneOf + } + enum: __type(name: "SomeEnum") { + isOneOf + } + interface: __type(name: "SomeInterface") { + isOneOf + } + scalar: __type(name: "String") { + isOneOf + } + union: __type(name: "SomeUnion") { + isOneOf + } + } + """ + + assert graphql_sync(schema=schema, source=source) == ( + { + "object": { + "isOneOf": None, + }, + "enum": { + "isOneOf": None, + }, + "interface": { + "isOneOf": None, + }, + "scalar": { + "isOneOf": None, + }, + "union": { + "isOneOf": None, + }, + }, + None, + ) + def fails_as_expected_on_the_type_root_field_without_an_arg(): schema = build_schema( """ diff --git a/tests/type/test_predicate.py b/tests/type/test_predicate.py index 60a23877..c741eca3 100644 --- a/tests/type/test_predicate.py +++ b/tests/type/test_predicate.py @@ -1,12 +1,12 @@ from typing import Any -from pytest import raises +import pytest from graphql.language import DirectiveLocation from graphql.type import ( GraphQLArgument, - GraphQLDeprecatedDirective, GraphQLBoolean, + GraphQLDeprecatedDirective, GraphQLDirective, GraphQLEnumType, GraphQLFloat, @@ -20,6 +20,7 @@ GraphQLNonNull, GraphQLObjectType, GraphQLScalarType, + GraphQLSchema, GraphQLSkipDirective, GraphQLString, GraphQLUnionType, @@ -38,6 +39,7 @@ assert_object_type, assert_output_type, assert_scalar_type, + assert_schema, assert_type, assert_union_type, assert_wrapping_type, @@ -53,13 +55,14 @@ is_leaf_type, is_list_type, is_named_type, - is_required_argument, - is_required_input_field, is_non_null_type, is_nullable_type, is_object_type, is_output_type, + is_required_argument, + is_required_input_field, is_scalar_type, + is_schema, is_specified_directive, is_specified_scalar_type, is_type, @@ -90,12 +93,12 @@ def returns_true_for_wrapped_types(): def returns_false_for_type_classes_rather_than_instance(): assert is_type(GraphQLObjectType) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_type(GraphQLObjectType) def returns_false_for_random_garbage(): assert is_type({"what": "is this"}) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_type({"what": "is this"}) def describe_is_scalar_type(): @@ -109,28 +112,28 @@ def returns_true_for_custom_scalar(): def returns_false_for_scalar_class_rather_than_instance(): assert is_scalar_type(GraphQLScalarType) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_scalar_type(GraphQLScalarType) def returns_false_for_wrapped_scalar(): assert is_scalar_type(GraphQLList(ScalarType)) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_scalar_type(GraphQLList(ScalarType)) def returns_false_for_non_scalar(): assert is_scalar_type(EnumType) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_scalar_type(EnumType) assert is_scalar_type(Directive) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_scalar_type(Directive) def returns_false_for_random_garbage(): assert is_scalar_type(None) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_scalar_type(None) assert is_scalar_type({"what": "is this"}) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_scalar_type({"what": "is this"}) def describe_is_specified_scalar_type(): @@ -148,12 +151,12 @@ def returns_true_for_object_type(): def returns_false_for_wrapped_object_type(): assert is_object_type(GraphQLList(ObjectType)) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_object_type(GraphQLList(ObjectType)) def returns_false_for_non_object_type(): assert is_scalar_type(InterfaceType) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_scalar_type(InterfaceType) def describe_is_interface_type(): @@ -163,12 +166,12 @@ def returns_true_for_interface_type(): def returns_false_for_wrapped_interface_type(): assert is_interface_type(GraphQLList(InterfaceType)) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_interface_type(GraphQLList(InterfaceType)) def returns_false_for_non_interface_type(): assert is_interface_type(ObjectType) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_interface_type(ObjectType) def describe_is_union_type(): @@ -178,12 +181,12 @@ def returns_true_for_union_type(): def returns_false_for_wrapped_union_type(): assert is_union_type(GraphQLList(UnionType)) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_union_type(GraphQLList(UnionType)) def returns_false_for_non_union_type(): assert is_union_type(ObjectType) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_union_type(ObjectType) def describe_is_enum_type(): @@ -193,12 +196,12 @@ def returns_true_for_enum_type(): def returns_false_for_wrapped_enum_type(): assert is_enum_type(GraphQLList(EnumType)) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_enum_type(GraphQLList(EnumType)) def returns_false_for_non_enum_type(): assert is_enum_type(ScalarType) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_enum_type(ScalarType) def describe_is_input_object_type(): @@ -208,12 +211,12 @@ def returns_true_for_input_object_type(): def returns_false_for_wrapped_input_object_type(): assert is_input_object_type(GraphQLList(InputObjectType)) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_input_object_type(GraphQLList(InputObjectType)) def returns_false_for_non_input_object_type(): assert is_input_object_type(ObjectType) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_input_object_type(ObjectType) def describe_is_list_type(): @@ -223,12 +226,12 @@ def returns_true_for_a_list_wrapped_type(): def returns_false_for_a_unwrapped_type(): assert is_list_type(ObjectType) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_list_type(ObjectType) def returns_false_for_a_non_list_wrapped_type(): assert is_list_type(GraphQLNonNull(GraphQLList(ObjectType))) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_list_type(GraphQLNonNull(GraphQLList(ObjectType))) def describe_is_non_null_type(): @@ -238,12 +241,12 @@ def returns_true_for_a_non_null_wrapped_type(): def returns_false_for_an_unwrapped_type(): assert is_non_null_type(ObjectType) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_non_null_type(ObjectType) def returns_false_for_a_not_non_null_wrapped_type(): assert is_non_null_type(GraphQLList(GraphQLNonNull(ObjectType))) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_non_null_type(GraphQLList(GraphQLNonNull(ObjectType))) def describe_is_input_type(): @@ -267,7 +270,7 @@ def returns_true_for_a_wrapped_input_type(): def _assert_non_input_type(type_: Any): assert is_input_type(type_) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_input_type(type_) def returns_false_for_an_output_type(): @@ -311,7 +314,7 @@ def returns_true_for_a_wrapped_output_type(): def _assert_non_output_type(type_: Any): assert is_output_type(type_) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_output_type(type_) def returns_false_for_an_input_type(): @@ -330,17 +333,17 @@ def returns_true_for_scalar_and_enum_types(): def returns_false_for_wrapped_leaf_type(): assert is_leaf_type(GraphQLList(ScalarType)) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_leaf_type(GraphQLList(ScalarType)) def returns_false_for_non_leaf_type(): assert is_leaf_type(ObjectType) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_leaf_type(ObjectType) def returns_false_for_wrapped_non_leaf_type(): assert is_leaf_type(GraphQLList(ObjectType)) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_leaf_type(GraphQLList(ObjectType)) def describe_is_composite_type(): @@ -354,17 +357,17 @@ def returns_true_for_object_interface_and_union_types(): def returns_false_for_wrapped_composite_type(): assert is_composite_type(GraphQLList(ObjectType)) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_composite_type(GraphQLList(ObjectType)) def returns_false_for_non_composite_type(): assert is_composite_type(InputObjectType) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_composite_type(InputObjectType) def returns_false_for_wrapped_non_composite_type(): assert is_composite_type(GraphQLList(InputObjectType)) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_composite_type(GraphQLList(InputObjectType)) def describe_is_abstract_type(): @@ -376,17 +379,17 @@ def returns_true_for_interface_and_union_types(): def returns_false_for_wrapped_abstract_type(): assert is_abstract_type(GraphQLList(InterfaceType)) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_abstract_type(GraphQLList(InterfaceType)) def returns_false_for_non_abstract_type(): assert is_abstract_type(ObjectType) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_abstract_type(ObjectType) def returns_false_for_wrapped_non_abstract_type(): assert is_abstract_type(GraphQLList(ObjectType)) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_abstract_type(GraphQLList(ObjectType)) def describe_is_wrapping_type(): @@ -398,7 +401,7 @@ def returns_true_for_list_and_non_null_types(): def returns_false_for_unwrapped_types(): assert is_wrapping_type(ObjectType) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_wrapping_type(ObjectType) def describe_is_nullable_type(): @@ -412,7 +415,7 @@ def returns_true_for_list_of_non_null_types(): def returns_false_for_non_null_types(): assert is_nullable_type(GraphQLNonNull(ObjectType)) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_nullable_type(GraphQLNonNull(ObjectType)) def describe_get_nullable_type(): @@ -434,10 +437,10 @@ def returns_true_for_unwrapped_types(): def returns_false_for_list_and_non_null_types(): assert is_named_type(GraphQLList(ObjectType)) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_named_type(GraphQLList(ObjectType)) assert is_named_type(GraphQLNonNull(ObjectType)) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_named_type(GraphQLNonNull(ObjectType)) def describe_get_named_type(): @@ -509,23 +512,23 @@ def returns_true_for_custom_directive(): def returns_false_for_directive_class_rather_than_instance(): assert is_directive(GraphQLDirective) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_directive(GraphQLScalarType) def returns_false_for_non_directive(): assert is_directive(EnumType) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_directive(EnumType) assert is_directive(ScalarType) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_directive(ScalarType) def returns_false_for_random_garbage(): assert is_directive(None) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_directive(None) assert is_directive({"what": "is this"}) is False - with raises(TypeError): + with pytest.raises(TypeError): assert_directive({"what": "is this"}) def describe_is_specified_directive(): @@ -536,3 +539,30 @@ def returns_true_for_specified_directives(): def returns_false_for_custom_directive(): assert is_specified_directive(Directive) is False + + +def describe_schema_predicates(): + schema = GraphQLSchema() + + def describe_is_schema_and_assert_schema(): + def returns_true_for_schema(): + assert is_schema(schema) is True + assert assert_schema(schema) is schema + + def returns_false_for_schema_class_rather_than_instance(): + assert is_schema(GraphQLSchema) is False + with pytest.raises(TypeError): + assert_schema(GraphQLSchema) + + def returns_false_for_non_schema(): + assert is_schema(EnumType) is False + with pytest.raises(TypeError): + assert_schema(EnumType) + assert is_schema(ScalarType) is False + with pytest.raises(TypeError): + assert_schema(ScalarType) + + def return_false_for_random_garbage(): + assert is_schema({"what": "is this"}) is False + with pytest.raises(TypeError): + assert_schema({"what": "is this"}) diff --git a/tests/type/test_scalars.py b/tests/type/test_scalars.py index e5dd7c6f..0ef5e548 100644 --- a/tests/type/test_scalars.py +++ b/tests/type/test_scalars.py @@ -1,17 +1,19 @@ +import pickle from math import inf, nan, pi from typing import Any -from pytest import raises +import pytest from graphql.error import GraphQLError from graphql.language import parse_value as parse_value_to_ast from graphql.pyutils import Undefined from graphql.type import ( - GraphQLInt, - GraphQLFloat, - GraphQLString, GraphQLBoolean, + GraphQLFloat, GraphQLID, + GraphQLInt, + GraphQLScalarType, + GraphQLString, ) @@ -21,7 +23,7 @@ def parse_value(): _parse_value = GraphQLInt.parse_value def _parse_value_raises(s: Any, message: str): - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: _parse_value(s) assert str(exc_info.value) == message @@ -58,7 +60,7 @@ def _parse_literal(s: str): return GraphQLInt.parse_literal(parse_value_to_ast(s)) def _parse_literal_raises(s: str, message: str): - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: _parse_literal(s) assert str(exc_info.value).startswith(message + "\n") @@ -93,7 +95,7 @@ def _parse_literal_raises(s: str, message: str): ) _parse_literal_raises("[1]", "Int cannot represent non-integer value: [1]") _parse_literal_raises( - "{value: 1}", "Int cannot represent non-integer value: {value: 1}" + "{value: 1}", "Int cannot represent non-integer value: { value: 1 }" ) _parse_literal_raises( "ENUM_VALUE", "Int cannot represent non-integer value: ENUM_VALUE" @@ -116,68 +118,75 @@ def serializes(): # The GraphQL specification does not allow serializing non-integer # values as Int to avoid accidental data loss. - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: serialize(0.1) assert str(exc_info.value) == "Int cannot represent non-integer value: 0.1" - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: serialize(1.1) assert str(exc_info.value) == "Int cannot represent non-integer value: 1.1" - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: serialize(-1.1) assert str(exc_info.value) == "Int cannot represent non-integer value: -1.1" - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: serialize("-1.1") assert ( str(exc_info.value) == "Int cannot represent non-integer value: '-1.1'" ) # Maybe a safe JavaScript int, but bigger than 2^32, so not # representable as a GraphQL Int - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: serialize(9876504321) assert str(exc_info.value) == ( "Int cannot represent non 32-bit signed integer value: 9876504321" ) - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: serialize(-9876504321) assert str(exc_info.value) == ( "Int cannot represent non 32-bit signed integer value: -9876504321" ) # Too big to represent as an Int in JavaScript or GraphQL - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: serialize(1e100) assert str(exc_info.value) == ( "Int cannot represent non 32-bit signed integer value: 1e+100" ) - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: serialize(-1e100) assert str(exc_info.value) == ( "Int cannot represent non 32-bit signed integer value: -1e+100" ) - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: serialize("one") assert ( str(exc_info.value) == "Int cannot represent non-integer value: 'one'" ) # Doesn't represent number - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: serialize("") assert str(exc_info.value) == "Int cannot represent non-integer value: ''" - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: serialize(nan) assert str(exc_info.value) == "Int cannot represent non-integer value: nan" - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: serialize(inf) assert str(exc_info.value) == "Int cannot represent non-integer value: inf" - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: serialize([5]) assert str(exc_info.value) == "Int cannot represent non-integer value: [5]" + def cannot_be_redefined(): + with pytest.raises(TypeError, match="Redefinition of reserved type 'Int'"): + GraphQLScalarType(name="Int") + + def pickles(): + assert pickle.loads(pickle.dumps(GraphQLInt)) is GraphQLInt + def describe_graphql_float(): def parse_value(): _parse_value = GraphQLFloat.parse_value def _parse_value_raises(s: Any, message: str): - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: _parse_value(s) assert str(exc_info.value) == message @@ -213,7 +222,7 @@ def _parse_literal(s: str): return GraphQLFloat.parse_literal(parse_value_to_ast(s)) def _parse_literal_raises(s: str, message: str): - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: _parse_literal(s) assert str(exc_info.value).startswith(message + "\n") @@ -246,7 +255,8 @@ def _parse_literal_raises(s: str, message: str): "[0.1]", "Float cannot represent non numeric value: [0.1]" ) _parse_literal_raises( - "{value: 0.1}", "Float cannot represent non numeric value: {value: 0.1}" + "{value: 0.1}", + "Float cannot represent non numeric value: { value: 0.1 }", ) _parse_literal_raises( "ENUM_VALUE", "Float cannot represent non numeric value: ENUM_VALUE" @@ -270,36 +280,45 @@ def serializes(): assert serialize(True) == 1 assert serialize(type("Float", (float,), {})(5.5)) == 5.5 - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: serialize(nan) assert ( str(exc_info.value) == "Float cannot represent non numeric value: nan" ) - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: serialize(inf) assert ( str(exc_info.value) == "Float cannot represent non numeric value: inf" ) - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: serialize("one") assert str(exc_info.value) == ( "Float cannot represent non numeric value: 'one'" ) - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: serialize("") assert str(exc_info.value) == "Float cannot represent non numeric value: ''" - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: serialize([5]) assert ( str(exc_info.value) == "Float cannot represent non numeric value: [5]" ) + def cannot_be_redefined(): + with pytest.raises( + TypeError, match="Redefinition of reserved type 'Float'" + ): + GraphQLScalarType(name="Float") + + def pickles(): + assert pickle.loads(pickle.dumps(GraphQLFloat)) is GraphQLFloat + def describe_graphql_string(): def parse_value(): _parse_value = GraphQLString.parse_value def _parse_value_raises(s: Any, message: str): - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: _parse_value(s) assert str(exc_info.value) == message @@ -329,7 +348,7 @@ def _parse_literal(s: str): return GraphQLString.parse_literal(parse_value_to_ast(s)) def _parse_literal_raises(s: str, message: str): - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: _parse_literal(s) assert str(exc_info.value).startswith(message + "\n") @@ -357,7 +376,7 @@ def _parse_literal_raises(s: str, message: str): ) _parse_literal_raises( '{value: "foo"}', - 'String cannot represent a non string value: {value: "foo"}', + 'String cannot represent a non string value: { value: "foo" }', ) _parse_literal_raises( "ENUM_VALUE", "String cannot represent a non string value: ENUM_VALUE" @@ -381,31 +400,40 @@ def __str__(self): assert serialize(StringableObjValue()) == "something useful" - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: serialize(nan) assert str(exc_info.value) == "String cannot represent value: nan" - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: serialize([1]) assert str(exc_info.value) == "String cannot represent value: [1]" - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: serialize({}) assert str(exc_info.value) == "String cannot represent value: {}" - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: serialize({"value_of": "value_of string"}) assert ( str(exc_info.value) == "String cannot represent value:" " {'value_of': 'value_of string'}" ) + def cannot_be_redefined(): + with pytest.raises( + TypeError, match="Redefinition of reserved type 'String'" + ): + GraphQLScalarType(name="String") + + def pickles(): + assert pickle.loads(pickle.dumps(GraphQLString)) is GraphQLString + def describe_graphql_boolean(): def parse_value(): _parse_value = GraphQLBoolean.parse_value def _parse_value_raises(s: Any, message: str): - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: _parse_value(s) assert str(exc_info.value) == message @@ -443,7 +471,7 @@ def _parse_literal(s: str): return GraphQLBoolean.parse_literal(parse_value_to_ast(s)) def _parse_literal_raises(s: str, message: str): - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: _parse_literal(s) assert str(exc_info.value).startswith(message + "\n") @@ -488,11 +516,11 @@ def _parse_literal_raises(s: str, message: str): ) _parse_literal_raises( "{value: false}", - "Boolean cannot represent a non boolean value: {value: false}", + "Boolean cannot represent a non boolean value: { value: false }", ) _parse_literal_raises( "{value: False}", - "Boolean cannot represent a non boolean value: {value: False}", + "Boolean cannot represent a non boolean value: { value: False }", ) _parse_literal_raises( "ENUM_VALUE", "Boolean cannot represent a non boolean value: ENUM_VALUE" @@ -508,46 +536,55 @@ def serializes(): assert serialize(0) is False assert serialize(True) is True assert serialize(False) is False - with raises(TypeError, match="not an acceptable base type"): + with pytest.raises(TypeError, match="not an acceptable base type"): # you can't subclass bool in Python assert serialize(type("Boolean", (bool,), {})(True)) is True - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: serialize(nan) assert str(exc_info.value) == ( "Boolean cannot represent a non boolean value: nan" ) - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: serialize("") assert str(exc_info.value) == ( "Boolean cannot represent a non boolean value: ''" ) - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: serialize("True") assert str(exc_info.value) == ( "Boolean cannot represent a non boolean value: 'True'" ) - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: serialize([False]) assert str(exc_info.value) == ( "Boolean cannot represent a non boolean value: [False]" ) - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: serialize({}) assert str(exc_info.value) == ( "Boolean cannot represent a non boolean value: {}" ) + def cannot_be_redefined(): + with pytest.raises( + TypeError, match="Redefinition of reserved type 'Boolean'" + ): + GraphQLScalarType(name="Boolean") + + def pickles(): + assert pickle.loads(pickle.dumps(GraphQLBoolean)) is GraphQLBoolean + def describe_graphql_id(): def parse_value(): _parse_value = GraphQLID.parse_value def _parse_value_raises(s: Any, message: str): - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: _parse_value(s) assert str(exc_info.value) == message @@ -579,7 +616,7 @@ def _parse_literal(s: str): return GraphQLID.parse_literal(parse_value_to_ast(s)) def _parse_literal_raises(s: str, message: str): - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: _parse_literal(s) assert str(exc_info.value).startswith(message + "\n") @@ -614,9 +651,9 @@ def _parse_literal_raises(s: str, message: str): '["1"]', 'ID cannot represent a non-string and non-integer value: ["1"]' ) _parse_literal_raises( - '{value: "1"}', + '{ value: "1" }', "ID cannot represent a non-string and non-integer value:" - ' {value: "1"}', + ' { value: "1" }', ) _parse_literal_raises( "ENUM_VALUE", @@ -647,18 +684,25 @@ def __str__(self): obj_value = ObjValue(123) assert serialize(obj_value) == "123" - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: serialize(True) assert str(exc_info.value) == "ID cannot represent value: True" - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: serialize(3.14) assert str(exc_info.value) == "ID cannot represent value: 3.14" - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: serialize({}) assert str(exc_info.value) == "ID cannot represent value: {}" - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: serialize(["abc"]) assert str(exc_info.value) == "ID cannot represent value: ['abc']" + + def cannot_be_redefined(): + with pytest.raises(TypeError, match="Redefinition of reserved type 'ID'"): + GraphQLScalarType(name="ID") + + def pickles(): + assert pickle.loads(pickle.dumps(GraphQLID)) is GraphQLID diff --git a/tests/type/test_schema.py b/tests/type/test_schema.py index ef483b54..e678de35 100644 --- a/tests/type/test_schema.py +++ b/tests/type/test_schema.py @@ -1,13 +1,11 @@ from copy import deepcopy -from pytest import raises +import pytest from graphql.language import ( DirectiveLocation, SchemaDefinitionNode, SchemaExtensionNode, - TypeDefinitionNode, - TypeExtensionNode, ) from graphql.type import ( GraphQLArgument, @@ -15,16 +13,21 @@ GraphQLDirective, GraphQLField, GraphQLFieldMap, - GraphQLInputObjectType, GraphQLInputField, + GraphQLInputObjectType, GraphQLInt, GraphQLInterfaceType, GraphQLList, + GraphQLNamedType, GraphQLObjectType, GraphQLScalarType, GraphQLSchema, GraphQLString, GraphQLType, + GraphQLUnionType, + SchemaMetaFieldDef, + TypeMetaFieldDef, + TypeNameMetaFieldDef, specified_directives, ) from graphql.utilities import build_schema, lexicographic_sort_schema, print_schema @@ -171,12 +174,6 @@ def freezes_the_specified_directives(): schema = GraphQLSchema(directives=directives_tuple) assert schema.directives is directives_tuple - def rejects_a_schema_with_incorrectly_typed_description(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLSchema(description=[]) # type: ignore - assert str(exc_info.value) == "Schema description must be a string." - def describe_type_map(): def includes_interface_possible_types_in_the_type_map(): SomeInterface = GraphQLInterfaceType("SomeInterface", {}) @@ -292,46 +289,77 @@ def preserves_the_order_of_user_provided_types(): copy_schema = GraphQLSchema(**schema.to_kwargs()) assert list(copy_schema.type_map) == type_names + def describe_get_field(): + pet_type = GraphQLInterfaceType("Pet", {"name": GraphQLField(GraphQLString)}) + cat_type = GraphQLObjectType( + "Cat", {"name": GraphQLField(GraphQLString)}, [pet_type] + ) + dog_type = GraphQLObjectType( + "Dog", {"name": GraphQLField(GraphQLString)}, [pet_type] + ) + cat_or_dog = GraphQLUnionType("CatOrDog", [cat_type, dog_type]) + query_type = GraphQLObjectType("Query", {"catOrDog": GraphQLField(cat_or_dog)}) + mutation_type = GraphQLObjectType("Mutation", {}) + subscription_type = GraphQLObjectType("Subscription", {}) + schema = GraphQLSchema(query_type, mutation_type, subscription_type) + + _get_field = schema.get_field + + def returns_known_field(): + assert _get_field(pet_type, "name") == pet_type.fields["name"] + assert _get_field(cat_type, "name") == cat_type.fields["name"] + + assert _get_field(query_type, "catOrDog") == query_type.fields["catOrDog"] + + def returns_none_for_unknown_fields(): + assert _get_field(cat_or_dog, "name") is None + + assert _get_field(query_type, "unknown") is None + assert _get_field(pet_type, "unknown") is None + assert _get_field(cat_type, "unknown") is None + assert _get_field(cat_or_dog, "unknown") is None + + def handles_introspection_fields(): + assert _get_field(query_type, "__typename") == TypeNameMetaFieldDef + assert _get_field(mutation_type, "__typename") == TypeNameMetaFieldDef + assert _get_field(subscription_type, "__typename") == TypeNameMetaFieldDef + + assert _get_field(pet_type, "__typename") is TypeNameMetaFieldDef + assert _get_field(cat_type, "__typename") is TypeNameMetaFieldDef + assert _get_field(dog_type, "__typename") is TypeNameMetaFieldDef + assert _get_field(cat_or_dog, "__typename") is TypeNameMetaFieldDef + + assert _get_field(query_type, "__type") is TypeMetaFieldDef + assert _get_field(query_type, "__schema") is SchemaMetaFieldDef + + def returns_non_for_introspection_fields_in_wrong_location(): + assert _get_field(pet_type, "__type") is None + assert _get_field(dog_type, "__type") is None + assert _get_field(mutation_type, "__type") is None + assert _get_field(subscription_type, "__type") is None + + assert _get_field(pet_type, "__schema") is None + assert _get_field(dog_type, "__schema") is None + assert _get_field(mutation_type, "__schema") is None + assert _get_field(subscription_type, "__schema") is None + def describe_validity(): def describe_when_not_assumed_valid(): def configures_the_schema_to_still_needing_validation(): # noinspection PyProtectedMember assert GraphQLSchema(assume_valid=False).validation_errors is None - def checks_the_configuration_for_mistakes(): - def query(): - pass - - with raises(Exception): - # noinspection PyTypeChecker - GraphQLSchema(query) # type: ignore - with raises(Exception): - GraphQLSchema(types={}) - with raises(Exception): - GraphQLSchema(directives={}) - - def check_that_query_mutation_and_subscription_are_graphql_types(): - directive = GraphQLDirective("foo", []) - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLSchema(query=directive) # type: ignore - assert str(exc_info.value) == "Expected query to be a GraphQL type." - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLSchema(mutation=directive) # type: ignore - assert str(exc_info.value) == ( - "Expected mutation to be a GraphQL type." - ) - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLSchema(subscription=directive) # type: ignore - assert str(exc_info.value) == ( - "Expected subscription to be a GraphQL type." - ) - def describe_a_schema_must_contain_uniquely_named_types(): def rejects_a_schema_which_redefines_a_built_in_type(): - FakeString = GraphQLScalarType("String") + # temporarily allow redefinition of the String scalar type + reserved_types = GraphQLNamedType.reserved_types + GraphQLScalarType.reserved_types = {} + try: + # create a redefined String scalar type + FakeString = GraphQLScalarType("String") + finally: + # protect from redefinition again + GraphQLScalarType.reserved_types = reserved_types QueryType = GraphQLObjectType( "Query", @@ -341,7 +369,7 @@ def rejects_a_schema_which_redefines_a_built_in_type(): }, ) - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: GraphQLSchema(QueryType) msg = str(exc_info.value) assert msg == ( @@ -353,7 +381,7 @@ def rejects_a_schema_when_a_provided_type_has_no_name(): query = GraphQLObjectType("Query", {"foo": GraphQLField(GraphQLString)}) types = [GraphQLType(), query, GraphQLType()] - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: GraphQLSchema(query, types=types) # type: ignore msg = str(exc_info.value) assert msg == ( @@ -366,7 +394,7 @@ def rejects_a_schema_which_defines_an_object_twice(): GraphQLObjectType("SameName", {}), ] - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: GraphQLSchema(types=types) msg = str(exc_info.value) assert msg == ( @@ -384,7 +412,7 @@ def rejects_a_schema_which_defines_fields_with_conflicting_types(): }, ) - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: GraphQLSchema(QueryType) msg = str(exc_info.value) assert msg == ( @@ -409,28 +437,6 @@ def accepts_a_scalar_type_with_ast_node_and_extension_ast_nodes(): assert schema.ast_node is ast_node assert schema.extension_ast_nodes == tuple(extension_ast_nodes) - def rejects_a_schema_with_an_incorrect_ast_node(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLSchema( - GraphQLObjectType("Query", {}), - ast_node=TypeDefinitionNode(), # type: ignore - ) - msg = str(exc_info.value) - assert msg == "Schema AST node must be a SchemaDefinitionNode." - - def rejects_a_scalar_type_with_incorrect_extension_ast_nodes(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLSchema( - GraphQLObjectType("Query", {}), - extension_ast_nodes=[TypeExtensionNode()], # type: ignore - ) - assert str(exc_info.value) == ( - "Schema extension AST nodes must be specified" - " as a collection of SchemaExtensionNode instances." - ) - def can_deep_copy_a_schema(): source = """ schema { diff --git a/tests/type/test_validation.py b/tests/type/test_validation.py index 875d8902..a4efe041 100644 --- a/tests/type/test_validation.py +++ b/tests/type/test_validation.py @@ -1,30 +1,19 @@ +from __future__ import annotations + from operator import attrgetter -from typing import Any, List, Union -from pytest import mark, raises +import pytest -from graphql.language import parse, DirectiveLocation +from graphql.language import DirectiveLocation, parse from graphql.pyutils import inspect from graphql.type import ( - assert_directive, - assert_enum_type, - assert_input_object_type, - assert_interface_type, - assert_object_type, - assert_scalar_type, - assert_union_type, - assert_valid_schema, - is_input_type, - is_output_type, - validate_schema, GraphQLArgument, GraphQLDirective, GraphQLEnumType, GraphQLField, GraphQLInputField, - GraphQLInputType, GraphQLInputObjectType, - GraphQLInt, + GraphQLInputType, GraphQLInterfaceType, GraphQLList, GraphQLNamedType, @@ -34,6 +23,15 @@ GraphQLSchema, GraphQLString, GraphQLUnionType, + assert_directive, + assert_enum_type, + assert_input_object_type, + assert_interface_type, + assert_object_type, + assert_scalar_type, + assert_union_type, + assert_valid_schema, + validate_schema, ) from graphql.utilities import build_schema, extend_schema @@ -69,7 +67,7 @@ def with_modifiers( type_: GraphQLNamedType, -) -> List[Union[GraphQLNamedType, GraphQLNonNull, GraphQLList]]: +) -> list[GraphQLNamedType | GraphQLNonNull | GraphQLList]: return [ type_, GraphQLList(type_), @@ -244,8 +242,7 @@ def rejects_a_schema_whose_query_root_type_is_not_an_object_type(): ) assert validate_schema(schema) == [ { - "message": "Query root type must be Object type," - " it cannot be Query.", + "message": "Query root type must be Object type, it cannot be Query.", "locations": [(2, 13)], } ] @@ -363,6 +360,12 @@ def rejects_a_schema_extended_with_invalid_root_types(): input SomeInputObject { test: String } + + scalar SomeScalar + + enum SomeEnum { + ENUM_VALUE + } """ ) schema = extend_schema( @@ -380,7 +383,7 @@ def rejects_a_schema_extended_with_invalid_root_types(): parse( """ extend schema { - mutation: SomeInputObject + mutation: SomeScalar } """ ), @@ -390,7 +393,7 @@ def rejects_a_schema_extended_with_invalid_root_types(): parse( """ extend schema { - subscription: SomeInputObject + subscription: SomeEnum } """ ), @@ -403,27 +406,18 @@ def rejects_a_schema_extended_with_invalid_root_types(): }, { "message": "Mutation root type must be Object type" - " if provided, it cannot be SomeInputObject.", + " if provided, it cannot be SomeScalar.", "locations": [(3, 29)], }, { "message": "Subscription root type must be Object type" - " if provided, it cannot be SomeInputObject.", + " if provided, it cannot be SomeEnum.", "locations": [(3, 33)], }, ] def rejects_a_schema_whose_types_are_incorrectly_type(): # invalid schema cannot be built with Python - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - GraphQLSchema( - SomeObjectType, - types=[{"name": "SomeType"}, SomeDirective], # type: ignore - ) - assert str(exc_info.value) == ( - "Schema types must be specified as a collection of GraphQL types." - ) # construct invalid schema manually schema = GraphQLSchema(SomeObjectType) schema.type_map = { @@ -447,6 +441,80 @@ def rejects_a_schema_whose_directives_are_incorrectly_typed(): ] +def describe_type_system_root_types_must_all_be_different_if_provided(): + def accepts_a_schema_with_different_root_types(): + schema = build_schema( + """ + type SomeObject1 { + field: String + } + + type SomeObject2 { + field: String + } + + type SomeObject3 { + field: String + } + + schema { + query: SomeObject1 + mutation: SomeObject2 + subscription: SomeObject3 + } + """ + ) + assert validate_schema(schema) == [] + + def rejects_a_schema_where_the_same_type_is_used_for_multiple_root_types(): + schema = build_schema( + """ + type SomeObject { + field: String + } + + type UniqueObject { + field: String + } + + schema { + query: SomeObject + mutation: UniqueObject + subscription: SomeObject + } + """ + ) + assert validate_schema(schema) == [ + { + "message": "All root types must be different, 'SomeObject' type" + " is used as query and subscription root types.", + "locations": [(11, 22), (13, 29)], + } + ] + + def rejects_a_schema_where_the_same_type_is_used_for_all_root_types(): + schema = build_schema( + """ + type SomeObject { + field: String + } + + schema { + query: SomeObject + mutation: SomeObject + subscription: SomeObject + } + """ + ) + assert validate_schema(schema) == [ + { + "message": "All root types must be different, 'SomeObject' type" + " is used as query, mutation, and subscription root types.", + "locations": [(7, 22), (8, 25), (9, 29)], + } + ] + + def describe_type_system_objects_must_have_fields(): def accepts_an_object_type_with_fields_object(): schema = build_schema( @@ -486,7 +554,7 @@ def rejects_an_object_type_with_missing_fields(): assert msg == "Type IncompleteObject must define one or more fields." manual_schema_2 = schema_with_field_type( - GraphQLObjectType("IncompleteObject", lambda: {}) + GraphQLObjectType("IncompleteObject", dict) ) msg = validate_schema(manual_schema_2)[0].message assert msg == "Type IncompleteObject must define one or more fields." @@ -632,32 +700,6 @@ def rejects_a_union_type_with_duplicated_member_type(): def rejects_a_union_type_with_non_object_member_types(): # invalid schema cannot be built with Python - with raises(TypeError) as exc_info: - build_schema( - """ - type Query { - test: BadUnion - } - - type TypeA { - field: String - } - - type TypeB { - field: String - } - - union BadUnion = - | TypeA - | String - | TypeB - """ - ) - assert str(exc_info.value) == ( - "BadUnion types must be specified" - " as a collection of GraphQLObjectType instances." - ) - # construct invalid schema manually schema = build_schema( """ type Query { @@ -674,28 +716,13 @@ def rejects_a_union_type_with_non_object_member_types(): union BadUnion = | TypeA - | TypeA + | String | TypeB """ ) - with raises(TypeError) as exc_info: - extend_schema(schema, parse("extend union BadUnion = Int")) - assert str(exc_info.value) == ( - "BadUnion types must be specified" - " as a collection of GraphQLObjectType instances." - ) - schema = extend_schema(schema, parse("extend union BadUnion = TypeB")) - bad_union: Any = schema.get_type("BadUnion") - types = bad_union.types - assert isinstance(types, tuple) - types = list(types) - assert types[1].name == "TypeA" - types[1] = GraphQLString - assert types[3].name == "TypeB" - types[3] = GraphQLInt - bad_union.types = tuple(types) - bad_union.ast_node.types[1].name.value = "String" - bad_union.extension_ast_nodes[0].types[0].name.value = "Int" + + schema = extend_schema(schema, parse("extend union BadUnion = Int")) + assert validate_schema(schema) == [ { "message": "Union type BadUnion can only include Object types," @@ -721,23 +748,14 @@ def rejects_a_union_type_with_non_object_member_types(): for member_type in bad_union_member_types: # invalid union type cannot be built with Python bad_union = GraphQLUnionType( - "BadUnion", types=[member_type] # type: ignore + "BadUnion", + types=[member_type], # type: ignore ) - with raises(TypeError) as exc_info: - schema_with_field_type(bad_union) - assert str(exc_info.value) == ( - "BadUnion types must be specified" - " as a collection of GraphQLObjectType instances." - ) - # noinspection PyPropertyAccess - bad_union.types = [] bad_schema = schema_with_field_type(bad_union) - # noinspection PyPropertyAccess - bad_union.types = [member_type] assert validate_schema(bad_schema) == [ { "message": "Union type BadUnion can only include Object types," - + f" it cannot include {inspect(member_type)}." + f" it cannot include {inspect(member_type)}." } ] @@ -880,32 +898,6 @@ def rejects_an_input_object_with_multiple_non_breakable_circular_reference(): ] def rejects_an_input_object_type_with_incorrectly_typed_fields(): - # invalid schema cannot be built with Python - with raises(TypeError) as exc_info: - build_schema( - """ - type Query { - field(arg: SomeInputObject): String - } - - type SomeObject { - field: String - } - - union SomeUnion = SomeObject - - input SomeInputObject { - badObject: SomeObject - badUnion: SomeUnion - goodInputObject: SomeInputObject - } - """ - ) - assert str(exc_info.value) == ( - "SomeInputObject fields cannot be resolved." - " Input field type must be a GraphQL input type." - ) - # construct invalid schema manually schema = build_schema( """ type Query { @@ -919,15 +911,12 @@ def rejects_an_input_object_type_with_incorrectly_typed_fields(): union SomeUnion = SomeObject input SomeInputObject { - badObject: SomeInputObject - badUnion: SomeInputObject + badObject: SomeObject + badUnion: SomeUnion goodInputObject: SomeInputObject } """ ) - some_input_obj: Any = schema.get_type("SomeInputObject") - some_input_obj.fields["badObject"].type = schema.get_type("SomeObject") - some_input_obj.fields["badUnion"].type = schema.get_type("SomeUnion") assert validate_schema(schema) == [ { "message": "The type of SomeInputObject.badObject must be Input Type" @@ -1009,23 +998,15 @@ def rejects_an_enum_type_with_incorrectly_named_values(): def describe_type_system_object_fields_must_have_output_types(): def _schema_with_object_field(type_: GraphQLOutputType) -> GraphQLSchema: - if is_output_type(type_): - field = GraphQLField(type_) - else: - # invalid field cannot be built with Python directly - with raises(TypeError) as exc_info: - GraphQLField(type_) - assert str(exc_info.value) == "Field type must be an output type." - # therefore we need to monkey-patch a valid field - field = GraphQLField(GraphQLString) - field.type = type_ - bad_object_type = GraphQLObjectType("BadObject", {"badField": field}) + bad_object_type = GraphQLObjectType( + "BadObject", {"badField": GraphQLField(type_)} + ) return GraphQLSchema( GraphQLObjectType("Query", {"f": GraphQLField(bad_object_type)}), types=[SomeObjectType], ) - @mark.parametrize("type_", output_types, ids=get_name) + @pytest.mark.parametrize("type_", output_types, ids=get_name) def accepts_an_output_type_as_an_object_field_type(type_): schema = _schema_with_object_field(type_) assert validate_schema(schema) == [] @@ -1040,7 +1021,7 @@ def rejects_an_empty_object_field_type(): } ] - @mark.parametrize("type_", not_output_types, ids=get_name) + @pytest.mark.parametrize("type_", not_output_types, ids=get_name) def rejects_a_non_output_type_as_an_object_field_type(type_): schema = _schema_with_object_field(type_) assert validate_schema(schema) == [ @@ -1050,7 +1031,7 @@ def rejects_a_non_output_type_as_an_object_field_type(type_): } ] - @mark.parametrize("type_", not_graphql_types, ids=get_name) + @pytest.mark.parametrize("type_", not_graphql_types, ids=get_name) def rejects_a_non_type_value_as_an_object_field_type(type_): schema = _schema_with_object_field(type_) assert validate_schema(schema) == [ @@ -1062,27 +1043,10 @@ def rejects_a_non_type_value_as_an_object_field_type(type_): ] def rejects_with_relevant_locations_for_a_non_output_type(): - # invalid schema cannot be built with Python - with raises(TypeError) as exc_info: - build_schema( - """ - type Query { - field: [SomeInputObject] - } - - input SomeInputObject { - field: String - } - """ - ) - assert str(exc_info.value) == ( - "Query fields cannot be resolved. Field type must be an output type." - ) - # therefore we need to monkey-patch a valid schema schema = build_schema( """ type Query { - field: [String] + field: [SomeInputObject] } input SomeInputObject { @@ -1090,8 +1054,6 @@ def rejects_with_relevant_locations_for_a_non_output_type(): } """ ) - some_input_obj = schema.get_type("SomeInputObject") - schema.query_type.fields["field"].type.of_type = some_input_obj # type: ignore assert validate_schema(schema) == [ { "message": "The type of Query.field must be Output Type" @@ -1119,27 +1081,27 @@ def rejects_an_object_implementing_a_non_type_value(): ] def rejects_an_object_implementing_a_non_interface_type(): - # invalid schema cannot be built with Python - with raises(TypeError) as exc_info: - build_schema( - """ - type Query { - test: BadObject - } + schema = build_schema( + """ + type Query { + test: BadObject + } - input SomeInputObject { - field: String - } + input SomeInputObject { + field: String + } - type BadObject implements SomeInputObject { - field: String - } - """ - ) - assert str(exc_info.value) == ( - "BadObject interfaces must be specified" - " as a collection of GraphQLInterfaceType instances." + type BadObject implements SomeInputObject { + field: String + } + """ ) + assert validate_schema(schema) == [ + { + "message": "Type BadObject must only implement Interface types," + " it cannot implement SomeInputObject." + } + ] def rejects_an_object_implementing_the_same_interface_twice(): schema = build_schema( @@ -1327,18 +1289,7 @@ def rejects_object_implementing_extended_interface_due_to_type_mismatch(): def describe_type_system_interface_fields_must_have_output_types(): def _schema_with_interface_field(type_: GraphQLOutputType) -> GraphQLSchema: - if is_output_type(type_): - field = GraphQLField(type_) - else: - # invalid field cannot be built with Python directly - with raises(TypeError) as exc_info: - GraphQLField(type_) - assert str(exc_info.value) == "Field type must be an output type." - # therefore we need to monkey-patch a valid field - field = GraphQLField(GraphQLString) - field.type = type_ - fields = {"badField": field} - + fields = {"badField": GraphQLField(type_)} bad_interface_type = GraphQLInterfaceType("BadInterface", fields) bad_implementing_type = GraphQLObjectType( "BadImplementing", @@ -1350,7 +1301,7 @@ def _schema_with_interface_field(type_: GraphQLOutputType) -> GraphQLSchema: types=[bad_implementing_type, SomeObjectType], ) - @mark.parametrize("type_", output_types, ids=get_name) + @pytest.mark.parametrize("type_", output_types, ids=get_name) def accepts_an_output_type_as_an_interface_field_type(type_): schema = _schema_with_interface_field(type_) assert validate_schema(schema) == [] @@ -1369,7 +1320,7 @@ def rejects_an_empty_interface_field_type(): }, ] - @mark.parametrize("type_", not_output_types, ids=get_name) + @pytest.mark.parametrize("type_", not_output_types, ids=get_name) def rejects_a_non_output_type_as_an_interface_field_type(type_): schema = _schema_with_interface_field(type_) assert validate_schema(schema) == [ @@ -1383,7 +1334,7 @@ def rejects_a_non_output_type_as_an_interface_field_type(type_): }, ] - @mark.parametrize("type_", not_graphql_types, ids=get_name) + @pytest.mark.parametrize("type_", not_graphql_types, ids=get_name) def rejects_a_non_type_value_as_an_interface_field_type(type_): schema = _schema_with_interface_field(type_) assert validate_schema(schema) == [ @@ -1399,32 +1350,6 @@ def rejects_a_non_type_value_as_an_interface_field_type(type_): ] def rejects_a_non_output_type_as_an_interface_field_with_locations(): - # invalid schema cannot be built with Python - with raises(TypeError) as exc_info: - build_schema( - """ - type Query { - test: SomeInterface - } - - interface SomeInterface { - field: SomeInputObject - } - - input SomeInputObject { - foo: String - } - - type SomeObject implements SomeInterface { - field: SomeInputObject - } - """ - ) - assert str(exc_info.value) == ( - "SomeInterface fields cannot be resolved." - " Field type must be an output type." - ) - # therefore we need to monkey-patch a valid schema schema = build_schema( """ type Query { @@ -1432,7 +1357,7 @@ def rejects_a_non_output_type_as_an_interface_field_with_locations(): } interface SomeInterface { - field: String + field: SomeInputObject } input SomeInputObject { @@ -1440,16 +1365,10 @@ def rejects_a_non_output_type_as_an_interface_field_with_locations(): } type SomeObject implements SomeInterface { - field: String + field: SomeInputObject } """ ) - # therefore we need to monkey-patch a valid schema - some_input_obj = schema.get_type("SomeInputObject") - some_interface: Any = schema.get_type("SomeInterface") - some_interface.fields["field"].type = some_input_obj - some_object: Any = schema.get_type("SomeObject") - some_object.fields["field"].type = some_input_obj assert validate_schema(schema) == [ { "message": "The type of SomeInterface.field must be Output Type" @@ -1480,17 +1399,7 @@ def accepts_an_interface_not_implemented_by_at_least_one_object(): def describe_type_system_arguments_must_have_input_types(): def _schema_with_arg(type_: GraphQLInputType) -> GraphQLSchema: - if is_input_type(type_): - argument = GraphQLArgument(type_) - else: - # invalid argument cannot be built with Python directly - with raises(TypeError) as exc_info: - GraphQLArgument(type_) - assert str(exc_info.value) == "Argument type must be a GraphQL input type." - # therefore we need to monkey-patch a valid argument - argument = GraphQLArgument(GraphQLString) - argument.type = type_ - args = {"badArg": argument} + args = {"badArg": GraphQLArgument(type_)} bad_object_type = GraphQLObjectType( "BadObject", {"badField": GraphQLField(GraphQLString, args)}, @@ -1506,7 +1415,7 @@ def _schema_with_arg(type_: GraphQLInputType) -> GraphQLSchema: ], ) - @mark.parametrize("type_", input_types, ids=get_name) + @pytest.mark.parametrize("type_", input_types, ids=get_name) def accepts_an_input_type_as_a_field_arg_type(type_): schema = _schema_with_arg(type_) assert validate_schema(schema) == [] @@ -1525,7 +1434,7 @@ def rejects_an_empty_field_arg_type(): }, ] - @mark.parametrize("type_", not_input_types, ids=get_name) + @pytest.mark.parametrize("type_", not_input_types, ids=get_name) def rejects_a_non_input_type_as_a_field_arg_type(type_): schema = _schema_with_arg(type_) assert validate_schema(schema) == [ @@ -1539,7 +1448,7 @@ def rejects_a_non_input_type_as_a_field_arg_type(type_): }, ] - @mark.parametrize("type_", not_graphql_types, ids=get_name) + @pytest.mark.parametrize("type_", not_graphql_types, ids=get_name) def rejects_a_non_type_value_as_a_field_arg_type(type_): schema = _schema_with_arg(type_) assert validate_schema(schema) == [ @@ -1586,28 +1495,10 @@ def rejects_a_required_argument_that_is_deprecated(): ] def rejects_a_non_input_type_as_a_field_arg_with_locations(): - # invalid schema cannot be built with Python - with raises(TypeError) as exc_info: - build_schema( - """ - type Query { - test(arg: SomeObject): String - } - - type SomeObject { - foo: String - } - """ - ) - assert str(exc_info.value) == ( - "Query fields cannot be resolved." - " Argument type must be a GraphQL input type." - ) - # therefore we need to monkey-patch a valid schema schema = build_schema( """ type Query { - test(arg: String): String + test(arg: SomeObject): String } type SomeObject { @@ -1615,8 +1506,6 @@ def rejects_a_non_input_type_as_a_field_arg_with_locations(): } """ ) - some_object = schema.get_type("SomeObject") - schema.query_type.fields["test"].args["arg"].type = some_object # type: ignore assert validate_schema(schema) == [ { "message": "The type of Query.test(arg:) must be Input Type" @@ -1628,20 +1517,8 @@ def rejects_a_non_input_type_as_a_field_arg_with_locations(): def describe_type_system_input_object_fields_must_have_input_types(): def _schema_with_input_field(type_: GraphQLInputType) -> GraphQLSchema: - if is_input_type(type_): - input_field = GraphQLInputField(type_) - else: - # invalid input field cannot be built with Python directly - with raises(TypeError) as exc_info: - GraphQLInputField(type_) - assert str(exc_info.value) == ( - "Input field type must be a GraphQL input type." - ) - # therefore we need to monkey-patch a valid input field - input_field = GraphQLInputField(GraphQLString) - input_field.type = type_ bad_input_object_type = GraphQLInputObjectType( - "BadInputObject", {"badField": input_field} + "BadInputObject", {"badField": GraphQLInputField(type_)} ) return GraphQLSchema( GraphQLObjectType( @@ -1655,7 +1532,7 @@ def _schema_with_input_field(type_: GraphQLInputType) -> GraphQLSchema: ) ) - @mark.parametrize("type_", input_types, ids=get_name) + @pytest.mark.parametrize("type_", input_types, ids=get_name) def accepts_an_input_type_as_an_input_field_type(type_): schema = _schema_with_input_field(type_) assert validate_schema(schema) == [] @@ -1670,7 +1547,7 @@ def rejects_an_empty_input_field_type(): } ] - @mark.parametrize("type_", not_input_types, ids=get_name) + @pytest.mark.parametrize("type_", not_input_types, ids=get_name) def rejects_a_non_input_type_as_an_input_field_type(type_): schema = _schema_with_input_field(type_) assert validate_schema(schema) == [ @@ -1680,7 +1557,7 @@ def rejects_a_non_input_type_as_an_input_field_type(type_): } ] - @mark.parametrize("type_", not_graphql_types, ids=get_name) + @pytest.mark.parametrize("type_", not_graphql_types, ids=get_name) def rejects_a_non_type_value_as_an_input_field_type(type_): schema = _schema_with_input_field(type_) assert validate_schema(schema) == [ @@ -1692,28 +1569,6 @@ def rejects_a_non_type_value_as_an_input_field_type(type_): ] def rejects_with_relevant_locations_for_a_non_input_type(): - # invalid schema cannot be built with Python - with raises(TypeError) as exc_info: - build_schema( - """ - type Query { - test(arg: SomeInputObject): String - } - - input SomeInputObject { - foo: SomeObject - } - - type SomeObject { - bar: String - } - """ - ) - assert str(exc_info.value) == ( - "SomeInputObject fields cannot be resolved." - " Input field type must be a GraphQL input type." - ) - # therefore we need to monkey-patch a valid schema schema = build_schema( """ type Query { @@ -1721,7 +1576,7 @@ def rejects_with_relevant_locations_for_a_non_input_type(): } input SomeInputObject { - foo: String + foo: SomeObject } type SomeObject { @@ -1729,9 +1584,6 @@ def rejects_with_relevant_locations_for_a_non_input_type(): } """ ) - some_object = schema.get_type("SomeObject") - some_input_object: Any = schema.get_type("SomeInputObject") - some_input_object.fields["foo"].type = some_object assert validate_schema(schema) == [ { "message": "The type of SomeInputObject.foo must be Input Type" @@ -1741,6 +1593,49 @@ def rejects_with_relevant_locations_for_a_non_input_type(): ] +def describe_type_system_one_of_input_object_fields_must_be_nullable(): + def rejects_non_nullable_fields(): + schema = build_schema( + """ + type Query { + test(arg: SomeInputObject): String + } + + input SomeInputObject @oneOf { + a: String + b: String! + } + """ + ) + assert validate_schema(schema) == [ + { + "message": "OneOf input field SomeInputObject.b must be nullable.", + "locations": [(8, 18)], + } + ] + + def rejects_fields_with_default_values(): + schema = build_schema( + """ + type Query { + test(arg: SomeInputObject): String + } + + input SomeInputObject @oneOf { + a: String + b: String = "foo" + } + """ + ) + assert validate_schema(schema) == [ + { + "message": "OneOf input field SomeInputObject.b" + " cannot have a default value.", + "locations": [(8, 15)], + } + ] + + def describe_objects_must_adhere_to_interfaces_they_implement(): def accepts_an_object_which_implements_an_interface(): schema = build_schema( @@ -2336,39 +2231,20 @@ def accepts_an_interface_with_a_subtyped_interface_field_union(): assert validate_schema(schema) == [] def rejects_an_interface_implementing_a_non_interface_type(): - # invalid schema cannot be built with Python - with raises(TypeError) as exc_info: - build_schema( - """ - type Query { - field: String - } + schema = build_schema( + """ + type Query { + field: String + } - input SomeInputObject { - field: String - } + input SomeInputObject { + field: String + } - interface BadInterface implements SomeInputObject { - field: String - } - """ - ) - assert str(exc_info.value) == ( - "BadInterface interfaces must be specified as a collection" - " of GraphQLInterfaceType instances." - ) - # therefore we construct the invalid schema manually - some_input_obj = GraphQLInputObjectType( - "SomeInputObject", {"field": GraphQLInputField(GraphQLString)} - ) - bad_interface = GraphQLInterfaceType( - "BadInterface", {"field": GraphQLField(GraphQLString)} - ) - # noinspection PyTypeChecker - bad_interface.interfaces = (some_input_obj,) - schema = GraphQLSchema( - GraphQLObjectType("Query", {"field": GraphQLField(GraphQLString)}), - types=[bad_interface], + interface BadInterface implements SomeInputObject { + field: String + } + """ ) assert validate_schema(schema) == [ { @@ -2673,19 +2549,17 @@ def rejects_a_circular_interface_implementation(): def describe_assert_valid_schema(): def does_not_throw_on_valid_schemas(): schema = build_schema( - ( - """ + """ type Query { foo: String } """ - ) ) assert_valid_schema(schema) def combines_multiple_errors(): schema = build_schema("type SomeType") - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: assert_valid_schema(schema) assert ( str(exc_info.value) diff --git a/tests/utilities/test_ast_from_value.py b/tests/utilities/test_ast_from_value.py index de037ca4..947f2b18 100644 --- a/tests/utilities/test_ast_from_value.py +++ b/tests/utilities/test_ast_from_value.py @@ -1,18 +1,18 @@ from math import inf, nan -from pytest import raises +import pytest from graphql.error import GraphQLError from graphql.language import ( BooleanValueNode, + ConstListValueNode, + ConstObjectFieldNode, + ConstObjectValueNode, EnumValueNode, FloatValueNode, IntValueNode, - ListValueNode, NameNode, NullValueNode, - ObjectFieldNode, - ObjectValueNode, StringValueNode, ) from graphql.pyutils import Undefined @@ -58,18 +58,18 @@ def converts_int_values_to_int_asts(): # GraphQL spec does not allow coercing non-integer values to Int to # avoid accidental data loss. - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: assert ast_from_value(123.5, GraphQLInt) msg = str(exc_info.value) assert msg == "Int cannot represent non-integer value: 123.5" # Note: outside the bounds of 32bit signed int. - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: assert ast_from_value(1e40, GraphQLInt) msg = str(exc_info.value) assert msg == "Int cannot represent non 32-bit signed integer value: 1e+40" - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: ast_from_value(nan, GraphQLInt) msg = str(exc_info.value) assert msg == "Int cannot represent non-integer value: nan" @@ -126,7 +126,7 @@ def converts_id_values_to_int_or_string_asts(): assert ast_from_value("01", GraphQLID) == StringValueNode(value="01") - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: assert ast_from_value(False, GraphQLID) assert str(exc_info.value) == "ID cannot represent value: False" @@ -144,17 +144,17 @@ def converts_using_serialize_from_a_custom_scalar_type(): value="value" ) - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: assert ast_from_value(nan, pass_through_scalar) assert str(exc_info.value) == "Cannot convert value to AST: nan." - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: ast_from_value(inf, pass_through_scalar) assert str(exc_info.value) == "Cannot convert value to AST: inf." return_null_scalar = GraphQLScalarType( "ReturnNullScalar", - serialize=lambda value: None, + serialize=lambda value: None, # noqa: ARG005 ) assert ast_from_value("value", return_null_scalar) is None @@ -164,10 +164,10 @@ class SomeClass: return_custom_class_scalar = GraphQLScalarType( "ReturnCustomClassScalar", - serialize=lambda value: SomeClass(), + serialize=lambda value: SomeClass(), # noqa: ARG005 ) - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: ast_from_value("value", return_custom_class_scalar) msg = str(exc_info.value) assert msg == "Cannot convert value to AST: ." @@ -188,12 +188,12 @@ def converts_string_values_to_enum_asts_if_possible(): assert ast_from_value(complex_value, my_enum) == EnumValueNode(value="COMPLEX") # Note: case sensitive - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: ast_from_value("hello", my_enum) assert exc_info.value.message == "Enum 'MyEnum' cannot represent value: 'hello'" # Note: not a valid enum value - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: ast_from_value("UNKNOWN_VALUE", my_enum) assert ( exc_info.value.message @@ -203,13 +203,13 @@ def converts_string_values_to_enum_asts_if_possible(): def converts_list_values_to_list_asts(): assert ast_from_value( ["FOO", "BAR"], GraphQLList(GraphQLString) - ) == ListValueNode( + ) == ConstListValueNode( values=[StringValueNode(value="FOO"), StringValueNode(value="BAR")] ) assert ast_from_value( ["HELLO", "GOODBYE"], GraphQLList(my_enum) - ) == ListValueNode( + ) == ConstListValueNode( values=[EnumValueNode(value="HELLO"), EnumValueNode(value="GOODBYE")] ) @@ -219,7 +219,7 @@ def list_generator(): yield 3 assert ast_from_value(list_generator(), GraphQLList(GraphQLInt)) == ( - ListValueNode( + ConstListValueNode( values=[ IntValueNode(value="1"), IntValueNode(value="2"), @@ -238,7 +238,7 @@ def skips_invalid_list_items(): ["FOO", None, "BAR"], GraphQLList(GraphQLNonNull(GraphQLString)) ) - assert ast == ListValueNode( + assert ast == ConstListValueNode( values=[StringValueNode(value="FOO"), StringValueNode(value="BAR")] ) @@ -248,20 +248,24 @@ def skips_invalid_list_items(): ) def converts_input_objects(): - assert ast_from_value({"foo": 3, "bar": "HELLO"}, input_obj) == ObjectValueNode( + assert ast_from_value( + {"foo": 3, "bar": "HELLO"}, input_obj + ) == ConstObjectValueNode( fields=[ - ObjectFieldNode( + ConstObjectFieldNode( name=NameNode(value="foo"), value=FloatValueNode(value="3") ), - ObjectFieldNode( + ConstObjectFieldNode( name=NameNode(value="bar"), value=EnumValueNode(value="HELLO") ), ] ) def converts_input_objects_with_explicit_nulls(): - assert ast_from_value({"foo": None}, input_obj) == ObjectValueNode( - fields=[ObjectFieldNode(name=NameNode(value="foo"), value=NullValueNode())] + assert ast_from_value({"foo": None}, input_obj) == ConstObjectValueNode( + fields=[ + ConstObjectFieldNode(name=NameNode(value="foo"), value=NullValueNode()) + ] ) def does_not_convert_non_object_values_as_input_objects(): diff --git a/tests/utilities/test_ast_to_dict.py b/tests/utilities/test_ast_to_dict.py index d0fa1b24..8e633fae 100644 --- a/tests/utilities/test_ast_to_dict.py +++ b/tests/utilities/test_ast_to_dict.py @@ -1,4 +1,4 @@ -from graphql.language import parse, FieldNode, NameNode, OperationType, SelectionSetNode +from graphql.language import FieldNode, NameNode, OperationType, SelectionSetNode, parse from graphql.utilities import ast_to_dict @@ -7,7 +7,7 @@ def converts_name_node_to_dict(): node = NameNode(value="test") res = ast_to_dict(node) assert res == {"kind": "name", "value": "test"} - assert list(res)[0] == "kind" + assert next(iter(res)) == "kind" assert ast_to_dict(node, locations=True) == res assert node.to_dict() == res assert node.to_dict(locations=True) == res @@ -45,6 +45,7 @@ def converts_recursive_ast_to_recursive_dict(): "alias": None, "arguments": [], "directives": None, + "nullability_assertion": None, "selection_set": res, } ], @@ -121,7 +122,7 @@ def converts_simple_schema_to_dict(): ], "kind": "document", } - assert list(res)[0] == "kind" + assert next(iter(res)) == "kind" def converts_simple_schema_to_dict_with_locations(): ast = parse( @@ -283,6 +284,7 @@ def converts_simple_query_to_dict(): "directives": [], "kind": "field", "name": {"kind": "name", "value": "hero"}, + "nullability_assertion": None, "selection_set": { "kind": "selection_set", "selections": [ @@ -292,6 +294,7 @@ def converts_simple_query_to_dict(): "directives": [], "kind": "field", "name": {"kind": "name", "value": "name"}, + "nullability_assertion": None, "selection_set": None, }, { @@ -309,6 +312,7 @@ def converts_simple_query_to_dict(): "kind": "name", "value": "primaryFunction", }, + "nullability_assertion": None, "selection_set": None, } ], @@ -336,6 +340,7 @@ def converts_simple_query_to_dict(): "kind": "name", "value": "height", }, + "nullability_assertion": None, "selection_set": None, } ], @@ -375,7 +380,7 @@ def converts_simple_query_to_dict(): ], "kind": "document", } - assert list(res)[0] == "kind" + assert next(iter(res)) == "kind" def converts_simple_query_to_dict_with_locations(): ast = parse( @@ -441,6 +446,7 @@ def converts_simple_query_to_dict_with_locations(): "loc": {"end": 69, "start": 65}, "value": "hero", }, + "nullability_assertion": None, "selection_set": { "kind": "selection_set", "loc": {"end": 279, "start": 84}, @@ -456,6 +462,7 @@ def converts_simple_query_to_dict_with_locations(): "loc": {"end": 106, "start": 102}, "value": "name", }, + "nullability_assertion": None, "selection_set": None, }, { @@ -483,6 +490,7 @@ def converts_simple_query_to_dict_with_locations(): }, "value": "primaryFunction", }, + "nullability_assertion": None, "selection_set": None, } ], @@ -522,6 +530,7 @@ def converts_simple_query_to_dict_with_locations(): }, "value": "height", }, + "nullability_assertion": None, "selection_set": None, } ], diff --git a/tests/utilities/test_build_ast_schema.py b/tests/utilities/test_build_ast_schema.py index ba9c6050..d0196bd7 100644 --- a/tests/utilities/test_build_ast_schema.py +++ b/tests/utilities/test_build_ast_schema.py @@ -1,27 +1,33 @@ +from __future__ import annotations + +import pickle +import sys from collections import namedtuple +from copy import deepcopy from typing import Union -from pytest import mark, raises +import pytest from graphql import graphql_sync -from graphql.language import parse, print_ast, DocumentNode, InterfaceTypeDefinitionNode +from graphql.language import DocumentNode, InterfaceTypeDefinitionNode, parse, print_ast from graphql.type import ( - GraphQLDeprecatedDirective, - GraphQLIncludeDirective, - GraphQLSchema, - GraphQLSkipDirective, - GraphQLSpecifiedByDirective, - GraphQLBoolean, - GraphQLFloat, - GraphQLID, - GraphQLInt, - GraphQLString, GraphQLArgument, + GraphQLBoolean, + GraphQLDeprecatedDirective, GraphQLEnumType, GraphQLEnumValue, GraphQLField, + GraphQLFloat, + GraphQLID, + GraphQLIncludeDirective, GraphQLInputField, + GraphQLInt, GraphQLNamedType, + GraphQLOneOfDirective, + GraphQLSchema, + GraphQLSkipDirective, + GraphQLSpecifiedByDirective, + GraphQLString, assert_directive, assert_enum_type, assert_input_object_type, @@ -35,7 +41,13 @@ from graphql.utilities import build_ast_schema, build_schema, print_schema, print_type from ..fixtures import big_schema_sdl # noqa: F401 -from ..utils import dedent +from ..star_wars_schema import star_wars_schema +from ..utils import dedent, viral_sdl + +try: + from typing import TypeAlias +except ImportError: # Python < 3.10 + from typing_extensions import TypeAlias def cycle_sdl(sdl: str) -> str: @@ -50,20 +62,22 @@ def cycle_sdl(sdl: str) -> str: return print_schema(schema) -TypeWithAstNode = Union[ +TypeWithAstNode: TypeAlias = Union[ GraphQLArgument, GraphQLEnumValue, GraphQLField, GraphQLInputField, GraphQLNamedType ] -TypeWithExtensionAstNodes = GraphQLNamedType +TypeWithExtensionAstNodes: TypeAlias = GraphQLNamedType def expect_ast_node(obj: TypeWithAstNode, expected: str) -> None: - assert obj is not None and obj.ast_node is not None + assert obj is not None + assert obj.ast_node is not None assert print_ast(obj.ast_node) == expected def expect_extension_ast_nodes(obj: TypeWithExtensionAstNodes, expected: str) -> None: - assert obj is not None and obj.extension_ast_nodes is not None + assert obj is not None + assert obj.extension_ast_nodes is not None assert "\n\n".join(print_ast(node) for node in obj.extension_ast_nodes) == expected @@ -79,7 +93,9 @@ def can_use_built_schema_for_limited_execution(): ) ) - root_value = namedtuple("Data", "str")(123) # type: ignore + root_value = namedtuple( # noqa: PYI024 + "Data", "str" + )(123) # type: ignore result = graphql_sync(schema=schema, source="{ str }", root_value=root_value) assert result == ({"str": "123"}, None) @@ -223,14 +239,15 @@ def supports_descriptions(): ) assert cycle_sdl(sdl) == sdl - def maintains_include_skip_and_specified_by_url_directives(): + def maintains_include_skip_and_three_other_directives(): schema = build_schema("type Query") - assert len(schema.directives) == 4 + assert len(schema.directives) == 5 assert schema.get_directive("skip") is GraphQLSkipDirective assert schema.get_directive("include") is GraphQLIncludeDirective assert schema.get_directive("deprecated") is GraphQLDeprecatedDirective assert schema.get_directive("specifiedBy") is GraphQLSpecifiedByDirective + assert schema.get_directive("oneOf") is GraphQLOneOfDirective def overriding_directives_excludes_specified(): schema = build_schema( @@ -239,10 +256,11 @@ def overriding_directives_excludes_specified(): directive @include on FIELD directive @deprecated on FIELD_DEFINITION directive @specifiedBy on FIELD_DEFINITION + directive @oneOf on OBJECT """ ) - assert len(schema.directives) == 4 + assert len(schema.directives) == 5 get_directive = schema.get_directive assert get_directive("skip") is not GraphQLSkipDirective assert get_directive("skip") is not None @@ -252,19 +270,22 @@ def overriding_directives_excludes_specified(): assert get_directive("deprecated") is not None assert get_directive("specifiedBy") is not GraphQLSpecifiedByDirective assert get_directive("specifiedBy") is not None + assert get_directive("oneOf") is not GraphQLOneOfDirective + assert get_directive("oneOf") is not None - def adding_directives_maintains_include_skip_and_specified_by_directives(): + def adding_directives_maintains_include_skip_and_three_other_directives(): schema = build_schema( """ directive @foo(arg: Int) on FIELD """ ) - assert len(schema.directives) == 5 + assert len(schema.directives) == 6 assert schema.get_directive("skip") is GraphQLSkipDirective assert schema.get_directive("include") is GraphQLIncludeDirective assert schema.get_directive("deprecated") is GraphQLDeprecatedDirective assert schema.get_directive("specifiedBy") is GraphQLSpecifiedByDirective + assert schema.get_directive("oneOf") is GraphQLOneOfDirective assert schema.get_directive("foo") is not None def type_modifiers(): @@ -484,20 +505,18 @@ def multiple_union(): def can_build_recursive_union(): # invalid schema cannot be built with Python - with raises(TypeError) as exc_info: - build_schema( - """ - union Hello = Hello + schema = build_schema( + """ + union Hello = Hello - type Query { - hello: Hello - } - """ - ) - assert ( - str(exc_info.value) == "Hello types must be specified" - " as a collection of GraphQLObjectType instances." + type Query { + hello: Hello + } + """ ) + errors = validate_schema(schema) + assert errors + assert isinstance(errors, list) def custom_scalar(): sdl = dedent( @@ -1121,7 +1140,7 @@ def can_build_invalid_schema(): assert errors def do_not_override_standard_types(): - # Note: not sure it's desired behaviour to just silently ignore override + # Note: not sure it's desired behavior to just silently ignore override # attempts so just documenting it here. schema = build_schema( @@ -1155,7 +1174,7 @@ def rejects_invalid_sdl(): foo: String @unknown } """ - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: build_schema(sdl) assert str(exc_info.value) == "Unknown directive '@unknown'." @@ -1174,40 +1193,148 @@ def throws_on_unknown_types(): unknown: UnknownType } """ - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: build_schema(sdl, assume_valid_sdl=True) assert str(exc_info.value).endswith("Unknown type: 'UnknownType'.") - def rejects_invalid_ast(): - with raises(TypeError) as exc_info: - build_ast_schema(None) # type: ignore - assert str(exc_info.value) == "Must provide valid Document AST." - with raises(TypeError) as exc_info: - build_ast_schema({}) # type: ignore - assert str(exc_info.value) == "Must provide valid Document AST." - - # This currently does not work because of how extend_schema is implemented - @mark.skip(reason="pickling of schemas is not yet supported") - def can_pickle_and_unpickle_big_schema( - big_schema_sdl, # noqa: F811 - ): # pragma: no cover - import pickle - - # create a schema from the kitchen sink SDL - schema = build_schema(big_schema_sdl, assume_valid_sdl=True) - # check that the schema can be pickled - # (particularly, there should be no recursion error, - # or errors because of trying to pickle lambdas or local functions) - dumped = pickle.dumps(schema) - # check that the pickle size is reasonable - assert len(dumped) < 50 * len(big_schema_sdl) - loaded = pickle.loads(dumped) - - # check that the un-pickled schema is still the same - assert loaded == schema - # check that pickling again creates the same result - dumped_again = pickle.dumps(schema) - assert dumped_again == dumped - - # check that printing the unpickled schema gives the same SDL - assert cycle_sdl(print_schema(schema)) == cycle_sdl(big_schema_sdl) + def correctly_processes_viral_schema(): + schema = build_schema(viral_sdl) + query_type = schema.query_type + assert isinstance(query_type, GraphQLNamedType) + assert query_type.name == "Query" + virus_type = schema.get_type("Virus") + assert isinstance(virus_type, GraphQLNamedType) + assert virus_type.name == "Virus" + mutation_type = schema.get_type("Mutation") + assert isinstance(mutation_type, GraphQLNamedType) + assert mutation_type.name == "Mutation" + # Though the viral schema has a 'Mutation' type, it is not used for the + # 'mutation' operation. + assert schema.mutation_type is None + + def describe_deepcopy_and_pickle(): # pragma: no cover + sdl = print_schema(star_wars_schema) + + def can_deep_copy_schema(): + schema = build_schema(sdl, assume_valid_sdl=True) + # create a deepcopy of the schema + copied = deepcopy(schema) + # check that printing the copied schema gives the same SDL + assert print_schema(copied) == sdl + + def can_deep_copy_schema_with_directive_using_args_of_custom_type(): + sdl = dedent(""" + directive @someDirective(someArg: SomeEnum) on FIELD_DEFINITION + + enum SomeEnum { + ONE + TWO + } + + type Query { + someField: String @someDirective(someArg: ONE) + } + """) + schema = build_schema(sdl) + copied = deepcopy(schema) + # custom directives on field definitions cannot be reproduced + expected_sdl = sdl.replace(" @someDirective(someArg: ONE)", "") + assert print_schema(copied) == expected_sdl + + def can_pickle_and_unpickle_star_wars_schema(): + # create a schema from the star wars SDL + schema = build_schema(sdl, assume_valid_sdl=True) + # check that the schema can be pickled + # (particularly, there should be no recursion error, + # or errors because of trying to pickle lambdas or local functions) + dumped = pickle.dumps(schema) + + # check that the pickle size is reasonable + assert len(dumped) < 25 * len(sdl) + loaded = pickle.loads(dumped) + + # check that printing the unpickled schema gives the same SDL + assert print_schema(loaded) == sdl + + # check that pickling again creates the same result + dumped = pickle.dumps(schema) + assert len(dumped) < 25 * len(sdl) + loaded = pickle.loads(dumped) + assert print_schema(loaded) == sdl + + def can_deep_copy_pickled_schema(): + # create a schema from the star wars SDL + schema = build_schema(sdl, assume_valid_sdl=True) + # pickle and unpickle the schema + loaded = pickle.loads(pickle.dumps(schema)) + # create a deepcopy of the unpickled schema + copied = deepcopy(loaded) + # check that printing the copied schema gives the same SDL + assert print_schema(copied) == sdl + + @pytest.mark.slow + def describe_deepcopy_and_pickle_big(): # pragma: no cover + @pytest.mark.timeout(20) + def can_deep_copy_big_schema(big_schema_sdl): # noqa: F811 + # use our printing conventions + big_schema_sdl = cycle_sdl(big_schema_sdl) + + # create a schema from the big SDL + schema = build_schema(big_schema_sdl, assume_valid_sdl=True) + # create a deepcopy of the schema + copied = deepcopy(schema) + # check that printing the copied schema gives the same SDL + assert print_schema(copied) == big_schema_sdl + + @pytest.mark.timeout(60) + def can_pickle_and_unpickle_big_schema(big_schema_sdl): # noqa: F811 + # use our printing conventions + big_schema_sdl = cycle_sdl(big_schema_sdl) + + limit = sys.getrecursionlimit() + sys.setrecursionlimit(max(limit, 4000)) # needed for pickle + + try: + # create a schema from the big SDL + schema = build_schema(big_schema_sdl, assume_valid_sdl=True) + # check that the schema can be pickled + # (particularly, there should be no recursion error, + # or errors because of trying to pickle lambdas or local functions) + dumped = pickle.dumps(schema) + + # check that the pickle size is reasonable + assert len(dumped) < 25 * len(big_schema_sdl) + loaded = pickle.loads(dumped) + + # check that printing the unpickled schema gives the same SDL + assert print_schema(loaded) == big_schema_sdl + + # check that pickling again creates the same result + dumped = pickle.dumps(schema) + assert len(dumped) < 25 * len(big_schema_sdl) + loaded = pickle.loads(dumped) + assert print_schema(loaded) == big_schema_sdl + + finally: + sys.setrecursionlimit(limit) + + @pytest.mark.timeout(60) + def can_deep_copy_pickled_big_schema(big_schema_sdl): # noqa: F811 + # use our printing conventions + big_schema_sdl = cycle_sdl(big_schema_sdl) + + limit = sys.getrecursionlimit() + sys.setrecursionlimit(max(limit, 4000)) # needed for pickle + + try: + # create a schema from the big SDL + schema = build_schema(big_schema_sdl, assume_valid_sdl=True) + # pickle and unpickle the schema + loaded = pickle.loads(pickle.dumps(schema)) + # create a deepcopy of the unpickled schema + copied = deepcopy(loaded) + # check that printing the copied schema gives the same SDL + assert print_schema(copied) == big_schema_sdl + + finally: + sys.setrecursionlimit(limit) diff --git a/tests/utilities/test_build_client_schema.py b/tests/utilities/test_build_client_schema.py index b566ba1b..1455f473 100644 --- a/tests/utilities/test_build_client_schema.py +++ b/tests/utilities/test_build_client_schema.py @@ -1,6 +1,6 @@ -from typing import cast +from typing import TYPE_CHECKING, cast -from pytest import raises +import pytest from graphql import graphql_sync from graphql.type import ( @@ -18,19 +18,21 @@ assert_enum_type, ) from graphql.utilities import ( - build_schema, build_client_schema, + build_schema, introspection_from_schema, print_schema, ) -from graphql.utilities.get_introspection_query import ( - IntrospectionEnumType, - IntrospectionInputObjectType, - IntrospectionInterfaceType, - IntrospectionObjectType, - IntrospectionType, - IntrospectionUnionType, -) + +if TYPE_CHECKING: + from graphql.utilities.get_introspection_query import ( + IntrospectionEnumType, + IntrospectionInputObjectType, + IntrospectionInterfaceType, + IntrospectionObjectType, + IntrospectionType, + IntrospectionUnionType, + ) from ..utils import dedent @@ -328,7 +330,7 @@ def builds_a_schema_with_field_arguments(): """A field with a two args""" two( - """This is an list of int arg""" + """This is a list of int arg""" listArg: [Int] """This is a required arg""" @@ -459,7 +461,7 @@ def builds_a_schema_with_field_arguments_with_default_values(): type Query { defaultInt(intArg: Int = 30): String defaultList(listArg: [Int] = [1, 2, 3]): String - defaultObject(objArg: Geo = {lat: 37.485, lon: -122.148}): String + defaultObject(objArg: Geo = { lat: 37.485, lon: -122.148 }): String defaultNull(intArg: Int = null): String noDefault(intArg: Int): String } @@ -651,7 +653,7 @@ def describe_throws_when_given_invalid_introspection(): ) def throws_when_introspection_is_missing_schema_property(): - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: # noinspection PyTypeChecker build_client_schema(None) # type: ignore @@ -661,7 +663,7 @@ def throws_when_introspection_is_missing_schema_property(): " and no 'errors' were returned alongside: None." ) - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: # noinspection PyTypeChecker build_client_schema({}) # type: ignore @@ -680,7 +682,7 @@ def throws_when_referenced_unknown_type(): if type_["name"] != "Query" ] - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: build_client_schema(introspection) assert str(exc_info.value) == ( @@ -704,7 +706,7 @@ def throws_when_missing_definition_for_one_of_the_standard_scalars(): if type_["name"] != "Float" ] - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: build_client_schema(introspection) assert str(exc_info.value).endswith( @@ -715,11 +717,13 @@ def throws_when_missing_definition_for_one_of_the_standard_scalars(): def throws_when_type_reference_is_missing_name(): introspection = introspection_from_schema(dummy_schema) - query_type = cast(IntrospectionType, introspection["__schema"]["queryType"]) + query_type = cast( + "IntrospectionType", introspection["__schema"]["queryType"] + ) assert query_type["name"] == "Query" del query_type["name"] # type: ignore - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: build_client_schema(introspection) assert str(exc_info.value) == "Unknown type reference: {}." @@ -734,7 +738,7 @@ def throws_when_missing_kind(): assert query_type_introspection["kind"] == "OBJECT" del query_type_introspection["kind"] - with raises( + with pytest.raises( TypeError, match=r"^Invalid or incomplete introspection result\." " Ensure that a full introspection query is used" @@ -745,7 +749,7 @@ def throws_when_missing_kind(): def throws_when_missing_interfaces(): introspection = introspection_from_schema(dummy_schema) query_type_introspection = cast( - IntrospectionObjectType, + "IntrospectionObjectType", next( type_ for type_ in introspection["__schema"]["types"] @@ -756,7 +760,7 @@ def throws_when_missing_interfaces(): assert query_type_introspection["interfaces"] == [] del query_type_introspection["interfaces"] # type: ignore - with raises( + with pytest.raises( TypeError, match="^Query interfaces cannot be resolved." " Introspection result missing interfaces:" @@ -767,7 +771,7 @@ def throws_when_missing_interfaces(): def legacy_support_for_interfaces_with_null_as_interfaces_field(): introspection = introspection_from_schema(dummy_schema) some_interface_introspection = cast( - IntrospectionInterfaceType, + "IntrospectionInterfaceType", next( type_ for type_ in introspection["__schema"]["types"] @@ -784,7 +788,7 @@ def legacy_support_for_interfaces_with_null_as_interfaces_field(): def throws_when_missing_fields(): introspection = introspection_from_schema(dummy_schema) query_type_introspection = cast( - IntrospectionObjectType, + "IntrospectionObjectType", next( type_ for type_ in introspection["__schema"]["types"] @@ -795,7 +799,7 @@ def throws_when_missing_fields(): assert query_type_introspection["fields"] del query_type_introspection["fields"] # type: ignore - with raises( + with pytest.raises( TypeError, match="^Query fields cannot be resolved." " Introspection result missing fields:" @@ -806,7 +810,7 @@ def throws_when_missing_fields(): def throws_when_missing_field_args(): introspection = introspection_from_schema(dummy_schema) query_type_introspection = cast( - IntrospectionObjectType, + "IntrospectionObjectType", next( type_ for type_ in introspection["__schema"]["types"] @@ -818,7 +822,7 @@ def throws_when_missing_field_args(): assert field["args"] del field["args"] # type: ignore - with raises( + with pytest.raises( TypeError, match="^Query fields cannot be resolved." r" Introspection result missing field args: {'name': 'foo', .*}\.$", @@ -828,7 +832,7 @@ def throws_when_missing_field_args(): def throws_when_output_type_is_used_as_an_arg_type(): introspection = introspection_from_schema(dummy_schema) query_type_introspection = cast( - IntrospectionObjectType, + "IntrospectionObjectType", next( type_ for type_ in introspection["__schema"]["types"] @@ -840,7 +844,7 @@ def throws_when_output_type_is_used_as_an_arg_type(): assert arg["type"]["name"] == "String" arg["type"]["name"] = "SomeUnion" - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: build_client_schema(introspection) assert str(exc_info.value).startswith( @@ -852,7 +856,7 @@ def throws_when_output_type_is_used_as_an_arg_type(): def throws_when_output_type_is_used_as_an_input_value_type(): introspection = introspection_from_schema(dummy_schema) input_object_type_introspection = cast( - IntrospectionInputObjectType, + "IntrospectionInputObjectType", next( type_ for type_ in introspection["__schema"]["types"] @@ -864,7 +868,7 @@ def throws_when_output_type_is_used_as_an_input_value_type(): assert input_field["type"]["name"] == "String" input_field["type"]["name"] = "SomeUnion" - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: build_client_schema(introspection) assert str(exc_info.value).startswith( @@ -876,7 +880,7 @@ def throws_when_output_type_is_used_as_an_input_value_type(): def throws_when_input_type_is_used_as_a_field_type(): introspection = introspection_from_schema(dummy_schema) query_type_introspection = cast( - IntrospectionObjectType, + "IntrospectionObjectType", next( type_ for type_ in introspection["__schema"]["types"] @@ -888,7 +892,7 @@ def throws_when_input_type_is_used_as_a_field_type(): assert field["type"]["name"] == "String" field["type"]["name"] = "SomeInputObject" - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: build_client_schema(introspection) assert str(exc_info.value).startswith( @@ -900,7 +904,7 @@ def throws_when_input_type_is_used_as_a_field_type(): def throws_when_missing_possible_types(): introspection = introspection_from_schema(dummy_schema) some_union_introspection = cast( - IntrospectionUnionType, + "IntrospectionUnionType", next( type_ for type_ in introspection["__schema"]["types"] @@ -911,7 +915,7 @@ def throws_when_missing_possible_types(): assert some_union_introspection["possibleTypes"] del some_union_introspection["possibleTypes"] # type: ignore - with raises( + with pytest.raises( TypeError, match="^Introspection result missing possibleTypes:" r" {'kind': 'UNION', 'name': 'SomeUnion', .*}\.$", @@ -921,7 +925,7 @@ def throws_when_missing_possible_types(): def throws_when_missing_enum_values(): introspection = introspection_from_schema(dummy_schema) some_enum_introspection = cast( - IntrospectionEnumType, + "IntrospectionEnumType", next( type_ for type_ in introspection["__schema"]["types"] @@ -932,7 +936,7 @@ def throws_when_missing_enum_values(): assert some_enum_introspection["enumValues"] del some_enum_introspection["enumValues"] # type: ignore - with raises( + with pytest.raises( TypeError, match="^Introspection result missing enumValues:" r" {'kind': 'ENUM', 'name': 'SomeEnum', .*}\.$", @@ -942,7 +946,7 @@ def throws_when_missing_enum_values(): def throws_when_missing_input_fields(): introspection = introspection_from_schema(dummy_schema) some_input_object_introspection = cast( - IntrospectionInputObjectType, + "IntrospectionInputObjectType", next( type_ for type_ in introspection["__schema"]["types"] @@ -953,7 +957,7 @@ def throws_when_missing_input_fields(): assert some_input_object_introspection["inputFields"] del some_input_object_introspection["inputFields"] # type: ignore - with raises( + with pytest.raises( TypeError, match="^Introspection result missing inputFields:" r" {'kind': 'INPUT_OBJECT', 'name': 'SomeInputObject', .*}\.$", @@ -968,7 +972,7 @@ def throws_when_missing_directive_locations(): assert some_directive_introspection["locations"] == ["QUERY"] del some_directive_introspection["locations"] # type: ignore - with raises( + with pytest.raises( TypeError, match="^Introspection result missing directive locations:" r" {'name': 'SomeDirective', .*}\.$", @@ -983,7 +987,7 @@ def throws_when_missing_directive_args(): assert some_directive_introspection["args"] == [] del some_directive_introspection["args"] # type: ignore - with raises( + with pytest.raises( TypeError, match="^Introspection result missing directive args:" r" {'name': 'SomeDirective', .*}\.$", @@ -991,18 +995,18 @@ def throws_when_missing_directive_args(): build_client_schema(introspection) def describe_very_deep_decorators_are_not_supported(): - def fails_on_very_deep_lists_more_than_7_levels(): + def fails_on_very_deep_lists_more_than_8_levels(): schema = build_schema( """ type Query { - foo: [[[[[[[[String]]]]]]]] + foo: [[[[[[[[[[String]]]]]]]]]] } """ ) introspection = introspection_from_schema(schema) - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: build_client_schema(introspection) assert str(exc_info.value) == ( @@ -1010,18 +1014,18 @@ def fails_on_very_deep_lists_more_than_7_levels(): " Decorated type deeper than introspection query." ) - def fails_on_a_very_deep_non_null_more_than_7_levels(): + def fails_on_a_very_deep_more_than_8_levels_non_null(): schema = build_schema( """ type Query { - foo: [[[[String!]!]!]!] + foo: [[[[[String!]!]!]!]!] } """ ) introspection = introspection_from_schema(schema) - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: build_client_schema(introspection) assert str(exc_info.value) == ( @@ -1029,12 +1033,12 @@ def fails_on_a_very_deep_non_null_more_than_7_levels(): " Decorated type deeper than introspection query." ) - def succeeds_on_deep_types_less_or_equal_7_levels(): - # e.g., fully non-null 3D matrix + def succeeds_on_deep_less_or_equal_8_levels_types(): + # e.g., fully non-null 4D matrix sdl = dedent( """ type Query { - foo: [[[String!]!]!]! + foo: [[[[String!]!]!]!]! } """ ) @@ -1055,7 +1059,7 @@ def recursive_interfaces(): schema = build_schema(sdl, assume_valid=True) introspection = introspection_from_schema(schema) foo_introspection = cast( - IntrospectionObjectType, + "IntrospectionObjectType", next( type_ for type_ in introspection["__schema"]["types"] @@ -1069,7 +1073,7 @@ def recursive_interfaces(): {"kind": "OBJECT", "name": "Foo", "ofType": None} ] - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: build_client_schema(introspection) assert str(exc_info.value) == ( "Foo interfaces cannot be resolved." @@ -1099,7 +1103,7 @@ def recursive_union(): {"kind": "UNION", "name": "Foo", "ofType": None} ] - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: build_client_schema(introspection) assert str(exc_info.value) == ( "Foo types cannot be resolved." diff --git a/tests/utilities/test_coerce_input_value.py b/tests/utilities/test_coerce_input_value.py index f34e8564..90af6cb9 100644 --- a/tests/utilities/test_coerce_input_value.py +++ b/tests/utilities/test_coerce_input_value.py @@ -1,7 +1,9 @@ +from __future__ import annotations + from math import nan -from typing import Any, List, NamedTuple, Union +from typing import Any, NamedTuple -from pytest import raises +import pytest from graphql.error import GraphQLError from graphql.pyutils import Undefined @@ -21,12 +23,12 @@ class CoercedValueError(NamedTuple): error: str - path: List[Union[str, int]] + path: list[str | int] value: Any class CoercedValue(NamedTuple): - errors: List[CoercedValueError] + errors: list[CoercedValueError] value: Any @@ -35,13 +37,13 @@ def expect_value(result: CoercedValue) -> Any: return result.value -def expect_errors(result: CoercedValue) -> List[CoercedValueError]: +def expect_errors(result: CoercedValue) -> list[CoercedValueError]: return result.errors def describe_coerce_input_value(): def _coerce_value(input_value: Any, type_: GraphQLInputType): - errors: List[CoercedValueError] = [] + errors: list[CoercedValueError] = [] append = errors.append def on_error(path, invalid_value, error): @@ -249,6 +251,99 @@ def transforms_values_with_out_type(): result = _coerce_value({"real": 1, "imag": 2}, ComplexInputObject) assert expect_value(result) == 1 + 2j + def describe_for_graphql_input_object_that_is_one_of(): + TestInputObject = GraphQLInputObjectType( + "TestInputObject", + { + "foo": GraphQLInputField(GraphQLInt), + "bar": GraphQLInputField(GraphQLInt), + }, + is_one_of=True, + ) + + def returns_no_error_for_a_valid_input(): + result = _coerce_value({"foo": 123}, TestInputObject) + assert expect_value(result) == {"foo": 123} + + def returns_an_error_if_more_than_one_field_is_specified(): + result = _coerce_value({"foo": 123, "bar": None}, TestInputObject) + assert expect_errors(result) == [ + ( + "Exactly one key must be specified" + " for OneOf type 'TestInputObject'.", + [], + {"foo": 123, "bar": None}, + ) + ] + + def returns_an_error_if_the_one_field_is_null(): + result = _coerce_value({"bar": None}, TestInputObject) + assert expect_errors(result) == [ + ( + "Field 'bar' must be non-null.", + ["bar"], + None, + ) + ] + + def returns_an_error_for_an_invalid_field(): + result = _coerce_value({"foo": nan}, TestInputObject) + assert expect_errors(result) == [ + ( + "Int cannot represent non-integer value: nan", + ["foo"], + nan, + ) + ] + + def returns_multiple_errors_for_multiple_invalid_fields(): + result = _coerce_value({"foo": "abc", "bar": "def"}, TestInputObject) + assert expect_errors(result) == [ + ( + "Int cannot represent non-integer value: 'abc'", + ["foo"], + "abc", + ), + ( + "Int cannot represent non-integer value: 'def'", + ["bar"], + "def", + ), + ( + "Exactly one key must be specified" + " for OneOf type 'TestInputObject'.", + [], + {"foo": "abc", "bar": "def"}, + ), + ] + + def returns_an_error_for_an_unknown_field(): + result = _coerce_value({"foo": 123, "unknownField": 123}, TestInputObject) + assert expect_errors(result) == [ + ( + "Field 'unknownField' is not defined by type 'TestInputObject'.", + [], + {"foo": 123, "unknownField": 123}, + ) + ] + + def returns_an_error_for_a_misspelled_field(): + result = _coerce_value({"bart": 123}, TestInputObject) + assert expect_errors(result) == [ + ( + "Field 'bart' is not defined by type 'TestInputObject'." + " Did you mean 'bar'?", + [], + {"bart": 123}, + ), + ( + "Exactly one key must be specified" + " for OneOf type 'TestInputObject'.", + [], + {"bart": 123}, + ), + ] + def describe_for_graphql_input_object_with_default_value(): def _get_test_input_object(default_value): return GraphQLInputObjectType( @@ -362,14 +457,14 @@ def returns_nested_null_for_nested_null_values(): def describe_with_default_on_error(): def throw_error_without_path(): - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: assert coerce_input_value(None, GraphQLNonNull(GraphQLInt)) assert exc_info.value.message == ( "Invalid value None: Expected non-nullable type 'Int!' not to be None." ) def throw_error_with_path(): - with raises(GraphQLError) as exc_info: + with pytest.raises(GraphQLError) as exc_info: assert coerce_input_value( [None], GraphQLList(GraphQLNonNull(GraphQLInt)) ) diff --git a/tests/utilities/test_concat_ast.py b/tests/utilities/test_concat_ast.py index 7d25d0b3..4d764562 100644 --- a/tests/utilities/test_concat_ast.py +++ b/tests/utilities/test_concat_ast.py @@ -1,4 +1,4 @@ -from graphql.language import parse, print_ast, Source +from graphql.language import Source, parse, print_ast from graphql.utilities import concat_ast from ..utils import dedent diff --git a/tests/utilities/test_extend_schema.py b/tests/utilities/test_extend_schema.py index 8a844abd..1eb98d38 100644 --- a/tests/utilities/test_extend_schema.py +++ b/tests/utilities/test_extend_schema.py @@ -1,6 +1,8 @@ +from __future__ import annotations + from typing import Union -from pytest import raises +import pytest from graphql import graphql_sync from graphql.language import parse, print_ast @@ -23,18 +25,20 @@ assert_object_type, assert_scalar_type, assert_union_type, + specified_directives, validate_schema, ) -from graphql.utilities import ( - build_schema, - concat_ast, - extend_schema, - print_schema, -) +from graphql.utilities import build_schema, concat_ast, extend_schema, print_schema from ..utils import dedent -TypeWithAstNode = Union[ +try: + from typing import TypeAlias +except ImportError: # Python < 3.10 + from typing_extensions import TypeAlias + + +TypeWithAstNode: TypeAlias = Union[ GraphQLArgument, GraphQLEnumValue, GraphQLField, @@ -43,19 +47,21 @@ GraphQLSchema, ] -TypeWithExtensionAstNodes = Union[ +TypeWithExtensionAstNodes: TypeAlias = Union[ GraphQLNamedType, GraphQLSchema, ] def expect_extension_ast_nodes(obj: TypeWithExtensionAstNodes, expected: str) -> None: - assert obj is not None and obj.extension_ast_nodes is not None + assert obj is not None + assert obj.extension_ast_nodes is not None assert "\n\n".join(print_ast(node) for node in obj.extension_ast_nodes) == expected def expect_ast_node(obj: TypeWithAstNode, expected: str) -> None: - assert obj is not None and obj.ast_node is not None + assert obj is not None + assert obj.ast_node is not None assert print_ast(obj.ast_node) == expected @@ -100,6 +106,48 @@ def can_be_used_for_limited_execution(): ) assert result == ({"newField": "123"}, None) + def does_not_modify_built_in_types_and_directives(): + schema = build_schema( + """ + type Query { + str: String + int: Int + float: Float + id: ID + bool: Boolean + } + """ + ) + + extension_sdl = dedent( + """ + extend type Query { + foo: String + } + """ + ) + + extended_schema = extend_schema(schema, parse(extension_sdl)) + + # built-ins are used + assert extended_schema.get_type("Int") is GraphQLInt + assert extended_schema.get_type("Float") is GraphQLFloat + assert extended_schema.get_type("String") is GraphQLString + assert extended_schema.get_type("Boolean") is GraphQLBoolean + assert extended_schema.get_type("ID") is GraphQLID + + assert extended_schema.directives == specified_directives + + def preserves_original_schema_config(): + description = "A schema description" + extensions = {"foo": "bar"} + schema = GraphQLSchema(description=description, extensions=extensions) + + extended_schema = extend_schema(schema, parse("scalar Bar")) + + assert extended_schema.description == description + assert extended_schema.extensions is extensions + def extends_objects_by_adding_new_fields(): schema = build_schema( ''' @@ -279,12 +327,17 @@ def allows_extension_of_union_by_adding_itself(): extend union SomeUnion = SomeUnion """ ) - # invalid schema cannot be built with Python - with raises(TypeError) as exc_info: - extend_schema(schema, extend_ast) - assert str(exc_info.value) == ( - "SomeUnion types must be specified" - " as a collection of GraphQLObjectType instances." + extended_schema = extend_schema(schema, extend_ast) + + assert validate_schema(extended_schema) + expect_schema_changes( + schema, + extended_schema, + dedent( + """ + union SomeUnion = SomeUnion + """ + ), ) def extends_inputs_by_adding_new_fields(): @@ -1275,7 +1328,7 @@ def rejects_invalid_sdl(): schema = GraphQLSchema() extend_ast = parse("extend schema @unknown") - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: extend_schema(schema, extend_ast) assert str(exc_info.value) == "Unknown directive '@unknown'." @@ -1295,23 +1348,10 @@ def throws_on_unknown_types(): } """ ) - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: extend_schema(schema, ast, assume_valid_sdl=True) assert str(exc_info.value).endswith("Unknown type: 'UnknownType'.") - def rejects_invalid_ast(): - schema = GraphQLSchema() - - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - extend_schema(schema, None) # type: ignore - assert str(exc_info.value) == "Must provide valid Document AST." - - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - extend_schema(schema, {}) # type: ignore - assert str(exc_info.value) == "Must provide valid Document AST." - def does_not_allow_replacing_a_default_directive(): schema = GraphQLSchema() extend_ast = parse( @@ -1320,11 +1360,10 @@ def does_not_allow_replacing_a_default_directive(): """ ) - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: extend_schema(schema, extend_ast) assert str(exc_info.value).startswith( - "Directive '@include' already exists in the schema." - " It cannot be redefined." + "Directive '@include' already exists in the schema. It cannot be redefined." ) def does_not_allow_replacing_an_existing_enum_value(): @@ -1343,7 +1382,7 @@ def does_not_allow_replacing_an_existing_enum_value(): """ ) - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: extend_schema(schema, extend_ast) assert str(exc_info.value).startswith( "Enum value 'SomeEnum.ONE' already exists in the schema." diff --git a/tests/utilities/test_find_breaking_changes.py b/tests/utilities/test_find_breaking_changes.py index f0ad2ab7..bfcc7e72 100644 --- a/tests/utilities/test_find_breaking_changes.py +++ b/tests/utilities/test_find_breaking_changes.py @@ -1,7 +1,8 @@ from graphql.type import ( - GraphQLSchema, GraphQLDeprecatedDirective, GraphQLIncludeDirective, + GraphQLOneOfDirective, + GraphQLSchema, GraphQLSkipDirective, GraphQLSpecifiedByDirective, ) @@ -754,8 +755,7 @@ def should_detect_all_breaking_changes(): ), ( BreakingChangeType.TYPE_CHANGED_KIND, - "TypeThatChangesType changed from an Object type to an" - " Interface type.", + "TypeThatChangesType changed from an Object type to an Interface type.", ), ( BreakingChangeType.FIELD_REMOVED, @@ -817,6 +817,7 @@ def should_detect_if_a_directive_was_implicitly_removed(): GraphQLSkipDirective, GraphQLIncludeDirective, GraphQLSpecifiedByDirective, + GraphQLOneOfDirective, ] ) @@ -986,8 +987,8 @@ def should_detect_if_a_default_value_changed_on_an_argument(): ( DangerousChangeType.ARG_DEFAULT_VALUE_CHANGE, "Type1.field1 arg complexObject has changed defaultValue" - " from {innerInputArray: [{arrayField: [1, 2, 3]}]}" - " to {innerInputArray: [{arrayField: [3, 2, 1]}]}.", + " from { innerInputArray: [{ arrayField: [1, 2, 3] }] }" + " to { innerInputArray: [{ arrayField: [3, 2, 1] }] }.", ), ] diff --git a/tests/utilities/test_get_introspection_query.py b/tests/utilities/test_get_introspection_query.py index 279112bb..348d2cbf 100644 --- a/tests/utilities/test_get_introspection_query.py +++ b/tests/utilities/test_get_introspection_query.py @@ -1,5 +1,6 @@ -import re +from __future__ import annotations +import re from typing import Pattern from graphql.language import parse diff --git a/tests/utilities/test_get_operation_root_type.py b/tests/utilities/test_get_operation_root_type.py deleted file mode 100644 index f0e8a4e3..00000000 --- a/tests/utilities/test_get_operation_root_type.py +++ /dev/null @@ -1,114 +0,0 @@ -from pytest import raises - -from graphql.error import GraphQLError -from graphql.language import ( - parse, - DocumentNode, - OperationDefinitionNode, - OperationTypeDefinitionNode, - SchemaDefinitionNode, -) -from graphql.type import GraphQLField, GraphQLObjectType, GraphQLSchema, GraphQLString -from graphql.utilities import get_operation_root_type - - -query_type = GraphQLObjectType("FooQuery", {"field": GraphQLField(GraphQLString)}) - -mutation_type = GraphQLObjectType("FooMutation", {"field": GraphQLField(GraphQLString)}) - -subscription_type = GraphQLObjectType( - "FooSubscription", {"field": GraphQLField(GraphQLString)} -) - - -def get_operation_node(doc: DocumentNode) -> OperationDefinitionNode: - operation_node = doc.definitions[0] - assert isinstance(operation_node, OperationDefinitionNode) - return operation_node - - -def describe_deprecated_get_operation_root_type(): - def gets_a_query_type_for_an_unnamed_operation_definition_node(): - test_schema = GraphQLSchema(query_type) - doc = parse("{ field }") - operation_node = get_operation_node(doc) - assert get_operation_root_type(test_schema, operation_node) is query_type - - def gets_a_query_type_for_a_named_operation_definition_node(): - test_schema = GraphQLSchema(query_type) - doc = parse("query Q { field }") - operation_node = get_operation_node(doc) - assert get_operation_root_type(test_schema, operation_node) is query_type - - def gets_a_type_for_operation_definition_nodes(): - test_schema = GraphQLSchema(query_type, mutation_type, subscription_type) - doc = parse( - """ - schema { - query: FooQuery - mutation: FooMutation - subscription: FooSubscription - } - """ - ) - - schema_node = doc.definitions[0] - assert isinstance(schema_node, SchemaDefinitionNode) - query_node, mutation_node, subscription_node = schema_node.operation_types - assert isinstance(query_node, OperationTypeDefinitionNode) - assert get_operation_root_type(test_schema, query_node) is query_type - assert isinstance(mutation_node, OperationTypeDefinitionNode) - assert get_operation_root_type(test_schema, mutation_node) is mutation_type - assert isinstance(subscription_node, OperationTypeDefinitionNode) - assert ( - get_operation_root_type(test_schema, subscription_node) is subscription_type - ) - - def gets_a_mutation_type_for_an_operation_definition_node(): - test_schema = GraphQLSchema(mutation=mutation_type) - doc = parse("mutation { field }") - operation_node = get_operation_node(doc) - assert get_operation_root_type(test_schema, operation_node) is mutation_type - - def gets_a_subscription_type_for_an_operation_definition_node(): - test_schema = GraphQLSchema(subscription=subscription_type) - doc = parse("subscription { field }") - operation_node = get_operation_node(doc) - assert get_operation_root_type(test_schema, operation_node) is subscription_type - - def throws_when_query_type_not_defined_in_schema(): - test_schema = GraphQLSchema() - doc = parse("query { field }") - operation_node = get_operation_node(doc) - with raises(GraphQLError) as exc_info: - get_operation_root_type(test_schema, operation_node) - assert exc_info.value.message == ( - "Schema does not define the required query root type." - ) - - def throws_when_mutation_type_not_defined_in_schema(): - test_schema = GraphQLSchema() - doc = parse("mutation { field }") - operation_node = get_operation_node(doc) - with raises(GraphQLError) as exc_info: - get_operation_root_type(test_schema, operation_node) - assert exc_info.value.message == "Schema is not configured for mutations." - - def throws_when_subscription_type_not_defined_in_schema(): - test_schema = GraphQLSchema() - doc = parse("subscription { field }") - operation_node = get_operation_node(doc) - with raises(GraphQLError) as exc_info: - get_operation_root_type(test_schema, operation_node) - assert exc_info.value.message == "Schema is not configured for subscriptions." - - def throws_when_operation_not_a_valid_operation_kind(): - test_schema = GraphQLSchema() - doc = parse("{ field }") - operation_node = get_operation_node(doc) - operation_node.operation = "non_existent_operation" # type: ignore - with raises(GraphQLError) as exc_info: - get_operation_root_type(test_schema, operation_node) - assert exc_info.value.message == ( - "Can only have query, mutation and subscription operations." - ) diff --git a/tests/utilities/test_introspection_from_schema.py b/tests/utilities/test_introspection_from_schema.py index d06788c0..1c9dbd52 100644 --- a/tests/utilities/test_introspection_from_schema.py +++ b/tests/utilities/test_introspection_from_schema.py @@ -1,11 +1,19 @@ -from graphql.type import GraphQLSchema, GraphQLObjectType, GraphQLField, GraphQLString +import pickle +import sys +from copy import deepcopy + +import pytest + +from graphql.type import GraphQLField, GraphQLObjectType, GraphQLSchema, GraphQLString from graphql.utilities import ( + IntrospectionQuery, build_client_schema, - print_schema, + build_schema, introspection_from_schema, - IntrospectionQuery, + print_schema, ) +from ..fixtures import big_schema_introspection_result, big_schema_sdl # noqa: F401 from ..utils import dedent @@ -14,7 +22,6 @@ def introspection_to_sdl(introspection: IntrospectionQuery) -> str: def describe_introspection_from_schema(): - schema = GraphQLSchema( GraphQLObjectType( "Simple", @@ -60,3 +67,109 @@ def converts_a_simple_schema_without_description(): } """ ) + + def describe_deepcopy_and_pickle(): # pragma: no cover + # introspect the schema + introspected_schema = introspection_from_schema(schema) + introspection_size = len(str(introspected_schema)) + + def can_deep_copy_schema(): + # create a deepcopy of the schema + copied = deepcopy(schema) + # check that introspecting the copied schema gives the same result + assert introspection_from_schema(copied) == introspected_schema + + def can_pickle_and_unpickle_schema(): + # check that the schema can be pickled + # (particularly, there should be no recursion error, + # or errors because of trying to pickle lambdas or local functions) + dumped = pickle.dumps(schema) + + # check that the pickle size is reasonable + assert len(dumped) < 5 * introspection_size + loaded = pickle.loads(dumped) + + # check that introspecting the unpickled schema gives the same result + assert introspection_from_schema(loaded) == introspected_schema + + # check that pickling again creates the same result + dumped = pickle.dumps(schema) + assert len(dumped) < 5 * introspection_size + loaded = pickle.loads(dumped) + assert introspection_from_schema(loaded) == introspected_schema + + def can_deep_copy_pickled_schema(): + # pickle and unpickle the schema + loaded = pickle.loads(pickle.dumps(schema)) + # create a deepcopy of the unpickled schema + copied = deepcopy(loaded) + # check that introspecting the copied schema gives the same result + assert introspection_from_schema(copied) == introspected_schema + + @pytest.mark.slow + def describe_deepcopy_and_pickle_big(): # pragma: no cover + @pytest.mark.timeout(20) + def can_deep_copy_big_schema(big_schema_sdl): # noqa: F811 + # introspect the original big schema + big_schema = build_schema(big_schema_sdl) + expected_introspection = introspection_from_schema(big_schema) + + # create a deepcopy of the schema + copied = deepcopy(big_schema) + # check that introspecting the copied schema gives the same result + assert introspection_from_schema(copied) == expected_introspection + + @pytest.mark.timeout(60) + def can_pickle_and_unpickle_big_schema(big_schema_sdl): # noqa: F811 + # introspect the original big schema + big_schema = build_schema(big_schema_sdl) + expected_introspection = introspection_from_schema(big_schema) + size_introspection = len(str(expected_introspection)) + + limit = sys.getrecursionlimit() + sys.setrecursionlimit(max(limit, 4000)) # needed for pickle + + try: + # check that the schema can be pickled + # (particularly, there should be no recursion error, + # or errors because of trying to pickle lambdas or local functions) + dumped = pickle.dumps(big_schema) + + # check that the pickle size is reasonable + assert len(dumped) < 5 * size_introspection + loaded = pickle.loads(dumped) + + # check that introspecting the pickled schema gives the same result + assert introspection_from_schema(loaded) == expected_introspection + + # check that pickling again creates the same result + dumped = pickle.dumps(loaded) + assert len(dumped) < 5 * size_introspection + loaded = pickle.loads(dumped) + + # check that introspecting the re-pickled schema gives the same result + assert introspection_from_schema(loaded) == expected_introspection + + finally: + sys.setrecursionlimit(limit) + + @pytest.mark.timeout(60) + def can_deep_copy_pickled_big_schema(big_schema_sdl): # noqa: F811 + # introspect the original big schema + big_schema = build_schema(big_schema_sdl) + expected_introspection = introspection_from_schema(big_schema) + + limit = sys.getrecursionlimit() + sys.setrecursionlimit(max(limit, 4000)) # needed for pickle + + try: + # pickle and unpickle the schema + loaded = pickle.loads(pickle.dumps(big_schema)) + # create a deepcopy of the unpickled schema + copied = deepcopy(loaded) + + # check that introspecting the copied schema gives the same result + assert introspection_from_schema(copied) == expected_introspection + + finally: + sys.setrecursionlimit(limit) diff --git a/tests/utilities/test_lexicographic_sort_schema.py b/tests/utilities/test_lexicographic_sort_schema.py index e93b680b..43740178 100644 --- a/tests/utilities/test_lexicographic_sort_schema.py +++ b/tests/utilities/test_lexicographic_sort_schema.py @@ -1,4 +1,4 @@ -from graphql.utilities import build_schema, print_schema, lexicographic_sort_schema +from graphql.utilities import build_schema, lexicographic_sort_schema, print_schema from ..utils import dedent diff --git a/tests/utilities/test_print_schema.py b/tests/utilities/test_print_schema.py index 37337b23..ab997610 100644 --- a/tests/utilities/test_print_schema.py +++ b/tests/utilities/test_print_schema.py @@ -1,12 +1,17 @@ -from typing import cast, Any, Dict +from __future__ import annotations + +from typing import Any, Dict, cast from graphql.language import DirectiveLocation from graphql.type import ( GraphQLArgument, GraphQLBoolean, + GraphQLDirective, GraphQLEnumType, + GraphQLEnumValue, GraphQLField, GraphQLFloat, + GraphQLInputField, GraphQLInputObjectType, GraphQLInt, GraphQLInterfaceType, @@ -17,17 +22,15 @@ GraphQLSchema, GraphQLString, GraphQLUnionType, - GraphQLInputField, - GraphQLDirective, ) from graphql.utilities import ( build_schema, - print_schema, print_introspection_schema, + print_schema, print_value, ) -from ..utils import dedent +from ..utils import dedent, viral_schema, viral_sdl def expect_printed_schema(schema: GraphQLSchema) -> str: @@ -87,7 +90,7 @@ def prints_non_null_list_of_string_field(): def prints_list_of_non_null_string_field(): schema = build_single_field_schema( - GraphQLField((GraphQLList(GraphQLNonNull(GraphQLString)))) + GraphQLField(GraphQLList(GraphQLNonNull(GraphQLString))) ) assert expect_printed_schema(schema) == dedent( """ @@ -552,7 +555,7 @@ def prints_enum(): def prints_empty_types(): schema = GraphQLSchema( types=[ - GraphQLEnumType("SomeEnum", cast(Dict[str, Any], {})), + GraphQLEnumType("SomeEnum", cast("Dict[str, Any]", {})), GraphQLInputObjectType("SomeInputObject", {}), GraphQLInterfaceType("SomeInterface", {}), GraphQLObjectType("SomeObject", {}), @@ -600,13 +603,108 @@ def prints_custom_directives(): ) def prints_an_empty_description(): - schema = build_single_field_schema(GraphQLField(GraphQLString, description="")) + args = { + "someArg": GraphQLArgument(GraphQLString, description=""), + "anotherArg": GraphQLArgument(GraphQLString, description=""), + } + fields = { + "someField": GraphQLField(GraphQLString, args, description=""), + "anotherField": GraphQLField(GraphQLString, args, description=""), + } + query_type = GraphQLObjectType("Query", fields, description="") + scalar_type = GraphQLScalarType("SomeScalar", description="") + interface_type = GraphQLInterfaceType("SomeInterface", fields, description="") + union_type = GraphQLUnionType("SomeUnion", [query_type], description="") + enum_type = GraphQLEnumType( + "SomeEnum", + { + "SOME_VALUE": GraphQLEnumValue("Some Value", description=""), + "ANOTHER_VALUE": GraphQLEnumValue("Another Value", description=""), + }, + description="", + ) + some_directive = GraphQLDirective( + "someDirective", [DirectiveLocation.QUERY], args, description="" + ) + + schema = GraphQLSchema( + query_type, + types=[scalar_type, interface_type, union_type, enum_type], + directives=[some_directive], + description="", + ) assert expect_printed_schema(schema) == dedent( ''' + """""" + schema { + query: Query + } + + """""" + directive @someDirective( + """""" + someArg: String + + """""" + anotherArg: String + ) on QUERY + + """""" + scalar SomeScalar + + """""" + interface SomeInterface { + """""" + someField( + """""" + someArg: String + + """""" + anotherArg: String + ): String + + """""" + anotherField( + """""" + someArg: String + + """""" + anotherArg: String + ): String + } + + """""" + union SomeUnion = Query + + """""" type Query { """""" - singleField: String + someField( + """""" + someArg: String + + """""" + anotherArg: String + ): String + + """""" + anotherField( + """""" + someArg: String + + """""" + anotherArg: String + ): String + } + + """""" + enum SomeEnum { + """""" + SOME_VALUE + + """""" + ANOTHER_VALUE } ''' ) @@ -667,12 +765,17 @@ def prints_introspection_schema(): reason: String = "No longer supported" ) on FIELD_DEFINITION | ARGUMENT_DEFINITION | INPUT_FIELD_DEFINITION | ENUM_VALUE - """Exposes a URL that specifies the behaviour of this scalar.""" + """Exposes a URL that specifies the behavior of this scalar.""" directive @specifiedBy( - """The URL that specifies the behaviour of this scalar.""" + """The URL that specifies the behavior of this scalar.""" url: String! ) on SCALAR + """ + Indicates exactly one field must be supplied and this field must not be `null`. + """ + directive @oneOf on INPUT_OBJECT + """ A GraphQL Schema defines the capabilities of a GraphQL server. It exposes all available types and directives on the server, as well as the entry points for query, mutation, and subscription operations. """ @@ -691,7 +794,7 @@ def prints_introspection_schema(): mutationType: __Type """ - If this server support subscription, the type that subscription operations will be rooted at. + If this server supports subscription, the type that subscription operations will be rooted at. """ subscriptionType: __Type @@ -715,6 +818,7 @@ def prints_introspection_schema(): enumValues(includeDeprecated: Boolean = false): [__EnumValue!] inputFields(includeDeprecated: Boolean = false): [__InputValue!] ofType: __Type + isOneOf: Boolean } """An enum describing what kind of type a given `__Type` is.""" @@ -865,6 +969,10 @@ def prints_introspection_schema(): ''' # noqa: E501 ) + def prints_viral_schema_correctly(): + printed = print_schema(viral_schema) + assert printed == viral_sdl + def describe_print_value(): def print_value_convenience_function(): diff --git a/tests/utilities/test_sort_value_node.py b/tests/utilities/test_sort_value_node.py index 5d246567..5ec97db1 100644 --- a/tests/utilities/test_sort_value_node.py +++ b/tests/utilities/test_sort_value_node.py @@ -21,13 +21,13 @@ def do_not_change_non_object_values(): ) def sort_input_object_fields(): - _expect_sorted_value("{ b: 2, a: 1 }", "{a: 1, b: 2}") - _expect_sorted_value("{ a: { c: 3, b: 2 } }", "{a: {b: 2, c: 3}}") + _expect_sorted_value("{ b: 2, a: 1 }", "{ a: 1, b: 2 }") + _expect_sorted_value("{ a: { c: 3, b: 2 } }", "{ a: { b: 2, c: 3 } }") _expect_sorted_value( "[{ b: 2, a: 1 }, { d: 4, c: 3}]", - "[{a: 1, b: 2}, {c: 3, d: 4}]", + "[{ a: 1, b: 2 }, { c: 3, d: 4 }]", ) _expect_sorted_value( "{ b: { g: 7, f: 6 }, c: 3 , a: { d: 4, e: 5 } }", - "{a: {d: 4, e: 5}, b: {f: 6, g: 7}, c: 3}", + "{ a: { d: 4, e: 5 }, b: { f: 6, g: 7 }, c: 3 }", ) diff --git a/tests/utilities/test_strip_ignored_characters.py b/tests/utilities/test_strip_ignored_characters.py index 67e5b4e1..cdc6062d 100644 --- a/tests/utilities/test_strip_ignored_characters.py +++ b/tests/utilities/test_strip_ignored_characters.py @@ -1,7 +1,6 @@ -from json import dumps -from typing import Optional +from __future__ import annotations -from pytest import raises +import pytest from graphql.error import GraphQLSyntaxError from graphql.language import Lexer, Source, TokenKind, parse @@ -10,34 +9,8 @@ from ..fixtures import kitchen_sink_query, kitchen_sink_sdl # noqa: F401 from ..utils import dedent -ignored_tokens = [ - # UnicodeBOM - "\uFEFF", # Byte Order Mark (U+FEFF) - # WhiteSpace - "\t", # Horizontal Tab (U+0009) - " ", # Space (U+0020) - # LineTerminator - "\n", # "New Line (U+000A)" - "\r", # "Carriage Return (U+000D)" [ lookahead ! "New Line (U+000A)" ] - "\r\n", # "Carriage Return (U+000D)" "New Line (U+000A)" - # Comment - '# "Comment" string\n', # `#` CommentChar* - # Comma - ",", # , -] - -punctuator_tokens = ["!", "$", "(", ")", "...", ":", "=", "@", "[", "]", "{", "|", "}"] - -non_punctuator_tokens = [ - "name_token", # Name - "1", # IntValue - "3.14", # FloatValue - '"some string value"', # StringValue - '"""block\nstring\nvalue"""', # StringValue(BlockString) -] - - -def lex_value(s: str) -> Optional[str]: + +def lex_value(s: str) -> str | None: lexer = Lexer(Source(s)) value = lexer.advance().value assert lexer.advance().kind == TokenKind.EOF, "Expected EOF" @@ -51,24 +24,10 @@ def __init__(self, doc_string: str): def to_equal(self, expected: str): doc_string = self.doc_string stripped = strip_ignored_characters(doc_string) - - assert stripped == expected, dedent( - f""" - Expected strip_ignored_characters({doc_string!r}) - to equal {expected!r} - but got {stripped!r} - """ - ) + assert stripped == expected stripped_twice = strip_ignored_characters(stripped) - - assert stripped == stripped_twice, dedent( - f"""" - Expected strip_ignored_characters({stripped!r})" - to equal {stripped!r} - but got {stripped_twice!r} - """ - ) + assert stripped == stripped_twice def to_stay_the_same(self): self.to_equal(self.doc_string) @@ -114,8 +73,23 @@ def strips_ignored_characters_from_graphql_sdl_document(): '"""Type description""" type Foo{"""Field description""" bar:String}' ) + def strips_ignored_characters_from_source(): + source = Source( + dedent( + """ + { + foo { + bar + } + } + """ + ) + ) + + assert strip_ignored_characters(source) == "{foo{bar}}" + def report_document_with_invalid_token(): - with raises(GraphQLSyntaxError) as exc_info: + with pytest.raises(GraphQLSyntaxError) as exc_info: strip_ignored_characters('{ foo(arg: "\n"') assert str(exc_info.value) == dedent( @@ -138,14 +112,6 @@ def strips_documents_with_only_ignored_characters(): ExpectStripped(",,").to_equal("") ExpectStripped("#comment\n, \n").to_equal("") - for ignored in ignored_tokens: - ExpectStripped(ignored).to_equal("") - - for another_ignored in ignored_tokens: - ExpectStripped(ignored + another_ignored).to_equal("") - - ExpectStripped("".join(ignored_tokens)).to_equal("") - def strips_leading_and_trailing_ignored_tokens(): ExpectStripped("\n1").to_equal("1") ExpectStripped(",1").to_equal("1") @@ -157,18 +123,6 @@ def strips_leading_and_trailing_ignored_tokens(): ExpectStripped("1,,").to_equal("1") ExpectStripped("1#comment\n, \n").to_equal("1") - for token in punctuator_tokens + non_punctuator_tokens: - for ignored in ignored_tokens: - ExpectStripped(ignored + token).to_equal(token) - ExpectStripped(token + ignored).to_equal(token) - - for another_ignored in ignored_tokens: - ExpectStripped(token + ignored + ignored).to_equal(token) - ExpectStripped(ignored + another_ignored + token).to_equal(token) - - ExpectStripped("".join(ignored_tokens) + token).to_equal(token) - ExpectStripped(token + "".join(ignored_tokens)).to_equal(token) - def strips_ignored_tokens_between_punctuator_tokens(): ExpectStripped("[,)").to_equal("[)") ExpectStripped("[\r)").to_equal("[)") @@ -176,20 +130,6 @@ def strips_ignored_tokens_between_punctuator_tokens(): ExpectStripped("[\r,)").to_equal("[)") ExpectStripped("[,\n)").to_equal("[)") - for left in punctuator_tokens: - for right in punctuator_tokens: - for ignored in ignored_tokens: - ExpectStripped(left + ignored + right).to_equal(left + right) - - for another_ignored in ignored_tokens: - ExpectStripped( - left + ignored + another_ignored + right - ).to_equal(left + right) - - ExpectStripped(left + "".join(ignored_tokens) + right).to_equal( - left + right - ) - def strips_ignored_tokens_between_punctuator_and_non_punctuator_tokens(): ExpectStripped("[,1").to_equal("[1") ExpectStripped("[\r1").to_equal("[1") @@ -197,22 +137,6 @@ def strips_ignored_tokens_between_punctuator_and_non_punctuator_tokens(): ExpectStripped("[\r,1").to_equal("[1") ExpectStripped("[,\n1").to_equal("[1") - for non_punctuator in non_punctuator_tokens: - for punctuator in punctuator_tokens: - for ignored in ignored_tokens: - ExpectStripped(punctuator + ignored + non_punctuator).to_equal( - punctuator + non_punctuator - ) - - for another_ignored in ignored_tokens: - ExpectStripped( - punctuator + ignored + another_ignored + non_punctuator - ).to_equal(punctuator + non_punctuator) - - ExpectStripped( - punctuator + "".join(ignored_tokens) + non_punctuator - ).to_equal(punctuator + non_punctuator) - def strips_ignored_tokens_between_non_punctuator_and_punctuator_tokens(): ExpectStripped("1,[").to_equal("1[") ExpectStripped("1\r[").to_equal("1[") @@ -220,46 +144,11 @@ def strips_ignored_tokens_between_non_punctuator_and_punctuator_tokens(): ExpectStripped("1\r,[").to_equal("1[") ExpectStripped("1,\n[").to_equal("1[") - for non_punctuator in non_punctuator_tokens: - for punctuator in punctuator_tokens: - # Special case for that is handled in the below test - if punctuator == "...": - continue - - for ignored in ignored_tokens: - ExpectStripped(non_punctuator + ignored + punctuator).to_equal( - non_punctuator + punctuator - ) - - for another_ignored in ignored_tokens: - ExpectStripped( - non_punctuator + ignored + another_ignored + punctuator - ).to_equal(non_punctuator + punctuator) - - ExpectStripped( - non_punctuator + "".join(ignored_tokens) + punctuator - ).to_equal(non_punctuator + punctuator) - def replace_ignored_tokens_between_non_punctuator_tokens_and_spread_with_space(): ExpectStripped("a ...").to_equal("a ...") ExpectStripped("1 ...").to_equal("1 ...") ExpectStripped("1 ... ...").to_equal("1 ......") - for non_punctuator in non_punctuator_tokens: - for ignored in ignored_tokens: - ExpectStripped(non_punctuator + ignored + "...").to_equal( - non_punctuator + " ..." - ) - - for another_ignored in ignored_tokens: - ExpectStripped( - non_punctuator + ignored + another_ignored + " ..." - ).to_equal(non_punctuator + " ...") - - ExpectStripped(non_punctuator + "".join(ignored_tokens) + "...").to_equal( - non_punctuator + " ..." - ) - def replace_ignored_tokens_between_non_punctuator_tokens_with_space(): ExpectStripped("1 2").to_stay_the_same() ExpectStripped('"" ""').to_stay_the_same() @@ -270,57 +159,17 @@ def replace_ignored_tokens_between_non_punctuator_tokens_with_space(): ExpectStripped("a 1").to_equal("a 1") ExpectStripped("a \t 1").to_equal("a 1") - for left in non_punctuator_tokens: - for right in non_punctuator_tokens: - for ignored in ignored_tokens: - ExpectStripped(left + ignored + right).to_equal(left + " " + right) - - for another_ignored in ignored_tokens: - ExpectStripped( - left + ignored + another_ignored + right - ).to_equal(left + " " + right) - - ExpectStripped(left + "".join(ignored_tokens) + right).to_equal( - left + " " + right - ) - def does_not_strip_ignored_tokens_embedded_in_the_string(): ExpectStripped('" "').to_stay_the_same() ExpectStripped('","').to_stay_the_same() ExpectStripped('",,"').to_stay_the_same() ExpectStripped('",|"').to_stay_the_same() - for ignored in ignored_tokens: - ExpectStripped(dumps(ignored)).to_stay_the_same() - - for another_ignored in ignored_tokens: - ExpectStripped(dumps(ignored + another_ignored)).to_stay_the_same() - - ExpectStripped(dumps("".join(ignored_tokens))).to_stay_the_same() - def does_not_strip_ignored_tokens_embedded_in_the_block_string(): ExpectStripped('""","""').to_stay_the_same() ExpectStripped('""",,"""').to_stay_the_same() ExpectStripped('""",|"""').to_stay_the_same() - ignored_tokens_without_formatting = [ - token - for token in ignored_tokens - if token not in ["\n", "\r", "\r\n", "\t", " "] - ] - - for ignored in ignored_tokens_without_formatting: - ExpectStripped('"""|' + ignored + '|"""').to_stay_the_same() - - for another_ignored in ignored_tokens_without_formatting: - ExpectStripped( - '"""|' + ignored + another_ignored + '|"""' - ).to_stay_the_same() - - ExpectStripped( - '"""|' + "".join(ignored_tokens_without_formatting) + '|"""' - ).to_stay_the_same() - def strips_ignored_characters_inside_block_strings(): # noinspection PyShadowingNames def expect_stripped_string(block_str: str): @@ -364,8 +213,16 @@ def strips_kitchen_sink_query_but_maintains_the_exact_same_ast( stripped_query = strip_ignored_characters(kitchen_sink_query) assert strip_ignored_characters(stripped_query) == stripped_query - query_ast = parse(kitchen_sink_query, no_location=True) - stripped_ast = parse(stripped_query, no_location=True) + query_ast = parse( + kitchen_sink_query, + no_location=True, + experimental_client_controlled_nullability=True, + ) + stripped_ast = parse( + stripped_query, + no_location=True, + experimental_client_controlled_nullability=True, + ) assert stripped_ast == query_ast # noinspection PyShadowingNames diff --git a/tests/utilities/test_strip_ignored_characters_fuzz.py b/tests/utilities/test_strip_ignored_characters_fuzz.py index 019ec5fb..4c276e07 100644 --- a/tests/utilities/test_strip_ignored_characters_fuzz.py +++ b/tests/utilities/test_strip_ignored_characters_fuzz.py @@ -1,6 +1,8 @@ -from typing import Optional +from __future__ import annotations -from pytest import mark +from json import dumps + +import pytest from graphql.error import GraphQLSyntaxError from graphql.language import Lexer, Source, TokenKind @@ -8,8 +10,64 @@ from ..utils import dedent, gen_fuzz_strings +ignored_tokens = [ + # UnicodeBOM + "\ufeff", # Byte Order Mark (U+FEFF) + # WhiteSpace + "\t", # Horizontal Tab (U+0009) + " ", # Space (U+0020) + # LineTerminator + "\n", # "New Line (U+000A)" + "\r", # "Carriage Return (U+000D)" [ lookahead ! "New Line (U+000A)" ] + "\r\n", # "Carriage Return (U+000D)" "New Line (U+000A)" + # Comment + '# "Comment" string\n', # `#` CommentChar* + # Comma + ",", # , +] + +punctuator_tokens = ["!", "$", "(", ")", "...", ":", "=", "@", "[", "]", "{", "|", "}"] + +non_punctuator_tokens = [ + "name_token", # Name + "1", # IntValue + "3.14", # FloatValue + '"some string value"', # StringValue + '"""block\nstring\nvalue"""', # StringValue(BlockString) +] + + +class ExpectStripped: + def __init__(self, doc_string: str): + self.doc_string = doc_string + + def to_equal(self, expected: str): + doc_string = self.doc_string + stripped = strip_ignored_characters(doc_string) + + assert stripped == expected, dedent( + f""" + Expected strip_ignored_characters({doc_string!r}) + to equal {expected!r} + but got {stripped!r} + """ + ) + + stripped_twice = strip_ignored_characters(stripped) + + assert stripped == stripped_twice, dedent( + f""" + Expected strip_ignored_characters({stripped!r})" + to equal {stripped!r} + but got {stripped_twice!r} + """ + ) + + def to_stay_the_same(self): + self.to_equal(self.doc_string) + -def lex_value(s: str) -> Optional[str]: +def lex_value(s: str) -> str | None: lexer = Lexer(Source(s)) value = lexer.advance().value assert lexer.advance().kind == TokenKind.EOF, "Expected EOF" @@ -17,8 +75,160 @@ def lex_value(s: str) -> Optional[str]: def describe_strip_ignored_characters(): - @mark.slow - @mark.timeout(20) + @pytest.mark.slow + @pytest.mark.timeout(10) + def strips_documents_with_random_combination_of_ignored_characters(): + for ignored in ignored_tokens: + ExpectStripped(ignored).to_equal("") + + for another_ignored in ignored_tokens: + ExpectStripped(ignored + another_ignored).to_equal("") + + ExpectStripped("".join(ignored_tokens)).to_equal("") + + @pytest.mark.slow + @pytest.mark.timeout(10) + def strips_random_leading_and_trailing_ignored_tokens(): + for token in punctuator_tokens + non_punctuator_tokens: + for ignored in ignored_tokens: + ExpectStripped(ignored + token).to_equal(token) + ExpectStripped(token + ignored).to_equal(token) + + for another_ignored in ignored_tokens: + ExpectStripped(token + ignored + ignored).to_equal(token) + ExpectStripped(ignored + another_ignored + token).to_equal(token) + + ExpectStripped("".join(ignored_tokens) + token).to_equal(token) + ExpectStripped(token + "".join(ignored_tokens)).to_equal(token) + + @pytest.mark.slow + @pytest.mark.timeout(10) + def strips_random_ignored_tokens_between_punctuator_tokens(): + for left in punctuator_tokens: + for right in punctuator_tokens: + for ignored in ignored_tokens: + ExpectStripped(left + ignored + right).to_equal(left + right) + + for another_ignored in ignored_tokens: + ExpectStripped( + left + ignored + another_ignored + right + ).to_equal(left + right) + + ExpectStripped(left + "".join(ignored_tokens) + right).to_equal( + left + right + ) + + @pytest.mark.slow + @pytest.mark.timeout(10) + def strips_random_ignored_tokens_between_punctuator_and_non_punctuator_tokens(): + for non_punctuator in non_punctuator_tokens: + for punctuator in punctuator_tokens: + for ignored in ignored_tokens: + ExpectStripped(punctuator + ignored + non_punctuator).to_equal( + punctuator + non_punctuator + ) + + for another_ignored in ignored_tokens: + ExpectStripped( + punctuator + ignored + another_ignored + non_punctuator + ).to_equal(punctuator + non_punctuator) + + ExpectStripped( + punctuator + "".join(ignored_tokens) + non_punctuator + ).to_equal(punctuator + non_punctuator) + + @pytest.mark.slow + @pytest.mark.timeout(10) + def strips_random_ignored_tokens_between_non_punctuator_and_punctuator_tokens(): + for non_punctuator in non_punctuator_tokens: + for punctuator in punctuator_tokens: + # Special case for that is handled in the below test + if punctuator == "...": + continue + + for ignored in ignored_tokens: + ExpectStripped(non_punctuator + ignored + punctuator).to_equal( + non_punctuator + punctuator + ) + + for another_ignored in ignored_tokens: + ExpectStripped( + non_punctuator + ignored + another_ignored + punctuator + ).to_equal(non_punctuator + punctuator) + + ExpectStripped( + non_punctuator + "".join(ignored_tokens) + punctuator + ).to_equal(non_punctuator + punctuator) + + @pytest.mark.slow + @pytest.mark.timeout(10) + def replace_random_ignored_tokens_between_non_punctuator_and_spread_with_space(): + for non_punctuator in non_punctuator_tokens: + for ignored in ignored_tokens: + ExpectStripped(non_punctuator + ignored + "...").to_equal( + non_punctuator + " ..." + ) + + for another_ignored in ignored_tokens: + ExpectStripped( + non_punctuator + ignored + another_ignored + " ..." + ).to_equal(non_punctuator + " ...") + + ExpectStripped(non_punctuator + "".join(ignored_tokens) + "...").to_equal( + non_punctuator + " ..." + ) + + @pytest.mark.slow + @pytest.mark.timeout(10) + def replace_random_ignored_tokens_between_non_punctuator_tokens_with_space(): + for left in non_punctuator_tokens: + for right in non_punctuator_tokens: + for ignored in ignored_tokens: + ExpectStripped(left + ignored + right).to_equal(left + " " + right) + + for another_ignored in ignored_tokens: + ExpectStripped( + left + ignored + another_ignored + right + ).to_equal(left + " " + right) + + ExpectStripped(left + "".join(ignored_tokens) + right).to_equal( + left + " " + right + ) + + @pytest.mark.slow + @pytest.mark.timeout(10) + def does_not_strip_random_ignored_tokens_embedded_in_the_string(): + for ignored in ignored_tokens: + ExpectStripped(dumps(ignored)).to_stay_the_same() + + for another_ignored in ignored_tokens: + ExpectStripped(dumps(ignored + another_ignored)).to_stay_the_same() + + ExpectStripped(dumps("".join(ignored_tokens))).to_stay_the_same() + + @pytest.mark.slow + @pytest.mark.timeout(10) + def does_not_strip_random_ignored_tokens_embedded_in_the_block_string(): + ignored_tokens_without_formatting = [ + token + for token in ignored_tokens + if token not in ["\n", "\r", "\r\n", "\t", " "] + ] + + for ignored in ignored_tokens_without_formatting: + ExpectStripped('"""|' + ignored + '|"""').to_stay_the_same() + + for another_ignored in ignored_tokens_without_formatting: + ExpectStripped( + '"""|' + ignored + another_ignored + '|"""' + ).to_stay_the_same() + + ExpectStripped( + '"""|' + "".join(ignored_tokens_without_formatting) + '|"""' + ).to_stay_the_same() + + @pytest.mark.slow + @pytest.mark.timeout(80) def strips_ignored_characters_inside_random_block_strings(): # Testing with length >7 is taking exponentially more time. However it is # highly recommended to test with increased limit if you make any change. diff --git a/tests/utilities/test_type_from_ast.py b/tests/utilities/test_type_from_ast.py index 9842a31f..fa75a9f9 100644 --- a/tests/utilities/test_type_from_ast.py +++ b/tests/utilities/test_type_from_ast.py @@ -1,6 +1,6 @@ -from pytest import raises +import pytest -from graphql.language import parse_type, TypeNode +from graphql.language import TypeNode, parse_type from graphql.type import GraphQLList, GraphQLNonNull, GraphQLObjectType from graphql.utilities import type_from_ast @@ -32,7 +32,7 @@ def for_non_null_type_node(): def for_unspecified_type_node(): node = TypeNode() - with raises(TypeError) as exc_info: + with pytest.raises(TypeError) as exc_info: type_from_ast(test_schema, node) msg = str(exc_info.value) assert msg == "Unexpected type node: ." diff --git a/tests/utilities/test_type_info.py b/tests/utilities/test_type_info.py index 650697f4..01f7e464 100644 --- a/tests/utilities/test_type_info.py +++ b/tests/utilities/test_type_info.py @@ -1,21 +1,29 @@ +from __future__ import annotations + from graphql.language import ( FieldNode, NameNode, Node, OperationDefinitionNode, SelectionSetNode, + Visitor, parse, parse_value, print_ast, visit, - Visitor, ) -from graphql.type import GraphQLSchema, get_named_type, is_composite_type +from graphql.type import ( + GraphQLSchema, + SchemaMetaFieldDef, + TypeMetaFieldDef, + TypeNameMetaFieldDef, + get_named_type, + is_composite_type, +) from graphql.utilities import TypeInfo, TypeInfoVisitor, build_schema from ..fixtures import kitchen_sink_query # noqa: F401 - test_schema = build_schema( """ interface Pet { @@ -39,9 +47,13 @@ name(surname: Boolean): String } + union HumanOrAlien = Human | Alien + type QueryRoot { human(id: ID): Human alien: Alien + humanOrAlien: HumanOrAlien + pet: Pet } schema { @@ -140,6 +152,77 @@ def leave(self, *args): assert test_visitor.args == wrapped_visitor.args + def supports_introspection_fields(): + type_info = TypeInfo(test_schema) + + ast = parse( + """ + { + __typename + __type(name: "Cat") { __typename } + __schema { + __typename # in object type + } + humanOrAlien { + __typename # in union type + } + pet { + __typename # in interface type + } + someUnknownType { + __typename # unknown + } + pet { + __type # unknown + __schema # unknown + } + } + """ + ) + + visited_fields: list[tuple[str | None, str | None]] = [] + + class TestVisitor(Visitor): + @staticmethod + def enter_field(*_args): + parent_type = type_info.get_parent_type() + type_name = getattr(type_info.get_parent_type(), "name", None) + field_def = type_info.get_field_def() + fields = getattr(parent_type, "fields", {}) + fields = dict( + **fields, + __type=TypeMetaFieldDef, + __typename=TypeNameMetaFieldDef, + __schema=SchemaMetaFieldDef, + ) + for name, field in fields.items(): + if field is field_def: + field_name = name + break + else: + field_name = None + visited_fields.append((type_name, field_name)) + + test_visitor = TestVisitor() + assert visit(ast, TypeInfoVisitor(type_info, test_visitor)) + + assert visited_fields == [ + ("QueryRoot", "__typename"), + ("QueryRoot", "__type"), + ("__Type", "__typename"), + ("QueryRoot", "__schema"), + ("__Schema", "__typename"), + ("QueryRoot", "humanOrAlien"), + ("HumanOrAlien", "__typename"), + ("QueryRoot", "pet"), + ("Pet", "__typename"), + ("QueryRoot", None), + (None, None), + ("QueryRoot", "pet"), + ("Pet", None), + ("Pet", None), + ] + def maintains_type_info_during_visit(): visited = [] @@ -267,6 +350,8 @@ def enter(*args): ), ) + return None + @staticmethod def leave(*args): parent_type = type_info.get_parent_type() @@ -290,8 +375,7 @@ def leave(*args): assert print_ast(edited_ast) == print_ast( parse( - "{ human(id: 4) { name, pets { __typename } }," - " alien { __typename } }" + "{ human(id: 4) { name, pets { __typename } }, alien { __typename } }" ) ) diff --git a/tests/utilities/test_value_from_ast.py b/tests/utilities/test_value_from_ast.py index cd72e278..6622b4dc 100644 --- a/tests/utilities/test_value_from_ast.py +++ b/tests/utilities/test_value_from_ast.py @@ -1,7 +1,9 @@ +from __future__ import annotations + from math import isnan, nan -from typing import Any, Dict, Optional +from typing import Any -from graphql.language import parse_value, ValueNode +from graphql.language import ValueNode, parse_value from graphql.pyutils import Undefined from graphql.type import ( GraphQLBoolean, @@ -24,7 +26,7 @@ def describe_value_from_ast(): def _value_from( value_text: str, type_: GraphQLInputType, - variables: Optional[Dict[str, Any]] = None, + variables: dict[str, Any] | None = None, ): ast = parse_value(value_text) return value_from_ast(ast, type_, variables) @@ -112,15 +114,15 @@ def converts_enum_values_according_to_input_coercion_rules(): assert isnan(_value_from("NAN", test_enum)) assert _value_from("NO_CUSTOM_VALUE", test_enum) is Undefined - # Boolean! + # make a Boolean! non_null_bool = GraphQLNonNull(GraphQLBoolean) - # [Boolean] + # make a [Boolean] list_of_bool = GraphQLList(GraphQLBoolean) - # [Boolean!] + # make a [Boolean!] list_of_non_null_bool = GraphQLList(non_null_bool) - # [Boolean]! + # make a [Boolean]! non_null_list_of_bool = GraphQLNonNull(list_of_bool) - # [Boolean!]! + # make a [Boolean!]! non_null_list_of_non_mull_bool = GraphQLNonNull(list_of_non_null_bool) def coerces_to_null_unless_non_null(): @@ -172,6 +174,15 @@ def coerces_non_null_lists_of_non_null_values(): }, ) + test_one_of_input_obj = GraphQLInputObjectType( + "TestOneOfInput", + { + "a": GraphQLInputField(GraphQLString), + "b": GraphQLInputField(GraphQLString), + }, + is_one_of=True, + ) + def coerces_input_objects_according_to_input_coercion_rules(): assert _value_from("null", test_input_obj) is None assert _value_from("[]", test_input_obj) is Undefined @@ -191,6 +202,14 @@ def coerces_input_objects_according_to_input_coercion_rules(): ) assert _value_from("{ requiredBool: null }", test_input_obj) is Undefined assert _value_from("{ bool: true }", test_input_obj) is Undefined + assert _value_from('{ a: "abc" }', test_one_of_input_obj) == {"a": "abc"} + assert _value_from('{ b: "def" }', test_one_of_input_obj) == {"b": "def"} + assert _value_from('{ a: "abc", b: None }', test_one_of_input_obj) is Undefined + assert _value_from("{ a: null }", test_one_of_input_obj) is Undefined + assert _value_from("{ a: 1 }", test_one_of_input_obj) is Undefined + assert _value_from('{ a: "abc", b: "def" }', test_one_of_input_obj) is Undefined + assert _value_from("{}", test_one_of_input_obj) is Undefined + assert _value_from('{ c: "abc" }', test_one_of_input_obj) is Undefined def accepts_variable_values_assuming_already_coerced(): assert _value_from("$var", GraphQLBoolean, {}) is Undefined diff --git a/tests/utilities/test_value_from_ast_untyped.py b/tests/utilities/test_value_from_ast_untyped.py index 4b82138d..0461cc20 100644 --- a/tests/utilities/test_value_from_ast_untyped.py +++ b/tests/utilities/test_value_from_ast_untyped.py @@ -1,7 +1,9 @@ +from __future__ import annotations + from math import nan -from typing import Any, Dict, Optional +from typing import Any -from graphql.language import parse_value, FloatValueNode, IntValueNode +from graphql.language import FloatValueNode, IntValueNode, parse_value from graphql.pyutils import Undefined from graphql.utilities import value_from_ast_untyped @@ -23,7 +25,7 @@ def _expect_value_from(value_text: str, expected: Any): _compare_value(value, expected) def _expect_value_from_vars( - value_text: str, variables: Optional[Dict[str, Any]], expected: Any + value_text: str, variables: dict[str, Any] | None, expected: Any ): ast = parse_value(value_text) value = value_from_ast_untyped(ast, variables) diff --git a/tests/utils/__init__.py b/tests/utils/__init__.py index a6e55a48..ea374993 100644 --- a/tests/utils/__init__.py +++ b/tests/utils/__init__.py @@ -1,6 +1,17 @@ """Test utilities""" +from .assert_equal_awaitables_or_values import assert_equal_awaitables_or_values +from .assert_matching_values import assert_matching_values from .dedent import dedent from .gen_fuzz_strings import gen_fuzz_strings +from .viral_schema import viral_schema +from .viral_sdl import viral_sdl -__all__ = ["dedent", "gen_fuzz_strings"] +__all__ = [ + "assert_equal_awaitables_or_values", + "assert_matching_values", + "dedent", + "gen_fuzz_strings", + "viral_schema", + "viral_sdl", +] diff --git a/tests/utils/assert_equal_awaitables_or_values.py b/tests/utils/assert_equal_awaitables_or_values.py new file mode 100644 index 00000000..964db1a8 --- /dev/null +++ b/tests/utils/assert_equal_awaitables_or_values.py @@ -0,0 +1,28 @@ +from __future__ import annotations + +import asyncio +from typing import Awaitable, Tuple, TypeVar, cast + +from graphql.pyutils import is_awaitable + +from .assert_matching_values import assert_matching_values + +__all__ = ["assert_equal_awaitables_or_values"] + +T = TypeVar("T") + + +def assert_equal_awaitables_or_values(*items: T) -> T: + """Check whether the items are the same and either all awaitables or all values.""" + if all(is_awaitable(item) for item in items): + awaitable_items = cast("Tuple[Awaitable]", items) + + async def assert_matching_awaitables(): + return assert_matching_values(*(await asyncio.gather(*awaitable_items))) + + return assert_matching_awaitables() + + if all(not is_awaitable(item) for item in items): + return assert_matching_values(*items) + + assert False, "Received an invalid mixture of promises and values." diff --git a/tests/utils/assert_matching_values.py b/tests/utils/assert_matching_values.py new file mode 100644 index 00000000..0cadce37 --- /dev/null +++ b/tests/utils/assert_matching_values.py @@ -0,0 +1,13 @@ +from typing import TypeVar + +__all__ = ["assert_matching_values"] + +T = TypeVar("T") + + +def assert_matching_values(*values: T) -> T: + """Test that all values in the sequence are equal.""" + first_value, *remaining_values = values + for value in remaining_values: + assert value == first_value + return first_value diff --git a/tests/utils/test_assert_equal_awaitables_or_values.py b/tests/utils/test_assert_equal_awaitables_or_values.py new file mode 100644 index 00000000..3e60fbcb --- /dev/null +++ b/tests/utils/test_assert_equal_awaitables_or_values.py @@ -0,0 +1,54 @@ +import pytest + +from . import assert_equal_awaitables_or_values + + +def describe_assert_equal_awaitables_or_values(): + def throws_when_given_unequal_values(): + with pytest.raises(AssertionError): + assert_equal_awaitables_or_values({}, {}, {"test": "test"}) + + def does_not_throw_when_given_equal_values(): + test_value = {"test": "test"} + assert ( + assert_equal_awaitables_or_values(test_value, test_value, test_value) + == test_value + ) + + @pytest.mark.asyncio + async def does_not_throw_when_given_equal_awaitables(): + async def test_value(): + return {"test": "test"} + + assert ( + await assert_equal_awaitables_or_values( + test_value(), test_value(), test_value() + ) + == await test_value() + ) + + @pytest.mark.asyncio + async def throws_when_given_unequal_awaitables(): + async def test_value(value): + return value + + with pytest.raises(AssertionError): + await assert_equal_awaitables_or_values( + test_value({}), test_value({}), test_value({"test": "test"}) + ) + + @pytest.mark.asyncio + async def throws_when_given_mixture_of_equal_values_and_awaitables(): + async def test_value(): + return {"test": "test"} + + value1 = await test_value() + value2 = test_value() + + with pytest.raises( + AssertionError, + match=r"Received an invalid mixture of promises and values\.", + ): + await assert_equal_awaitables_or_values(value1, value2) + + assert await value2 == value1 diff --git a/tests/utils/test_assert_matching_values.py b/tests/utils/test_assert_matching_values.py new file mode 100644 index 00000000..a67191d0 --- /dev/null +++ b/tests/utils/test_assert_matching_values.py @@ -0,0 +1,13 @@ +import pytest + +from . import assert_matching_values + + +def describe_assert_matching_values(): + def throws_when_given_unequal_values(): + with pytest.raises(AssertionError): + assert_matching_values({}, {}, {"test": "test"}) + + def does_not_throw_when_given_equal_values(): + test_value = {"test": "test"} + assert assert_matching_values(test_value, test_value, test_value) == test_value diff --git a/tests/utils/viral_schema.py b/tests/utils/viral_schema.py new file mode 100644 index 00000000..57ebf703 --- /dev/null +++ b/tests/utils/viral_schema.py @@ -0,0 +1,34 @@ +from graphql import GraphQLSchema +from graphql.type import ( + GraphQLField, + GraphQLList, + GraphQLNonNull, + GraphQLObjectType, + GraphQLString, +) + +__all__ = ["viral_schema"] + +Mutation = GraphQLObjectType( + "Mutation", + { + "name": GraphQLField(GraphQLNonNull(GraphQLString)), + "geneSequence": GraphQLField(GraphQLNonNull(GraphQLString)), + }, +) + +Virus = GraphQLObjectType( + "Virus", + { + "name": GraphQLField(GraphQLNonNull(GraphQLString)), + "knownMutations": GraphQLField( + GraphQLNonNull(GraphQLList(GraphQLNonNull(Mutation))) + ), + }, +) + +Query = GraphQLObjectType( + "Query", {"viruses": GraphQLField(GraphQLList(GraphQLNonNull(Virus)))} +) + +viral_schema = GraphQLSchema(Query) diff --git a/tests/utils/viral_sdl.py b/tests/utils/viral_sdl.py new file mode 100644 index 00000000..dd7afc84 --- /dev/null +++ b/tests/utils/viral_sdl.py @@ -0,0 +1,21 @@ +__all__ = ["viral_sdl"] + +viral_sdl = """ +schema { + query: Query +} + +type Query { + viruses: [Virus!] +} + +type Virus { + name: String! + knownMutations: [Mutation!]! +} + +type Mutation { + name: String! + geneSequence: String! +} +""".strip() diff --git a/tests/validation/__init__.py b/tests/validation/__init__.py index 5449639d..ad944ff3 100644 --- a/tests/validation/__init__.py +++ b/tests/validation/__init__.py @@ -1,5 +1,5 @@ """Tests for graphql.validation""" -from pytest import register_assert_rewrite +import pytest -register_assert_rewrite("tests.validation.harness") +pytest.register_assert_rewrite("tests.validation.harness") diff --git a/tests/validation/harness.py b/tests/validation/harness.py index 214a91bc..737fb2df 100644 --- a/tests/validation/harness.py +++ b/tests/validation/harness.py @@ -1,16 +1,20 @@ -from typing import List, Optional, Type +from __future__ import annotations + +from typing import TYPE_CHECKING, Any -from graphql.error import GraphQLError from graphql.language import parse -from graphql.type import GraphQLSchema from graphql.utilities import build_schema -from graphql.validation import ValidationRule, SDLValidationRule from graphql.validation.validate import validate, validate_sdl +if TYPE_CHECKING: + from graphql.error import GraphQLError + from graphql.type import GraphQLSchema + from graphql.validation import ASTValidationRule + __all__ = [ - "test_schema", - "assert_validation_errors", "assert_sdl_validation_errors", + "assert_validation_errors", + "test_schema", ] test_schema = build_schema( @@ -61,7 +65,7 @@ type Human { name(surname: Boolean): String pets: [Pet] - relatives: [Human] + relatives: [Human]! } enum FurColor { @@ -82,6 +86,11 @@ stringListField: [String] } + input OneOfInput @oneOf { + stringField: String + intField: Int + } + type ComplicatedArgs { # TODO List # TODO Coercion @@ -96,6 +105,7 @@ stringListArgField(stringListArg: [String]): String stringListNonNullArgField(stringListNonNullArg: [String!]): String complexArgField(complexArg: ComplexInput): String + oneOfArgField(oneOfArg: OneOfInput): String multipleReqs(req1: Int!, req2: Int!): String nonNullFieldWithDefault(arg: Int! = 0): String multipleOpts(opt1: Int = 0, opt2: Int = 0): String @@ -121,11 +131,11 @@ def assert_validation_errors( - rule: Type[ValidationRule], + rule: type[ASTValidationRule], query_str: str, - errors: List[GraphQLError], + errors: list[GraphQLError | dict[str, Any]], schema: GraphQLSchema = test_schema, -) -> List[GraphQLError]: +) -> list[GraphQLError]: doc = parse(query_str) returned_errors = validate(schema, doc, [rule]) assert returned_errors == errors @@ -133,11 +143,11 @@ def assert_validation_errors( def assert_sdl_validation_errors( - rule: Type[SDLValidationRule], + rule: type[ASTValidationRule], sdl_str: str, - errors: List[GraphQLError], - schema: Optional[GraphQLSchema] = None, -) -> List[GraphQLError]: + errors: list[GraphQLError | dict[str, Any]], + schema: GraphQLSchema | None = None, +) -> list[GraphQLError]: doc = parse(sdl_str) returned_errors = validate_sdl(doc, schema, [rule]) assert returned_errors == errors diff --git a/tests/validation/test_defer_stream_directive_label.py b/tests/validation/test_defer_stream_directive_label.py new file mode 100644 index 00000000..a75acd6f --- /dev/null +++ b/tests/validation/test_defer_stream_directive_label.py @@ -0,0 +1,188 @@ +from functools import partial + +from graphql.validation import DeferStreamDirectiveLabel + +from .harness import assert_validation_errors + +assert_errors = partial(assert_validation_errors, DeferStreamDirectiveLabel) + +assert_valid = partial(assert_errors, errors=[]) + + +def describe_defer_stream_directive_labels(): + def defer_fragments_with_no_label(): + assert_valid( + """ + { + dog { + ...dogFragmentA @defer + ...dogFragmentB @defer + } + } + fragment dogFragmentA on Dog { + name + } + fragment dogFragmentB on Dog { + nickname + } + """ + ) + + def defer_fragments_one_with_label_one_without(): + assert_valid( + """ + { + dog { + ...dogFragmentA @defer(label: "fragA") + ...dogFragmentB @defer + } + } + fragment dogFragmentA on Dog { + name + } + fragment dogFragmentB on Dog { + nickname + } + """ + ) + + def defer_fragment_with_variable_label(): + assert_errors( + """ + query($label: String) { + dog { + ...dogFragmentA @defer(label: $label) + ...dogFragmentB @defer(label: "fragA") + } + } + fragment dogFragmentA on Dog { + name + } + fragment dogFragmentB on Dog { + nickname + } + """, + [ + { + "message": "Defer directive label argument" + " must be a static string.", + "locations": [(4, 33)], + }, + ], + ) + + def defer_fragments_with_different_labels(): + assert_valid( + """ + { + dog { + ...dogFragmentA @defer(label: "fragB") + ...dogFragmentB @defer(label: "fragA") + } + } + fragment dogFragmentA on Dog { + name + } + fragment dogFragmentB on Dog { + nickname + } + """ + ) + + def defer_fragments_with_same_labels(): + assert_errors( + """ + { + dog { + ...dogFragmentA @defer(label: "fragA") + ...dogFragmentB @defer(label: "fragA") + } + } + fragment dogFragmentA on Dog { + name + } + fragment dogFragmentB on Dog { + nickname + } + """, + [ + { + "message": "Defer/Stream directive label argument must be unique.", + "locations": [(4, 33), (5, 33)], + }, + ], + ) + + def defer_and_stream_with_no_label(): + assert_valid( + """ + { + dog { + ...dogFragment @defer + } + pets @stream(initialCount: 0) @stream { + name + } + } + fragment dogFragment on Dog { + name + } + """ + ) + + def stream_with_variable_label(): + assert_errors( + """ + query ($label: String!) { + dog { + ...dogFragment @defer + } + pets @stream(initialCount: 0) @stream(label: $label) { + name + } + } + fragment dogFragment on Dog { + name + } + """, + [ + { + "message": "Stream directive label argument" + " must be a static string.", + "locations": [(6, 45)], + }, + ], + ) + + def defer_and_stream_with_the_same_labels(): + assert_errors( + """ + { + dog { + ...dogFragment @defer(label: "MyLabel") + } + pets @stream(initialCount: 0) @stream(label: "MyLabel") { + name + } + } + fragment dogFragment on Dog { + name + } + """, + [ + { + "message": "Defer/Stream directive label argument must be unique.", + "locations": [(4, 32), (6, 45)], + }, + ], + ) + + def no_defer_or_stream_directive_with_variable_and_duplicate_label(): + assert_valid( + """ + query($label: String) { + dog @skip(label: $label) + dog @skip(label: $label) + } + """ + ) diff --git a/tests/validation/test_defer_stream_directive_on_root_field.py b/tests/validation/test_defer_stream_directive_on_root_field.py new file mode 100644 index 00000000..0997f140 --- /dev/null +++ b/tests/validation/test_defer_stream_directive_on_root_field.py @@ -0,0 +1,283 @@ +from functools import partial + +from graphql.utilities import build_schema +from graphql.validation import DeferStreamDirectiveOnRootField + +from .harness import assert_validation_errors + +schema = build_schema( + """ + type Message { + body: String + sender: String + } + + type SubscriptionRoot { + subscriptionField: Message + subscriptionListField: [Message] + } + + type MutationRoot { + mutationField: Message + mutationListField: [Message] + } + + type QueryRoot { + message: Message + messages: [Message] + } + + schema { + query: QueryRoot + mutation: MutationRoot + subscription: SubscriptionRoot + } + """ +) + +assert_errors = partial( + assert_validation_errors, DeferStreamDirectiveOnRootField, schema=schema +) + +assert_valid = partial(assert_errors, errors=[]) + + +def describe_defer_stream_on_root_field(): + def defer_fragments_spread_on_root_field(): + assert_valid( + """ + { + ...rootQueryFragment @defer + } + fragment rootQueryFragment on QueryRoot { + message { + body + } + } + """ + ) + + def defer_inline_fragment_spread_on_root_query_field(): + assert_valid( + """ + { + ... @defer { + message { + body + } + } + } + """ + ) + + def defer_fragment_spread_on_root_mutation_field(): + assert_errors( + """ + mutation { + ...rootFragment @defer + } + fragment rootFragment on MutationRoot { + mutationField { + body + } + } + """, + [ + { + "message": "Defer directive cannot be used on root" + " mutation type 'MutationRoot'.", + "locations": [(3, 31)], + }, + ], + ) + + def defer_inline_fragment_spread_on_root_mutation_field(): + assert_errors( + """ + mutation { + ... @defer { + mutationField { + body + } + } + } + """, + [ + { + "message": "Defer directive cannot be used on root" + " mutation type 'MutationRoot'.", + "locations": [(3, 19)], + }, + ], + ) + + def defer_fragment_spread_on_nested_mutation_field(): + assert_valid( + """ + mutation { + mutationField { + ... @defer { + body + } + } + } + """ + ) + + def defer_fragment_spread_on_root_subscription_field(): + assert_errors( + """ + subscription { + ...rootFragment @defer + } + fragment rootFragment on SubscriptionRoot { + subscriptionField { + body + } + } + """, + [ + { + "message": "Defer directive cannot be used on root" + " subscription type 'SubscriptionRoot'.", + "locations": [(3, 31)], + }, + ], + ) + + def defer_inline_fragment_spread_on_root_subscription_field(): + assert_errors( + """ + subscription { + ... @defer { + subscriptionField { + body + } + } + } + """, + [ + { + "message": "Defer directive cannot be used on root" + " subscription type 'SubscriptionRoot'.", + "locations": [(3, 19)], + }, + ], + ) + + def defer_fragment_spread_on_nested_subscription_field(): + assert_valid( + """ + subscription { + subscriptionField { + ...nestedFragment @defer + } + } + fragment nestedFragment on Message { + body + } + """ + ) + + def stream_field_on_root_query_field(): + assert_valid( + """ + { + messages @stream { + name + } + } + """ + ) + + def stream_field_on_fragment_on_root_query_field(): + assert_valid( + """ + { + ...rootFragment + } + fragment rootFragment on QueryType { + messages @stream { + name + } + } + """ + ) + + def stream_field_on_root_mutation_field(): + assert_errors( + """ + mutation { + mutationListField @stream { + name + } + } + """, + [ + { + "message": "Stream directive cannot be used on root" + " mutation type 'MutationRoot'.", + "locations": [(3, 33)], + }, + ], + ) + + def stream_field_on_fragment_on_root_mutation_field(): + assert_errors( + """ + mutation { + ...rootFragment + } + fragment rootFragment on MutationRoot { + mutationListField @stream { + name + } + } + """, + [ + { + "message": "Stream directive cannot be used on root" + " mutation type 'MutationRoot'.", + "locations": [(6, 33)], + }, + ], + ) + + def stream_field_on_root_subscription_field(): + assert_errors( + """ + subscription { + subscriptionListField @stream { + name + } + } + """, + [ + { + "message": "Stream directive cannot be used on root" + " subscription type 'SubscriptionRoot'.", + "locations": [(3, 37)], + }, + ], + ) + + def stream_field_on_fragment_on_root_subscription_field(): + assert_errors( + """ + subscription { + ...rootFragment + } + fragment rootFragment on SubscriptionRoot { + subscriptionListField @stream { + name + } + } + """, + [ + { + "message": "Stream directive cannot be used on root" + " subscription type 'SubscriptionRoot'.", + "locations": [(6, 37)], + }, + ], + ) diff --git a/tests/validation/test_defer_stream_directive_on_valid_operations.py b/tests/validation/test_defer_stream_directive_on_valid_operations.py new file mode 100644 index 00000000..70207650 --- /dev/null +++ b/tests/validation/test_defer_stream_directive_on_valid_operations.py @@ -0,0 +1,395 @@ +from functools import partial + +from graphql.utilities import build_schema +from graphql.validation import DeferStreamDirectiveOnValidOperationsRule + +from .harness import assert_validation_errors + +schema = build_schema( + """ + type Message { + body: String + sender: String + } + + type SubscriptionRoot { + subscriptionField: Message + subscriptionListField: [Message] + } + + type MutationRoot { + mutationField: Message + mutationListField: [Message] + } + + type QueryRoot { + message: Message + messages: [Message] + } + + schema { + query: QueryRoot + mutation: MutationRoot + subscription: SubscriptionRoot + } + """ +) + +assert_errors = partial( + assert_validation_errors, DeferStreamDirectiveOnValidOperationsRule, schema=schema +) + +assert_valid = partial(assert_errors, errors=[]) + + +def describe_defer_stream_directive_on_valid_operations(): + def defer_fragment_spread_nested_in_query_operation(): + assert_valid( + """ + { + message { + ...myFragment @defer + } + } + fragment myFragment on Message { + message { + body + } + } + """ + ) + + def defer_inline_fragment_spread_in_query_operation(): + assert_valid( + """ + { + ... @defer { + message { + body + } + } + } + """ + ) + + def defer_fragment_spread_on_mutation_field(): + assert_valid( + """ + mutation { + mutationField { + ...myFragment @defer + } + } + fragment myFragment on Message { + body + } + """ + ) + + def defer_inline_fragment_spread_on_mutation_field(): + assert_valid( + """ + mutation { + mutationField { + ... @defer { + body + } + } + } + """ + ) + + def defer_fragment_spread_on_subscription_field(): + assert_errors( + """ + subscription { + subscriptionField { + ...myFragment @defer + } + } + fragment myFragment on Message { + body + } + """, + [ + { + "message": "Defer directive not supported" + " on subscription operations." + " Disable `@defer` by setting the `if` argument to `false`.", + "locations": [(4, 31)], + }, + ], + ) + + def defer_fragment_spread_with_boolean_true_if_argument(): + assert_errors( + """ + subscription { + subscriptionField { + ...myFragment @defer(if: true) + } + } + fragment myFragment on Message { + body + } + """, + [ + { + "message": "Defer directive not supported" + " on subscription operations." + " Disable `@defer` by setting the `if` argument to `false`.", + "locations": [(4, 31)], + }, + ], + ) + + def defer_fragment_spread_with_boolean_false_if_argument(): + assert_valid( + """ + subscription { + subscriptionField { + ...myFragment @defer(if: false) + } + } + fragment myFragment on Message { + body + } + """ + ) + + def defer_fragment_spread_on_query_in_multi_operation_document(): + assert_valid( + """ + subscription MySubscription { + subscriptionField { + ...myFragment + } + } + query MyQuery { + message { + ...myFragment @defer + } + } + fragment myFragment on Message { + body + } + """ + ) + + def defer_fragment_spread_on_subscription_in_multi_operation_document(): + assert_errors( + """ + subscription MySubscription { + subscriptionField { + ...myFragment @defer + } + } + query MyQuery { + message { + ...myFragment @defer + } + } + fragment myFragment on Message { + body + } + """, + [ + { + "message": "Defer directive not supported" + " on subscription operations." + " Disable `@defer` by setting the `if` argument to `false`.", + "locations": [(4, 31)], + }, + ], + ) + + def defer_fragment_spread_with_invalid_if_argument(): + assert_errors( + """ + subscription MySubscription { + subscriptionField { + ...myFragment @defer(if: "Oops") + } + } + fragment myFragment on Message { + body + } + """, + [ + { + "message": "Defer directive not supported" + " on subscription operations." + " Disable `@defer` by setting the `if` argument to `false`.", + "locations": [(4, 31)], + }, + ], + ) + + def stream_on_query_field(): + assert_valid( + """ + { + messages @stream { + name + } + } + """ + ) + + def stream_on_mutation_field(): + assert_valid( + """ + mutation { + mutationField { + messages @stream + } + } + """ + ) + + def stream_on_fragment_on_mutation_field(): + assert_valid( + """ + mutation { + mutationField { + ...myFragment + } + } + fragment myFragment on Message { + messages @stream + } + """ + ) + + def stream_on_subscription_field(): + assert_errors( + """ + subscription { + subscriptionField { + messages @stream + } + } + """, + [ + { + "message": "Stream directive not supported" + " on subscription operations." + " Disable `@stream` by setting the `if` argument to `false`.", + "locations": [(4, 26)], + }, + ], + ) + + def stream_on_fragment_on_subscription_field(): + assert_errors( + """ + subscription { + subscriptionField { + ...myFragment + } + } + fragment myFragment on Message { + messages @stream + } + """, + [ + { + "message": "Stream directive not supported" + " on subscription operations." + " Disable `@stream` by setting the `if` argument to `false`.", + "locations": [(8, 24)], + }, + ], + ) + + def stream_on_fragment_on_query_in_multi_operation_document(): + assert_valid( + """ + subscription MySubscription { + subscriptionField { + message + } + } + query MyQuery { + message { + ...myFragment + } + } + fragment myFragment on Message { + messages @stream + } + """ + ) + + def stream_on_subscription_in_multi_operation_document(): + assert_errors( + """ + query MyQuery { + message { + ...myFragment + } + } + subscription MySubscription { + subscriptionField { + message { + ...myFragment + } + } + } + fragment myFragment on Message { + messages @stream + } + """, + [ + { + "message": "Stream directive not supported" + " on subscription operations." + " Disable `@stream` by setting the `if` argument to `false`.", + "locations": [(15, 24)], + }, + ], + ) + + def stream_with_boolean_false_if_argument(): + assert_valid( + """ + subscription { + subscriptionField { + ...myFragment @stream(if:false) + } + } + """ + ) + + def stream_with_two_arguments(): + assert_valid( + """ + subscription { + subscriptionField { + ...myFragment @stream(foo:false,if:false) + } + } + """ + ) + + def stream_with_variable_argument(): + assert_valid( + """ + subscription ($stream: boolean!) { + subscriptionField { + ...myFragment @stream(if:$stream) + } + } + """ + ) + + def other_directive_on_subscription_field(): + assert_valid( + """ + subscription { + subscriptionField { + ...myFragment @foo + } + } + """ + ) diff --git a/tests/validation/test_fields_on_correct_type.py b/tests/validation/test_fields_on_correct_type.py index 949e2c28..d0d6d5f3 100644 --- a/tests/validation/test_fields_on_correct_type.py +++ b/tests/validation/test_fields_on_correct_type.py @@ -3,7 +3,7 @@ from graphql.language import parse from graphql.type import GraphQLSchema from graphql.utilities import build_schema -from graphql.validation import validate, FieldsOnCorrectTypeRule +from graphql.validation import FieldsOnCorrectTypeRule, validate from .harness import assert_validation_errors diff --git a/tests/validation/test_known_argument_names.py b/tests/validation/test_known_argument_names.py index 3291b7c9..74099ca6 100644 --- a/tests/validation/test_known_argument_names.py +++ b/tests/validation/test_known_argument_names.py @@ -6,7 +6,7 @@ KnownArgumentNamesOnDirectivesRule, ) -from .harness import assert_validation_errors, assert_sdl_validation_errors +from .harness import assert_sdl_validation_errors, assert_validation_errors assert_errors = partial(assert_validation_errors, KnownArgumentNamesRule) diff --git a/tests/validation/test_known_directives.py b/tests/validation/test_known_directives.py index 3c837ba0..c99921ac 100644 --- a/tests/validation/test_known_directives.py +++ b/tests/validation/test_known_directives.py @@ -3,7 +3,7 @@ from graphql.utilities import build_schema from graphql.validation import KnownDirectivesRule -from .harness import assert_validation_errors, assert_sdl_validation_errors +from .harness import assert_sdl_validation_errors, assert_validation_errors schema_with_directives = build_schema( """ diff --git a/tests/validation/test_known_type_names.py b/tests/validation/test_known_type_names.py index 4b4683ae..b4124a75 100644 --- a/tests/validation/test_known_type_names.py +++ b/tests/validation/test_known_type_names.py @@ -3,7 +3,7 @@ from graphql.utilities import build_schema from graphql.validation import KnownTypeNamesRule -from .harness import assert_validation_errors, assert_sdl_validation_errors +from .harness import assert_sdl_validation_errors, assert_validation_errors assert_errors = partial(assert_validation_errors, KnownTypeNamesRule) diff --git a/tests/validation/test_no_deprecated.py b/tests/validation/test_no_deprecated.py index c4ac992a..1f9bd163 100644 --- a/tests/validation/test_no_deprecated.py +++ b/tests/validation/test_no_deprecated.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from functools import partial -from typing import Callable, List, Tuple +from typing import Callable from graphql.utilities import build_schema from graphql.validation import NoDeprecatedCustomRule @@ -9,7 +11,7 @@ def build_assertions( sdl_str: str, -) -> Tuple[Callable[[str], None], Callable[[str, List], None]]: +) -> tuple[Callable[[str], None], Callable[[str, list], None]]: schema = build_schema(sdl_str) assert_errors = partial( assert_validation_errors, NoDeprecatedCustomRule, schema=schema diff --git a/tests/validation/test_overlapping_fields_can_be_merged.py b/tests/validation/test_overlapping_fields_can_be_merged.py index 5f7800f7..8745f67e 100644 --- a/tests/validation/test_overlapping_fields_can_be_merged.py +++ b/tests/validation/test_overlapping_fields_can_be_merged.py @@ -84,6 +84,133 @@ def different_skip_or_include_directives_accepted(): """ ) + def same_stream_directives_supported(): + assert_valid( + """ + fragment differentDirectivesWithDifferentAliases on Dog { + name @stream(label: "streamLabel", initialCount: 1) + name @stream(label: "streamLabel", initialCount: 1) + } + """ + ) + + def different_stream_directive_label(): + assert_errors( + """ + fragment conflictingArgs on Dog { + name @stream(label: "streamLabel", initialCount: 1) + name @stream(label: "anotherLabel", initialCount: 1) + } + """, + [ + { + "message": "Fields 'name' conflict because they have differing" + " stream directives. Use different aliases on the fields" + " to fetch both if this was intentional.", + "locations": [(3, 15), (4, 15)], + } + ], + ) + + def different_stream_directive_initial_count(): + assert_errors( + """ + fragment conflictingArgs on Dog { + name @stream(label: "streamLabel", initialCount: 1) + name @stream(label: "streamLabel", initialCount: 2) + } + """, + [ + { + "message": "Fields 'name' conflict because they have differing" + " stream directives. Use different aliases on the fields" + " to fetch both if this was intentional.", + "locations": [(3, 15), (4, 15)], + } + ], + ) + + def different_stream_directive_first_missing_args(): + assert_errors( + """ + fragment conflictingArgs on Dog { + name @stream + name @stream(label: "streamLabel", initialCount: 1) + } + """, + [ + { + "message": "Fields 'name' conflict because they have differing" + " stream directives. Use different aliases on the fields" + " to fetch both if this was intentional.", + "locations": [(3, 15), (4, 15)], + } + ], + ) + + def different_stream_directive_second_missing_args(): + assert_errors( + """ + fragment conflictingArgs on Dog { + name @stream(label: "streamLabel", initialCount: 1) + name @stream + } + """, + [ + { + "message": "Fields 'name' conflict because they have differing" + " stream directives. Use different aliases on the fields" + " to fetch both if this was intentional.", + "locations": [(3, 15), (4, 15)], + } + ], + ) + + def different_stream_directive_extra_argument(): + assert_errors( + """ + fragment conflictingArgs on Dog { + name @stream(label: "streamLabel", initialCount: 1) + name @stream(label: "streamLabel", initialCount: 1, extraArg: true) + }""", + [ + { + "message": "Fields 'name' conflict because they have differing" + " stream directives. Use different aliases on the fields" + " to fetch both if this was intentional.", + "locations": [(3, 15), (4, 15)], + } + ], + ) + + def mix_of_stream_and_no_stream(): + assert_errors( + """ + fragment conflictingArgs on Dog { + name @stream + name + } + """, + [ + { + "message": "Fields 'name' conflict because they have differing" + " stream directives. Use different aliases on the fields" + " to fetch both if this was intentional.", + "locations": [(3, 15), (4, 15)], + } + ], + ) + + def same_stream_directive_both_missing_args(): + assert_valid( + """ + fragment conflictingArgs on Dog { + name @stream + name @stream + } + """ + ) + def same_aliases_with_different_field_targets(): assert_errors( """ @@ -576,7 +703,6 @@ def ignores_unknown_fragments(): ) def describe_return_types_must_be_unambiguous(): - schema = build_schema( """ interface SomeBox { diff --git a/tests/validation/test_provided_required_arguments.py b/tests/validation/test_provided_required_arguments.py index 86bb5233..0e9607a9 100644 --- a/tests/validation/test_provided_required_arguments.py +++ b/tests/validation/test_provided_required_arguments.py @@ -6,7 +6,7 @@ ProvidedRequiredArgumentsOnDirectivesRule, ) -from .harness import assert_validation_errors, assert_sdl_validation_errors +from .harness import assert_sdl_validation_errors, assert_validation_errors assert_errors = partial(assert_validation_errors, ProvidedRequiredArgumentsRule) diff --git a/tests/validation/test_stream_directive_on_list_field.py b/tests/validation/test_stream_directive_on_list_field.py new file mode 100644 index 00000000..5b9b5b8c --- /dev/null +++ b/tests/validation/test_stream_directive_on_list_field.py @@ -0,0 +1,82 @@ +from functools import partial + +from graphql.validation import StreamDirectiveOnListField + +from .harness import assert_validation_errors + +assert_errors = partial(assert_validation_errors, StreamDirectiveOnListField) + +assert_valid = partial(assert_errors, errors=[]) + + +def describe_stream_directive_on_list_field(): + def stream_on_list_field(): + assert_valid( + """ + fragment objectFieldSelection on Human { + pets @stream(initialCount: 0) { + name + } + } + """ + ) + + def stream_on_non_null_list_field(): + assert_valid( + """ + fragment objectFieldSelection on Human { + relatives @stream(initialCount: 0) { + name + } + } + """ + ) + + def does_not_validate_other_directives_on_list_fields(): + assert_valid( + """ + fragment objectFieldSelection on Human { + pets @include(if: true) { + name + } + } + """ + ) + + def does_not_validate_other_directives_on_non_list_fields(): + assert_valid( + """ + fragment objectFieldSelection on Human { + pets { + name @include(if: true) + } + } + """ + ) + + def does_not_validate_misplaced_stream_directives(): + assert_valid( + """ + fragment objectFieldSelection on Human { + ... @stream(initialCount: 0) { + name + } + } + """ + ) + + def reports_errors_when_stream_is_used_on_non_list_field(): + assert_errors( + """ + fragment objectFieldSelection on Human { + name @stream(initialCount: 0) + } + """, + [ + { + "message": "Stream directive cannot be used" + " on non-list field 'name' on type 'Human'.", + "locations": [(3, 20)], + }, + ], + ) diff --git a/tests/validation/test_unique_argument_names.py b/tests/validation/test_unique_argument_names.py index ef82f67e..64aa2b95 100644 --- a/tests/validation/test_unique_argument_names.py +++ b/tests/validation/test_unique_argument_names.py @@ -1,4 +1,5 @@ from functools import partial + from graphql.validation import UniqueArgumentNamesRule from .harness import assert_validation_errors diff --git a/tests/validation/test_unique_directives_per_location.py b/tests/validation/test_unique_directives_per_location.py index 29842087..4c21842c 100644 --- a/tests/validation/test_unique_directives_per_location.py +++ b/tests/validation/test_unique_directives_per_location.py @@ -4,7 +4,7 @@ from graphql.utilities import extend_schema from graphql.validation import UniqueDirectivesPerLocationRule -from .harness import assert_validation_errors, assert_sdl_validation_errors, test_schema +from .harness import assert_sdl_validation_errors, assert_validation_errors, test_schema extension_sdl = """ directive @directive on FIELD | FRAGMENT_DEFINITION diff --git a/tests/validation/test_validation.py b/tests/validation/test_validation.py index 0f7d80e6..78efbce9 100644 --- a/tests/validation/test_validation.py +++ b/tests/validation/test_validation.py @@ -1,4 +1,4 @@ -from pytest import raises +import pytest from graphql.error import GraphQLError from graphql.language import parse @@ -9,42 +9,6 @@ def describe_validate_supports_full_validation(): - def rejects_invalid_documents(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - assert validate(test_schema, None) # type: ignore - assert str(exc_info.value) == "Must provide document." - - def rejects_invalid_type_info(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - assert validate( - test_schema, parse("query { name }"), type_info={} # type: ignore - ) - assert str(exc_info.value) == "Not a TypeInfo object: {}." - - def rejects_invalid_rules(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - assert validate( - test_schema, parse("query { name }"), rules=[None] # type: ignore - ) - assert ( - str(exc_info.value) == "Rules must be specified as a collection" - " of ASTValidationRule subclasses." - ) - - def rejects_invalid_max_errors(): - with raises(TypeError) as exc_info: - # noinspection PyTypeChecker - assert validate( - test_schema, parse("query { name }"), max_errors=2.5 # type: ignore - ) - assert ( - str(exc_info.value) - == "The maximum number of errors must be passed as an int." - ) - def validates_queries(): doc = parse( """ @@ -82,7 +46,7 @@ def detects_unknown_fields(): def deprecated_validates_using_a_custom_type_info(): # This TypeInfo will never return a valid field. - type_info = TypeInfo(test_schema, None, lambda *args: None) + type_info = TypeInfo(test_schema, None, lambda *_args: None) doc = parse( """ @@ -107,8 +71,7 @@ def deprecated_validates_using_a_custom_type_info(): "Cannot query field 'human' on type 'QueryRoot'. Did you mean 'human'?", "Cannot query field 'meowsVolume' on type 'Cat'." " Did you mean 'meowsVolume'?", - "Cannot query field 'barkVolume' on type 'Dog'." - " Did you mean 'barkVolume'?", + "Cannot query field 'barkVolume' on type 'Dog'. Did you mean 'barkVolume'?", ] def validates_using_a_custom_rule(): @@ -200,5 +163,5 @@ class CustomRule(ValidationRule): def enter_field(self, *_args): raise RuntimeError("Error from custom rule!") - with raises(RuntimeError, match="^Error from custom rule!$"): + with pytest.raises(RuntimeError, match="^Error from custom rule!$"): validate(test_schema, doc, [CustomRule], max_errors=1) diff --git a/tests/validation/test_values_of_correct_type.py b/tests/validation/test_values_of_correct_type.py index a41cd9f6..7cf20648 100644 --- a/tests/validation/test_values_of_correct_type.py +++ b/tests/validation/test_values_of_correct_type.py @@ -5,8 +5,8 @@ GraphQLArgument, GraphQLField, GraphQLObjectType, - GraphQLSchema, GraphQLScalarType, + GraphQLSchema, GraphQLString, ) from graphql.validation import ValuesOfCorrectTypeRule @@ -931,6 +931,29 @@ def full_object_with_fields_in_different_order(): """ ) + def describe_valid_one_of_input_object_value(): + def exactly_one_field(): + assert_valid( + """ + { + complicatedArgs { + oneOfArgField(oneOfArg: { stringField: "abc" }) + } + } + """ + ) + + def exactly_one_non_nullable_variable(): + assert_valid( + """ + query ($string: String!) { + complicatedArgs { + oneOfArgField(oneOfArg: { stringField: $string }) + } + } + """ + ) + def describe_invalid_input_object_value(): def partial_object_missing_required(): assert_errors( @@ -1047,7 +1070,7 @@ def parse_value(value): def reports_error_for_custom_scalar_that_returns_undefined(): custom_scalar = GraphQLScalarType( - "CustomScalar", parse_value=lambda value: Undefined + "CustomScalar", parse_value=lambda _value: Undefined ) schema = GraphQLSchema( @@ -1097,6 +1120,77 @@ def allows_custom_scalar_to_accept_complex_literals(): schema=schema, ) + def describe_invalid_one_of_input_object_value(): + def invalid_field_type(): + assert_errors( + """ + { + complicatedArgs { + oneOfArgField(oneOfArg: { stringField: 2 }) + } + } + """, + [ + { + "message": "String cannot represent a non string value: 2", + "locations": [(4, 60)], + }, + ], + ) + + def exactly_one_null_field(): + assert_errors( + """ + { + complicatedArgs { + oneOfArgField(oneOfArg: { stringField: null }) + } + } + """, + [ + { + "message": "Field 'OneOfInput.stringField' must be non-null.", + "locations": [(4, 45)], + }, + ], + ) + + def exactly_one_nullable_variable(): + assert_errors( + """ + query ($string: String) { + complicatedArgs { + oneOfArgField(oneOfArg: { stringField: $string }) + } + } + """, + [ + { + "message": "Variable 'string' must be non-nullable to be used" + " for OneOf Input Object 'OneOfInput'.", + "locations": [(4, 45)], + }, + ], + ) + + def more_than_one_field(): + assert_errors( + """ + { + complicatedArgs { + oneOfArgField(oneOfArg: { stringField: "abc", intField: 123 }) + } + } + """, + [ + { + "message": "OneOf Input Object 'OneOfInput'" + " must specify exactly one key.", + "locations": [(4, 45)], + }, + ], + ) + def describe_directive_arguments(): def with_directives_of_valid_types(): assert_valid( diff --git a/tox.ini b/tox.ini index f9c3a5e2..d7dc47bc 100644 --- a/tox.ini +++ b/tox.ini @@ -1,59 +1,55 @@ [tox] -envlist = py3{6,7,8,9,10}, black, flake8, mypy, docs, manifest +envlist = py3{7,8,9,10,11,12,13}, pypy3{9,10}, ruff, mypy, docs isolated_build = true [gh-actions] python = - 3.6: py36 + 3: py313 3.7: py37 3.8: py38 3.9: py39 3.10: py310 + 3.11: py311 + 3.12: py312 + 3.13: py313 + pypy3: pypy39 + pypy3.9: pypy39 + pypy3.10: pypy310 -[testenv:black] -basepython = python3.9 -deps = black==22.8.0 +[testenv:ruff] +basepython = python3.12 +deps = ruff>=0.11,<0.12 commands = - black src tests setup.py -t py39 --check - -[testenv:flake8] -basepython = python3.9 -deps = flake8>=5,<6 -commands = - flake8 src tests setup.py + ruff check src tests + ruff format --check src tests [testenv:mypy] -basepython = python3.9 +basepython = python3.12 deps = - mypy==0.971 - pytest>=6.2,<7 + mypy>=1.15,<2 + pytest>=8.3,<9 commands = mypy src tests [testenv:docs] -basepython = python3.9 +basepython = python3.12 deps = - sphinx>=4.3,<5 - sphinx_rtd_theme>=1,<2 + sphinx>=8,<9 + sphinx_rtd_theme>=3,<4 commands = sphinx-build -b html -nEW docs docs/_build/html -[testenv:manifest] -basepython = python3.9 -deps = check-manifest>=0.48,<1 -commands = - check-manifest -v - [testenv] deps = - py37,py38,py39,py310: pytest>=7.1,<8 - py36: pytest>=6.2,<7 - pytest-asyncio>=0.16,<1 - pytest-benchmark>=3.4,<4 - pytest-cov>=3,<4 - pytest-describe>=2,<3 - pytest-timeout>=2,<3 - py37: typing-extensions>=4.3,<5 - py36: typing-extensions>=4.1,<5 + pytest>=7.4,<9 + pytest-asyncio>=0.21.1,<1 + pytest-benchmark>=4,<6 + pytest-cov>=4.1,<7 + pytest-describe>=2.2,<3 + pytest-timeout>=2.3,<3 + py3{7,8,9},pypy39: typing-extensions>=4.7.1,<5 commands = - pytest tests {posargs: --cov-report=term-missing --cov=graphql --cov=tests --cov-fail-under=100} + # to also run the time-consuming tests: tox -e py312 -- --run-slow + # to run the benchmarks: tox -e py312 -- -k benchmarks --benchmark-enable + py3{7,8,9,10,11,13},pypy3{9,10}: pytest tests {posargs} + py312: pytest tests {posargs: --cov-report=term-missing --cov=graphql --cov=tests --cov-fail-under=100}