From 8e7d76bbce3eb8755bf4f3beba8b9d8b026c6424 Mon Sep 17 00:00:00 2001 From: Mel van Londen Date: Sat, 17 Aug 2019 17:07:53 -0400 Subject: [PATCH 001/141] Graphene v3 following v3 graphql-core (#1048) * v3.0 - remove Python 2.x from build (#983) * Change travis to only compile for p3.6+ * Changed tox to only run Python 3.6+ * Changed library classifiers to reflect support in Python 3.6+ * Changed version to 3.0.0 development In [15]: get_version((3, 0, 0, "alpha", 0)) Out[15]: '3.0.dev20190601212304' * Reorganize Tests (#985) We no longer need a dedicated folder for Python3.6+ tests We no longer need to check six.PY3 in tests * Upgrade black to 19.3b0 (#987) * Remove six dependency (#986) * No one is using func_name * Remove six simple usages * Remove six requirement * Remove `six.with_metaclass` calls * pytest-asyncio should be a regular dependency now with Py3 move * Change dependency to graphql-core-next (#988) * Changed dependencies to core-next * Converted Scalars * ResolveInfo name change * Ignore .venv * Make Schema compatible with GraphQL-core-next * Ignore more venv names and mypy and pytest caches * Remove print statements for debugging in schema test * core-next now provides out_type and out_name * Adapt date and time scalar types to core-next * Ignore the non-standard result.invalid flag * Results are named tuples in core-next (immutable) * Enum values are returned as dict in core-next * Fix mutation tests with promises * Make all 345 tests pass with graphql-core-next * Remove the compat module which was only needed for older Py version * Remove object as base class (not needed in Py 3) * We can assume that dicts are ordered in Py 3.6+ * Make use of the fact that dicts are iterable * Use consistent style of importing from pytest * Restore compatibility with graphql-relay-py v3 Add adpaters for the PageInfo and Connection args. * Avoid various deprecation warnings * Use graphql-core 3 instead of graphql-core-next * Update dependencies, reformat changes with black * Update graphene/relay/connection.py Co-Authored-By: Jonathan Kim * Run black on setup.py * Remove trailing whitespace --- .gitignore | 14 +- .pre-commit-config.yaml | 4 +- .travis.yml | 2 - .../snap_test_objectidentification.py | 42 +- graphene/__init__.py | 3 +- graphene/pyutils/compat.py | 21 - graphene/pyutils/signature.py | 850 ------------------ graphene/relay/connection.py | 65 +- graphene/relay/mutation.py | 7 +- graphene/relay/node.py | 5 +- graphene/relay/tests/test_connection.py | 20 +- graphene/relay/tests/test_connection_query.py | 134 +-- graphene/relay/tests/test_global_id.py | 2 +- graphene/relay/tests/test_mutation.py | 42 +- graphene/relay/tests/test_node.py | 95 +- graphene/relay/tests/test_node_custom.py | 95 +- graphene/test/__init__.py | 12 +- graphene/tests/issues/test_313.py | 2 +- graphene/tests/issues/test_356.py | 9 +- graphene/types/__init__.py | 2 +- graphene/types/argument.py | 3 +- graphene/types/base.py | 8 +- graphene/types/context.py | 2 +- graphene/types/datetime.py | 22 +- graphene/types/decimal.py | 4 +- graphene/types/definitions.py | 2 +- graphene/types/enum.py | 13 +- graphene/types/field.py | 4 +- graphene/types/generic.py | 22 +- graphene/types/inputobjecttype.py | 6 +- graphene/types/interface.py | 4 +- graphene/types/json.py | 4 +- graphene/types/mutation.py | 6 +- graphene/types/objecttype.py | 4 +- graphene/types/scalars.py | 25 +- graphene/types/schema.py | 556 +++++++++--- graphene/types/tests/test_abstracttype.py | 26 +- graphene/types/tests/test_argument.py | 6 +- graphene/types/tests/test_datetime.py | 41 +- graphene/types/tests/test_definition.py | 32 +- graphene/types/tests/test_enum.py | 44 +- graphene/types/tests/test_field.py | 8 +- graphene/types/tests/test_inputobjecttype.py | 13 +- graphene/types/tests/test_interface.py | 14 +- graphene/types/tests/test_mutation.py | 4 +- graphene/types/tests/test_objecttype.py | 42 +- graphene/types/tests/test_query.py | 39 +- graphene/types/tests/test_resolver.py | 2 +- graphene/types/tests/test_schema.py | 34 +- graphene/types/tests/test_structures.py | 8 +- .../{test_typemap.py => test_type_map.py} | 117 +-- graphene/types/tests/test_union.py | 4 +- graphene/types/typemap.py | 337 ------- graphene/types/utils.py | 7 +- graphene/types/uuid.py | 7 +- graphene/utils/annotate.py | 35 - graphene/utils/crunch.py | 2 +- graphene/utils/deduplicator.py | 7 +- graphene/utils/orderedtype.py | 2 +- graphene/utils/props.py | 2 +- graphene/utils/subclass_with_meta.py | 6 +- graphene/utils/tests/test_annotate.py | 37 - graphene/utils/tests/test_crunch.py | 19 +- graphene/utils/tests/test_deduplicator.py | 4 +- graphene/utils/tests/test_deprecated.py | 4 +- graphene/utils/tests/test_trim_docstring.py | 4 +- graphene/utils/thenables.py | 31 +- graphene/utils/thenables_asyncio.py | 5 - setup.py | 19 +- tests_asyncio/test_relay_connection.py | 27 +- tests_asyncio/test_relay_mutation.py | 23 +- tests_py36/test_objecttype.py | 15 - tox.ini | 15 +- 73 files changed, 1117 insertions(+), 2036 deletions(-) delete mode 100644 graphene/pyutils/compat.py delete mode 100644 graphene/pyutils/signature.py rename graphene/types/tests/{test_typemap.py => test_type_map.py} (69%) delete mode 100644 graphene/types/typemap.py delete mode 100644 graphene/utils/annotate.py delete mode 100644 graphene/utils/tests/test_annotate.py delete mode 100644 graphene/utils/thenables_asyncio.py delete mode 100644 tests_py36/test_objecttype.py diff --git a/.gitignore b/.gitignore index 77ac057a5..9148845fa 100644 --- a/.gitignore +++ b/.gitignore @@ -10,9 +10,6 @@ __pycache__/ # Distribution / packaging .Python -env/ -venv/ -.venv/ build/ develop-eggs/ dist/ @@ -47,7 +44,8 @@ htmlcov/ .pytest_cache nosetests.xml coverage.xml -*,cover +*.cover +.pytest_cache/ # Translations *.mo @@ -62,6 +60,14 @@ docs/_build/ # PyBuilder target/ +# VirtualEnv +.env +.venv +env/ +venv/ + +# Typing +.mypy_cache/ /tests/django.sqlite diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 93ab2e6d9..7aa720015 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -18,11 +18,11 @@ repos: hooks: - id: pyupgrade - repo: https://github.com/ambv/black - rev: 18.9b0 + rev: 19.3b0 hooks: - id: black language_version: python3 - repo: https://github.com/PyCQA/flake8 - rev: 3.7.7 + rev: 3.7.8 hooks: - id: flake8 diff --git a/.travis.yml b/.travis.yml index 40d0415ed..a5d15f2d5 100644 --- a/.travis.yml +++ b/.travis.yml @@ -2,8 +2,6 @@ language: python dist: xenial python: - - "2.7" - - "3.5" - "3.6" - "3.7" diff --git a/examples/starwars_relay/tests/snapshots/snap_test_objectidentification.py b/examples/starwars_relay/tests/snapshots/snap_test_objectidentification.py index 2d13cba3a..cb57709ae 100644 --- a/examples/starwars_relay/tests/snapshots/snap_test_objectidentification.py +++ b/examples/starwars_relay/tests/snapshots/snap_test_objectidentification.py @@ -30,21 +30,22 @@ snapshots[ "test_str_schema 1" -] = """schema { - query: Query - mutation: Mutation -} - +] = '''"""A faction in the Star Wars saga""" type Faction implements Node { + """The ID of the object""" id: ID! + + """The name of the faction.""" name: String - ships(before: String, after: String, first: Int, last: Int): ShipConnection + + """The ships used by the faction.""" + ships(before: String = null, after: String = null, first: Int = null, last: Int = null): ShipConnection } input IntroduceShipInput { shipName: String! factionId: String! - clientMutationId: String + clientMutationId: String = null } type IntroduceShipPayload { @@ -57,35 +58,60 @@ introduceShip(input: IntroduceShipInput!): IntroduceShipPayload } +"""An object with an ID""" interface Node { + """The ID of the object""" id: ID! } +""" +The Relay compliant `PageInfo` type, containing data necessary to paginate this connection. +""" type PageInfo { + """When paginating forwards, are there more items?""" hasNextPage: Boolean! + + """When paginating backwards, are there more items?""" hasPreviousPage: Boolean! + + """When paginating backwards, the cursor to continue.""" startCursor: String + + """When paginating forwards, the cursor to continue.""" endCursor: String } type Query { rebels: Faction empire: Faction + + """The ID of the object""" node(id: ID!): Node } +"""A ship in the Star Wars saga""" type Ship implements Node { + """The ID of the object""" id: ID! + + """The name of the ship.""" name: String } type ShipConnection { + """Pagination data for this connection.""" pageInfo: PageInfo! + + """Contains the nodes in this connection.""" edges: [ShipEdge]! } +"""A Relay edge containing a `Ship` and its cursor.""" type ShipEdge { + """The item at the end of the edge""" node: Ship + + """A cursor for use in pagination""" cursor: String! } -""" +''' diff --git a/graphene/__init__.py b/graphene/__init__.py index 9cbbc38f7..d7d7ef888 100644 --- a/graphene/__init__.py +++ b/graphene/__init__.py @@ -43,7 +43,8 @@ from .utils.module_loading import lazy_import -VERSION = (2, 1, 8, "final", 0) +VERSION = (3, 0, 0, "alpha", 0) + __version__ = get_version(VERSION) diff --git a/graphene/pyutils/compat.py b/graphene/pyutils/compat.py deleted file mode 100644 index 3d3ce2fe7..000000000 --- a/graphene/pyutils/compat.py +++ /dev/null @@ -1,21 +0,0 @@ -from __future__ import absolute_import - -import six - -from graphql.pyutils.compat import Enum - -try: - from inspect import signature -except ImportError: - from .signature import signature - -if six.PY2: - - def func_name(func): - return func.func_name - - -else: - - def func_name(func): - return func.__name__ diff --git a/graphene/pyutils/signature.py b/graphene/pyutils/signature.py deleted file mode 100644 index 7757d9d01..000000000 --- a/graphene/pyutils/signature.py +++ /dev/null @@ -1,850 +0,0 @@ -# Copyright 2001-2013 Python Software Foundation; All Rights Reserved -"""Function signature objects for callables -Back port of Python 3.3's function signature tools from the inspect module, -modified to be compatible with Python 2.7 and 3.2+. -""" -from __future__ import absolute_import, division, print_function - -import functools -import itertools -import re -import types -from collections import OrderedDict - -__version__ = "0.4" - -__all__ = ["BoundArguments", "Parameter", "Signature", "signature"] - - -_WrapperDescriptor = type(type.__call__) -_MethodWrapper = type(all.__call__) - -_NonUserDefinedCallables = ( - _WrapperDescriptor, - _MethodWrapper, - types.BuiltinFunctionType, -) - - -def formatannotation(annotation, base_module=None): - if isinstance(annotation, type): - if annotation.__module__ in ("builtins", "__builtin__", base_module): - return annotation.__name__ - return annotation.__module__ + "." + annotation.__name__ - return repr(annotation) - - -def _get_user_defined_method(cls, method_name, *nested): - try: - if cls is type: - return - meth = getattr(cls, method_name) - for name in nested: - meth = getattr(meth, name, meth) - except AttributeError: - return - else: - if not isinstance(meth, _NonUserDefinedCallables): - # Once '__signature__' will be added to 'C'-level - # callables, this check won't be necessary - return meth - - -def signature(obj): - """Get a signature object for the passed callable.""" - - if not callable(obj): - raise TypeError("{!r} is not a callable object".format(obj)) - - if isinstance(obj, types.MethodType): - sig = signature(obj.__func__) - if obj.__self__ is None: - # Unbound method: the first parameter becomes positional-only - if sig.parameters: - first = sig.parameters.values()[0].replace(kind=_POSITIONAL_ONLY) - return sig.replace( - parameters=(first,) + tuple(sig.parameters.values())[1:] - ) - else: - return sig - else: - # In this case we skip the first parameter of the underlying - # function (usually `self` or `cls`). - return sig.replace(parameters=tuple(sig.parameters.values())[1:]) - - try: - sig = obj.__signature__ - except AttributeError: - pass - else: - if sig is not None: - return sig - - try: - # Was this function wrapped by a decorator? - wrapped = obj.__wrapped__ - except AttributeError: - pass - else: - return signature(wrapped) - - if isinstance(obj, types.FunctionType): - return Signature.from_function(obj) - - if isinstance(obj, functools.partial): - sig = signature(obj.func) - - new_params = OrderedDict(sig.parameters.items()) - - partial_args = obj.args or () - partial_keywords = obj.keywords or {} - try: - ba = sig.bind_partial(*partial_args, **partial_keywords) - except TypeError as ex: - msg = "partial object {!r} has incorrect arguments".format(obj) - raise ValueError(msg) - - for arg_name, arg_value in ba.arguments.items(): - param = new_params[arg_name] - if arg_name in partial_keywords: - # We set a new default value, because the following code - # is correct: - # - # >>> def foo(a): print(a) - # >>> print(partial(partial(foo, a=10), a=20)()) - # 20 - # >>> print(partial(partial(foo, a=10), a=20)(a=30)) - # 30 - # - # So, with 'partial' objects, passing a keyword argument is - # like setting a new default value for the corresponding - # parameter - # - # We also mark this parameter with '_partial_kwarg' - # flag. Later, in '_bind', the 'default' value of this - # parameter will be added to 'kwargs', to simulate - # the 'functools.partial' real call. - new_params[arg_name] = param.replace( - default=arg_value, _partial_kwarg=True - ) - - elif ( - param.kind not in (_VAR_KEYWORD, _VAR_POSITIONAL) - and not param._partial_kwarg - ): - new_params.pop(arg_name) - - return sig.replace(parameters=new_params.values()) - - sig = None - if isinstance(obj, type): - # obj is a class or a metaclass - - # First, let's see if it has an overloaded __call__ defined - # in its metaclass - call = _get_user_defined_method(type(obj), "__call__") - if call is not None: - sig = signature(call) - else: - # Now we check if the 'obj' class has a '__new__' method - new = _get_user_defined_method(obj, "__new__") - if new is not None: - sig = signature(new) - else: - # Finally, we should have at least __init__ implemented - init = _get_user_defined_method(obj, "__init__") - if init is not None: - sig = signature(init) - elif not isinstance(obj, _NonUserDefinedCallables): - # An object with __call__ - # We also check that the 'obj' is not an instance of - # _WrapperDescriptor or _MethodWrapper to avoid - # infinite recursion (and even potential segfault) - call = _get_user_defined_method(type(obj), "__call__", "im_func") - if call is not None: - sig = signature(call) - - if sig is not None: - # For classes and objects we skip the first parameter of their - # __call__, __new__, or __init__ methods - return sig.replace(parameters=tuple(sig.parameters.values())[1:]) - - if isinstance(obj, types.BuiltinFunctionType): - # Raise a nicer error message for builtins - msg = "no signature found for builtin function {!r}".format(obj) - raise ValueError(msg) - - raise ValueError("callable {!r} is not supported by signature".format(obj)) - - -class _void(object): - """A private marker - used in Parameter & Signature""" - - -class _empty(object): - pass - - -class _ParameterKind(int): - def __new__(self, *args, **kwargs): - obj = int.__new__(self, *args) - obj._name = kwargs["name"] - return obj - - def __str__(self): - return self._name - - def __repr__(self): - return "<_ParameterKind: {!r}>".format(self._name) - - -_POSITIONAL_ONLY = _ParameterKind(0, name="POSITIONAL_ONLY") -_POSITIONAL_OR_KEYWORD = _ParameterKind(1, name="POSITIONAL_OR_KEYWORD") -_VAR_POSITIONAL = _ParameterKind(2, name="VAR_POSITIONAL") -_KEYWORD_ONLY = _ParameterKind(3, name="KEYWORD_ONLY") -_VAR_KEYWORD = _ParameterKind(4, name="VAR_KEYWORD") - - -class Parameter(object): - """Represents a parameter in a function signature. - Has the following public attributes: - * name : str - The name of the parameter as a string. - * default : object - The default value for the parameter if specified. If the - parameter has no default value, this attribute is not set. - * annotation - The annotation for the parameter if specified. If the - parameter has no annotation, this attribute is not set. - * kind : str - Describes how argument values are bound to the parameter. - Possible values: `Parameter.POSITIONAL_ONLY`, - `Parameter.POSITIONAL_OR_KEYWORD`, `Parameter.VAR_POSITIONAL`, - `Parameter.KEYWORD_ONLY`, `Parameter.VAR_KEYWORD`. - """ - - __slots__ = ("_name", "_kind", "_default", "_annotation", "_partial_kwarg") - - POSITIONAL_ONLY = _POSITIONAL_ONLY - POSITIONAL_OR_KEYWORD = _POSITIONAL_OR_KEYWORD - VAR_POSITIONAL = _VAR_POSITIONAL - KEYWORD_ONLY = _KEYWORD_ONLY - VAR_KEYWORD = _VAR_KEYWORD - - empty = _empty - - def __init__( - self, name, kind, default=_empty, annotation=_empty, _partial_kwarg=False - ): - - if kind not in ( - _POSITIONAL_ONLY, - _POSITIONAL_OR_KEYWORD, - _VAR_POSITIONAL, - _KEYWORD_ONLY, - _VAR_KEYWORD, - ): - raise ValueError("invalid value for 'Parameter.kind' attribute") - self._kind = kind - - if default is not _empty: - if kind in (_VAR_POSITIONAL, _VAR_KEYWORD): - msg = "{} parameters cannot have default values".format(kind) - raise ValueError(msg) - self._default = default - self._annotation = annotation - - if name is None: - if kind != _POSITIONAL_ONLY: - raise ValueError( - "None is not a valid name for a " "non-positional-only parameter" - ) - self._name = name - else: - name = str(name) - if kind != _POSITIONAL_ONLY and not re.match(r"[a-z_]\w*$", name, re.I): - msg = "{!r} is not a valid parameter name".format(name) - raise ValueError(msg) - self._name = name - - self._partial_kwarg = _partial_kwarg - - @property - def name(self): - return self._name - - @property - def default(self): - return self._default - - @property - def annotation(self): - return self._annotation - - @property - def kind(self): - return self._kind - - def replace( - self, - name=_void, - kind=_void, - annotation=_void, - default=_void, - _partial_kwarg=_void, - ): - """Creates a customized copy of the Parameter.""" - - if name is _void: - name = self._name - - if kind is _void: - kind = self._kind - - if annotation is _void: - annotation = self._annotation - - if default is _void: - default = self._default - - if _partial_kwarg is _void: - _partial_kwarg = self._partial_kwarg - - return type(self)( - name, - kind, - default=default, - annotation=annotation, - _partial_kwarg=_partial_kwarg, - ) - - def __str__(self): - kind = self.kind - - formatted = self._name - if kind == _POSITIONAL_ONLY: - if formatted is None: - formatted = "" - formatted = "<{}>".format(formatted) - - # Add annotation and default value - if self._annotation is not _empty: - formatted = "{}:{}".format(formatted, formatannotation(self._annotation)) - - if self._default is not _empty: - formatted = "{}={}".format(formatted, repr(self._default)) - - if kind == _VAR_POSITIONAL: - formatted = "*" + formatted - elif kind == _VAR_KEYWORD: - formatted = "**" + formatted - - return formatted - - def __repr__(self): - return "<{} at {:#x} {!r}>".format(self.__class__.__name__, id(self), self.name) - - def __hash__(self): - msg = "unhashable type: '{}'".format(self.__class__.__name__) - raise TypeError(msg) - - def __eq__(self, other): - return ( - issubclass(other.__class__, Parameter) - and self._name == other._name - and self._kind == other._kind - and self._default == other._default - and self._annotation == other._annotation - ) - - def __ne__(self, other): - return not self.__eq__(other) - - -class BoundArguments(object): - """Result of `Signature.bind` call. Holds the mapping of arguments - to the function's parameters. - Has the following public attributes: - * arguments : OrderedDict - An ordered mutable mapping of parameters' names to arguments' values. - Does not contain arguments' default values. - * signature : Signature - The Signature object that created this instance. - * args : tuple - Tuple of positional arguments values. - * kwargs : dict - Dict of keyword arguments values. - """ - - def __init__(self, signature, arguments): - self.arguments = arguments - self._signature = signature - - @property - def signature(self): - return self._signature - - @property - def args(self): - args = [] - for param_name, param in self._signature.parameters.items(): - if param.kind in (_VAR_KEYWORD, _KEYWORD_ONLY) or param._partial_kwarg: - # Keyword arguments mapped by 'functools.partial' - # (Parameter._partial_kwarg is True) are mapped - # in 'BoundArguments.kwargs', along with VAR_KEYWORD & - # KEYWORD_ONLY - break - - try: - arg = self.arguments[param_name] - except KeyError: - # We're done here. Other arguments - # will be mapped in 'BoundArguments.kwargs' - break - else: - if param.kind == _VAR_POSITIONAL: - # *args - args.extend(arg) - else: - # plain argument - args.append(arg) - - return tuple(args) - - @property - def kwargs(self): - kwargs = {} - kwargs_started = False - for param_name, param in self._signature.parameters.items(): - if not kwargs_started: - if param.kind in (_VAR_KEYWORD, _KEYWORD_ONLY) or param._partial_kwarg: - kwargs_started = True - else: - if param_name not in self.arguments: - kwargs_started = True - continue - - if not kwargs_started: - continue - - try: - arg = self.arguments[param_name] - except KeyError: - pass - else: - if param.kind == _VAR_KEYWORD: - # **kwargs - kwargs.update(arg) - else: - # plain keyword argument - kwargs[param_name] = arg - - return kwargs - - def __hash__(self): - msg = "unhashable type: '{}'".format(self.__class__.__name__) - raise TypeError(msg) - - def __eq__(self, other): - return ( - issubclass(other.__class__, BoundArguments) - and self.signature == other.signature - and self.arguments == other.arguments - ) - - def __ne__(self, other): - return not self.__eq__(other) - - -class Signature(object): - """A Signature object represents the overall signature of a function. - It stores a Parameter object for each parameter accepted by the - function, as well as information specific to the function itself. - A Signature object has the following public attributes and methods: - * parameters : OrderedDict - An ordered mapping of parameters' names to the corresponding - Parameter objects (keyword-only arguments are in the same order - as listed in `code.co_varnames`). - * return_annotation : object - The annotation for the return type of the function if specified. - If the function has no annotation for its return type, this - attribute is not set. - * bind(*args, **kwargs) -> BoundArguments - Creates a mapping from positional and keyword arguments to - parameters. - * bind_partial(*args, **kwargs) -> BoundArguments - Creates a partial mapping from positional and keyword arguments - to parameters (simulating 'functools.partial' behavior.) - """ - - __slots__ = ("_return_annotation", "_parameters") - - _parameter_cls = Parameter - _bound_arguments_cls = BoundArguments - - empty = _empty - - def __init__( - self, parameters=None, return_annotation=_empty, __validate_parameters__=True - ): - """Constructs Signature from the given list of Parameter - objects and 'return_annotation'. All arguments are optional. - """ - - if parameters is None: - params = OrderedDict() - else: - if __validate_parameters__: - params = OrderedDict() - top_kind = _POSITIONAL_ONLY - - for idx, param in enumerate(parameters): - kind = param.kind - if kind < top_kind: - msg = "wrong parameter order: {0} before {1}" - msg = msg.format(top_kind, param.kind) - raise ValueError(msg) - else: - top_kind = kind - - name = param.name - if name is None: - name = str(idx) - param = param.replace(name=name) - - if name in params: - msg = "duplicate parameter name: {!r}".format(name) - raise ValueError(msg) - params[name] = param - else: - params = OrderedDict(((param.name, param) for param in parameters)) - - self._parameters = params - self._return_annotation = return_annotation - - @classmethod - def from_function(cls, func): - """Constructs Signature for the given python function""" - - if not isinstance(func, types.FunctionType): - raise TypeError("{!r} is not a Python function".format(func)) - - Parameter = cls._parameter_cls - - # Parameter information. - func_code = func.__code__ - pos_count = func_code.co_argcount - arg_names = func_code.co_varnames - positional = tuple(arg_names[:pos_count]) - keyword_only_count = getattr(func_code, "co_kwonlyargcount", 0) - keyword_only = arg_names[pos_count : (pos_count + keyword_only_count)] - annotations = getattr(func, "__annotations__", {}) - defaults = func.__defaults__ - kwdefaults = getattr(func, "__kwdefaults__", None) - - if defaults: - pos_default_count = len(defaults) - else: - pos_default_count = 0 - - parameters = [] - - # Non-keyword-only parameters w/o defaults. - non_default_count = pos_count - pos_default_count - for name in positional[:non_default_count]: - annotation = annotations.get(name, _empty) - parameters.append( - Parameter(name, annotation=annotation, kind=_POSITIONAL_OR_KEYWORD) - ) - - # ... w/ defaults. - for offset, name in enumerate(positional[non_default_count:]): - annotation = annotations.get(name, _empty) - parameters.append( - Parameter( - name, - annotation=annotation, - kind=_POSITIONAL_OR_KEYWORD, - default=defaults[offset], - ) - ) - - # *args - if func_code.co_flags & 0x04: - name = arg_names[pos_count + keyword_only_count] - annotation = annotations.get(name, _empty) - parameters.append( - Parameter(name, annotation=annotation, kind=_VAR_POSITIONAL) - ) - - # Keyword-only parameters. - for name in keyword_only: - default = _empty - if kwdefaults is not None: - default = kwdefaults.get(name, _empty) - - annotation = annotations.get(name, _empty) - parameters.append( - Parameter( - name, annotation=annotation, kind=_KEYWORD_ONLY, default=default - ) - ) - # **kwargs - if func_code.co_flags & 0x08: - index = pos_count + keyword_only_count - if func_code.co_flags & 0x04: - index += 1 - - name = arg_names[index] - annotation = annotations.get(name, _empty) - parameters.append(Parameter(name, annotation=annotation, kind=_VAR_KEYWORD)) - - return cls( - parameters, - return_annotation=annotations.get("return", _empty), - __validate_parameters__=False, - ) - - @property - def parameters(self): - try: - return types.MappingProxyType(self._parameters) - except AttributeError: - return OrderedDict(self._parameters.items()) - - @property - def return_annotation(self): - return self._return_annotation - - def replace(self, parameters=_void, return_annotation=_void): - """Creates a customized copy of the Signature. - Pass 'parameters' and/or 'return_annotation' arguments - to override them in the new copy. - """ - - if parameters is _void: - parameters = self.parameters.values() - - if return_annotation is _void: - return_annotation = self._return_annotation - - return type(self)(parameters, return_annotation=return_annotation) - - def __hash__(self): - msg = "unhashable type: '{}'".format(self.__class__.__name__) - raise TypeError(msg) - - def __eq__(self, other): - if ( - not issubclass(type(other), Signature) - or self.return_annotation != other.return_annotation - or len(self.parameters) != len(other.parameters) - ): - return False - - other_positions = { - param: idx for idx, param in enumerate(other.parameters.keys()) - } - - for idx, (param_name, param) in enumerate(self.parameters.items()): - if param.kind == _KEYWORD_ONLY: - try: - other_param = other.parameters[param_name] - except KeyError: - return False - else: - if param != other_param: - return False - else: - try: - other_idx = other_positions[param_name] - except KeyError: - return False - else: - if idx != other_idx or param != other.parameters[param_name]: - return False - - return True - - def __ne__(self, other): - return not self.__eq__(other) - - def _bind(self, args, kwargs, partial=False): - """Private method. Don't use directly.""" - - arguments = OrderedDict() - - parameters = iter(self.parameters.values()) - parameters_ex = () - arg_vals = iter(args) - - if partial: - # Support for binding arguments to 'functools.partial' objects. - # See 'functools.partial' case in 'signature()' implementation - # for details. - for param_name, param in self.parameters.items(): - if param._partial_kwarg and param_name not in kwargs: - # Simulating 'functools.partial' behavior - kwargs[param_name] = param.default - - while True: - # Let's iterate through the positional arguments and corresponding - # parameters - try: - arg_val = next(arg_vals) - except StopIteration: - # No more positional arguments - try: - param = next(parameters) - except StopIteration: - # No more parameters. That's it. Just need to check that - # we have no `kwargs` after this while loop - break - else: - if param.kind == _VAR_POSITIONAL: - # That's OK, just empty *args. Let's start parsing - # kwargs - break - elif param.name in kwargs: - if param.kind == _POSITIONAL_ONLY: - msg = ( - "{arg!r} parameter is positional only, " - "but was passed as a keyword" - ) - msg = msg.format(arg=param.name) - raise TypeError(msg) - parameters_ex = (param,) - break - elif param.kind == _VAR_KEYWORD or param.default is not _empty: - # That's fine too - we have a default value for this - # parameter. So, lets start parsing `kwargs`, starting - # with the current parameter - parameters_ex = (param,) - break - else: - if partial: - parameters_ex = (param,) - break - else: - msg = "{arg!r} parameter lacking default value" - msg = msg.format(arg=param.name) - raise TypeError(msg) - else: - # We have a positional argument to process - try: - param = next(parameters) - except StopIteration: - raise TypeError("too many positional arguments") - else: - if param.kind in (_VAR_KEYWORD, _KEYWORD_ONLY): - # Looks like we have no parameter for this positional - # argument - raise TypeError("too many positional arguments") - - if param.kind == _VAR_POSITIONAL: - # We have an '*args'-like argument, let's fill it with - # all positional arguments we have left and move on to - # the next phase - values = [arg_val] - values.extend(arg_vals) - arguments[param.name] = tuple(values) - break - - if param.name in kwargs: - raise TypeError( - "multiple values for argument " - "{arg!r}".format(arg=param.name) - ) - - arguments[param.name] = arg_val - - # Now, we iterate through the remaining parameters to process - # keyword arguments - kwargs_param = None - for param in itertools.chain(parameters_ex, parameters): - if param.kind == _POSITIONAL_ONLY: - # This should never happen in case of a properly built - # Signature object (but let's have this check here - # to ensure correct behaviour just in case) - raise TypeError( - "{arg!r} parameter is positional only, " - "but was passed as a keyword".format(arg=param.name) - ) - - if param.kind == _VAR_KEYWORD: - # Memorize that we have a '**kwargs'-like parameter - kwargs_param = param - continue - - param_name = param.name - try: - arg_val = kwargs.pop(param_name) - except KeyError: - # We have no value for this parameter. It's fine though, - # if it has a default value, or it is an '*args'-like - # parameter, left alone by the processing of positional - # arguments. - if ( - not partial - and param.kind != _VAR_POSITIONAL - and param.default is _empty - ): - raise TypeError( - "{arg!r} parameter lacking default value".format(arg=param_name) - ) - - else: - arguments[param_name] = arg_val - - if kwargs: - if kwargs_param is not None: - # Process our '**kwargs'-like parameter - arguments[kwargs_param.name] = kwargs - else: - raise TypeError("too many keyword arguments") - - return self._bound_arguments_cls(self, arguments) - - def bind(self, *args, **kwargs): - """Get a BoundArguments object, that maps the passed `args` - and `kwargs` to the function's signature. Raises `TypeError` - if the passed arguments can not be bound. - """ - return self._bind(args, kwargs) - - def bind_partial(self, *args, **kwargs): - """Get a BoundArguments object, that partially maps the - passed `args` and `kwargs` to the function's signature. - Raises `TypeError` if the passed arguments can not be bound. - """ - return self._bind(args, kwargs, partial=True) - - def __str__(self): - result = [] - render_kw_only_separator = True - for idx, param in enumerate(self.parameters.values()): - formatted = str(param) - - kind = param.kind - if kind == _VAR_POSITIONAL: - # OK, we have an '*args'-like parameter, so we won't need - # a '*' to separate keyword-only arguments - render_kw_only_separator = False - elif kind == _KEYWORD_ONLY and render_kw_only_separator: - # We have a keyword-only parameter to render and we haven't - # rendered an '*args'-like parameter before, so add a '*' - # separator to the parameters list ("foo(arg1, *, arg2)" case) - result.append("*") - # This condition should be only triggered once, so - # reset the flag - render_kw_only_separator = False - - result.append(formatted) - - rendered = "({})".format(", ".join(result)) - - if self.return_annotation is not _empty: - anno = formatannotation(self.return_annotation) - rendered += " -> {}".format(anno) - - return rendered diff --git a/graphene/relay/connection.py b/graphene/relay/connection.py index 047f2b4de..8581a4b5e 100644 --- a/graphene/relay/connection.py +++ b/graphene/relay/connection.py @@ -1,8 +1,8 @@ import re -from collections import Iterable, OrderedDict +from collections.abc import Iterable from functools import partial -from graphql_relay import connection_from_list +from graphql_relay import connection_from_array from ..types import Boolean, Enum, Int, Interface, List, NonNull, Scalar, String, Union from ..types.field import Field @@ -41,6 +41,17 @@ class Meta: ) +# noinspection PyPep8Naming +def page_info_adapter(startCursor, endCursor, hasPreviousPage, hasNextPage): + """Adapter for creating PageInfo instances""" + return PageInfo( + start_cursor=startCursor, + end_cursor=endCursor, + has_previous_page=hasPreviousPage, + has_next_page=hasNextPage, + ) + + class ConnectionOptions(ObjectTypeOptions): node = None @@ -66,7 +77,7 @@ def __init_subclass_with_meta__(cls, node=None, name=None, **options): edge_class = getattr(cls, "Edge", None) _node = node - class EdgeBase(object): + class EdgeBase: node = Field(_node, description="The item at the end of the edge") cursor = String(required=True, description="A cursor for use in pagination") @@ -86,31 +97,29 @@ class EdgeMeta: options["name"] = name _meta.node = node - _meta.fields = OrderedDict( - [ - ( - "page_info", - Field( - PageInfo, - name="pageInfo", - required=True, - description="Pagination data for this connection.", - ), - ), - ( - "edges", - Field( - NonNull(List(edge)), - description="Contains the nodes in this connection.", - ), - ), - ] - ) + _meta.fields = { + "page_info": Field( + PageInfo, + name="pageInfo", + required=True, + description="Pagination data for this connection.", + ), + "edges": Field( + NonNull(List(edge)), + description="Contains the nodes in this connection.", + ), + } return super(Connection, cls).__init_subclass_with_meta__( _meta=_meta, **options ) +# noinspection PyPep8Naming +def connection_adapter(cls, edges, pageInfo): + """Adapter for creating Connection instances""" + return cls(edges=edges, page_info=pageInfo) + + class IterableConnectionField(Field): def __init__(self, type, *args, **kwargs): kwargs.setdefault("before", String()) @@ -133,7 +142,7 @@ def type(self): ) assert issubclass(connection_type, Connection), ( - '{} type have to be a subclass of Connection. Received "{}".' + '{} type has to be a subclass of Connection. Received "{}".' ).format(self.__class__.__name__, connection_type) return type @@ -143,15 +152,15 @@ def resolve_connection(cls, connection_type, args, resolved): return resolved assert isinstance(resolved, Iterable), ( - "Resolved value from the connection field have to be iterable or instance of {}. " + "Resolved value from the connection field has to be an iterable or instance of {}. " 'Received "{}"' ).format(connection_type, resolved) - connection = connection_from_list( + connection = connection_from_array( resolved, args, - connection_type=connection_type, + connection_type=partial(connection_adapter, connection_type), edge_type=connection_type.Edge, - pageinfo_type=PageInfo, + page_info_type=page_info_adapter, ) connection.iterable = resolved return connection diff --git a/graphene/relay/mutation.py b/graphene/relay/mutation.py index ee758e78c..fce0c5982 100644 --- a/graphene/relay/mutation.py +++ b/graphene/relay/mutation.py @@ -1,5 +1,4 @@ import re -from collections import OrderedDict from ..types import Field, InputObjectType, String from ..types.mutation import Mutation @@ -30,12 +29,10 @@ def __init_subclass_with_meta__( cls.Input = type( "{}Input".format(base_name), bases, - OrderedDict( - input_fields, client_mutation_id=String(name="clientMutationId") - ), + dict(input_fields, client_mutation_id=String(name="clientMutationId")), ) - arguments = OrderedDict( + arguments = dict( input=cls.Input(required=True) # 'client_mutation_id': String(name='clientMutationId') ) diff --git a/graphene/relay/node.py b/graphene/relay/node.py index d9c4c0f6c..54423bbba 100644 --- a/graphene/relay/node.py +++ b/graphene/relay/node.py @@ -1,4 +1,3 @@ -from collections import OrderedDict from functools import partial from inspect import isclass @@ -72,9 +71,7 @@ class Meta: @classmethod def __init_subclass_with_meta__(cls, **options): _meta = InterfaceOptions(cls) - _meta.fields = OrderedDict( - id=GlobalID(cls, description="The ID of the object.") - ) + _meta.fields = {"id": GlobalID(cls, description="The ID of the object")} super(AbstractNode, cls).__init_subclass_with_meta__(_meta=_meta, **options) diff --git a/graphene/relay/tests/test_connection.py b/graphene/relay/tests/test_connection.py index 6686f9640..4015f4b43 100644 --- a/graphene/relay/tests/test_connection.py +++ b/graphene/relay/tests/test_connection.py @@ -1,4 +1,4 @@ -import pytest +from pytest import raises from ...types import Argument, Field, Int, List, NonNull, ObjectType, Schema, String from ..connection import Connection, ConnectionField, PageInfo @@ -24,7 +24,7 @@ class Edge: assert MyObjectConnection._meta.name == "MyObjectConnection" fields = MyObjectConnection._meta.fields - assert list(fields.keys()) == ["page_info", "edges", "extra"] + assert list(fields) == ["page_info", "edges", "extra"] edge_field = fields["edges"] pageinfo_field = fields["page_info"] @@ -39,7 +39,7 @@ class Edge: def test_connection_inherit_abstracttype(): - class BaseConnection(object): + class BaseConnection: extra = String() class MyObjectConnection(BaseConnection, Connection): @@ -48,13 +48,13 @@ class Meta: assert MyObjectConnection._meta.name == "MyObjectConnection" fields = MyObjectConnection._meta.fields - assert list(fields.keys()) == ["page_info", "edges", "extra"] + assert list(fields) == ["page_info", "edges", "extra"] def test_connection_name(): custom_name = "MyObjectCustomNameConnection" - class BaseConnection(object): + class BaseConnection: extra = String() class MyObjectConnection(BaseConnection, Connection): @@ -76,7 +76,7 @@ class Edge: Edge = MyObjectConnection.Edge assert Edge._meta.name == "MyObjectEdge" edge_fields = Edge._meta.fields - assert list(edge_fields.keys()) == ["node", "cursor", "other"] + assert list(edge_fields) == ["node", "cursor", "other"] assert isinstance(edge_fields["node"], Field) assert edge_fields["node"].type == MyObject @@ -86,7 +86,7 @@ class Edge: def test_edge_with_bases(): - class BaseEdge(object): + class BaseEdge: extra = String() class MyObjectConnection(Connection): @@ -99,7 +99,7 @@ class Edge(BaseEdge): Edge = MyObjectConnection.Edge assert Edge._meta.name == "MyObjectEdge" edge_fields = Edge._meta.fields - assert list(edge_fields.keys()) == ["node", "cursor", "extra", "other"] + assert list(edge_fields) == ["node", "cursor", "extra", "other"] assert isinstance(edge_fields["node"], Field) assert edge_fields["node"].type == MyObject @@ -122,7 +122,7 @@ class Meta: def test_pageinfo(): assert PageInfo._meta.name == "PageInfo" fields = PageInfo._meta.fields - assert list(fields.keys()) == [ + assert list(fields) == [ "has_next_page", "has_previous_page", "start_cursor", @@ -146,7 +146,7 @@ class Meta: def test_connectionfield_node_deprecated(): field = ConnectionField(MyObject) - with pytest.raises(Exception) as exc_info: + with raises(Exception) as exc_info: field.type assert "ConnectionFields now need a explicit ConnectionType for Nodes." in str( diff --git a/graphene/relay/tests/test_connection_query.py b/graphene/relay/tests/test_connection_query.py index be6ee8c74..e109067ba 100644 --- a/graphene/relay/tests/test_connection_query.py +++ b/graphene/relay/tests/test_connection_query.py @@ -1,7 +1,6 @@ -from collections import OrderedDict +from pytest import mark from graphql_relay.utils import base64 -from promise import Promise from ...types import ObjectType, Schema, String from ..connection import Connection, ConnectionField, PageInfo @@ -25,15 +24,15 @@ class Meta: class Query(ObjectType): letters = ConnectionField(LetterConnection) connection_letters = ConnectionField(LetterConnection) - promise_letters = ConnectionField(LetterConnection) + async_letters = ConnectionField(LetterConnection) node = Node.Field() def resolve_letters(self, info, **args): return list(letters.values()) - def resolve_promise_letters(self, info, **args): - return Promise.resolve(list(letters.values())) + async def resolve_async_letters(self, info, **args): + return list(letters.values()) def resolve_connection_letters(self, info, **args): return LetterConnection( @@ -46,9 +45,7 @@ def resolve_connection_letters(self, info, **args): schema = Schema(Query) -letters = OrderedDict() -for i, letter in enumerate(letter_chars): - letters[letter] = Letter(id=i, letter=letter) +letters = {letter: Letter(id=i, letter=letter) for i, letter in enumerate(letter_chars)} def edges(selected_letters): @@ -66,11 +63,11 @@ def cursor_for(ltr): return base64("arrayconnection:%s" % letter.id) -def execute(args=""): +async def execute(args=""): if args: args = "(" + args + ")" - return schema.execute( + return await schema.execute_async( """ { letters%s { @@ -94,8 +91,8 @@ def execute(args=""): ) -def check(args, letters, has_previous_page=False, has_next_page=False): - result = execute(args) +async def check(args, letters, has_previous_page=False, has_next_page=False): + result = await execute(args) expected_edges = edges(letters) expected_page_info = { "hasPreviousPage": has_previous_page, @@ -110,96 +107,118 @@ def check(args, letters, has_previous_page=False, has_next_page=False): } -def test_returns_all_elements_without_filters(): - check("", "ABCDE") +@mark.asyncio +async def test_returns_all_elements_without_filters(): + await check("", "ABCDE") -def test_respects_a_smaller_first(): - check("first: 2", "AB", has_next_page=True) +@mark.asyncio +async def test_respects_a_smaller_first(): + await check("first: 2", "AB", has_next_page=True) -def test_respects_an_overly_large_first(): - check("first: 10", "ABCDE") +@mark.asyncio +async def test_respects_an_overly_large_first(): + await check("first: 10", "ABCDE") -def test_respects_a_smaller_last(): - check("last: 2", "DE", has_previous_page=True) +@mark.asyncio +async def test_respects_a_smaller_last(): + await check("last: 2", "DE", has_previous_page=True) -def test_respects_an_overly_large_last(): - check("last: 10", "ABCDE") +@mark.asyncio +async def test_respects_an_overly_large_last(): + await check("last: 10", "ABCDE") -def test_respects_first_and_after(): - check('first: 2, after: "{}"'.format(cursor_for("B")), "CD", has_next_page=True) +@mark.asyncio +async def test_respects_first_and_after(): + await check( + 'first: 2, after: "{}"'.format(cursor_for("B")), "CD", has_next_page=True + ) -def test_respects_first_and_after_with_long_first(): - check('first: 10, after: "{}"'.format(cursor_for("B")), "CDE") +@mark.asyncio +async def test_respects_first_and_after_with_long_first(): + await check('first: 10, after: "{}"'.format(cursor_for("B")), "CDE") -def test_respects_last_and_before(): - check('last: 2, before: "{}"'.format(cursor_for("D")), "BC", has_previous_page=True) +@mark.asyncio +async def test_respects_last_and_before(): + await check( + 'last: 2, before: "{}"'.format(cursor_for("D")), "BC", has_previous_page=True + ) -def test_respects_last_and_before_with_long_last(): - check('last: 10, before: "{}"'.format(cursor_for("D")), "ABC") +@mark.asyncio +async def test_respects_last_and_before_with_long_last(): + await check('last: 10, before: "{}"'.format(cursor_for("D")), "ABC") -def test_respects_first_and_after_and_before_too_few(): - check( +@mark.asyncio +async def test_respects_first_and_after_and_before_too_few(): + await check( 'first: 2, after: "{}", before: "{}"'.format(cursor_for("A"), cursor_for("E")), "BC", has_next_page=True, ) -def test_respects_first_and_after_and_before_too_many(): - check( +@mark.asyncio +async def test_respects_first_and_after_and_before_too_many(): + await check( 'first: 4, after: "{}", before: "{}"'.format(cursor_for("A"), cursor_for("E")), "BCD", ) -def test_respects_first_and_after_and_before_exactly_right(): - check( +@mark.asyncio +async def test_respects_first_and_after_and_before_exactly_right(): + await check( 'first: 3, after: "{}", before: "{}"'.format(cursor_for("A"), cursor_for("E")), "BCD", ) -def test_respects_last_and_after_and_before_too_few(): - check( +@mark.asyncio +async def test_respects_last_and_after_and_before_too_few(): + await check( 'last: 2, after: "{}", before: "{}"'.format(cursor_for("A"), cursor_for("E")), "CD", has_previous_page=True, ) -def test_respects_last_and_after_and_before_too_many(): - check( +@mark.asyncio +async def test_respects_last_and_after_and_before_too_many(): + await check( 'last: 4, after: "{}", before: "{}"'.format(cursor_for("A"), cursor_for("E")), "BCD", ) -def test_respects_last_and_after_and_before_exactly_right(): - check( +@mark.asyncio +async def test_respects_last_and_after_and_before_exactly_right(): + await check( 'last: 3, after: "{}", before: "{}"'.format(cursor_for("A"), cursor_for("E")), "BCD", ) -def test_returns_no_elements_if_first_is_0(): - check("first: 0", "", has_next_page=True) +@mark.asyncio +async def test_returns_no_elements_if_first_is_0(): + await check("first: 0", "", has_next_page=True) -def test_returns_all_elements_if_cursors_are_invalid(): - check('before: "invalid" after: "invalid"', "ABCDE") +@mark.asyncio +async def test_returns_all_elements_if_cursors_are_invalid(): + await check('before: "invalid" after: "invalid"', "ABCDE") -def test_returns_all_elements_if_cursors_are_on_the_outside(): - check( +@mark.asyncio +async def test_returns_all_elements_if_cursors_are_on_the_outside(): + await check( 'before: "{}" after: "{}"'.format( base64("arrayconnection:%s" % 6), base64("arrayconnection:%s" % -1) ), @@ -207,8 +226,9 @@ def test_returns_all_elements_if_cursors_are_on_the_outside(): ) -def test_returns_no_elements_if_cursors_cross(): - check( +@mark.asyncio +async def test_returns_no_elements_if_cursors_cross(): + await check( 'before: "{}" after: "{}"'.format( base64("arrayconnection:%s" % 2), base64("arrayconnection:%s" % 4) ), @@ -216,8 +236,9 @@ def test_returns_no_elements_if_cursors_cross(): ) -def test_connection_type_nodes(): - result = schema.execute( +@mark.asyncio +async def test_connection_type_nodes(): + result = await schema.execute_async( """ { connectionLetters { @@ -248,11 +269,12 @@ def test_connection_type_nodes(): } -def test_connection_promise(): - result = schema.execute( +@mark.asyncio +async def test_connection_async(): + result = await schema.execute_async( """ { - promiseLetters(first:1) { + asyncLetters(first:1) { edges { node { id @@ -270,7 +292,7 @@ def test_connection_promise(): assert not result.errors assert result.data == { - "promiseLetters": { + "asyncLetters": { "edges": [{"node": {"id": "TGV0dGVyOjA=", "letter": "A"}}], "pageInfo": {"hasPreviousPage": False, "hasNextPage": True}, } diff --git a/graphene/relay/tests/test_global_id.py b/graphene/relay/tests/test_global_id.py index 6d328f23d..2fe813008 100644 --- a/graphene/relay/tests/test_global_id.py +++ b/graphene/relay/tests/test_global_id.py @@ -17,7 +17,7 @@ class Meta: name = String() -class Info(object): +class Info: def __init__(self, parent_type): self.parent_type = GrapheneObjectType( graphene_type=parent_type, diff --git a/graphene/relay/tests/test_mutation.py b/graphene/relay/tests/test_mutation.py index b58a3ddfc..5fb1c4687 100644 --- a/graphene/relay/tests/test_mutation.py +++ b/graphene/relay/tests/test_mutation.py @@ -1,5 +1,4 @@ -import pytest -from promise import Promise +from pytest import mark, raises from ...types import ( ID, @@ -15,7 +14,7 @@ from ..mutation import ClientIDMutation -class SharedFields(object): +class SharedFields: shared = String() @@ -37,7 +36,7 @@ def mutate_and_get_payload(self, info, what, client_mutation_id=None): return SaySomething(phrase=str(what)) -class FixedSaySomething(object): +class FixedSaySomething: __slots__ = ("phrase",) def __init__(self, phrase): @@ -55,15 +54,15 @@ def mutate_and_get_payload(self, info, what, client_mutation_id=None): return FixedSaySomething(phrase=str(what)) -class SaySomethingPromise(ClientIDMutation): +class SaySomethingAsync(ClientIDMutation): class Input: what = String() phrase = String() @staticmethod - def mutate_and_get_payload(self, info, what, client_mutation_id=None): - return Promise.resolve(SaySomething(phrase=str(what))) + async def mutate_and_get_payload(self, info, what, client_mutation_id=None): + return SaySomething(phrase=str(what)) # MyEdge = MyNode.Connection.Edge @@ -81,11 +80,11 @@ class Input(SharedFields): @staticmethod def mutate_and_get_payload( - self, info, shared="", additional_field="", client_mutation_id=None + self, info, shared, additional_field, client_mutation_id=None ): edge_type = MyEdge return OtherMutation( - name=shared + additional_field, + name=(shared or "") + (additional_field or ""), my_node_edge=edge_type(cursor="1", node=MyNode(name="name")), ) @@ -97,7 +96,7 @@ class RootQuery(ObjectType): class Mutation(ObjectType): say = SaySomething.Field() say_fixed = SaySomethingFixed.Field() - say_promise = SaySomethingPromise.Field() + say_async = SaySomethingAsync.Field() other = OtherMutation.Field() @@ -105,7 +104,7 @@ class Mutation(ObjectType): def test_no_mutate_and_get_payload(): - with pytest.raises(AssertionError) as excinfo: + with raises(AssertionError) as excinfo: class MyMutation(ClientIDMutation): pass @@ -118,12 +117,12 @@ class MyMutation(ClientIDMutation): def test_mutation(): fields = SaySomething._meta.fields - assert list(fields.keys()) == ["phrase", "client_mutation_id"] + assert list(fields) == ["phrase", "client_mutation_id"] assert SaySomething._meta.name == "SaySomethingPayload" assert isinstance(fields["phrase"], Field) field = SaySomething.Field() assert field.type == SaySomething - assert list(field.args.keys()) == ["input"] + assert list(field.args) == ["input"] assert isinstance(field.args["input"], Argument) assert isinstance(field.args["input"].type, NonNull) assert field.args["input"].type.of_type == SaySomething.Input @@ -136,7 +135,7 @@ def test_mutation_input(): Input = SaySomething.Input assert issubclass(Input, InputObjectType) fields = Input._meta.fields - assert list(fields.keys()) == ["what", "client_mutation_id"] + assert list(fields) == ["what", "client_mutation_id"] assert isinstance(fields["what"], InputField) assert fields["what"].type == String assert isinstance(fields["client_mutation_id"], InputField) @@ -145,11 +144,11 @@ def test_mutation_input(): def test_subclassed_mutation(): fields = OtherMutation._meta.fields - assert list(fields.keys()) == ["name", "my_node_edge", "client_mutation_id"] + assert list(fields) == ["name", "my_node_edge", "client_mutation_id"] assert isinstance(fields["name"], Field) field = OtherMutation.Field() assert field.type == OtherMutation - assert list(field.args.keys()) == ["input"] + assert list(field.args) == ["input"] assert isinstance(field.args["input"], Argument) assert isinstance(field.args["input"].type, NonNull) assert field.args["input"].type.of_type == OtherMutation.Input @@ -159,7 +158,7 @@ def test_subclassed_mutation_input(): Input = OtherMutation.Input assert issubclass(Input, InputObjectType) fields = Input._meta.fields - assert list(fields.keys()) == ["shared", "additional_field", "client_mutation_id"] + assert list(fields) == ["shared", "additional_field", "client_mutation_id"] assert isinstance(fields["shared"], InputField) assert fields["shared"].type == String assert isinstance(fields["additional_field"], InputField) @@ -185,12 +184,13 @@ def test_node_query_fixed(): ) -def test_node_query_promise(): - executed = schema.execute( - 'mutation a { sayPromise(input: {what:"hello", clientMutationId:"1"}) { phrase } }' +@mark.asyncio +async def test_node_query_async(): + executed = await schema.execute_async( + 'mutation a { sayAsync(input: {what:"hello", clientMutationId:"1"}) { phrase } }' ) assert not executed.errors - assert executed.data == {"sayPromise": {"phrase": "hello"}} + assert executed.data == {"sayAsync": {"phrase": "hello"}} def test_edge_query(): diff --git a/graphene/relay/tests/test_node.py b/graphene/relay/tests/test_node.py index fbce1d547..c43ee1edc 100644 --- a/graphene/relay/tests/test_node.py +++ b/graphene/relay/tests/test_node.py @@ -1,12 +1,12 @@ -from collections import OrderedDict - from graphql_relay import to_global_id +from graphql.pyutils import dedent + from ...types import ObjectType, Schema, String from ..node import Node, is_node -class SharedNodeFields(object): +class SharedNodeFields: shared = String() something_else = String() @@ -70,17 +70,13 @@ def test_subclassed_node_query(): % to_global_id("MyOtherNode", 1) ) assert not executed.errors - assert executed.data == OrderedDict( - { - "node": OrderedDict( - [ - ("shared", "1"), - ("extraField", "extra field info."), - ("somethingElse", "----"), - ] - ) + assert executed.data == { + "node": { + "shared": "1", + "extraField": "extra field info.", + "somethingElse": "----", } - ) + } def test_node_requesting_non_node(): @@ -124,7 +120,7 @@ def test_node_field_only_type_wrong(): % Node.to_global_id("MyOtherNode", 1) ) assert len(executed.errors) == 1 - assert str(executed.errors[0]) == "Must receive a MyNode id." + assert str(executed.errors[0]).startswith("Must receive a MyNode id.") assert executed.data == {"onlyNode": None} @@ -143,39 +139,48 @@ def test_node_field_only_lazy_type_wrong(): % Node.to_global_id("MyOtherNode", 1) ) assert len(executed.errors) == 1 - assert str(executed.errors[0]) == "Must receive a MyNode id." + assert str(executed.errors[0]).startswith("Must receive a MyNode id.") assert executed.data == {"onlyNodeLazy": None} def test_str_schema(): - assert ( - str(schema) - == """ -schema { - query: RootQuery -} - -type MyNode implements Node { - id: ID! - name: String -} - -type MyOtherNode implements Node { - id: ID! - shared: String - somethingElse: String - extraField: String -} - -interface Node { - id: ID! -} - -type RootQuery { - first: String - node(id: ID!): Node - onlyNode(id: ID!): MyNode - onlyNodeLazy(id: ID!): MyNode -} -""".lstrip() + assert str(schema) == dedent( + ''' + schema { + query: RootQuery + } + + type MyNode implements Node { + """The ID of the object""" + id: ID! + name: String + } + + type MyOtherNode implements Node { + """The ID of the object""" + id: ID! + shared: String + somethingElse: String + extraField: String + } + + """An object with an ID""" + interface Node { + """The ID of the object""" + id: ID! + } + + type RootQuery { + first: String + + """The ID of the object""" + node(id: ID!): Node + + """The ID of the object""" + onlyNode(id: ID!): MyNode + + """The ID of the object""" + onlyNodeLazy(id: ID!): MyNode + } + ''' ) diff --git a/graphene/relay/tests/test_node_custom.py b/graphene/relay/tests/test_node_custom.py index 07e50a1bc..773be48f3 100644 --- a/graphene/relay/tests/test_node_custom.py +++ b/graphene/relay/tests/test_node_custom.py @@ -1,4 +1,5 @@ -from graphql import graphql +from graphql import graphql_sync +from graphql.pyutils import dedent from ...types import Interface, ObjectType, Schema from ...types.scalars import Int, String @@ -15,7 +16,7 @@ def to_global_id(type, id): @staticmethod def get_node_from_global_id(info, id, only_type=None): - assert info.schema == schema + assert info.schema is graphql_schema if id in user_data: return user_data.get(id) else: @@ -23,14 +24,14 @@ def get_node_from_global_id(info, id, only_type=None): class BasePhoto(Interface): - width = Int() + width = Int(description="The width of the photo in pixels") class User(ObjectType): class Meta: interfaces = [CustomNode] - name = String() + name = String(description="The full name of the user") class Photo(ObjectType): @@ -48,37 +49,47 @@ class RootQuery(ObjectType): schema = Schema(query=RootQuery, types=[User, Photo]) +graphql_schema = schema.graphql_schema def test_str_schema_correct(): - assert ( - str(schema) - == """schema { - query: RootQuery -} - -interface BasePhoto { - width: Int -} - -interface Node { - id: ID! -} - -type Photo implements Node, BasePhoto { - id: ID! - width: Int -} - -type RootQuery { - node(id: ID!): Node -} - -type User implements Node { - id: ID! - name: String -} -""" + assert str(schema) == dedent( + ''' + schema { + query: RootQuery + } + + interface BasePhoto { + """The width of the photo in pixels""" + width: Int + } + + interface Node { + """The ID of the object""" + id: ID! + } + + type Photo implements Node & BasePhoto { + """The ID of the object""" + id: ID! + + """The width of the photo in pixels""" + width: Int + } + + type RootQuery { + """The ID of the object""" + node(id: ID!): Node + } + + type User implements Node { + """The ID of the object""" + id: ID! + + """The full name of the user""" + name: String + } + ''' ) @@ -91,7 +102,7 @@ def test_gets_the_correct_id_for_users(): } """ expected = {"node": {"id": "1"}} - result = graphql(schema, query) + result = graphql_sync(graphql_schema, query) assert not result.errors assert result.data == expected @@ -105,7 +116,7 @@ def test_gets_the_correct_id_for_photos(): } """ expected = {"node": {"id": "4"}} - result = graphql(schema, query) + result = graphql_sync(graphql_schema, query) assert not result.errors assert result.data == expected @@ -122,7 +133,7 @@ def test_gets_the_correct_name_for_users(): } """ expected = {"node": {"id": "1", "name": "John Doe"}} - result = graphql(schema, query) + result = graphql_sync(graphql_schema, query) assert not result.errors assert result.data == expected @@ -139,7 +150,7 @@ def test_gets_the_correct_width_for_photos(): } """ expected = {"node": {"id": "4", "width": 400}} - result = graphql(schema, query) + result = graphql_sync(graphql_schema, query) assert not result.errors assert result.data == expected @@ -154,7 +165,7 @@ def test_gets_the_correct_typename_for_users(): } """ expected = {"node": {"id": "1", "__typename": "User"}} - result = graphql(schema, query) + result = graphql_sync(graphql_schema, query) assert not result.errors assert result.data == expected @@ -169,7 +180,7 @@ def test_gets_the_correct_typename_for_photos(): } """ expected = {"node": {"id": "4", "__typename": "Photo"}} - result = graphql(schema, query) + result = graphql_sync(graphql_schema, query) assert not result.errors assert result.data == expected @@ -186,7 +197,7 @@ def test_ignores_photo_fragments_on_user(): } """ expected = {"node": {"id": "1"}} - result = graphql(schema, query) + result = graphql_sync(graphql_schema, query) assert not result.errors assert result.data == expected @@ -200,7 +211,7 @@ def test_returns_null_for_bad_ids(): } """ expected = {"node": None} - result = graphql(schema, query) + result = graphql_sync(graphql_schema, query) assert not result.errors assert result.data == expected @@ -239,7 +250,7 @@ def test_have_correct_node_interface(): ], } } - result = graphql(schema, query) + result = graphql_sync(graphql_schema, query) assert not result.errors assert result.data == expected @@ -291,6 +302,6 @@ def test_has_correct_node_root_field(): } } } - result = graphql(schema, query) + result = graphql_sync(graphql_schema, query) assert not result.errors assert result.data == expected diff --git a/graphene/test/__init__.py b/graphene/test/__init__.py index dab3b51bc..8591dc066 100644 --- a/graphene/test/__init__.py +++ b/graphene/test/__init__.py @@ -1,5 +1,4 @@ from promise import Promise, is_thenable -import six from graphql.error import format_error as format_graphql_error from graphql.error import GraphQLError @@ -9,24 +8,19 @@ def default_format_error(error): if isinstance(error, GraphQLError): return format_graphql_error(error) - - return {"message": six.text_type(error)} + return {"message": str(error)} def format_execution_result(execution_result, format_error): if execution_result: response = {} - if execution_result.errors: response["errors"] = [format_error(e) for e in execution_result.errors] - - if not execution_result.invalid: - response["data"] = execution_result.data - + response["data"] = execution_result.data return response -class Client(object): +class Client: def __init__(self, schema, format_error=None, **execute_options): assert isinstance(schema, Schema) self.schema = schema diff --git a/graphene/tests/issues/test_313.py b/graphene/tests/issues/test_313.py index 34dfef1ab..8082677a1 100644 --- a/graphene/tests/issues/test_313.py +++ b/graphene/tests/issues/test_313.py @@ -21,7 +21,7 @@ class Meta: class CreatePost(graphene.Mutation): - class Input: + class Arguments: text = graphene.String(required=True) result = graphene.Field(CreatePostResult) diff --git a/graphene/tests/issues/test_356.py b/graphene/tests/issues/test_356.py index 4571aad8e..0e7daa094 100644 --- a/graphene/tests/issues/test_356.py +++ b/graphene/tests/issues/test_356.py @@ -1,6 +1,6 @@ # https://github.com/graphql-python/graphene/issues/356 -import pytest +from pytest import raises import graphene from graphene import relay @@ -23,10 +23,11 @@ def test_issue(): class Query(graphene.ObjectType): things = relay.ConnectionField(MyUnion) - with pytest.raises(Exception) as exc_info: + with raises(Exception) as exc_info: graphene.Schema(query=Query) assert str(exc_info.value) == ( - "IterableConnectionField type have to be a subclass of Connection. " - 'Received "MyUnion".' + "Query fields cannot be resolved:" + " IterableConnectionField type has to be a subclass of Connection." + ' Received "MyUnion".' ) diff --git a/graphene/types/__init__.py b/graphene/types/__init__.py index 965916059..292db235b 100644 --- a/graphene/types/__init__.py +++ b/graphene/types/__init__.py @@ -1,5 +1,5 @@ # flake8: noqa -from graphql import ResolveInfo +from graphql import GraphQLResolveInfo as ResolveInfo from .objecttype import ObjectType from .interface import Interface diff --git a/graphene/types/argument.py b/graphene/types/argument.py index cb72322ce..f0ba4c1c7 100644 --- a/graphene/types/argument.py +++ b/graphene/types/argument.py @@ -1,4 +1,3 @@ -from collections import OrderedDict from itertools import chain from .dynamic import Dynamic @@ -81,7 +80,7 @@ def to_arguments(args, extra_args=None): else: extra_args = [] iter_arguments = chain(args.items(), extra_args) - arguments = OrderedDict() + arguments = {} for default_name, arg in iter_arguments: if isinstance(arg, Dynamic): arg = arg.get_type() diff --git a/graphene/types/base.py b/graphene/types/base.py index f43feb478..79907b4d9 100644 --- a/graphene/types/base.py +++ b/graphene/types/base.py @@ -1,12 +1,10 @@ +from typing import Type + from ..utils.subclass_with_meta import SubclassWithMeta from ..utils.trim_docstring import trim_docstring -import six - -if six.PY3: - from typing import Type -class BaseOptions(object): +class BaseOptions: name = None # type: str description = None # type: str diff --git a/graphene/types/context.py b/graphene/types/context.py index 70b1a5b26..fa405179a 100644 --- a/graphene/types/context.py +++ b/graphene/types/context.py @@ -1,4 +1,4 @@ -class Context(object): +class Context: """ Context can be used to make a convenient container for attributes to provide for execution for resolvers of a GraphQL operation like a query. diff --git a/graphene/types/datetime.py b/graphene/types/datetime.py index 3519d76db..c533d23e4 100644 --- a/graphene/types/datetime.py +++ b/graphene/types/datetime.py @@ -3,8 +3,8 @@ import datetime from aniso8601 import parse_date, parse_datetime, parse_time -from graphql.language import ast -from six import string_types +from graphql.error import INVALID +from graphql.language import StringValueNode from .scalars import Scalar @@ -27,7 +27,7 @@ def serialize(date): @classmethod def parse_literal(cls, node): - if isinstance(node, ast.StringValue): + if isinstance(node, StringValueNode): return cls.parse_value(node.value) @staticmethod @@ -35,10 +35,10 @@ def parse_value(value): try: if isinstance(value, datetime.date): return value - elif isinstance(value, string_types): + elif isinstance(value, str): return parse_date(value) except ValueError: - return None + return INVALID class DateTime(Scalar): @@ -57,7 +57,7 @@ def serialize(dt): @classmethod def parse_literal(cls, node): - if isinstance(node, ast.StringValue): + if isinstance(node, StringValueNode): return cls.parse_value(node.value) @staticmethod @@ -65,10 +65,10 @@ def parse_value(value): try: if isinstance(value, datetime.datetime): return value - elif isinstance(value, string_types): + elif isinstance(value, str): return parse_datetime(value) except ValueError: - return None + return INVALID class Time(Scalar): @@ -87,7 +87,7 @@ def serialize(time): @classmethod def parse_literal(cls, node): - if isinstance(node, ast.StringValue): + if isinstance(node, StringValueNode): return cls.parse_value(node.value) @classmethod @@ -95,7 +95,7 @@ def parse_value(cls, value): try: if isinstance(value, datetime.time): return value - elif isinstance(value, string_types): + elif isinstance(value, str): return parse_time(value) except ValueError: - return None + return INVALID diff --git a/graphene/types/decimal.py b/graphene/types/decimal.py index 2f99134d0..10a2609a9 100644 --- a/graphene/types/decimal.py +++ b/graphene/types/decimal.py @@ -2,7 +2,7 @@ from decimal import Decimal as _Decimal -from graphql.language import ast +from graphql.language.ast import StringValueNode from .scalars import Scalar @@ -23,7 +23,7 @@ def serialize(dec): @classmethod def parse_literal(cls, node): - if isinstance(node, ast.StringValue): + if isinstance(node, StringValueNode): return cls.parse_value(node.value) @staticmethod diff --git a/graphene/types/definitions.py b/graphene/types/definitions.py index a914008c5..009169201 100644 --- a/graphene/types/definitions.py +++ b/graphene/types/definitions.py @@ -8,7 +8,7 @@ ) -class GrapheneGraphQLType(object): +class GrapheneGraphQLType: """ A class for extending the base GraphQLType with the related graphene_type diff --git a/graphene/types/enum.py b/graphene/types/enum.py index 8c95efbe6..1d290a20e 100644 --- a/graphene/types/enum.py +++ b/graphene/types/enum.py @@ -1,10 +1,7 @@ -from collections import OrderedDict - -import six +from enum import Enum as PyEnum from graphene.utils.subclass_with_meta import SubclassWithMeta_Meta -from ..pyutils.compat import Enum as PyEnum from .base import BaseOptions, BaseType from .unmountedtype import UnmountedType @@ -25,13 +22,13 @@ class EnumOptions(BaseOptions): class EnumMeta(SubclassWithMeta_Meta): def __new__(cls, name, bases, classdict, **options): - enum_members = OrderedDict(classdict, __eq__=eq_enum) + enum_members = dict(classdict, __eq__=eq_enum) # We remove the Meta attribute from the class to not collide # with the enum values. enum_members.pop("Meta", None) enum = PyEnum(cls.__name__, enum_members) return SubclassWithMeta_Meta.__new__( - cls, name, bases, OrderedDict(classdict, __enum__=enum), **options + cls, name, bases, dict(classdict, __enum__=enum), **options ) def get(cls, value): @@ -41,7 +38,7 @@ def __getitem__(cls, value): return cls._meta.enum[value] def __prepare__(name, bases, **kwargs): # noqa: N805 - return OrderedDict() + return {} def __call__(cls, *args, **kwargs): # noqa: N805 if cls is Enum: @@ -66,7 +63,7 @@ def from_enum(cls, enum, description=None, deprecation_reason=None): # noqa: N8 return type(meta_class.enum.__name__, (Enum,), {"Meta": meta_class}) -class Enum(six.with_metaclass(EnumMeta, UnmountedType, BaseType)): +class Enum(UnmountedType, BaseType, metaclass=EnumMeta): """ Enum type definition diff --git a/graphene/types/field.py b/graphene/types/field.py index 7f63a8535..d55b0347a 100644 --- a/graphene/types/field.py +++ b/graphene/types/field.py @@ -1,5 +1,5 @@ import inspect -from collections import Mapping, OrderedDict +from collections.abc import Mapping from functools import partial from .argument import Argument, to_arguments @@ -100,7 +100,7 @@ def __init__( self.name = name self._type = type - self.args = to_arguments(args or OrderedDict(), extra_args) + self.args = to_arguments(args or {}, extra_args) if source: resolver = partial(source_resolver, source) self.resolver = resolver diff --git a/graphene/types/generic.py b/graphene/types/generic.py index e5470dd97..5d1a6c4b6 100644 --- a/graphene/types/generic.py +++ b/graphene/types/generic.py @@ -1,12 +1,12 @@ from __future__ import unicode_literals from graphql.language.ast import ( - BooleanValue, - FloatValue, - IntValue, - ListValue, - ObjectValue, - StringValue, + BooleanValueNode, + FloatValueNode, + IntValueNode, + ListValueNode, + ObjectValueNode, + StringValueNode, ) from graphene.types.scalars import MAX_INT, MIN_INT @@ -30,17 +30,17 @@ def identity(value): @staticmethod def parse_literal(ast): - if isinstance(ast, (StringValue, BooleanValue)): + if isinstance(ast, (StringValueNode, BooleanValueNode)): return ast.value - elif isinstance(ast, IntValue): + elif isinstance(ast, IntValueNode): num = int(ast.value) if MIN_INT <= num <= MAX_INT: return num - elif isinstance(ast, FloatValue): + elif isinstance(ast, FloatValueNode): return float(ast.value) - elif isinstance(ast, ListValue): + elif isinstance(ast, ListValueNode): return [GenericScalar.parse_literal(value) for value in ast.values] - elif isinstance(ast, ObjectValue): + elif isinstance(ast, ObjectValueNode): return { field.name.value: GenericScalar.parse_literal(field.value) for field in ast.fields diff --git a/graphene/types/inputobjecttype.py b/graphene/types/inputobjecttype.py index d13689313..98f0148de 100644 --- a/graphene/types/inputobjecttype.py +++ b/graphene/types/inputobjecttype.py @@ -1,5 +1,3 @@ -from collections import OrderedDict - from .base import BaseOptions, BaseType from .inputfield import InputField from .unmountedtype import UnmountedType @@ -22,7 +20,7 @@ class Meta: def __init__(self, *args, **kwargs): dict.__init__(self, *args, **kwargs) - for key in self._meta.fields.keys(): + for key in self._meta.fields: setattr(self, key, self.get(key, None)) def __init_subclass__(cls, *args, **kwargs): @@ -70,7 +68,7 @@ def __init_subclass_with_meta__(cls, container=None, _meta=None, **options): if not _meta: _meta = InputObjectTypeOptions(cls) - fields = OrderedDict() + fields = {} for base in reversed(cls.__mro__): fields.update(yank_fields_from_attrs(base.__dict__, _as=InputField)) diff --git a/graphene/types/interface.py b/graphene/types/interface.py index 59184a1f0..def0d040f 100644 --- a/graphene/types/interface.py +++ b/graphene/types/interface.py @@ -1,5 +1,3 @@ -from collections import OrderedDict - from .base import BaseOptions, BaseType from .field import Field from .utils import yank_fields_from_attrs @@ -51,7 +49,7 @@ def __init_subclass_with_meta__(cls, _meta=None, **options): if not _meta: _meta = InterfaceOptions(cls) - fields = OrderedDict() + fields = {} for base in reversed(cls.__mro__): fields.update(yank_fields_from_attrs(base.__dict__, _as=Field)) diff --git a/graphene/types/json.py b/graphene/types/json.py index 6b87ae6e1..4bb5061ce 100644 --- a/graphene/types/json.py +++ b/graphene/types/json.py @@ -2,7 +2,7 @@ import json -from graphql.language import ast +from graphql.language.ast import StringValueNode from .scalars import Scalar @@ -21,7 +21,7 @@ def serialize(dt): @staticmethod def parse_literal(node): - if isinstance(node, ast.StringValue): + if isinstance(node, StringValueNode): return json.loads(node.value) @staticmethod diff --git a/graphene/types/mutation.py b/graphene/types/mutation.py index c96162e44..0710d66f4 100644 --- a/graphene/types/mutation.py +++ b/graphene/types/mutation.py @@ -1,5 +1,3 @@ -from collections import OrderedDict - from ..utils.deprecated import warn_deprecation from ..utils.get_unbound_function import get_unbound_function from ..utils.props import props @@ -90,7 +88,7 @@ def __init_subclass_with_meta__( if not output: # If output is defined, we don't need to get the fields - fields = OrderedDict() + fields = {} for base in reversed(cls.__mro__): fields.update(yank_fields_from_attrs(base.__dict__, _as=Field)) output = cls @@ -103,7 +101,7 @@ def __init_subclass_with_meta__( warn_deprecation( ( "Please use {name}.Arguments instead of {name}.Input." - "Input is now only used in ClientMutationID.\n" + " Input is now only used in ClientMutationID.\n" "Read more:" " https://github.com/graphql-python/graphene/blob/v2.0.0/UPGRADE-v2.0.md#mutation-input" ).format(name=cls.__name__) diff --git a/graphene/types/objecttype.py b/graphene/types/objecttype.py index 88c9f5bc7..1fa4cf353 100644 --- a/graphene/types/objecttype.py +++ b/graphene/types/objecttype.py @@ -1,5 +1,3 @@ -from collections import OrderedDict - from .base import BaseOptions, BaseType from .field import Field from .interface import Interface @@ -100,7 +98,7 @@ def __init_subclass_with_meta__( if not _meta: _meta = ObjectTypeOptions(cls) - fields = OrderedDict() + fields = {} for interface in interfaces: assert issubclass(interface, Interface), ( diff --git a/graphene/types/scalars.py b/graphene/types/scalars.py index c5f437872..245fa570b 100644 --- a/graphene/types/scalars.py +++ b/graphene/types/scalars.py @@ -1,12 +1,15 @@ -import six -from graphql.language.ast import BooleanValue, FloatValue, IntValue, StringValue +from typing import Any + +from graphql.language.ast import ( + BooleanValueNode, + FloatValueNode, + IntValueNode, + StringValueNode, +) from .base import BaseOptions, BaseType from .unmountedtype import UnmountedType -if six.PY3: - from typing import Any - class ScalarOptions(BaseOptions): pass @@ -73,7 +76,7 @@ def coerce_int(value): @staticmethod def parse_literal(ast): - if isinstance(ast, IntValue): + if isinstance(ast, IntValueNode): num = int(ast.value) if MIN_INT <= num <= MAX_INT: return num @@ -99,7 +102,7 @@ def coerce_float(value): @staticmethod def parse_literal(ast): - if isinstance(ast, (FloatValue, IntValue)): + if isinstance(ast, (FloatValueNode, IntValueNode)): return float(ast.value) @@ -114,14 +117,14 @@ class String(Scalar): def coerce_string(value): if isinstance(value, bool): return u"true" if value else u"false" - return six.text_type(value) + return str(value) serialize = coerce_string parse_value = coerce_string @staticmethod def parse_literal(ast): - if isinstance(ast, StringValue): + if isinstance(ast, StringValueNode): return ast.value @@ -135,7 +138,7 @@ class Boolean(Scalar): @staticmethod def parse_literal(ast): - if isinstance(ast, BooleanValue): + if isinstance(ast, BooleanValueNode): return ast.value @@ -153,5 +156,5 @@ class ID(Scalar): @staticmethod def parse_literal(ast): - if isinstance(ast, (StringValue, IntValue)): + if isinstance(ast, (StringValueNode, IntValueNode)): return ast.value diff --git a/graphene/types/schema.py b/graphene/types/schema.py index 07c25763c..bf8c469a4 100644 --- a/graphene/types/schema.py +++ b/graphene/types/schema.py @@ -1,37 +1,416 @@ import inspect +from functools import partial -from graphql import GraphQLObjectType, GraphQLSchema, graphql, is_type -from graphql.type.directives import ( - GraphQLDirective, - GraphQLIncludeDirective, - GraphQLSkipDirective, +from graphql import ( + default_type_resolver, + get_introspection_query, + graphql, + graphql_sync, + introspection_types, + is_type, + print_schema, + GraphQLArgument, + GraphQLBoolean, + GraphQLEnumValue, + GraphQLField, + GraphQLFloat, + GraphQLID, + GraphQLInputField, + GraphQLInt, + GraphQLList, + GraphQLNonNull, + GraphQLObjectType, + GraphQLSchema, + GraphQLString, + INVALID, ) -from graphql.type.introspection import IntrospectionSchema -from graphql.utils.introspection_query import introspection_query -from graphql.utils.schema_printer import print_schema -from .definitions import GrapheneGraphQLType +from ..utils.str_converters import to_camel_case +from ..utils.get_unbound_function import get_unbound_function +from .definitions import ( + GrapheneEnumType, + GrapheneGraphQLType, + GrapheneInputObjectType, + GrapheneInterfaceType, + GrapheneObjectType, + GrapheneScalarType, + GrapheneUnionType, +) +from .dynamic import Dynamic +from .enum import Enum +from .field import Field +from .inputobjecttype import InputObjectType +from .interface import Interface from .objecttype import ObjectType -from .typemap import TypeMap, is_graphene_type +from .resolver import get_default_resolver +from .scalars import ID, Boolean, Float, Int, Scalar, String +from .structures import List, NonNull +from .union import Union +from .utils import get_field_as + +introspection_query = get_introspection_query() +IntrospectionSchema = introspection_types["__Schema"] -def assert_valid_root_type(_type): - if _type is None: +def assert_valid_root_type(type_): + if type_ is None: return - is_graphene_objecttype = inspect.isclass(_type) and issubclass(_type, ObjectType) - is_graphql_objecttype = isinstance(_type, GraphQLObjectType) + is_graphene_objecttype = inspect.isclass(type_) and issubclass(type_, ObjectType) + is_graphql_objecttype = isinstance(type_, GraphQLObjectType) assert is_graphene_objecttype or is_graphql_objecttype, ( "Type {} is not a valid ObjectType." - ).format(_type) + ).format(type_) -class Schema(GraphQLSchema): - """ - Graphene Schema can execute operations (query, mutation, subscription) against the defined - types. +def is_graphene_type(type_): + if isinstance(type_, (List, NonNull)): + return True + if inspect.isclass(type_) and issubclass( + type_, (ObjectType, InputObjectType, Scalar, Interface, Union, Enum) + ): + return True + + +def resolve_type(resolve_type_func, map_, type_name, root, info, _type): + type_ = resolve_type_func(root, info) + + if not type_: + return_type = map_[type_name] + return default_type_resolver(root, info, return_type) + + if inspect.isclass(type_) and issubclass(type_, ObjectType): + graphql_type = map_.get(type_._meta.name) + assert graphql_type, "Can't find type {} in schema".format(type_._meta.name) + assert graphql_type.graphene_type == type_, ( + "The type {} does not match with the associated graphene type {}." + ).format(type_, graphql_type.graphene_type) + return graphql_type + + return type_ + + +def is_type_of_from_possible_types(possible_types, root, _info): + return isinstance(root, possible_types) + + +class GrapheneGraphQLSchema(GraphQLSchema): + """A GraphQLSchema that can deal with Graphene types as well.""" + + def __init__( + self, + query=None, + mutation=None, + subscription=None, + types=None, + directives=None, + auto_camelcase=True, + ): + assert_valid_root_type(query) + assert_valid_root_type(mutation) + assert_valid_root_type(subscription) + + self.auto_camelcase = auto_camelcase + super().__init__(query, mutation, subscription, types, directives) + + if query: + self.query_type = self.get_type( + query.name if isinstance(query, GraphQLObjectType) else query._meta.name + ) + if mutation: + self.mutation_type = self.get_type( + mutation.name + if isinstance(mutation, GraphQLObjectType) + else mutation._meta.name + ) + if subscription: + self.subscription_type = self.get_type( + subscription.name + if isinstance(subscription, GraphQLObjectType) + else subscription._meta.name + ) + + def get_graphql_type(self, _type): + if not _type: + return _type + if is_type(_type): + return _type + if is_graphene_type(_type): + graphql_type = self.get_type(_type._meta.name) + assert graphql_type, "Type {} not found in this schema.".format( + _type._meta.name + ) + assert graphql_type.graphene_type == _type + return graphql_type + raise Exception("{} is not a valid GraphQL type.".format(_type)) + + # noinspection PyMethodOverriding + def type_map_reducer(self, map_, type_): + if not type_: + return map_ + if inspect.isfunction(type_): + type_ = type_() + if is_graphene_type(type_): + return self.graphene_reducer(map_, type_) + return super().type_map_reducer(map_, type_) - For advanced purposes, the schema can be used to lookup type definitions and answer questions - about the types through introspection. + def graphene_reducer(self, map_, type_): + if isinstance(type_, (List, NonNull)): + return self.type_map_reducer(map_, type_.of_type) + if type_._meta.name in map_: + _type = map_[type_._meta.name] + if isinstance(_type, GrapheneGraphQLType): + assert _type.graphene_type == type_, ( + "Found different types with the same name in the schema: {}, {}." + ).format(_type.graphene_type, type_) + return map_ + + if issubclass(type_, ObjectType): + internal_type = self.construct_objecttype(map_, type_) + elif issubclass(type_, InputObjectType): + internal_type = self.construct_inputobjecttype(map_, type_) + elif issubclass(type_, Interface): + internal_type = self.construct_interface(map_, type_) + elif issubclass(type_, Scalar): + internal_type = self.construct_scalar(type_) + elif issubclass(type_, Enum): + internal_type = self.construct_enum(type_) + elif issubclass(type_, Union): + internal_type = self.construct_union(map_, type_) + else: + raise Exception("Expected Graphene type, but received: {}.".format(type_)) + + return super().type_map_reducer(map_, internal_type) + + @staticmethod + def construct_scalar(type_): + # We have a mapping to the original GraphQL types + # so there are no collisions. + _scalars = { + String: GraphQLString, + Int: GraphQLInt, + Float: GraphQLFloat, + Boolean: GraphQLBoolean, + ID: GraphQLID, + } + if type_ in _scalars: + return _scalars[type_] + + return GrapheneScalarType( + graphene_type=type_, + name=type_._meta.name, + description=type_._meta.description, + serialize=getattr(type_, "serialize", None), + parse_value=getattr(type_, "parse_value", None), + parse_literal=getattr(type_, "parse_literal", None), + ) + + @staticmethod + def construct_enum(type_): + values = {} + for name, value in type_._meta.enum.__members__.items(): + description = getattr(value, "description", None) + deprecation_reason = getattr(value, "deprecation_reason", None) + if not description and callable(type_._meta.description): + description = type_._meta.description(value) + + if not deprecation_reason and callable(type_._meta.deprecation_reason): + deprecation_reason = type_._meta.deprecation_reason(value) + + values[name] = GraphQLEnumValue( + value=value.value, + description=description, + deprecation_reason=deprecation_reason, + ) + + type_description = ( + type_._meta.description(None) + if callable(type_._meta.description) + else type_._meta.description + ) + + return GrapheneEnumType( + graphene_type=type_, + values=values, + name=type_._meta.name, + description=type_description, + ) + + def construct_objecttype(self, map_, type_): + if type_._meta.name in map_: + _type = map_[type_._meta.name] + if isinstance(_type, GrapheneGraphQLType): + assert _type.graphene_type == type_, ( + "Found different types with the same name in the schema: {}, {}." + ).format(_type.graphene_type, type_) + return _type + + def interfaces(): + interfaces = [] + for interface in type_._meta.interfaces: + self.graphene_reducer(map_, interface) + internal_type = map_[interface._meta.name] + assert internal_type.graphene_type == interface + interfaces.append(internal_type) + return interfaces + + if type_._meta.possible_types: + is_type_of = partial( + is_type_of_from_possible_types, type_._meta.possible_types + ) + else: + is_type_of = type_.is_type_of + + return GrapheneObjectType( + graphene_type=type_, + name=type_._meta.name, + description=type_._meta.description, + fields=partial(self.construct_fields_for_type, map_, type_), + is_type_of=is_type_of, + interfaces=interfaces, + ) + + def construct_interface(self, map_, type_): + if type_._meta.name in map_: + _type = map_[type_._meta.name] + if isinstance(_type, GrapheneInterfaceType): + assert _type.graphene_type == type_, ( + "Found different types with the same name in the schema: {}, {}." + ).format(_type.graphene_type, type_) + return _type + + _resolve_type = None + if type_.resolve_type: + _resolve_type = partial( + resolve_type, type_.resolve_type, map_, type_._meta.name + ) + return GrapheneInterfaceType( + graphene_type=type_, + name=type_._meta.name, + description=type_._meta.description, + fields=partial(self.construct_fields_for_type, map_, type_), + resolve_type=_resolve_type, + ) + + def construct_inputobjecttype(self, map_, type_): + return GrapheneInputObjectType( + graphene_type=type_, + name=type_._meta.name, + description=type_._meta.description, + out_type=type_._meta.container, + fields=partial( + self.construct_fields_for_type, map_, type_, is_input_type=True + ), + ) + + def construct_union(self, map_, type_): + _resolve_type = None + if type_.resolve_type: + _resolve_type = partial( + resolve_type, type_.resolve_type, map_, type_._meta.name + ) + + def types(): + union_types = [] + for objecttype in type_._meta.types: + self.graphene_reducer(map_, objecttype) + internal_type = map_[objecttype._meta.name] + assert internal_type.graphene_type == objecttype + union_types.append(internal_type) + return union_types + + return GrapheneUnionType( + graphene_type=type_, + name=type_._meta.name, + description=type_._meta.description, + types=types, + resolve_type=_resolve_type, + ) + + def get_name(self, name): + if self.auto_camelcase: + return to_camel_case(name) + return name + + def construct_fields_for_type(self, map_, type_, is_input_type=False): + fields = {} + for name, field in type_._meta.fields.items(): + if isinstance(field, Dynamic): + field = get_field_as(field.get_type(self), _as=Field) + if not field: + continue + map_ = self.type_map_reducer(map_, field.type) + field_type = self.get_field_type(map_, field.type) + if is_input_type: + _field = GraphQLInputField( + field_type, + default_value=field.default_value, + out_name=name, + description=field.description, + ) + else: + args = {} + for arg_name, arg in field.args.items(): + map_ = self.type_map_reducer(map_, arg.type) + arg_type = self.get_field_type(map_, arg.type) + processed_arg_name = arg.name or self.get_name(arg_name) + args[processed_arg_name] = GraphQLArgument( + arg_type, + out_name=arg_name, + description=arg.description, + default_value=INVALID + if isinstance(arg.type, NonNull) + else arg.default_value, + ) + _field = GraphQLField( + field_type, + args=args, + resolve=field.get_resolver( + self.get_resolver_for_type(type_, name, field.default_value) + ), + deprecation_reason=field.deprecation_reason, + description=field.description, + ) + field_name = field.name or self.get_name(name) + fields[field_name] = _field + return fields + + def get_resolver_for_type(self, type_, name, default_value): + if not issubclass(type_, ObjectType): + return + resolver = getattr(type_, "resolve_{}".format(name), None) + if not resolver: + # If we don't find the resolver in the ObjectType class, then try to + # find it in each of the interfaces + interface_resolver = None + for interface in type_._meta.interfaces: + if name not in interface._meta.fields: + continue + interface_resolver = getattr(interface, "resolve_{}".format(name), None) + if interface_resolver: + break + resolver = interface_resolver + + # Only if is not decorated with classmethod + if resolver: + return get_unbound_function(resolver) + + default_resolver = type_._meta.default_resolver or get_default_resolver() + return partial(default_resolver, name, default_value) + + def get_field_type(self, map_, type_): + if isinstance(type_, List): + return GraphQLList(self.get_field_type(map_, type_.of_type)) + if isinstance(type_, NonNull): + return GraphQLNonNull(self.get_field_type(map_, type_.of_type)) + return map_.get(type_._meta.name) + + +class Schema: + """Schema Definition. + + A Graphene Schema can execute operations (query, mutation, subscription) against the defined + types. For advanced purposes, the schema can be used to lookup type definitions and answer + questions about the types through introspection. Args: query (ObjectType): Root query *ObjectType*. Describes entry point for fields to *read* @@ -40,8 +419,8 @@ class Schema(GraphQLSchema): fields to *create, update or delete* data in your API. subscription (ObjectType, optional): Root subscription *ObjectType*. Describes entry point for fields to receive continuous updates. - directives (List[GraphQLDirective], optional): List of custom directives to include in - GraphQL schema. Defaults to only include directives definved by GraphQL spec (@include + directives (List[GraphQLDirective], optional): List of custom directives to include in the + GraphQL schema. Defaults to only include directives defined by GraphQL spec (@include and @skip) [GraphQLIncludeDirective, GraphQLSkipDirective]. types (List[GraphQLType], optional): List of any types to include in schema that may not be introspected through root types. @@ -54,37 +433,24 @@ def __init__( query=None, mutation=None, subscription=None, - directives=None, types=None, + directives=None, auto_camelcase=True, ): - assert_valid_root_type(query) - assert_valid_root_type(mutation) - assert_valid_root_type(subscription) - self._query = query - self._mutation = mutation - self._subscription = subscription - self.types = types - self.auto_camelcase = auto_camelcase - if directives is None: - directives = [GraphQLIncludeDirective, GraphQLSkipDirective] - - assert all( - isinstance(d, GraphQLDirective) for d in directives - ), "Schema directives must be List[GraphQLDirective] if provided but got: {}.".format( - directives + self.query = query + self.mutation = mutation + self.subscription = subscription + self.graphql_schema = GrapheneGraphQLSchema( + query, + mutation, + subscription, + types, + directives, + auto_camelcase=auto_camelcase, ) - self._directives = directives - self.build_typemap() - - def get_query_type(self): - return self.get_graphql_type(self._query) - - def get_mutation_type(self): - return self.get_graphql_type(self._mutation) - def get_subscription_type(self): - return self.get_graphql_type(self._subscription) + def __str__(self): + return print_schema(self.graphql_schema) def __getattr__(self, type_name): """ @@ -93,77 +459,67 @@ def __getattr__(self, type_name): Example: using schema.Query for accessing the "Query" type in the Schema """ - _type = super(Schema, self).get_type(type_name) + _type = self.graphql_schema.get_type(type_name) if _type is None: raise AttributeError('Type "{}" not found in the Schema'.format(type_name)) if isinstance(_type, GrapheneGraphQLType): return _type.graphene_type return _type - def get_graphql_type(self, _type): - if not _type: - return _type - if is_type(_type): - return _type - if is_graphene_type(_type): - graphql_type = self.get_type(_type._meta.name) - assert graphql_type, "Type {} not found in this schema.".format( - _type._meta.name - ) - assert graphql_type.graphene_type == _type - return graphql_type - raise Exception("{} is not a valid GraphQL type.".format(_type)) + def lazy(self, _type): + return lambda: self.get_type(_type) def execute(self, *args, **kwargs): - """ - Use the `graphql` function from `graphql-core` to provide the result for a query string. - Most of the time this method will be called by one of the Graphene :ref:`Integrations` - via a web request. + """Execute a GraphQL query on the schema. + + Use the `graphql_sync` function from `graphql-core` to provide the result + for a query string. Most of the time this method will be called by one of the Graphene + :ref:`Integrations` via a web request. Args: - request_string (str or Document): GraphQL request (query, mutation or subscription) in - string or parsed AST form from `graphql-core`. - root (Any, optional): Value to use as the parent value object when resolving root - types. - context (Any, optional): Value to be made avaiable to all resolvers via + request_string (str or Document): GraphQL request (query, mutation or subscription) + as string or parsed AST form from `graphql-core`. + root_value (Any, optional): Value to use as the parent value object when resolving + root types. + context_value (Any, optional): Value to be made avaiable to all resolvers via `info.context`. Can be used to share authorization, dataloaders or other information needed to resolve an operation. - variables (dict, optional): If variables are used in the request string, they can be - provided in dictionary form mapping the variable name to the variable value. - operation_name (str, optional): If mutiple operations are provided in the + variable_values (dict, optional): If variables are used in the request string, they can + be provided in dictionary form mapping the variable name to the variable value. + operation_name (str, optional): If multiple operations are provided in the request_string, an operation name must be provided for the result to be provided. middleware (List[SupportsGraphQLMiddleware]): Supply request level middleware as defined in `graphql-core`. - backend (GraphQLCoreBackend, optional): Override the default GraphQLCoreBackend. - **execute_options (Any): Depends on backend selected. Default backend has several - options such as: validate, allow_subscriptions, return_promise, executor. Returns: :obj:`ExecutionResult` containing any data and errors for the operation. """ - return graphql(self, *args, **kwargs) + kwargs = normalize_execute_kwargs(kwargs) + return graphql_sync(self.graphql_schema, *args, **kwargs) - def introspect(self): - instrospection = self.execute(introspection_query) - if instrospection.errors: - raise instrospection.errors[0] - return instrospection.data + async def execute_async(self, *args, **kwargs): + """Execute a GraphQL query on the schema asynchronously. - def __str__(self): - return print_schema(self) + Same as `execute`, but uses `graphql` instead of `graphql_sync`. + """ + kwargs = normalize_execute_kwargs(kwargs) + return await graphql(self.graphql_schema, *args, **kwargs) - def lazy(self, _type): - return lambda: self.get_type(_type) + def introspect(self): + introspection = self.execute(introspection_query) + if introspection.errors: + raise introspection.errors[0] + return introspection.data - def build_typemap(self): - initial_types = [ - self._query, - self._mutation, - self._subscription, - IntrospectionSchema, - ] - if self.types: - initial_types += self.types - self._type_map = TypeMap( - initial_types, auto_camelcase=self.auto_camelcase, schema=self - ) + +def normalize_execute_kwargs(kwargs): + """Replace alias names in keyword arguments for graphql()""" + if "root" in kwargs and "root_value" not in kwargs: + kwargs["root_value"] = kwargs.pop("root") + if "context" in kwargs and "context_value" not in kwargs: + kwargs["context_value"] = kwargs.pop("context") + if "variables" in kwargs and "variable_values" not in kwargs: + kwargs["variable_values"] = kwargs.pop("variables") + if "operation" in kwargs and "operation_name" not in kwargs: + kwargs["operation_name"] = kwargs.pop("operation") + return kwargs diff --git a/graphene/types/tests/test_abstracttype.py b/graphene/types/tests/test_abstracttype.py index 414703834..a50c87571 100644 --- a/graphene/types/tests/test_abstracttype.py +++ b/graphene/types/tests/test_abstracttype.py @@ -1,4 +1,5 @@ -from .. import abstracttype +from pytest import deprecated_call + from ..abstracttype import AbstractType from ..field import Field from ..objecttype import ObjectType @@ -14,24 +15,25 @@ def get_type(self): return MyType -def test_abstract_objecttype_warn_deprecation(mocker): - mocker.patch.object(abstracttype, "warn_deprecation") - - class MyAbstractType(AbstractType): - field1 = MyScalar() +def test_abstract_objecttype_warn_deprecation(): + with deprecated_call(): - assert abstracttype.warn_deprecation.called + # noinspection PyUnusedLocal + class MyAbstractType(AbstractType): + field1 = MyScalar() def test_generate_objecttype_inherit_abstracttype(): - class MyAbstractType(AbstractType): - field1 = MyScalar() + with deprecated_call(): + + class MyAbstractType(AbstractType): + field1 = MyScalar() - class MyObjectType(ObjectType, MyAbstractType): - field2 = MyScalar() + class MyObjectType(ObjectType, MyAbstractType): + field2 = MyScalar() assert MyObjectType._meta.description is None assert MyObjectType._meta.interfaces == () assert MyObjectType._meta.name == "MyObjectType" - assert list(MyObjectType._meta.fields.keys()) == ["field1", "field2"] + assert list(MyObjectType._meta.fields) == ["field1", "field2"] assert list(map(type, MyObjectType._meta.fields.values())) == [Field, Field] diff --git a/graphene/types/tests/test_argument.py b/graphene/types/tests/test_argument.py index b8f49a47b..db4d6c242 100644 --- a/graphene/types/tests/test_argument.py +++ b/graphene/types/tests/test_argument.py @@ -1,6 +1,6 @@ from functools import partial -import pytest +from pytest import raises from ..argument import Argument, to_arguments from ..field import Field @@ -43,7 +43,7 @@ def test_to_arguments(): def test_to_arguments_raises_if_field(): args = {"arg_string": Field(String)} - with pytest.raises(ValueError) as exc_info: + with raises(ValueError) as exc_info: to_arguments(args) assert str(exc_info.value) == ( @@ -55,7 +55,7 @@ def test_to_arguments_raises_if_field(): def test_to_arguments_raises_if_inputfield(): args = {"arg_string": InputField(String)} - with pytest.raises(ValueError) as exc_info: + with raises(ValueError) as exc_info: to_arguments(args) assert str(exc_info.value) == ( diff --git a/graphene/types/tests/test_datetime.py b/graphene/types/tests/test_datetime.py index bb6f212c9..bfd56c6c0 100644 --- a/graphene/types/tests/test_datetime.py +++ b/graphene/types/tests/test_datetime.py @@ -2,7 +2,8 @@ import pytz from graphql import GraphQLError -import pytest + +from pytest import fixture, mark from ..datetime import Date, DateTime, Time from ..objecttype import ObjectType @@ -27,13 +28,13 @@ def resolve_time(self, info, _at=None): schema = Schema(query=Query) -@pytest.fixture +@fixture def sample_datetime(): utc_datetime = datetime.datetime(2019, 5, 25, 5, 30, 15, 10, pytz.utc) return utc_datetime -@pytest.fixture +@fixture def sample_time(sample_datetime): time = datetime.time( sample_datetime.hour, @@ -45,7 +46,7 @@ def sample_time(sample_datetime): return time -@pytest.fixture +@fixture def sample_date(sample_datetime): date = sample_datetime.date() return date @@ -76,12 +77,16 @@ def test_time_query(sample_time): def test_bad_datetime_query(): - not_a_date = "Some string that's not a date" + not_a_date = "Some string that's not a datetime" result = schema.execute("""{ datetime(in: "%s") }""" % not_a_date) - assert len(result.errors) == 1 - assert isinstance(result.errors[0], GraphQLError) + assert result.errors and len(result.errors) == 1 + error = result.errors[0] + assert isinstance(error, GraphQLError) + assert error.message == ( + 'Expected type DateTime, found "Some string that\'s not a datetime".' + ) assert result.data is None @@ -90,18 +95,24 @@ def test_bad_date_query(): result = schema.execute("""{ date(in: "%s") }""" % not_a_date) - assert len(result.errors) == 1 - assert isinstance(result.errors[0], GraphQLError) + error = result.errors[0] + assert isinstance(error, GraphQLError) + assert error.message == ( + 'Expected type Date, found "Some string that\'s not a date".' + ) assert result.data is None def test_bad_time_query(): - not_a_date = "Some string that's not a date" + not_a_date = "Some string that's not a time" result = schema.execute("""{ time(at: "%s") }""" % not_a_date) - assert len(result.errors) == 1 - assert isinstance(result.errors[0], GraphQLError) + error = result.errors[0] + assert isinstance(error, GraphQLError) + assert error.message == ( + 'Expected type Time, found "Some string that\'s not a time".' + ) assert result.data is None @@ -163,7 +174,7 @@ def test_time_query_variable(sample_time): assert result.data == {"time": isoformat} -@pytest.mark.xfail( +@mark.xfail( reason="creating the error message fails when un-parsable object is not JSON serializable." ) def test_bad_variables(sample_date, sample_datetime, sample_time): @@ -174,11 +185,11 @@ def _test_bad_variables(type_, input_): ), variables={"input": input_}, ) - assert len(result.errors) == 1 # when `input` is not JSON serializable formatting the error message in # `graphql.utils.is_valid_value` line 79 fails with a TypeError + assert isinstance(result.errors, list) + assert len(result.errors) == 1 assert isinstance(result.errors[0], GraphQLError) - print(result.errors[0]) assert result.data is None not_a_date = dict() diff --git a/graphene/types/tests/test_definition.py b/graphene/types/tests/test_definition.py index 549847d5a..b3b480af0 100644 --- a/graphene/types/tests/test_definition.py +++ b/graphene/types/tests/test_definition.py @@ -69,7 +69,8 @@ class MyInputObjectType(InputObjectType): def test_defines_a_query_only_schema(): blog_schema = Schema(Query) - assert blog_schema.get_query_type().graphene_type == Query + assert blog_schema.query == Query + assert blog_schema.graphql_schema.query_type.graphene_type == Query article_field = Query._meta.fields["article"] assert article_field.type == Article @@ -95,7 +96,8 @@ def test_defines_a_query_only_schema(): def test_defines_a_mutation_schema(): blog_schema = Schema(Query, mutation=Mutation) - assert blog_schema.get_mutation_type().graphene_type == Mutation + assert blog_schema.mutation == Mutation + assert blog_schema.graphql_schema.mutation_type.graphene_type == Mutation write_mutation = Mutation._meta.fields["write_article"] assert write_mutation.type == Article @@ -105,7 +107,8 @@ def test_defines_a_mutation_schema(): def test_defines_a_subscription_schema(): blog_schema = Schema(Query, subscription=Subscription) - assert blog_schema.get_subscription_type().graphene_type == Subscription + assert blog_schema.subscription == Subscription + assert blog_schema.graphql_schema.subscription_type.graphene_type == Subscription subscription = Subscription._meta.fields["article_subscribe"] assert subscription.type == Article @@ -126,8 +129,9 @@ class SomeSubscription(Mutation): subscribe_to_something = Field(Article, input=Argument(SomeInputObject)) schema = Schema(query=Query, mutation=SomeMutation, subscription=SomeSubscription) + type_map = schema.graphql_schema.type_map - assert schema.get_type_map()["NestedInputObject"].graphene_type is NestedInputObject + assert type_map["NestedInputObject"].graphene_type is NestedInputObject def test_includes_interfaces_thunk_subtypes_in_the_type_map(): @@ -142,8 +146,9 @@ class Query(ObjectType): iface = Field(lambda: SomeInterface) schema = Schema(query=Query, types=[SomeSubtype]) + type_map = schema.graphql_schema.type_map - assert schema.get_type_map()["SomeSubtype"].graphene_type is SomeSubtype + assert type_map["SomeSubtype"].graphene_type is SomeSubtype def test_includes_types_in_union(): @@ -161,9 +166,10 @@ class Query(ObjectType): union = Field(MyUnion) schema = Schema(query=Query) + type_map = schema.graphql_schema.type_map - assert schema.get_type_map()["OtherType"].graphene_type is OtherType - assert schema.get_type_map()["SomeType"].graphene_type is SomeType + assert type_map["OtherType"].graphene_type is OtherType + assert type_map["SomeType"].graphene_type is SomeType def test_maps_enum(): @@ -181,9 +187,10 @@ class Query(ObjectType): union = Field(MyUnion) schema = Schema(query=Query) + type_map = schema.graphql_schema.type_map - assert schema.get_type_map()["OtherType"].graphene_type is OtherType - assert schema.get_type_map()["SomeType"].graphene_type is SomeType + assert type_map["OtherType"].graphene_type is OtherType + assert type_map["SomeType"].graphene_type is SomeType def test_includes_interfaces_subtypes_in_the_type_map(): @@ -198,8 +205,9 @@ class Query(ObjectType): iface = Field(SomeInterface) schema = Schema(query=Query, types=[SomeSubtype]) + type_map = schema.graphql_schema.type_map - assert schema.get_type_map()["SomeSubtype"].graphene_type is SomeSubtype + assert type_map["SomeSubtype"].graphene_type is SomeSubtype def test_stringifies_simple_types(): @@ -281,7 +289,7 @@ def test_stringifies_simple_types(): def test_does_not_mutate_passed_field_definitions(): - class CommonFields(object): + class CommonFields: field1 = String() field2 = String(id=String()) @@ -293,7 +301,7 @@ class TestObject2(CommonFields, ObjectType): assert TestObject1._meta.fields == TestObject2._meta.fields - class CommonFields(object): + class CommonFields: field1 = String() field2 = String() diff --git a/graphene/types/tests/test_enum.py b/graphene/types/tests/test_enum.py index 6086f54ce..1c5bdb383 100644 --- a/graphene/types/tests/test_enum.py +++ b/graphene/types/tests/test_enum.py @@ -1,5 +1,3 @@ -import six - from ..argument import Argument from ..enum import Enum, PyEnum from ..field import Field @@ -82,42 +80,22 @@ def custom_deprecation_reason(value): class Query(ObjectType): foo = Episode() - schema = Schema(query=Query) - - GraphQLPyEpisode = schema._type_map["PyEpisode"].values + schema = Schema(query=Query).graphql_schema - assert schema._type_map["PyEpisode"].description == "StarWars Episodes" - assert ( - GraphQLPyEpisode[0].name == "NEWHOPE" - and GraphQLPyEpisode[0].description == "New Hope Episode" - ) - assert ( - GraphQLPyEpisode[1].name == "EMPIRE" - and GraphQLPyEpisode[1].description == "Other" - ) - assert ( - GraphQLPyEpisode[2].name == "JEDI" - and GraphQLPyEpisode[2].description == "Other" - ) + episode = schema.get_type("PyEpisode") - assert ( - GraphQLPyEpisode[0].name == "NEWHOPE" - and GraphQLPyEpisode[0].deprecation_reason == "meh" - ) - assert ( - GraphQLPyEpisode[1].name == "EMPIRE" - and GraphQLPyEpisode[1].deprecation_reason is None - ) - assert ( - GraphQLPyEpisode[2].name == "JEDI" - and GraphQLPyEpisode[2].deprecation_reason is None - ) + assert episode.description == "StarWars Episodes" + assert [ + (name, value.description, value.deprecation_reason) + for name, value in episode.values.items() + ] == [ + ("NEWHOPE", "New Hope Episode", "meh"), + ("EMPIRE", "Other", None), + ("JEDI", "Other", None), + ] def test_enum_from_python3_enum_uses_enum_doc(): - if not six.PY3: - return - from enum import Enum as PyEnum class Color(PyEnum): diff --git a/graphene/types/tests/test_field.py b/graphene/types/tests/test_field.py index 13c755fca..70ac09109 100644 --- a/graphene/types/tests/test_field.py +++ b/graphene/types/tests/test_field.py @@ -1,6 +1,6 @@ from functools import partial -import pytest +from pytest import raises from ..argument import Argument from ..field import Field @@ -9,7 +9,7 @@ from .utils import MyLazyType -class MyInstance(object): +class MyInstance: value = "value" value_func = staticmethod(lambda: "value_func") @@ -85,7 +85,7 @@ def test_field_with_string_type(): def test_field_not_source_and_resolver(): MyType = object() - with pytest.raises(Exception) as exc_info: + with raises(Exception) as exc_info: Field(MyType, source="value", resolver=lambda: None) assert ( str(exc_info.value) @@ -122,7 +122,7 @@ def test_field_name_as_argument(): def test_field_source_argument_as_kw(): MyType = object() field = Field(MyType, b=NonNull(True), c=Argument(None), a=NonNull(False)) - assert list(field.args.keys()) == ["b", "c", "a"] + assert list(field.args) == ["b", "c", "a"] assert isinstance(field.args["b"], Argument) assert isinstance(field.args["b"].type, NonNull) assert field.args["b"].type.of_type is True diff --git a/graphene/types/tests/test_inputobjecttype.py b/graphene/types/tests/test_inputobjecttype.py index dc557b943..e11823823 100644 --- a/graphene/types/tests/test_inputobjecttype.py +++ b/graphene/types/tests/test_inputobjecttype.py @@ -8,7 +8,7 @@ from ..unmountedtype import UnmountedType -class MyType(object): +class MyType: pass @@ -50,7 +50,7 @@ class MyInputObjectType(InputObjectType): field = MyScalar() asa = InputField(MyType) - assert list(MyInputObjectType._meta.fields.keys()) == ["b", "a", "field", "asa"] + assert list(MyInputObjectType._meta.fields) == ["b", "a", "field", "asa"] def test_generate_inputobjecttype_unmountedtype(): @@ -78,13 +78,13 @@ class MyObjectType(ObjectType): def test_generate_inputobjecttype_inherit_abstracttype(): - class MyAbstractType(object): + class MyAbstractType: field1 = MyScalar(MyType) class MyInputObjectType(InputObjectType, MyAbstractType): field2 = MyScalar(MyType) - assert list(MyInputObjectType._meta.fields.keys()) == ["field1", "field2"] + assert list(MyInputObjectType._meta.fields) == ["field1", "field2"] assert [type(x) for x in MyInputObjectType._meta.fields.values()] == [ InputField, InputField, @@ -92,13 +92,13 @@ class MyInputObjectType(InputObjectType, MyAbstractType): def test_generate_inputobjecttype_inherit_abstracttype_reversed(): - class MyAbstractType(object): + class MyAbstractType: field1 = MyScalar(MyType) class MyInputObjectType(MyAbstractType, InputObjectType): field2 = MyScalar(MyType) - assert list(MyInputObjectType._meta.fields.keys()) == ["field1", "field2"] + assert list(MyInputObjectType._meta.fields) == ["field1", "field2"] assert [type(x) for x in MyInputObjectType._meta.fields.values()] == [ InputField, InputField, @@ -133,5 +133,6 @@ def resolve_is_child(self, info, parent): } """ ) + assert not result.errors assert result.data == {"isChild": True} diff --git a/graphene/types/tests/test_interface.py b/graphene/types/tests/test_interface.py index b524296b6..d551f2384 100644 --- a/graphene/types/tests/test_interface.py +++ b/graphene/types/tests/test_interface.py @@ -3,7 +3,7 @@ from ..unmountedtype import UnmountedType -class MyType(object): +class MyType: pass @@ -45,7 +45,7 @@ class MyInterface(Interface): field = MyScalar() asa = Field(MyType) - assert list(MyInterface._meta.fields.keys()) == ["b", "a", "field", "asa"] + assert list(MyInterface._meta.fields) == ["b", "a", "field", "asa"] def test_generate_interface_unmountedtype(): @@ -57,13 +57,13 @@ class MyInterface(Interface): def test_generate_interface_inherit_abstracttype(): - class MyAbstractType(object): + class MyAbstractType: field1 = MyScalar() class MyInterface(Interface, MyAbstractType): field2 = MyScalar() - assert list(MyInterface._meta.fields.keys()) == ["field1", "field2"] + assert list(MyInterface._meta.fields) == ["field1", "field2"] assert [type(x) for x in MyInterface._meta.fields.values()] == [Field, Field] @@ -75,16 +75,16 @@ class MyInterface(MyBaseInterface): field2 = MyScalar() assert MyInterface._meta.name == "MyInterface" - assert list(MyInterface._meta.fields.keys()) == ["field1", "field2"] + assert list(MyInterface._meta.fields) == ["field1", "field2"] assert [type(x) for x in MyInterface._meta.fields.values()] == [Field, Field] def test_generate_interface_inherit_abstracttype_reversed(): - class MyAbstractType(object): + class MyAbstractType: field1 = MyScalar() class MyInterface(MyAbstractType, Interface): field2 = MyScalar() - assert list(MyInterface._meta.fields.keys()) == ["field1", "field2"] + assert list(MyInterface._meta.fields) == ["field1", "field2"] assert [type(x) for x in MyInterface._meta.fields.values()] == [Field, Field] diff --git a/graphene/types/tests/test_mutation.py b/graphene/types/tests/test_mutation.py index 69d9b4cc4..4a7ad3c7c 100644 --- a/graphene/types/tests/test_mutation.py +++ b/graphene/types/tests/test_mutation.py @@ -1,4 +1,4 @@ -import pytest +from pytest import raises from ..argument import Argument from ..dynamic import Dynamic @@ -46,7 +46,7 @@ def mutate(self, info, **args): def test_mutation_raises_exception_if_no_mutate(): - with pytest.raises(AssertionError) as excinfo: + with raises(AssertionError) as excinfo: class MyMutation(Mutation): pass diff --git a/graphene/types/tests/test_objecttype.py b/graphene/types/tests/test_objecttype.py index 2acb578f0..25025e4d1 100644 --- a/graphene/types/tests/test_objecttype.py +++ b/graphene/types/tests/test_objecttype.py @@ -1,4 +1,4 @@ -import pytest +from pytest import raises from ..field import Field from ..interface import Interface @@ -91,7 +91,7 @@ class MyObjectType(ObjectType): m = MyObjectType(_private_state="custom") assert m._private_state == "custom" - with pytest.raises(TypeError): + with raises(TypeError): MyObjectType(_other_private_state="Wrong") @@ -102,11 +102,11 @@ class MyObjectType(ObjectType): field = MyScalar() asa = Field(MyType) - assert list(MyObjectType._meta.fields.keys()) == ["b", "a", "field", "asa"] + assert list(MyObjectType._meta.fields) == ["b", "a", "field", "asa"] def test_generate_objecttype_inherit_abstracttype(): - class MyAbstractType(object): + class MyAbstractType: field1 = MyScalar() class MyObjectType(ObjectType, MyAbstractType): @@ -115,12 +115,12 @@ class MyObjectType(ObjectType, MyAbstractType): assert MyObjectType._meta.description is None assert MyObjectType._meta.interfaces == () assert MyObjectType._meta.name == "MyObjectType" - assert list(MyObjectType._meta.fields.keys()) == ["field1", "field2"] + assert list(MyObjectType._meta.fields) == ["field1", "field2"] assert list(map(type, MyObjectType._meta.fields.values())) == [Field, Field] def test_generate_objecttype_inherit_abstracttype_reversed(): - class MyAbstractType(object): + class MyAbstractType: field1 = MyScalar() class MyObjectType(MyAbstractType, ObjectType): @@ -129,7 +129,7 @@ class MyObjectType(MyAbstractType, ObjectType): assert MyObjectType._meta.description is None assert MyObjectType._meta.interfaces == () assert MyObjectType._meta.name == "MyObjectType" - assert list(MyObjectType._meta.fields.keys()) == ["field1", "field2"] + assert list(MyObjectType._meta.fields) == ["field1", "field2"] assert list(map(type, MyObjectType._meta.fields.values())) == [Field, Field] @@ -142,15 +142,11 @@ class MyObjectType(ObjectType): def test_parent_container_get_fields(): - assert list(Container._meta.fields.keys()) == ["field1", "field2"] + assert list(Container._meta.fields) == ["field1", "field2"] def test_parent_container_interface_get_fields(): - assert list(ContainerWithInterface._meta.fields.keys()) == [ - "ifield", - "field1", - "field2", - ] + assert list(ContainerWithInterface._meta.fields) == ["ifield", "field1", "field2"] def test_objecttype_as_container_only_args(): @@ -177,14 +173,14 @@ def test_objecttype_as_container_all_kwargs(): def test_objecttype_as_container_extra_args(): - with pytest.raises(IndexError) as excinfo: + with raises(IndexError) as excinfo: Container("1", "2", "3") assert "Number of args exceeds number of fields" == str(excinfo.value) def test_objecttype_as_container_invalid_kwargs(): - with pytest.raises(TypeError) as excinfo: + with raises(TypeError) as excinfo: Container(unexisting_field="3") assert "'unexisting_field' is an invalid keyword argument for Container" == str( @@ -218,7 +214,7 @@ class Meta: def test_objecttype_with_possible_types_and_is_type_of_should_raise(): - with pytest.raises(AssertionError) as excinfo: + with raises(AssertionError) as excinfo: class MyObjectType(ObjectType): class Meta: @@ -265,3 +261,17 @@ class Meta: field = MyScalar() assert str(MyObjectType) == "MyObjectType" + + +def test_objecttype_meta_with_annotations(): + class Query(ObjectType): + class Meta: + name: str = "oops" + + hello = String() + + def resolve_hello(self, info): + return "Hello" + + schema = Schema(query=Query) + assert schema is not None diff --git a/graphene/types/tests/test_query.py b/graphene/types/tests/test_query.py index 8681e4628..004d53c8b 100644 --- a/graphene/types/tests/test_query.py +++ b/graphene/types/tests/test_query.py @@ -1,7 +1,13 @@ import json from functools import partial -from graphql import GraphQLError, ResolveInfo, Source, execute, parse +from graphql import ( + GraphQLError, + GraphQLResolveInfo as ResolveInfo, + Source, + execute, + parse, +) from ..context import Context from ..dynamic import Dynamic @@ -28,7 +34,7 @@ class Query(ObjectType): def test_query_source(): - class Root(object): + class Root: _hello = "World" def hello(self): @@ -45,10 +51,10 @@ class Query(ObjectType): def test_query_union(): - class one_object(object): + class one_object: pass - class two_object(object): + class two_object: pass class One(ObjectType): @@ -83,10 +89,10 @@ def resolve_unions(self, info): def test_query_interface(): - class one_object(object): + class one_object: pass - class two_object(object): + class two_object: pass class MyInterface(Interface): @@ -175,7 +181,7 @@ class Query(ObjectType): assert len(executed.errors) == 1 assert ( executed.errors[0].message - == GraphQLError('Expected value of type "MyType" but got: str.').message + == GraphQLError("Expected value of type 'MyType' but got: 'hello'.").message ) assert executed.data == {"hello": None} @@ -223,11 +229,11 @@ def resolve_test(self, info, **args): result = test_schema.execute("{ test }", None) assert not result.errors - assert result.data == {"test": "[null,{}]"} + assert result.data == {"test": '[null,{"a_str":null,"a_int":null}]'} result = test_schema.execute('{ test(aStr: "String!") }', "Source!") assert not result.errors - assert result.data == {"test": '["Source!",{"a_str":"String!"}]'} + assert result.data == {"test": '["Source!",{"a_str":"String!","a_int":null}]'} result = test_schema.execute('{ test(aInt: -123, aStr: "String!") }', "Source!") assert not result.errors @@ -252,18 +258,21 @@ def resolve_test(self, info, **args): result = test_schema.execute("{ test }", None) assert not result.errors - assert result.data == {"test": "[null,{}]"} + assert result.data == {"test": '[null,{"a_input":null}]'} result = test_schema.execute('{ test(aInput: {aField: "String!"} ) }', "Source!") assert not result.errors - assert result.data == {"test": '["Source!",{"a_input":{"a_field":"String!"}}]'} + assert result.data == { + "test": '["Source!",{"a_input":{"a_field":"String!","recursive_field":null}}]' + } result = test_schema.execute( '{ test(aInput: {recursiveField: {aField: "String!"}}) }', "Source!" ) assert not result.errors assert result.data == { - "test": '["Source!",{"a_input":{"recursive_field":{"a_field":"String!"}}}]' + "test": '["Source!",{"a_input":{"a_field":null,"recursive_field":' + '{"a_field":"String!","recursive_field":null}}}]' } @@ -279,8 +288,7 @@ def resolve_other(self, info): return "other" def reversed_middleware(next, *args, **kwargs): - p = next(*args, **kwargs) - return p.then(lambda x: x[::-1]) + return next(*args, **kwargs)[::-1] hello_schema = Schema(Query) @@ -342,10 +350,11 @@ def resolve_all_ints(self, info): return big_list hello_schema = Schema(Query) + graphql_schema = hello_schema.graphql_schema source = Source("{ allInts }") query_ast = parse(source) - big_list_query = partial(execute, hello_schema, query_ast) + big_list_query = partial(execute, graphql_schema, query_ast) result = benchmark(big_list_query) assert not result.errors assert result.data == {"allInts": list(big_list)} diff --git a/graphene/types/tests/test_resolver.py b/graphene/types/tests/test_resolver.py index a03cf187d..dcadb6d8d 100644 --- a/graphene/types/tests/test_resolver.py +++ b/graphene/types/tests/test_resolver.py @@ -13,7 +13,7 @@ demo_dict = {"attr": "value"} -class demo_obj(object): +class demo_obj: attr = "value" diff --git a/graphene/types/tests/test_schema.py b/graphene/types/tests/test_schema.py index d4f2e33e3..29581122e 100644 --- a/graphene/types/tests/test_schema.py +++ b/graphene/types/tests/test_schema.py @@ -1,4 +1,6 @@ -import pytest +from pytest import raises + +from graphql.pyutils import dedent from ..field import Field from ..objecttype import ObjectType @@ -15,8 +17,8 @@ class Query(ObjectType): def test_schema(): - schema = Schema(Query) - assert schema.get_query_type() == schema.get_graphql_type(Query) + schema = Schema(Query).graphql_schema + assert schema.query_type == schema.get_graphql_type(Query) def test_schema_get_type(): @@ -27,7 +29,7 @@ def test_schema_get_type(): def test_schema_get_type_error(): schema = Schema(Query) - with pytest.raises(AttributeError) as exc_info: + with raises(AttributeError) as exc_info: schema.X assert str(exc_info.value) == 'Type "X" not found in the Schema' @@ -35,20 +37,16 @@ def test_schema_get_type_error(): def test_schema_str(): schema = Schema(Query) - assert ( - str(schema) - == """schema { - query: Query -} - -type MyOtherType { - field: String -} - -type Query { - inner: MyOtherType -} -""" + assert str(schema) == dedent( + """ + type MyOtherType { + field: String + } + + type Query { + inner: MyOtherType + } + """ ) diff --git a/graphene/types/tests/test_structures.py b/graphene/types/tests/test_structures.py index 5359278f5..88f3ff1da 100644 --- a/graphene/types/tests/test_structures.py +++ b/graphene/types/tests/test_structures.py @@ -1,6 +1,6 @@ from functools import partial -import pytest +from pytest import raises from ..scalars import String from ..structures import List, NonNull @@ -14,7 +14,7 @@ def test_list(): def test_list_with_unmounted_type(): - with pytest.raises(Exception) as exc_info: + with raises(Exception) as exc_info: List(String()) assert ( @@ -82,7 +82,7 @@ def test_nonnull_inherited_works_list(): def test_nonnull_inherited_dont_work_nonnull(): - with pytest.raises(Exception) as exc_info: + with raises(Exception) as exc_info: NonNull(NonNull(String)) assert ( @@ -92,7 +92,7 @@ def test_nonnull_inherited_dont_work_nonnull(): def test_nonnull_with_unmounted_type(): - with pytest.raises(Exception) as exc_info: + with raises(Exception) as exc_info: NonNull(String()) assert ( diff --git a/graphene/types/tests/test_typemap.py b/graphene/types/tests/test_type_map.py similarity index 69% rename from graphene/types/tests/test_typemap.py rename to graphene/types/tests/test_type_map.py index f713726fc..0ef3af1be 100644 --- a/graphene/types/tests/test_typemap.py +++ b/graphene/types/tests/test_type_map.py @@ -1,10 +1,11 @@ -import pytest +from pytest import raises + from graphql.type import ( GraphQLArgument, GraphQLEnumType, GraphQLEnumValue, GraphQLField, - GraphQLInputObjectField, + GraphQLInputField, GraphQLInputObjectType, GraphQLInterfaceType, GraphQLObjectType, @@ -20,7 +21,13 @@ from ..objecttype import ObjectType from ..scalars import Int, String from ..structures import List, NonNull -from ..typemap import TypeMap, resolve_type +from ..schema import GrapheneGraphQLSchema, resolve_type + + +def create_type_map(types, auto_camelcase=True): + query = GraphQLObjectType("Query", {}) + schema = GrapheneGraphQLSchema(query, types=types, auto_camelcase=auto_camelcase) + return schema.type_map def test_enum(): @@ -39,22 +46,18 @@ def deprecation_reason(self): if self == MyEnum.foo: return "Is deprecated" - typemap = TypeMap([MyEnum]) - assert "MyEnum" in typemap - graphql_enum = typemap["MyEnum"] + type_map = create_type_map([MyEnum]) + assert "MyEnum" in type_map + graphql_enum = type_map["MyEnum"] assert isinstance(graphql_enum, GraphQLEnumType) assert graphql_enum.name == "MyEnum" assert graphql_enum.description == "Description" - values = graphql_enum.values - assert values == [ - GraphQLEnumValue( - name="foo", - value=1, - description="Description foo=1", - deprecation_reason="Is deprecated", + assert graphql_enum.values == { + "foo": GraphQLEnumValue( + value=1, description="Description foo=1", deprecation_reason="Is deprecated" ), - GraphQLEnumValue(name="bar", value=2, description="Description bar=2"), - ] + "bar": GraphQLEnumValue(value=2, description="Description bar=2"), + } def test_objecttype(): @@ -70,15 +73,15 @@ class MyObjectType(ObjectType): def resolve_foo(self, bar): return bar - typemap = TypeMap([MyObjectType]) - assert "MyObjectType" in typemap - graphql_type = typemap["MyObjectType"] + type_map = create_type_map([MyObjectType]) + assert "MyObjectType" in type_map + graphql_type = type_map["MyObjectType"] assert isinstance(graphql_type, GraphQLObjectType) assert graphql_type.name == "MyObjectType" assert graphql_type.description == "Description" fields = graphql_type.fields - assert list(fields.keys()) == ["foo", "gizmo"] + assert list(fields) == ["foo", "gizmo"] foo_field = fields["foo"] assert isinstance(foo_field, GraphQLField) assert foo_field.description == "Field description" @@ -100,13 +103,13 @@ class MyObjectType(ObjectType): bar = Dynamic(lambda: Field(String)) own = Field(lambda: MyObjectType) - typemap = TypeMap([MyObjectType]) - assert "MyObjectType" in typemap - assert list(MyObjectType._meta.fields.keys()) == ["bar", "own"] - graphql_type = typemap["MyObjectType"] + type_map = create_type_map([MyObjectType]) + assert "MyObjectType" in type_map + assert list(MyObjectType._meta.fields) == ["bar", "own"] + graphql_type = type_map["MyObjectType"] fields = graphql_type.fields - assert list(fields.keys()) == ["bar", "own"] + assert list(fields) == ["bar", "own"] assert fields["bar"].type == GraphQLString assert fields["own"].type == graphql_type @@ -125,21 +128,21 @@ class MyInterface(Interface): def resolve_foo(self, args, info): return args.get("bar") - typemap = TypeMap([MyInterface]) - assert "MyInterface" in typemap - graphql_type = typemap["MyInterface"] + type_map = create_type_map([MyInterface]) + assert "MyInterface" in type_map + graphql_type = type_map["MyInterface"] assert isinstance(graphql_type, GraphQLInterfaceType) assert graphql_type.name == "MyInterface" assert graphql_type.description == "Description" fields = graphql_type.fields - assert list(fields.keys()) == ["foo", "gizmo", "own"] + assert list(fields) == ["foo", "gizmo", "own"] assert fields["own"].type == graphql_type - assert list(fields["gizmo"].args.keys()) == ["firstArg", "oth_arg"] + assert list(fields["gizmo"].args) == ["firstArg", "oth_arg"] foo_field = fields["foo"] assert isinstance(foo_field, GraphQLField) assert foo_field.description == "Field description" - assert not foo_field.resolver # Resolver not attached in interfaces + assert not foo_field.resolve # Resolver not attached in interfaces assert foo_field.args == { "bar": GraphQLArgument( GraphQLString, @@ -169,23 +172,23 @@ class MyInputObjectType(InputObjectType): def resolve_foo_bar(self, args, info): return args.get("bar") - typemap = TypeMap([MyInputObjectType]) - assert "MyInputObjectType" in typemap - graphql_type = typemap["MyInputObjectType"] + type_map = create_type_map([MyInputObjectType]) + assert "MyInputObjectType" in type_map + graphql_type = type_map["MyInputObjectType"] assert isinstance(graphql_type, GraphQLInputObjectType) assert graphql_type.name == "MyInputObjectType" assert graphql_type.description == "Description" - other_graphql_type = typemap["OtherObjectType"] - inner_graphql_type = typemap["MyInnerObjectType"] - container = graphql_type.create_container( + other_graphql_type = type_map["OtherObjectType"] + inner_graphql_type = type_map["MyInnerObjectType"] + container = graphql_type.out_type( { "bar": "oh!", - "baz": inner_graphql_type.create_container( + "baz": inner_graphql_type.out_type( { "some_other_field": [ - other_graphql_type.create_container({"thingy": 1}), - other_graphql_type.create_container({"thingy": 2}), + other_graphql_type.out_type({"thingy": 1}), + other_graphql_type.out_type({"thingy": 2}), ] } ), @@ -201,11 +204,11 @@ def resolve_foo_bar(self, args, info): assert container.baz.some_other_field[1].thingy == 2 fields = graphql_type.fields - assert list(fields.keys()) == ["fooBar", "gizmo", "baz", "own"] + assert list(fields) == ["fooBar", "gizmo", "baz", "own"] own_field = fields["own"] assert own_field.type == graphql_type foo_field = fields["fooBar"] - assert isinstance(foo_field, GraphQLInputObjectField) + assert isinstance(foo_field, GraphQLInputField) assert foo_field.description == "Field description" @@ -215,19 +218,19 @@ class MyObjectType(ObjectType): foo_bar = String(bar_foo=String()) - typemap = TypeMap([MyObjectType]) - assert "MyObjectType" in typemap - graphql_type = typemap["MyObjectType"] + type_map = create_type_map([MyObjectType]) + assert "MyObjectType" in type_map + graphql_type = type_map["MyObjectType"] assert isinstance(graphql_type, GraphQLObjectType) assert graphql_type.name == "MyObjectType" assert graphql_type.description == "Description" fields = graphql_type.fields - assert list(fields.keys()) == ["fooBar"] + assert list(fields) == ["fooBar"] foo_field = fields["fooBar"] assert isinstance(foo_field, GraphQLField) assert foo_field.args == { - "barFoo": GraphQLArgument(GraphQLString, out_name="bar_foo") + "barFoo": GraphQLArgument(GraphQLString, default_value=None, out_name="bar_foo") } @@ -237,19 +240,21 @@ class MyObjectType(ObjectType): foo_bar = String(bar_foo=String()) - typemap = TypeMap([MyObjectType], auto_camelcase=False) - assert "MyObjectType" in typemap - graphql_type = typemap["MyObjectType"] + type_map = create_type_map([MyObjectType], auto_camelcase=False) + assert "MyObjectType" in type_map + graphql_type = type_map["MyObjectType"] assert isinstance(graphql_type, GraphQLObjectType) assert graphql_type.name == "MyObjectType" assert graphql_type.description == "Description" fields = graphql_type.fields - assert list(fields.keys()) == ["foo_bar"] + assert list(fields) == ["foo_bar"] foo_field = fields["foo_bar"] assert isinstance(foo_field, GraphQLField) assert foo_field.args == { - "bar_foo": GraphQLArgument(GraphQLString, out_name="bar_foo") + "bar_foo": GraphQLArgument( + GraphQLString, default_value=None, out_name="bar_foo" + ) } @@ -262,8 +267,8 @@ class Meta: foo_bar = String() - typemap = TypeMap([MyObjectType]) - graphql_type = typemap["MyObjectType"] + type_map = create_type_map([MyObjectType]) + graphql_type = type_map["MyObjectType"] assert graphql_type.is_type_of assert graphql_type.is_type_of({}, None) is True assert graphql_type.is_type_of(MyObjectType(), None) is False @@ -279,8 +284,8 @@ class MyOtherObjectType(ObjectType): def resolve_type_func(root, info): return MyOtherObjectType - typemap = TypeMap([MyObjectType]) - with pytest.raises(AssertionError) as excinfo: - resolve_type(resolve_type_func, typemap, "MyOtherObjectType", {}, {}) + type_map = create_type_map([MyObjectType]) + with raises(AssertionError) as excinfo: + resolve_type(resolve_type_func, type_map, "MyOtherObjectType", {}, {}, None) assert "MyOtherObjectTyp" in str(excinfo.value) diff --git a/graphene/types/tests/test_union.py b/graphene/types/tests/test_union.py index 256c7d95b..4d642d6f5 100644 --- a/graphene/types/tests/test_union.py +++ b/graphene/types/tests/test_union.py @@ -1,4 +1,4 @@ -import pytest +from pytest import raises from ..field import Field from ..objecttype import ObjectType @@ -38,7 +38,7 @@ class Meta: def test_generate_union_with_no_types(): - with pytest.raises(Exception) as exc_info: + with raises(Exception) as exc_info: class MyUnion(Union): pass diff --git a/graphene/types/typemap.py b/graphene/types/typemap.py deleted file mode 100644 index 9edb85181..000000000 --- a/graphene/types/typemap.py +++ /dev/null @@ -1,337 +0,0 @@ -import inspect -from collections import OrderedDict -from functools import partial - -from graphql import ( - GraphQLArgument, - GraphQLBoolean, - GraphQLField, - GraphQLFloat, - GraphQLID, - GraphQLInputObjectField, - GraphQLInt, - GraphQLList, - GraphQLNonNull, - GraphQLString, -) -from graphql.execution.executor import get_default_resolve_type_fn -from graphql.type import GraphQLEnumValue -from graphql.type.typemap import GraphQLTypeMap - -from ..utils.get_unbound_function import get_unbound_function -from ..utils.str_converters import to_camel_case -from .definitions import ( - GrapheneEnumType, - GrapheneGraphQLType, - GrapheneInputObjectType, - GrapheneInterfaceType, - GrapheneObjectType, - GrapheneScalarType, - GrapheneUnionType, -) -from .dynamic import Dynamic -from .enum import Enum -from .field import Field -from .inputobjecttype import InputObjectType -from .interface import Interface -from .objecttype import ObjectType -from .resolver import get_default_resolver -from .scalars import ID, Boolean, Float, Int, Scalar, String -from .structures import List, NonNull -from .union import Union -from .utils import get_field_as - - -def is_graphene_type(_type): - if isinstance(_type, (List, NonNull)): - return True - if inspect.isclass(_type) and issubclass( - _type, (ObjectType, InputObjectType, Scalar, Interface, Union, Enum) - ): - return True - - -def resolve_type(resolve_type_func, map, type_name, root, info): - _type = resolve_type_func(root, info) - - if not _type: - return_type = map[type_name] - return get_default_resolve_type_fn(root, info, return_type) - - if inspect.isclass(_type) and issubclass(_type, ObjectType): - graphql_type = map.get(_type._meta.name) - assert graphql_type, "Can't find type {} in schema".format(_type._meta.name) - assert graphql_type.graphene_type == _type, ( - "The type {} does not match with the associated graphene type {}." - ).format(_type, graphql_type.graphene_type) - return graphql_type - - return _type - - -def is_type_of_from_possible_types(possible_types, root, info): - return isinstance(root, possible_types) - - -class TypeMap(GraphQLTypeMap): - def __init__(self, types, auto_camelcase=True, schema=None): - self.auto_camelcase = auto_camelcase - self.schema = schema - super(TypeMap, self).__init__(types) - - def reducer(self, map, type): - if not type: - return map - if inspect.isfunction(type): - type = type() - if is_graphene_type(type): - return self.graphene_reducer(map, type) - return GraphQLTypeMap.reducer(map, type) - - def graphene_reducer(self, map, type): - if isinstance(type, (List, NonNull)): - return self.reducer(map, type.of_type) - if type._meta.name in map: - _type = map[type._meta.name] - if isinstance(_type, GrapheneGraphQLType): - assert _type.graphene_type == type, ( - "Found different types with the same name in the schema: {}, {}." - ).format(_type.graphene_type, type) - return map - - if issubclass(type, ObjectType): - internal_type = self.construct_objecttype(map, type) - elif issubclass(type, InputObjectType): - internal_type = self.construct_inputobjecttype(map, type) - elif issubclass(type, Interface): - internal_type = self.construct_interface(map, type) - elif issubclass(type, Scalar): - internal_type = self.construct_scalar(map, type) - elif issubclass(type, Enum): - internal_type = self.construct_enum(map, type) - elif issubclass(type, Union): - internal_type = self.construct_union(map, type) - else: - raise Exception("Expected Graphene type, but received: {}.".format(type)) - - return GraphQLTypeMap.reducer(map, internal_type) - - def construct_scalar(self, map, type): - # We have a mapping to the original GraphQL types - # so there are no collisions. - _scalars = { - String: GraphQLString, - Int: GraphQLInt, - Float: GraphQLFloat, - Boolean: GraphQLBoolean, - ID: GraphQLID, - } - if type in _scalars: - return _scalars[type] - - return GrapheneScalarType( - graphene_type=type, - name=type._meta.name, - description=type._meta.description, - serialize=getattr(type, "serialize", None), - parse_value=getattr(type, "parse_value", None), - parse_literal=getattr(type, "parse_literal", None), - ) - - def construct_enum(self, map, type): - values = OrderedDict() - for name, value in type._meta.enum.__members__.items(): - description = getattr(value, "description", None) - deprecation_reason = getattr(value, "deprecation_reason", None) - if not description and callable(type._meta.description): - description = type._meta.description(value) - - if not deprecation_reason and callable(type._meta.deprecation_reason): - deprecation_reason = type._meta.deprecation_reason(value) - - values[name] = GraphQLEnumValue( - name=name, - value=value.value, - description=description, - deprecation_reason=deprecation_reason, - ) - - type_description = ( - type._meta.description(None) - if callable(type._meta.description) - else type._meta.description - ) - - return GrapheneEnumType( - graphene_type=type, - values=values, - name=type._meta.name, - description=type_description, - ) - - def construct_objecttype(self, map, type): - if type._meta.name in map: - _type = map[type._meta.name] - if isinstance(_type, GrapheneGraphQLType): - assert _type.graphene_type == type, ( - "Found different types with the same name in the schema: {}, {}." - ).format(_type.graphene_type, type) - return _type - - def interfaces(): - interfaces = [] - for interface in type._meta.interfaces: - self.graphene_reducer(map, interface) - internal_type = map[interface._meta.name] - assert internal_type.graphene_type == interface - interfaces.append(internal_type) - return interfaces - - if type._meta.possible_types: - is_type_of = partial( - is_type_of_from_possible_types, type._meta.possible_types - ) - else: - is_type_of = type.is_type_of - - return GrapheneObjectType( - graphene_type=type, - name=type._meta.name, - description=type._meta.description, - fields=partial(self.construct_fields_for_type, map, type), - is_type_of=is_type_of, - interfaces=interfaces, - ) - - def construct_interface(self, map, type): - if type._meta.name in map: - _type = map[type._meta.name] - if isinstance(_type, GrapheneInterfaceType): - assert _type.graphene_type == type, ( - "Found different types with the same name in the schema: {}, {}." - ).format(_type.graphene_type, type) - return _type - - _resolve_type = None - if type.resolve_type: - _resolve_type = partial( - resolve_type, type.resolve_type, map, type._meta.name - ) - return GrapheneInterfaceType( - graphene_type=type, - name=type._meta.name, - description=type._meta.description, - fields=partial(self.construct_fields_for_type, map, type), - resolve_type=_resolve_type, - ) - - def construct_inputobjecttype(self, map, type): - return GrapheneInputObjectType( - graphene_type=type, - name=type._meta.name, - description=type._meta.description, - container_type=type._meta.container, - fields=partial( - self.construct_fields_for_type, map, type, is_input_type=True - ), - ) - - def construct_union(self, map, type): - _resolve_type = None - if type.resolve_type: - _resolve_type = partial( - resolve_type, type.resolve_type, map, type._meta.name - ) - - def types(): - union_types = [] - for objecttype in type._meta.types: - self.graphene_reducer(map, objecttype) - internal_type = map[objecttype._meta.name] - assert internal_type.graphene_type == objecttype - union_types.append(internal_type) - return union_types - - return GrapheneUnionType( - graphene_type=type, - name=type._meta.name, - description=type._meta.description, - types=types, - resolve_type=_resolve_type, - ) - - def get_name(self, name): - if self.auto_camelcase: - return to_camel_case(name) - return name - - def construct_fields_for_type(self, map, type, is_input_type=False): - fields = OrderedDict() - for name, field in type._meta.fields.items(): - if isinstance(field, Dynamic): - field = get_field_as(field.get_type(self.schema), _as=Field) - if not field: - continue - map = self.reducer(map, field.type) - field_type = self.get_field_type(map, field.type) - if is_input_type: - _field = GraphQLInputObjectField( - field_type, - default_value=field.default_value, - out_name=name, - description=field.description, - ) - else: - args = OrderedDict() - for arg_name, arg in field.args.items(): - map = self.reducer(map, arg.type) - arg_type = self.get_field_type(map, arg.type) - processed_arg_name = arg.name or self.get_name(arg_name) - args[processed_arg_name] = GraphQLArgument( - arg_type, - out_name=arg_name, - description=arg.description, - default_value=arg.default_value, - ) - _field = GraphQLField( - field_type, - args=args, - resolver=field.get_resolver( - self.get_resolver_for_type(type, name, field.default_value) - ), - deprecation_reason=field.deprecation_reason, - description=field.description, - ) - field_name = field.name or self.get_name(name) - fields[field_name] = _field - return fields - - def get_resolver_for_type(self, type, name, default_value): - if not issubclass(type, ObjectType): - return - resolver = getattr(type, "resolve_{}".format(name), None) - if not resolver: - # If we don't find the resolver in the ObjectType class, then try to - # find it in each of the interfaces - interface_resolver = None - for interface in type._meta.interfaces: - if name not in interface._meta.fields: - continue - interface_resolver = getattr(interface, "resolve_{}".format(name), None) - if interface_resolver: - break - resolver = interface_resolver - - # Only if is not decorated with classmethod - if resolver: - return get_unbound_function(resolver) - - default_resolver = type._meta.default_resolver or get_default_resolver() - return partial(default_resolver, name, default_value) - - def get_field_type(self, map, type): - if isinstance(type, List): - return GraphQLList(self.get_field_type(map, type.of_type)) - if isinstance(type, NonNull): - return GraphQLNonNull(self.get_field_type(map, type.of_type)) - return map.get(type._meta.name) diff --git a/graphene/types/utils.py b/graphene/types/utils.py index b026355b8..3b195d692 100644 --- a/graphene/types/utils.py +++ b/graphene/types/utils.py @@ -1,9 +1,6 @@ import inspect -from collections import OrderedDict from functools import partial -from six import string_types - from ..utils.module_loading import import_string from .mountedtype import MountedType from .unmountedtype import UnmountedType @@ -35,11 +32,11 @@ def yank_fields_from_attrs(attrs, _as=None, sort=True): if sort: fields_with_names = sorted(fields_with_names, key=lambda f: f[1]) - return OrderedDict(fields_with_names) + return dict(fields_with_names) def get_type(_type): - if isinstance(_type, string_types): + if isinstance(_type, str): return import_string(_type) if inspect.isfunction(_type) or isinstance(_type, partial): return _type() diff --git a/graphene/types/uuid.py b/graphene/types/uuid.py index 0628200b7..1f980fc79 100644 --- a/graphene/types/uuid.py +++ b/graphene/types/uuid.py @@ -1,8 +1,7 @@ from __future__ import absolute_import -import six from uuid import UUID as _UUID -from graphql.language import ast +from graphql.language.ast import StringValueNode from .scalars import Scalar @@ -15,7 +14,7 @@ class UUID(Scalar): @staticmethod def serialize(uuid): - if isinstance(uuid, six.string_types): + if isinstance(uuid, str): uuid = _UUID(uuid) assert isinstance(uuid, _UUID), "Expected UUID instance, received {}".format( @@ -25,7 +24,7 @@ def serialize(uuid): @staticmethod def parse_literal(node): - if isinstance(node, ast.StringValue): + if isinstance(node, StringValueNode): return _UUID(node.value) @staticmethod diff --git a/graphene/utils/annotate.py b/graphene/utils/annotate.py deleted file mode 100644 index 43a26ef63..000000000 --- a/graphene/utils/annotate.py +++ /dev/null @@ -1,35 +0,0 @@ -import six - -from ..pyutils.compat import func_name, signature -from .deprecated import warn_deprecation - - -def annotate(_func=None, _trigger_warning=True, **annotations): - if not six.PY2 and _trigger_warning: - warn_deprecation( - "annotate is intended for use in Python 2 only, as you can use type annotations Python 3.\n" - "Read more in https://docs.python.org/3/library/typing.html" - ) - - if not _func: - - def _func(f): - return annotate(f, **annotations) - - return _func - - func_signature = signature(_func) - - # We make sure the annotations are valid - for key, value in annotations.items(): - assert key in func_signature.parameters, ( - 'The key {key} is not a function parameter in the function "{func_name}".' - ).format(key=key, func_name=func_name(_func)) - - func_annotations = getattr(_func, "__annotations__", None) - if func_annotations is None: - _func.__annotations__ = annotations - else: - _func.__annotations__.update(annotations) - - return _func diff --git a/graphene/utils/crunch.py b/graphene/utils/crunch.py index 57fcb77fe..b27d3718e 100644 --- a/graphene/utils/crunch.py +++ b/graphene/utils/crunch.py @@ -1,5 +1,5 @@ import json -from collections import Mapping +from collections.abc import Mapping def to_key(value): diff --git a/graphene/utils/deduplicator.py b/graphene/utils/deduplicator.py index 13c1cb163..3fbf139d1 100644 --- a/graphene/utils/deduplicator.py +++ b/graphene/utils/deduplicator.py @@ -1,4 +1,4 @@ -from collections import Mapping, OrderedDict +from collections.abc import Mapping def deflate(node, index=None, path=None): @@ -16,10 +16,9 @@ def deflate(node, index=None, path=None): else: index[cache_key] = True - field_names = node.keys() - result = OrderedDict() + result = {} - for field_name in field_names: + for field_name in node: value = node[field_name] new_path = path + [field_name] diff --git a/graphene/utils/orderedtype.py b/graphene/utils/orderedtype.py index a58f4d08a..fb8783d27 100644 --- a/graphene/utils/orderedtype.py +++ b/graphene/utils/orderedtype.py @@ -2,7 +2,7 @@ @total_ordering -class OrderedType(object): +class OrderedType: creation_counter = 1 def __init__(self, _creation_counter=None): diff --git a/graphene/utils/props.py b/graphene/utils/props.py index 5ef3ba0a0..26c697eca 100644 --- a/graphene/utils/props.py +++ b/graphene/utils/props.py @@ -2,7 +2,7 @@ class _OldClass: pass -class _NewClass(object): +class _NewClass: pass diff --git a/graphene/utils/subclass_with_meta.py b/graphene/utils/subclass_with_meta.py index 01fc53757..c6ba2d3fb 100644 --- a/graphene/utils/subclass_with_meta.py +++ b/graphene/utils/subclass_with_meta.py @@ -1,7 +1,5 @@ from inspect import isclass -import six - from ..pyutils.init_subclass import InitSubclassMeta from .props import props @@ -18,7 +16,7 @@ def __repr__(cls): return "<{} meta={}>".format(cls.__name__, repr(cls._meta)) -class SubclassWithMeta(six.with_metaclass(SubclassWithMeta_Meta)): +class SubclassWithMeta(metaclass=SubclassWithMeta_Meta): """This class improves __init_subclass__ to receive automatically the options from meta""" # We will only have the metaclass in Python 2 @@ -45,7 +43,7 @@ def __init_subclass__(cls, **meta_options): assert not options, ( "Abstract types can only contain the abstract attribute. " "Received: abstract, {option_keys}" - ).format(option_keys=", ".join(options.keys())) + ).format(option_keys=", ".join(options)) else: super_class = super(cls, cls) if hasattr(super_class, "__init_subclass_with_meta__"): diff --git a/graphene/utils/tests/test_annotate.py b/graphene/utils/tests/test_annotate.py deleted file mode 100644 index 1b7b4ce14..000000000 --- a/graphene/utils/tests/test_annotate.py +++ /dev/null @@ -1,37 +0,0 @@ -import pytest - -from ..annotate import annotate - - -def func(a, b, *c, **d): - pass - - -annotations = {"a": int, "b": str, "c": list, "d": dict} - - -def func_with_annotations(a, b, *c, **d): - pass - - -func_with_annotations.__annotations__ = annotations - - -def test_annotate_with_no_params(): - annotated_func = annotate(func, _trigger_warning=False) - assert annotated_func.__annotations__ == {} - - -def test_annotate_with_params(): - annotated_func = annotate(_trigger_warning=False, **annotations)(func) - assert annotated_func.__annotations__ == annotations - - -def test_annotate_with_wront_params(): - with pytest.raises(Exception) as exc_info: - annotate(p=int, _trigger_warning=False)(func) - - assert ( - str(exc_info.value) - == 'The key p is not a function parameter in the function "func".' - ) diff --git a/graphene/utils/tests/test_crunch.py b/graphene/utils/tests/test_crunch.py index 9646a2572..92d0b1b04 100644 --- a/graphene/utils/tests/test_crunch.py +++ b/graphene/utils/tests/test_crunch.py @@ -1,10 +1,9 @@ -import pytest -from collections import OrderedDict +from pytest import mark from ..crunch import crunch -@pytest.mark.parametrize( +@mark.parametrize( "description,uncrunched,crunched", [ ["number primitive", 0, [0]], @@ -28,28 +27,22 @@ ["single-item object", {"a": None}, [None, {"a": 0}]], [ "multi-item all distinct object", - OrderedDict([("a", None), ("b", 0), ("c", True), ("d", "string")]), + {"a": None, "b": 0, "c": True, "d": "string"}, [None, 0, True, "string", {"a": 0, "b": 1, "c": 2, "d": 3}], ], [ "multi-item repeated object", - OrderedDict([("a", True), ("b", True), ("c", True), ("d", True)]), + {"a": True, "b": True, "c": True, "d": True}, [True, {"a": 0, "b": 0, "c": 0, "d": 0}], ], [ "complex array", - [OrderedDict([("a", True), ("b", [1, 2, 3])]), [1, 2, 3]], + [{"a": True, "b": [1, 2, 3]}, [1, 2, 3]], [True, 1, 2, 3, [1, 2, 3], {"a": 0, "b": 4}, [5, 4]], ], [ "complex object", - OrderedDict( - [ - ("a", True), - ("b", [1, 2, 3]), - ("c", OrderedDict([("a", True), ("b", [1, 2, 3])])), - ] - ), + {"a": True, "b": [1, 2, 3], "c": {"a": True, "b": [1, 2, 3]}}, [True, 1, 2, 3, [1, 2, 3], {"a": 0, "b": 4}, {"a": 0, "b": 4, "c": 5}], ], ], diff --git a/graphene/utils/tests/test_deduplicator.py b/graphene/utils/tests/test_deduplicator.py index 604ae4388..b845caf19 100644 --- a/graphene/utils/tests/test_deduplicator.py +++ b/graphene/utils/tests/test_deduplicator.py @@ -150,8 +150,8 @@ def resolve_events(_, info): result = schema.execute(query) assert not result.errors - result.data = deflate(result.data) - assert result.data == { + data = deflate(result.data) + assert data == { "events": [ { "__typename": "Event", diff --git a/graphene/utils/tests/test_deprecated.py b/graphene/utils/tests/test_deprecated.py index 7d407548c..8a14434b6 100644 --- a/graphene/utils/tests/test_deprecated.py +++ b/graphene/utils/tests/test_deprecated.py @@ -1,4 +1,4 @@ -import pytest +from pytest import raises from .. import deprecated from ..deprecated import deprecated as deprecated_decorator @@ -71,5 +71,5 @@ class X: def test_deprecated_other_object(mocker): mocker.patch.object(deprecated, "warn_deprecation") - with pytest.raises(TypeError): + with raises(TypeError): deprecated_decorator({}) diff --git a/graphene/utils/tests/test_trim_docstring.py b/graphene/utils/tests/test_trim_docstring.py index 704d39977..232836d1f 100644 --- a/graphene/utils/tests/test_trim_docstring.py +++ b/graphene/utils/tests/test_trim_docstring.py @@ -2,7 +2,7 @@ def test_trim_docstring(): - class WellDocumentedObject(object): + class WellDocumentedObject: """ This object is very well-documented. It has multiple lines in its description. @@ -16,7 +16,7 @@ class WellDocumentedObject(object): "description.\n\nMultiple paragraphs too" ) - class UndocumentedObject(object): + class UndocumentedObject: pass assert trim_docstring(UndocumentedObject.__doc__) is None diff --git a/graphene/utils/thenables.py b/graphene/utils/thenables.py index a3089595f..96286992e 100644 --- a/graphene/utils/thenables.py +++ b/graphene/utils/thenables.py @@ -1,28 +1,15 @@ """ This file is used mainly as a bridge for thenable abstractions. -This includes: -- Promises -- Asyncio Coroutines """ -try: - from promise import Promise, is_thenable # type: ignore -except ImportError: +from inspect import isawaitable - class Promise(object): # type: ignore - pass - def is_thenable(obj): # type: ignore - return False +def await_and_execute(obj, on_resolve): + async def build_resolve_async(): + return on_resolve(await obj) - -try: - from inspect import isawaitable - from .thenables_asyncio import await_and_execute -except ImportError: - - def isawaitable(obj): # type: ignore - return False + return build_resolve_async() def maybe_thenable(obj, on_resolve): @@ -31,12 +18,8 @@ def maybe_thenable(obj, on_resolve): returning the same type of object inputed. If the object is not thenable, it should return on_resolve(obj) """ - if isawaitable(obj) and not isinstance(obj, Promise): + if isawaitable(obj): return await_and_execute(obj, on_resolve) - if is_thenable(obj): - return Promise.resolve(obj).then(on_resolve) - - # If it's not awaitable not a Promise, return - # the function executed over the object + # If it's not awaitable, return the function executed over the object return on_resolve(obj) diff --git a/graphene/utils/thenables_asyncio.py b/graphene/utils/thenables_asyncio.py deleted file mode 100644 index d5f93182e..000000000 --- a/graphene/utils/thenables_asyncio.py +++ /dev/null @@ -1,5 +0,0 @@ -def await_and_execute(obj, on_resolve): - async def build_resolve_async(): - return on_resolve(await obj) - - return build_resolve_async() diff --git a/setup.py b/setup.py index 75d92f794..3e22b7c43 100644 --- a/setup.py +++ b/setup.py @@ -49,6 +49,7 @@ def run_tests(self): "pytest-benchmark", "pytest-cov", "pytest-mock", + "pytest-asyncio", "snapshottest", "coveralls", "promise", @@ -73,27 +74,17 @@ def run_tests(self): "Development Status :: 3 - Alpha", "Intended Audience :: Developers", "Topic :: Software Development :: Libraries", - "Programming Language :: Python :: 2", - "Programming Language :: Python :: 2.7", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: Implementation :: PyPy", ], keywords="api graphql protocol rest relay graphene", packages=find_packages(exclude=["tests", "tests.*", "examples"]), install_requires=[ - "six>=1.10.0,<2", - "graphql-core>=2.1,<3", - "graphql-relay>=2,<3", - "aniso8601>=3,<=7", + "graphql-core>=3.0.0a0,<4", + "graphql-relay>=3.0.0a0,<4", + "aniso8601>=6,<8", ], tests_require=tests_require, - extras_require={ - "test": tests_require, - "django": ["graphene-django"], - "sqlalchemy": ["graphene-sqlalchemy"], - }, + extras_require={"test": tests_require}, cmdclass={"test": PyTest}, ) diff --git a/tests_asyncio/test_relay_connection.py b/tests_asyncio/test_relay_connection.py index ec86fef66..b139f6a39 100644 --- a/tests_asyncio/test_relay_connection.py +++ b/tests_asyncio/test_relay_connection.py @@ -1,7 +1,4 @@ -import pytest - -from collections import OrderedDict -from graphql.execution.executors.asyncio import AsyncioExecutor +from pytest import mark from graphql_relay.utils import base64 @@ -27,14 +24,14 @@ class Meta: class Query(ObjectType): letters = ConnectionField(LetterConnection) connection_letters = ConnectionField(LetterConnection) - promise_letters = ConnectionField(LetterConnection) + async_letters = ConnectionField(LetterConnection) node = Node.Field() def resolve_letters(self, info, **args): return list(letters.values()) - async def resolve_promise_letters(self, info, **args): + async def resolve_async_letters(self, info, **args): return list(letters.values()) def resolve_connection_letters(self, info, **args): @@ -48,9 +45,7 @@ def resolve_connection_letters(self, info, **args): schema = Schema(Query) -letters = OrderedDict() -for i, letter in enumerate(letter_chars): - letters[letter] = Letter(id=i, letter=letter) +letters = {letter: Letter(id=i, letter=letter) for i, letter in enumerate(letter_chars)} def edges(selected_letters): @@ -96,12 +91,12 @@ def execute(args=""): ) -@pytest.mark.asyncio -async def test_connection_promise(): - result = await schema.execute( +@mark.asyncio +async def test_connection_async(): + result = await schema.execute_async( """ { - promiseLetters(first:1) { + asyncLetters(first:1) { edges { node { id @@ -114,14 +109,12 @@ async def test_connection_promise(): } } } - """, - executor=AsyncioExecutor(), - return_promise=True, + """ ) assert not result.errors assert result.data == { - "promiseLetters": { + "asyncLetters": { "edges": [{"node": {"id": "TGV0dGVyOjA=", "letter": "A"}}], "pageInfo": {"hasPreviousPage": False, "hasNextPage": True}, } diff --git a/tests_asyncio/test_relay_mutation.py b/tests_asyncio/test_relay_mutation.py index 42ea5fc77..7b083dbf9 100644 --- a/tests_asyncio/test_relay_mutation.py +++ b/tests_asyncio/test_relay_mutation.py @@ -1,5 +1,4 @@ -import pytest -from graphql.execution.executors.asyncio import AsyncioExecutor +from pytest import mark from graphene.types import ID, Field, ObjectType, Schema from graphene.types.scalars import String @@ -43,11 +42,11 @@ class Input(SharedFields): @staticmethod def mutate_and_get_payload( - self, info, shared="", additional_field="", client_mutation_id=None + self, info, shared, additional_field, client_mutation_id=None ): edge_type = MyEdge return OtherMutation( - name=shared + additional_field, + name=(shared or "") + (additional_field or ""), my_node_edge=edge_type(cursor="1", node=MyNode(name="name")), ) @@ -64,23 +63,19 @@ class Mutation(ObjectType): schema = Schema(query=RootQuery, mutation=Mutation) -@pytest.mark.asyncio +@mark.asyncio async def test_node_query_promise(): - executed = await schema.execute( - 'mutation a { sayPromise(input: {what:"hello", clientMutationId:"1"}) { phrase } }', - executor=AsyncioExecutor(), - return_promise=True, + executed = await schema.execute_async( + 'mutation a { sayPromise(input: {what:"hello", clientMutationId:"1"}) { phrase } }' ) assert not executed.errors assert executed.data == {"sayPromise": {"phrase": "hello"}} -@pytest.mark.asyncio +@mark.asyncio async def test_edge_query(): - executed = await schema.execute( - 'mutation a { other(input: {clientMutationId:"1"}) { clientMutationId, myNodeEdge { cursor node { name }} } }', - executor=AsyncioExecutor(), - return_promise=True, + executed = await schema.execute_async( + 'mutation a { other(input: {clientMutationId:"1"}) { clientMutationId, myNodeEdge { cursor node { name }} } }' ) assert not executed.errors assert dict(executed.data) == { diff --git a/tests_py36/test_objecttype.py b/tests_py36/test_objecttype.py deleted file mode 100644 index bce38d948..000000000 --- a/tests_py36/test_objecttype.py +++ /dev/null @@ -1,15 +0,0 @@ -from graphene import Schema, ObjectType, String - - -def test_objecttype_meta_with_annotations(): - class Query(ObjectType): - class Meta: - name: str = "oops" - - hello = String() - - def resolve_hello(self, info): - return "Hello" - - schema = Schema(query=Query) - assert schema is not None diff --git a/tox.ini b/tox.ini index a519cb1e7..090cca076 100644 --- a/tox.ini +++ b/tox.ini @@ -1,17 +1,14 @@ [tox] -envlist = flake8,py27,py34,py35,py36,py37,pre-commit,pypy,mypy +envlist = flake8,py36,py37,pre-commit,mypy skipsdist = true [testenv] deps = .[test] - py{35,36,37}: pytest-asyncio setenv = PYTHONPATH = .:{envdir} commands = - py{27,py}: py.test --cov=graphene graphene examples {posargs} - py{35}: py.test --cov=graphene graphene examples tests_asyncio {posargs} - py{36,37}: py.test --cov=graphene graphene examples tests_asyncio tests_py36 {posargs} + py{36,37}: py.test --cov=graphene graphene examples tests_asyncio {posargs} [testenv:pre-commit] basepython=python3.7 @@ -25,14 +22,16 @@ commands = [testenv:mypy] basepython=python3.7 deps = - mypy + mypy>=0.720 commands = mypy graphene [testenv:flake8] -deps = flake8 +basepython=python3.6 +deps = + flake8>=3.7,<4 commands = - pip install -e . + pip install --pre -e . flake8 graphene [pytest] From e90aa1b712198475e7d06df92c97fbbc3ae2a488 Mon Sep 17 00:00:00 2001 From: Min ho Kim Date: Thu, 26 Sep 2019 09:57:53 +1000 Subject: [PATCH 002/141] Fix typos (#1066) Fixed typos in docs, string literals, comments, test name --- docs/quickstart.rst | 2 +- docs/types/objecttypes.rst | 4 ++-- examples/starwars_relay/data.py | 2 +- .../starwars_relay/tests/snapshots/snap_test_mutation.py | 2 +- graphene/types/argument.py | 2 +- graphene/types/field.py | 2 +- graphene/types/inputfield.py | 2 +- graphene/types/objecttype.py | 6 +++--- graphene/types/schema.py | 2 +- graphene/types/tests/test_enum.py | 2 +- graphene/types/union.py | 2 +- graphene/types/uuid.py | 2 +- 12 files changed, 15 insertions(+), 15 deletions(-) diff --git a/docs/quickstart.rst b/docs/quickstart.rst index fa0129715..4ff0dfa2c 100644 --- a/docs/quickstart.rst +++ b/docs/quickstart.rst @@ -24,7 +24,7 @@ What is Graphene? Graphene is a library that provides tools to implement a GraphQL API in Python using a *code-first* approach. -Compare Graphene's *code-first* approach to building a GraphQL API with *schema-first* approaches like `Apollo Server`_ (JavaScript) or Ariadne_ (Python). Instead of writing GraphQL **Schema Definition Langauge (SDL)**, we write Python code to describe the data provided by your server. +Compare Graphene's *code-first* approach to building a GraphQL API with *schema-first* approaches like `Apollo Server`_ (JavaScript) or Ariadne_ (Python). Instead of writing GraphQL **Schema Definition Language (SDL)**, we write Python code to describe the data provided by your server. .. _Apollo Server: https://www.apollographql.com/docs/apollo-server/ diff --git a/docs/types/objecttypes.rst b/docs/types/objecttypes.rst index 5f7272a6f..f56cad9b3 100644 --- a/docs/types/objecttypes.rst +++ b/docs/types/objecttypes.rst @@ -195,7 +195,7 @@ The two resolvers in this example are effectively the same. # ... -If you prefer your code to be more explict, feel free to use ``@staticmethod`` decorators. Otherwise, your code may be cleaner without them! +If you prefer your code to be more explicit, feel free to use ``@staticmethod`` decorators. Otherwise, your code may be cleaner without them! .. _DefaultResolver: @@ -251,7 +251,7 @@ GraphQL Argument defaults If you define an argument for a field that is not required (and in a query execution it is not provided as an argument) it will not be passed to the -resolver function at all. This is so that the developer can differenciate +resolver function at all. This is so that the developer can differentiate between a ``undefined`` value for an argument and an explicit ``null`` value. For example, given this schema: diff --git a/examples/starwars_relay/data.py b/examples/starwars_relay/data.py index 01231b7f1..0ed654c2a 100644 --- a/examples/starwars_relay/data.py +++ b/examples/starwars_relay/data.py @@ -14,7 +14,7 @@ def setup(): # Yeah, technically it's Corellian. But it flew in the service of the rebels, # so for the purposes of this demo it's a rebel ship. - falcon = Ship(id="4", name="Millenium Falcon") + falcon = Ship(id="4", name="Millennium Falcon") homeOne = Ship(id="5", name="Home One") diff --git a/examples/starwars_relay/tests/snapshots/snap_test_mutation.py b/examples/starwars_relay/tests/snapshots/snap_test_mutation.py index 4608bdaf5..c35b2aeba 100644 --- a/examples/starwars_relay/tests/snapshots/snap_test_mutation.py +++ b/examples/starwars_relay/tests/snapshots/snap_test_mutation.py @@ -17,7 +17,7 @@ {"node": {"id": "U2hpcDox", "name": "X-Wing"}}, {"node": {"id": "U2hpcDoy", "name": "Y-Wing"}}, {"node": {"id": "U2hpcDoz", "name": "A-Wing"}}, - {"node": {"id": "U2hpcDo0", "name": "Millenium Falcon"}}, + {"node": {"id": "U2hpcDo0", "name": "Millennium Falcon"}}, {"node": {"id": "U2hpcDo1", "name": "Home One"}}, {"node": {"id": "U2hpcDo5", "name": "Peter"}}, ] diff --git a/graphene/types/argument.py b/graphene/types/argument.py index f0ba4c1c7..cdc21d4bb 100644 --- a/graphene/types/argument.py +++ b/graphene/types/argument.py @@ -30,7 +30,7 @@ class Argument(MountedType): type (class for a graphene.UnmountedType): must be a class (not an instance) of an unmounted graphene type (ex. scalar or object) which is used for the type of this argument in the GraphQL schema. - required (bool): indicates this argument as not null in the graphql scehma. Same behavior + required (bool): indicates this argument as not null in the graphql schema. Same behavior as graphene.NonNull. Default False. name (str): the name of the GraphQL argument. Defaults to parameter name. description (str): the description of the GraphQL argument in the schema. diff --git a/graphene/types/field.py b/graphene/types/field.py index d55b0347a..56c2ff671 100644 --- a/graphene/types/field.py +++ b/graphene/types/field.py @@ -50,7 +50,7 @@ class Person(ObjectType): object. Alternative to resolver (cannot set both source and resolver). deprecation_reason (optional, str): Setting this value indicates that the field is depreciated and may provide instruction or reason on how for clients to proceed. - required (optional, bool): indicates this field as not null in the graphql scehma. Same behavior as + required (optional, bool): indicates this field as not null in the graphql schema. Same behavior as graphene.NonNull. Default False. name (optional, str): the name of the GraphQL field (must be unique in a type). Defaults to attribute name. diff --git a/graphene/types/inputfield.py b/graphene/types/inputfield.py index ed9b96ab0..b0e0915a4 100644 --- a/graphene/types/inputfield.py +++ b/graphene/types/inputfield.py @@ -38,7 +38,7 @@ class Person(InputObjectType): deprecation_reason (optional, str): Setting this value indicates that the field is depreciated and may provide instruction or reason on how for clients to proceed. description (optional, str): Description of the GraphQL field in the schema. - required (optional, bool): Indicates this input field as not null in the graphql scehma. + required (optional, bool): Indicates this input field as not null in the graphql schema. Raises a validation error if argument not provided. Same behavior as graphene.NonNull. Default False. **extra_args (optional, Dict): Not used. diff --git a/graphene/types/objecttype.py b/graphene/types/objecttype.py index 1fa4cf353..cca23d111 100644 --- a/graphene/types/objecttype.py +++ b/graphene/types/objecttype.py @@ -22,7 +22,7 @@ class ObjectType(BaseType): have a name, but most importantly describe their fields. The name of the type defined by an _ObjectType_ defaults to the class name. The type - description defaults to the class docstring. This can be overriden by adding attributes + description defaults to the class docstring. This can be overridden by adding attributes to a Meta inner class. The class attributes of an _ObjectType_ are mounted as instances of ``graphene.Field``. @@ -66,8 +66,8 @@ class Query(ObjectType): docstring. interfaces (Iterable[graphene.Interface]): GraphQL interfaces to extend with this object. all fields from interface will be included in this object's schema. - possible_types (Iterable[class]): Used to test parent value object via isintance to see if - this type can be used to resolve an ambigous type (interface, union). + possible_types (Iterable[class]): Used to test parent value object via isinstance to see if + this type can be used to resolve an ambiguous type (interface, union). default_resolver (any Callable resolver): Override the default resolver for this type. Defaults to graphene default resolver which returns an attribute or dictionary key with the same name as the field. diff --git a/graphene/types/schema.py b/graphene/types/schema.py index bf8c469a4..3249c6f6c 100644 --- a/graphene/types/schema.py +++ b/graphene/types/schema.py @@ -481,7 +481,7 @@ def execute(self, *args, **kwargs): as string or parsed AST form from `graphql-core`. root_value (Any, optional): Value to use as the parent value object when resolving root types. - context_value (Any, optional): Value to be made avaiable to all resolvers via + context_value (Any, optional): Value to be made available to all resolvers via `info.context`. Can be used to share authorization, dataloaders or other information needed to resolve an operation. variable_values (dict, optional): If variables are used in the request string, they can diff --git a/graphene/types/tests/test_enum.py b/graphene/types/tests/test_enum.py index 1c5bdb383..40cd4afd7 100644 --- a/graphene/types/tests/test_enum.py +++ b/graphene/types/tests/test_enum.py @@ -172,7 +172,7 @@ class RGB(Enum): assert RGB.BLUE == 3 -def test_enum_can_be_initialzied(): +def test_enum_can_be_initialized(): class RGB(Enum): RED = 1 GREEN = 2 diff --git a/graphene/types/union.py b/graphene/types/union.py index 194ba1da7..5ae54562a 100644 --- a/graphene/types/union.py +++ b/graphene/types/union.py @@ -23,7 +23,7 @@ class Union(UnmountedType, BaseType): The schema in this example can take a search text and return any of the GraphQL object types indicated: Human, Droid or Startship. - Ambigous return types can be resolved on each ObjectType through ``Meta.possible_types`` + Ambiguous return types can be resolved on each ObjectType through ``Meta.possible_types`` attribute or ``is_type_of`` method. Or by implementing ``resolve_type`` class method on the Union. diff --git a/graphene/types/uuid.py b/graphene/types/uuid.py index 1f980fc79..ef09ae6c7 100644 --- a/graphene/types/uuid.py +++ b/graphene/types/uuid.py @@ -8,7 +8,7 @@ class UUID(Scalar): """ - Leverages the internal Python implmeentation of UUID (uuid.UUID) to provide native UUID objects + Leverages the internal Python implementation of UUID (uuid.UUID) to provide native UUID objects in fields, resolvers and input. """ From a3b215d8911a6b24e0db5a0b2cbd8468e7512d0c Mon Sep 17 00:00:00 2001 From: Jonathan Kim Date: Fri, 27 Sep 2019 09:54:19 +0100 Subject: [PATCH 003/141] Remove AbstractType (#1053) --- docs/types/abstracttypes.rst | 43 ----------------------- docs/types/index.rst | 1 - graphene/__init__.py | 3 -- graphene/types/__init__.py | 5 --- graphene/types/abstracttype.py | 11 ------ graphene/types/tests/test_abstracttype.py | 39 -------------------- 6 files changed, 102 deletions(-) delete mode 100644 docs/types/abstracttypes.rst delete mode 100644 graphene/types/abstracttype.py delete mode 100644 graphene/types/tests/test_abstracttype.py diff --git a/docs/types/abstracttypes.rst b/docs/types/abstracttypes.rst deleted file mode 100644 index 093cfd931..000000000 --- a/docs/types/abstracttypes.rst +++ /dev/null @@ -1,43 +0,0 @@ -AbstractTypes -============= - -An AbstractType contains fields that can be shared among -``graphene.ObjectType``, ``graphene.Interface``, -``graphene.InputObjectType`` or other ``graphene.AbstractType``. - -The basics: - -- Each AbstractType is a Python class that inherits from ``graphene.AbstractType``. -- Each attribute of the AbstractType represents a field (a ``graphene.Field`` or - ``graphene.InputField`` depending on where it is mounted) - -Quick example -------------- - -In this example UserFields is an ``AbstractType`` with a name. ``User`` and -``UserInput`` are two types that have their own fields -plus the ones defined in ``UserFields``. - -.. code:: python - - import graphene - - class UserFields(graphene.AbstractType): - name = graphene.String() - - class User(graphene.ObjectType, UserFields): - pass - - class UserInput(graphene.InputObjectType, UserFields): - pass - - -.. code:: - - type User { - name: String - } - - inputtype UserInput { - name: String - } diff --git a/docs/types/index.rst b/docs/types/index.rst index d44894af3..d82beb33a 100644 --- a/docs/types/index.rst +++ b/docs/types/index.rst @@ -15,4 +15,3 @@ Types Reference interfaces unions mutations - abstracttypes diff --git a/graphene/__init__.py b/graphene/__init__.py index d7d7ef888..f667e0147 100644 --- a/graphene/__init__.py +++ b/graphene/__init__.py @@ -1,7 +1,6 @@ from .pyutils.version import get_version from .types import ( - AbstractType, ObjectType, InputObjectType, Interface, @@ -86,6 +85,4 @@ "lazy_import", "Context", "ResolveInfo", - # Deprecated - "AbstractType", ] diff --git a/graphene/types/__init__.py b/graphene/types/__init__.py index 292db235b..680149a35 100644 --- a/graphene/types/__init__.py +++ b/graphene/types/__init__.py @@ -20,9 +20,6 @@ from .union import Union from .context import Context -# Deprecated -from .abstracttype import AbstractType - __all__ = [ "ObjectType", @@ -52,6 +49,4 @@ "Union", "Context", "ResolveInfo", - # Deprecated - "AbstractType", ] diff --git a/graphene/types/abstracttype.py b/graphene/types/abstracttype.py deleted file mode 100644 index 4eeb7f9cb..000000000 --- a/graphene/types/abstracttype.py +++ /dev/null @@ -1,11 +0,0 @@ -from ..utils.deprecated import warn_deprecation -from ..utils.subclass_with_meta import SubclassWithMeta - - -class AbstractType(SubclassWithMeta): - def __init_subclass__(cls, *args, **kwargs): - warn_deprecation( - "Abstract type is deprecated, please use normal object inheritance instead.\n" - "See more: https://github.com/graphql-python/graphene/blob/master/UPGRADE-v2.0.md#deprecations" - ) - super(AbstractType, cls).__init_subclass__(*args, **kwargs) diff --git a/graphene/types/tests/test_abstracttype.py b/graphene/types/tests/test_abstracttype.py deleted file mode 100644 index a50c87571..000000000 --- a/graphene/types/tests/test_abstracttype.py +++ /dev/null @@ -1,39 +0,0 @@ -from pytest import deprecated_call - -from ..abstracttype import AbstractType -from ..field import Field -from ..objecttype import ObjectType -from ..unmountedtype import UnmountedType - - -class MyType(ObjectType): - pass - - -class MyScalar(UnmountedType): - def get_type(self): - return MyType - - -def test_abstract_objecttype_warn_deprecation(): - with deprecated_call(): - - # noinspection PyUnusedLocal - class MyAbstractType(AbstractType): - field1 = MyScalar() - - -def test_generate_objecttype_inherit_abstracttype(): - with deprecated_call(): - - class MyAbstractType(AbstractType): - field1 = MyScalar() - - class MyObjectType(ObjectType, MyAbstractType): - field2 = MyScalar() - - assert MyObjectType._meta.description is None - assert MyObjectType._meta.interfaces == () - assert MyObjectType._meta.name == "MyObjectType" - assert list(MyObjectType._meta.fields) == ["field1", "field2"] - assert list(map(type, MyObjectType._meta.fields.values())) == [Field, Field] From 7c7876d37cd697d721a11ed1f9e469eda94c9c0f Mon Sep 17 00:00:00 2001 From: Theodore Diamantidis Date: Fri, 27 Sep 2019 11:54:46 +0300 Subject: [PATCH 004/141] Propagate arguments of relay.NodeField to Field (#1036) * Propagate name, deprecation_reason arguments of relay.NodeField to Field * Allow custom description in Node.Field and move ID description to ID argument * Add test for Node.Field with custom name * Add tests for description, deprecation_reason arguments of NodeField * Pass all kwargs from NodeField to Field --- graphene/relay/node.py | 6 +++--- graphene/relay/tests/test_node.py | 11 +++++++++++ 2 files changed, 14 insertions(+), 3 deletions(-) diff --git a/graphene/relay/node.py b/graphene/relay/node.py index 54423bbba..1a5c5bdb3 100644 --- a/graphene/relay/node.py +++ b/graphene/relay/node.py @@ -47,7 +47,7 @@ def get_resolver(self, parent_resolver): class NodeField(Field): - def __init__(self, node, type=False, deprecation_reason=None, name=None, **kwargs): + def __init__(self, node, type=False, **kwargs): assert issubclass(node, Node), "NodeField can only operate in Nodes" self.node_type = node self.field_type = type @@ -56,8 +56,8 @@ def __init__(self, node, type=False, deprecation_reason=None, name=None, **kwarg # If we don's specify a type, the field type will be the node # interface type or node, - description="The ID of the object", - id=ID(required=True), + id=ID(required=True, description="The ID of the object"), + **kwargs ) def get_resolver(self, parent_resolver): diff --git a/graphene/relay/tests/test_node.py b/graphene/relay/tests/test_node.py index c43ee1edc..c322b1a3a 100644 --- a/graphene/relay/tests/test_node.py +++ b/graphene/relay/tests/test_node.py @@ -106,6 +106,17 @@ def test_node_field_custom(): assert node_field.node_type == Node +def test_node_field_args(): + field_args = { + "name": "my_custom_name", + "description": "my_custom_description", + "deprecation_reason": "my_custom_deprecation_reason", + } + node_field = Node.Field(**field_args) + for field_arg, value in field_args.items(): + assert getattr(node_field, field_arg) == value + + def test_node_field_only_type(): executed = schema.execute( '{ onlyNode(id:"%s") { __typename, name } } ' % Node.to_global_id("MyNode", 1) From 3f6f4269462ba0f90c62de1140a9e49e0cda2fa5 Mon Sep 17 00:00:00 2001 From: Jonathan Kim Date: Fri, 18 Oct 2019 10:50:54 +0100 Subject: [PATCH 005/141] Update stale.yml --- .github/stale.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/stale.yml b/.github/stale.yml index dc90e5a1c..c9418f678 100644 --- a/.github/stale.yml +++ b/.github/stale.yml @@ -1,7 +1,7 @@ # Number of days of inactivity before an issue becomes stale -daysUntilStale: 60 +daysUntilStale: 90 # Number of days of inactivity before a stale issue is closed -daysUntilClose: 7 +daysUntilClose: 14 # Issues with these labels will never be considered stale exemptLabels: - pinned From abc2c2a78418364c0f0f5c7c154cbae1ad34997b Mon Sep 17 00:00:00 2001 From: TheMelter Date: Thu, 19 Dec 2019 23:02:45 -0800 Subject: [PATCH 006/141] Fix typo in execute.rst (#1115) --- docs/execution/execute.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/execution/execute.rst b/docs/execution/execute.rst index 74300a82b..f0ea88537 100644 --- a/docs/execution/execute.rst +++ b/docs/execution/execute.rst @@ -4,7 +4,7 @@ Executing a query ================= -For executing a query a schema, you can directly call the ``execute`` method on it. +For executing a query against a schema, you can directly call the ``execute`` method on it. .. code:: python From e31b93d1fdda810d70d3050c73c6638b29219d12 Mon Sep 17 00:00:00 2001 From: Yu Mochizuki Date: Thu, 26 Dec 2019 20:27:55 +0900 Subject: [PATCH 007/141] Increase the allowed version of aniso8601 (#1072) --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 3e22b7c43..d50aeba15 100644 --- a/setup.py +++ b/setup.py @@ -82,7 +82,7 @@ def run_tests(self): install_requires=[ "graphql-core>=3.0.0a0,<4", "graphql-relay>=3.0.0a0,<4", - "aniso8601>=6,<8", + "aniso8601>=6,<9", ], tests_require=tests_require, extras_require={"test": tests_require}, From c0fbcba97a459e4a28e72d17755f6a1a21cbd74a Mon Sep 17 00:00:00 2001 From: Iman Date: Thu, 26 Dec 2019 23:32:28 +0330 Subject: [PATCH 008/141] Update quickstart.rst (#1090) A miss letter --- docs/quickstart.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/quickstart.rst b/docs/quickstart.rst index 4ff0dfa2c..2f0d54f99 100644 --- a/docs/quickstart.rst +++ b/docs/quickstart.rst @@ -30,7 +30,7 @@ Compare Graphene's *code-first* approach to building a GraphQL API with *schema- .. _Ariadne: https://ariadne.readthedocs.io -Graphene is fully featured with integrations for the most popular web frameworks and ORMs. Graphene produces schemas tha are fully compliant with the GraphQL spec and provides tools and patterns for building a Relay-Compliant API as well. +Graphene is fully featured with integrations for the most popular web frameworks and ORMs. Graphene produces schemas that are fully compliant with the GraphQL spec and provides tools and patterns for building a Relay-Compliant API as well. An example in Graphene ---------------------- From 482c7fcc65e98ba5f96ea5de3546ea95e7b1cdc7 Mon Sep 17 00:00:00 2001 From: Jonathan Kim Date: Thu, 26 Dec 2019 20:02:57 +0000 Subject: [PATCH 009/141] Add file uploading docs (#1084) --- docs/execution/fileuploading.rst | 8 ++++++++ docs/execution/index.rst | 1 + 2 files changed, 9 insertions(+) create mode 100644 docs/execution/fileuploading.rst diff --git a/docs/execution/fileuploading.rst b/docs/execution/fileuploading.rst new file mode 100644 index 000000000..d92174c0d --- /dev/null +++ b/docs/execution/fileuploading.rst @@ -0,0 +1,8 @@ +File uploading +============== + +File uploading is not part of the official GraphQL spec yet and is not natively +implemented in Graphene. + +If your server needs to support file uploading then you can use the libary: `graphene-file-upload `_ which enhances Graphene to add file +uploads and conforms to the unoffical GraphQL `multipart request spec `_. diff --git a/docs/execution/index.rst b/docs/execution/index.rst index 00d98ffb6..93a028456 100644 --- a/docs/execution/index.rst +++ b/docs/execution/index.rst @@ -8,3 +8,4 @@ Execution execute middleware dataloader + fileuploading From 81d61f82c5f0d60ee6aa4135a7f83f9c38ebf186 Mon Sep 17 00:00:00 2001 From: Tom Paoletti Date: Thu, 26 Dec 2019 12:05:14 -0800 Subject: [PATCH 010/141] Fix objecttypes DefaultResolver example (#1087) (#1088) * Create namedtuple as expected * Access result.data instead of result['data'] * Refer to field with camel-case name --- docs/types/objecttypes.rst | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/types/objecttypes.rst b/docs/types/objecttypes.rst index f56cad9b3..7919941aa 100644 --- a/docs/types/objecttypes.rst +++ b/docs/types/objecttypes.rst @@ -212,7 +212,7 @@ If the :ref:`ResolverParamParent` is a dictionary, the resolver will look for a from graphene import ObjectType, String, Field, Schema - PersonValueObject = namedtuple('Person', 'first_name', 'last_name') + PersonValueObject = namedtuple('Person', ['first_name', 'last_name']) class Person(ObjectType): first_name = String() @@ -238,10 +238,10 @@ If the :ref:`ResolverParamParent` is a dictionary, the resolver will look for a } ''') # With default resolvers we can resolve attributes from an object.. - assert result['data']['me'] == {"firstName": "Luke", "lastName": "Skywalker"} + assert result.data['me'] == {"firstName": "Luke", "lastName": "Skywalker"} # With default resolvers, we can also resolve keys from a dictionary.. - assert result['data']['my_best_friend'] == {"firstName": "R2", "lastName": "D2"} + assert result.data['myBestFriend'] == {"firstName": "R2", "lastName": "D2"} Advanced ~~~~~~~~ @@ -280,7 +280,7 @@ An error will be thrown: TypeError: resolve_hello() missing 1 required positional argument: 'name' -You can fix this error in serveral ways. Either by combining all keyword arguments +You can fix this error in several ways. Either by combining all keyword arguments into a dict: .. code:: python From bd6d8d086dc350ab23f2ba56aee16de12bad53cf Mon Sep 17 00:00:00 2001 From: Jonathan Kim Date: Tue, 31 Dec 2019 14:08:30 +0000 Subject: [PATCH 011/141] Fix tests (#1119) * Fix tests * Add extra folders to make test command * Update snapshots * Add python 3.8 to test matrix * Add black command to makefile and black dependency to setup.py * Add lint command * Run format * Remove 3.8 from test matrix * Add Python 3.8 to test matrix * Update setup.py --- .travis.yml | 1 + Makefile | 12 +++++++++-- .../snap_test_objectidentification.py | 8 ++++--- graphene/relay/tests/test_node.py | 21 +++++++++++-------- graphene/relay/tests/test_node_custom.py | 6 ++++-- setup.py | 9 +++++--- 6 files changed, 38 insertions(+), 19 deletions(-) diff --git a/.travis.yml b/.travis.yml index a5d15f2d5..e1e551198 100644 --- a/.travis.yml +++ b/.travis.yml @@ -4,6 +4,7 @@ dist: xenial python: - "3.6" - "3.7" + - "3.8" install: - pip install tox tox-travis diff --git a/Makefile b/Makefile index b4e6c928f..df3b41181 100644 --- a/Makefile +++ b/Makefile @@ -5,10 +5,10 @@ help: .PHONY: install-dev ## Install development dependencies install-dev: - pip install -e ".[test]" + pip install -e ".[dev]" test: - py.test graphene + py.test graphene examples tests_asyncio .PHONY: docs ## Generate docs docs: install-dev @@ -17,3 +17,11 @@ docs: install-dev .PHONY: docs-live ## Generate docs with live reloading docs-live: install-dev cd docs && make install && make livehtml + +.PHONY: format +format: + black graphene examples setup.py tests_asyncio + +.PHONY: lint +lint: + flake8 graphene examples setup.py tests_asyncio diff --git a/examples/starwars_relay/tests/snapshots/snap_test_objectidentification.py b/examples/starwars_relay/tests/snapshots/snap_test_objectidentification.py index cb57709ae..02e61c393 100644 --- a/examples/starwars_relay/tests/snapshots/snap_test_objectidentification.py +++ b/examples/starwars_relay/tests/snapshots/snap_test_objectidentification.py @@ -4,6 +4,7 @@ from snapshottest import Snapshot + snapshots = Snapshot() snapshots["test_correctly_fetches_id_name_rebels 1"] = { @@ -84,9 +85,10 @@ type Query { rebels: Faction empire: Faction - - """The ID of the object""" - node(id: ID!): Node + node( + """The ID of the object""" + id: ID! + ): Node } """A ship in the Star Wars saga""" diff --git a/graphene/relay/tests/test_node.py b/graphene/relay/tests/test_node.py index c322b1a3a..62fd31a3e 100644 --- a/graphene/relay/tests/test_node.py +++ b/graphene/relay/tests/test_node.py @@ -183,15 +183,18 @@ def test_str_schema(): type RootQuery { first: String - - """The ID of the object""" - node(id: ID!): Node - - """The ID of the object""" - onlyNode(id: ID!): MyNode - - """The ID of the object""" - onlyNodeLazy(id: ID!): MyNode + node( + """The ID of the object""" + id: ID! + ): Node + onlyNode( + """The ID of the object""" + id: ID! + ): MyNode + onlyNodeLazy( + """The ID of the object""" + id: ID! + ): MyNode } ''' ) diff --git a/graphene/relay/tests/test_node_custom.py b/graphene/relay/tests/test_node_custom.py index 773be48f3..6f28eb665 100644 --- a/graphene/relay/tests/test_node_custom.py +++ b/graphene/relay/tests/test_node_custom.py @@ -78,8 +78,10 @@ def test_str_schema_correct(): } type RootQuery { - """The ID of the object""" - node(id: ID!): Node + node( + """The ID of the object""" + id: ID! + ): Node } type User implements Node { diff --git a/setup.py b/setup.py index d50aeba15..58ec73456 100644 --- a/setup.py +++ b/setup.py @@ -59,6 +59,8 @@ def run_tests(self): "iso8601", ] +dev_requires = ["black==19.3b0", "flake8==3.7.7"] + tests_require + setup( name="graphene", version=version, @@ -76,15 +78,16 @@ def run_tests(self): "Topic :: Software Development :: Libraries", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", ], keywords="api graphql protocol rest relay graphene", packages=find_packages(exclude=["tests", "tests.*", "examples"]), install_requires=[ - "graphql-core>=3.0.0a0,<4", - "graphql-relay>=3.0.0a0,<4", + "graphql-core>=3.0.0,<4", + "graphql-relay>=3.0.0,<4", "aniso8601>=6,<9", ], tests_require=tests_require, - extras_require={"test": tests_require}, + extras_require={"test": tests_require, "dev": dev_requires}, cmdclass={"test": PyTest}, ) From f82b8113776123afbec68ee9018891cb856a470e Mon Sep 17 00:00:00 2001 From: Jonathan Kim Date: Thu, 30 Jan 2020 12:18:00 +0000 Subject: [PATCH 012/141] Fix example code (#1120) --- docs/types/objecttypes.rst | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/docs/types/objecttypes.rst b/docs/types/objecttypes.rst index 7919941aa..77ab130b0 100644 --- a/docs/types/objecttypes.rst +++ b/docs/types/objecttypes.rst @@ -101,7 +101,7 @@ When we execute a query against that schema. query_string = "{ me { fullName } }" result = schema.execute(query_string) - assert result["data"]["me"] == {"fullName": "Luke Skywalker") + assert result.data["me"] == {"fullName": "Luke Skywalker") Then we go through the following steps to resolve this query: @@ -212,7 +212,7 @@ If the :ref:`ResolverParamParent` is a dictionary, the resolver will look for a from graphene import ObjectType, String, Field, Schema - PersonValueObject = namedtuple('Person', ['first_name', 'last_name']) + PersonValueObject = namedtuple("Person", ["first_name", "last_name"]) class Person(ObjectType): first_name = String() @@ -224,7 +224,7 @@ If the :ref:`ResolverParamParent` is a dictionary, the resolver will look for a def resolve_me(parent, info): # always pass an object for `me` field - return PersonValueObject(first_name='Luke', last_name='Skywalker') + return PersonValueObject(first_name="Luke", last_name="Skywalker") def resolve_my_best_friend(parent, info): # always pass a dictionary for `my_best_fiend_field` @@ -238,10 +238,10 @@ If the :ref:`ResolverParamParent` is a dictionary, the resolver will look for a } ''') # With default resolvers we can resolve attributes from an object.. - assert result.data['me'] == {"firstName": "Luke", "lastName": "Skywalker"} + assert result.data["me"] == {"firstName": "Luke", "lastName": "Skywalker"} # With default resolvers, we can also resolve keys from a dictionary.. - assert result.data['myBestFriend'] == {"firstName": "R2", "lastName": "D2"} + assert result.data["myBestFriend"] == {"firstName": "R2", "lastName": "D2"} Advanced ~~~~~~~~ From 55a03ba716ca6be431a42c817d9eca2154f6661c Mon Sep 17 00:00:00 2001 From: Jonathan Kim Date: Thu, 30 Jan 2020 16:17:12 +0000 Subject: [PATCH 013/141] Update readme (#1130) * Add slack link and dev notice to the README * Fix formatting * Update formatting * Add notice to documentation --- README.md | 15 ++++++++------- docs/index.rst | 6 ++++++ 2 files changed, 14 insertions(+), 7 deletions(-) diff --git a/README.md b/README.md index 9e84f819c..e7bc5a60f 100644 --- a/README.md +++ b/README.md @@ -1,12 +1,18 @@ +# ![Graphene Logo](http://graphene-python.org/favicon.png) [Graphene](http://graphene-python.org) [![Build Status](https://travis-ci.org/graphql-python/graphene.svg?branch=master)](https://travis-ci.org/graphql-python/graphene) [![PyPI version](https://badge.fury.io/py/graphene.svg)](https://badge.fury.io/py/graphene) [![Coverage Status](https://coveralls.io/repos/graphql-python/graphene/badge.svg?branch=master&service=github)](https://coveralls.io/github/graphql-python/graphene?branch=master) + +[💬 Join the community on Slack](https://join.slack.com/t/graphenetools/shared_invite/enQtOTE2MDQ1NTg4MDM1LTA4Nzk0MGU0NGEwNzUxZGNjNDQ4ZjAwNDJjMjY0OGE1ZDgxZTg4YjM2ZTc4MjE2ZTAzZjE2ZThhZTQzZTkyMmM) + **We are looking for contributors**! Please check the [ROADMAP](https://github.com/graphql-python/graphene/blob/master/ROADMAP.md) to see how you can help ❤️ --- -# ![Graphene Logo](http://graphene-python.org/favicon.png) [Graphene](http://graphene-python.org) [![Build Status](https://travis-ci.org/graphql-python/graphene.svg?branch=master)](https://travis-ci.org/graphql-python/graphene) [![PyPI version](https://badge.fury.io/py/graphene.svg)](https://badge.fury.io/py/graphene) [![Coverage Status](https://coveralls.io/repos/graphql-python/graphene/badge.svg?branch=master&service=github)](https://coveralls.io/github/graphql-python/graphene?branch=master) +**The below readme is the documentation for the `dev` (prerelease) version of Graphene. To view the documentation for the latest stable Graphene version go to the [v2 docs](https://docs.graphene-python.org/en/stable/)** + +--- ## Introduction -[Graphene](http://graphene-python.org) is a Python library for building GraphQL schemas/types fast and easily. +[Graphene](http://graphene-python.org) is an opinionated Python library for building GraphQL schemas/types fast and easily. - **Easy to use:** Graphene helps you use GraphQL in Python without effort. - **Relay:** Graphene has builtin support for Relay. @@ -23,7 +29,6 @@ Graphene has multiple integrations with different frameworks: | Django | [graphene-django](https://github.com/graphql-python/graphene-django/) | | SQLAlchemy | [graphene-sqlalchemy](https://github.com/graphql-python/graphene-sqlalchemy/) | | Google App Engine | [graphene-gae](https://github.com/graphql-python/graphene-gae/) | -| Peewee | _In progress_ ([Tracking Issue](https://github.com/graphql-python/graphene/issues/289)) | Also, Graphene is fully compatible with the GraphQL spec, working seamlessly with all GraphQL clients, such as [Relay](https://github.com/facebook/relay), [Apollo](https://github.com/apollographql/apollo-client) and [gql](https://github.com/graphql-python/gql). @@ -35,10 +40,6 @@ For instaling graphene, just run this command in your shell pip install "graphene>=2.0" ``` -## 2.0 Upgrade Guide - -Please read [UPGRADE-v2.0.md](/UPGRADE-v2.0.md) to learn how to upgrade. - ## Examples Here is one example for you to get started: diff --git a/docs/index.rst b/docs/index.rst index 8db02a6e1..dfaab1d51 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -1,6 +1,12 @@ Graphene ======== +------------ + +The documentation below is for the ``dev`` (prerelease) version of Graphene. To view the documentation for the latest stable Graphene version go to the `v2 docs `_. + +------------ + Contents: .. toctree:: From 9a19447213c53f7ba19f9dad5206a462ab31fcf0 Mon Sep 17 00:00:00 2001 From: Henry Baldursson Date: Sat, 8 Feb 2020 17:21:25 +0000 Subject: [PATCH 014/141] Use unidecode to handle unicode characters in constant names (#1080) --- graphene/utils/str_converters.py | 3 ++- graphene/utils/tests/test_str_converters.py | 4 ++++ setup.py | 1 + 3 files changed, 7 insertions(+), 1 deletion(-) diff --git a/graphene/utils/str_converters.py b/graphene/utils/str_converters.py index 216b05476..9ac8461f3 100644 --- a/graphene/utils/str_converters.py +++ b/graphene/utils/str_converters.py @@ -1,4 +1,5 @@ import re +from unidecode import unidecode # Adapted from this response in Stackoverflow @@ -18,4 +19,4 @@ def to_snake_case(name): def to_const(string): - return re.sub(r"[\W|^]+", "_", string).upper() # noqa + return re.sub(r"[\W|^]+", "_", unidecode(string)).upper() diff --git a/graphene/utils/tests/test_str_converters.py b/graphene/utils/tests/test_str_converters.py index 786149d98..d765906c8 100644 --- a/graphene/utils/tests/test_str_converters.py +++ b/graphene/utils/tests/test_str_converters.py @@ -21,3 +21,7 @@ def test_camel_case(): def test_to_const(): assert to_const('snakes $1. on a "#plane') == "SNAKES_1_ON_A_PLANE" + + +def test_to_const_unicode(): + assert to_const("Skoða þetta unicode stöff") == "SKODA_THETTA_UNICODE_STOFF" diff --git a/setup.py b/setup.py index 58ec73456..d7077f0c9 100644 --- a/setup.py +++ b/setup.py @@ -86,6 +86,7 @@ def run_tests(self): "graphql-core>=3.0.0,<4", "graphql-relay>=3.0.0,<4", "aniso8601>=6,<9", + "unidecode>=1.1.1,<2", ], tests_require=tests_require, extras_require={"test": tests_require, "dev": dev_requires}, From ad0b3a529cbe006284dfdb1c01d1b68b60c3cd18 Mon Sep 17 00:00:00 2001 From: Jean-Louis Fuchs Date: Sat, 8 Feb 2020 21:24:58 +0100 Subject: [PATCH 015/141] The default_value of InputField should be INVALID (#1111) * The default_value of InputField should be INVALID Since GraphQL 3.0 there is a distinction between None and INVALID (no value). The tests captured the bug and are updated. * Update minimum graphql-core version * Use Undefined instead of INVALID Co-authored-by: Jonathan Kim --- .../tests/snapshots/snap_test_objectidentification.py | 2 +- graphene/relay/tests/test_mutation.py | 4 ++-- graphene/types/inputfield.py | 3 ++- graphene/types/tests/test_query.py | 9 +++------ setup.py | 2 +- tests_asyncio/test_relay_mutation.py | 4 ++-- 6 files changed, 11 insertions(+), 13 deletions(-) diff --git a/examples/starwars_relay/tests/snapshots/snap_test_objectidentification.py b/examples/starwars_relay/tests/snapshots/snap_test_objectidentification.py index 02e61c393..e42260f8e 100644 --- a/examples/starwars_relay/tests/snapshots/snap_test_objectidentification.py +++ b/examples/starwars_relay/tests/snapshots/snap_test_objectidentification.py @@ -46,7 +46,7 @@ input IntroduceShipInput { shipName: String! factionId: String! - clientMutationId: String = null + clientMutationId: String } type IntroduceShipPayload { diff --git a/graphene/relay/tests/test_mutation.py b/graphene/relay/tests/test_mutation.py index 5fb1c4687..e079ab4ea 100644 --- a/graphene/relay/tests/test_mutation.py +++ b/graphene/relay/tests/test_mutation.py @@ -80,11 +80,11 @@ class Input(SharedFields): @staticmethod def mutate_and_get_payload( - self, info, shared, additional_field, client_mutation_id=None + self, info, shared="", additional_field="", client_mutation_id=None ): edge_type = MyEdge return OtherMutation( - name=(shared or "") + (additional_field or ""), + name=shared + additional_field, my_node_edge=edge_type(cursor="1", node=MyNode(name="name")), ) diff --git a/graphene/types/inputfield.py b/graphene/types/inputfield.py index b0e0915a4..bf3538e32 100644 --- a/graphene/types/inputfield.py +++ b/graphene/types/inputfield.py @@ -1,3 +1,4 @@ +from graphql import Undefined from .mountedtype import MountedType from .structures import NonNull from .utils import get_type @@ -48,7 +49,7 @@ def __init__( self, type, name=None, - default_value=None, + default_value=Undefined, deprecation_reason=None, description=None, required=False, diff --git a/graphene/types/tests/test_query.py b/graphene/types/tests/test_query.py index 004d53c8b..fe9f39fce 100644 --- a/graphene/types/tests/test_query.py +++ b/graphene/types/tests/test_query.py @@ -262,17 +262,14 @@ def resolve_test(self, info, **args): result = test_schema.execute('{ test(aInput: {aField: "String!"} ) }', "Source!") assert not result.errors - assert result.data == { - "test": '["Source!",{"a_input":{"a_field":"String!","recursive_field":null}}]' - } + assert result.data == {"test": '["Source!",{"a_input":{"a_field":"String!"}}]'} result = test_schema.execute( '{ test(aInput: {recursiveField: {aField: "String!"}}) }', "Source!" ) assert not result.errors assert result.data == { - "test": '["Source!",{"a_input":{"a_field":null,"recursive_field":' - '{"a_field":"String!","recursive_field":null}}}]' + "test": '["Source!",{"a_input":{"recursive_field":{"a_field":"String!"}}}]' } @@ -408,7 +405,7 @@ def resolve_all_containers(self, info): def test_big_list_of_containers_multiple_fields_custom_resolvers_query_benchmark( - benchmark + benchmark, ): class Container(ObjectType): x = Int() diff --git a/setup.py b/setup.py index d7077f0c9..977eba5d1 100644 --- a/setup.py +++ b/setup.py @@ -83,7 +83,7 @@ def run_tests(self): keywords="api graphql protocol rest relay graphene", packages=find_packages(exclude=["tests", "tests.*", "examples"]), install_requires=[ - "graphql-core>=3.0.0,<4", + "graphql-core>=3.0.3,<4", "graphql-relay>=3.0.0,<4", "aniso8601>=6,<9", "unidecode>=1.1.1,<2", diff --git a/tests_asyncio/test_relay_mutation.py b/tests_asyncio/test_relay_mutation.py index 7b083dbf9..4308a6141 100644 --- a/tests_asyncio/test_relay_mutation.py +++ b/tests_asyncio/test_relay_mutation.py @@ -42,11 +42,11 @@ class Input(SharedFields): @staticmethod def mutate_and_get_payload( - self, info, shared, additional_field, client_mutation_id=None + self, info, shared="", additional_field="", client_mutation_id=None ): edge_type = MyEdge return OtherMutation( - name=(shared or "") + (additional_field or ""), + name=shared + additional_field, my_node_edge=edge_type(cursor="1", node=MyNode(name="name")), ) From 23bb52a770e8b696770cdd29b76dbd23c4a5e749 Mon Sep 17 00:00:00 2001 From: James <33908344+allen-munsch@users.noreply.github.com> Date: Mon, 10 Feb 2020 16:16:11 -0600 Subject: [PATCH 016/141] Add a helpful message to when a global_id fails to parse. (#1074) * Add a helpful message to when a global_id fails to parse. * Update test_node to have errors on test_node_query_incorrect_id * Black the node.py file * Remove func wrapper used in debugging get_resolver partial * Update node.py * Expand error messages Co-authored-by: Jonathan Kim --- graphene/relay/node.py | 27 +++++++++++++++++++++++---- graphene/relay/tests/test_node.py | 18 +++++++++++++++++- 2 files changed, 40 insertions(+), 5 deletions(-) diff --git a/graphene/relay/node.py b/graphene/relay/node.py index 1a5c5bdb3..f8927ab76 100644 --- a/graphene/relay/node.py +++ b/graphene/relay/node.py @@ -90,9 +90,24 @@ def node_resolver(cls, only_type, root, info, id): def get_node_from_global_id(cls, info, global_id, only_type=None): try: _type, _id = cls.from_global_id(global_id) - graphene_type = info.schema.get_type(_type).graphene_type - except Exception: - return None + except Exception as e: + raise Exception( + ( + 'Unable to parse global ID "{global_id}". ' + 'Make sure it is a base64 encoded string in the format: "TypeName:id". ' + "Exception message: {exception}".format( + global_id=global_id, exception=str(e) + ) + ) + ) + + graphene_type = info.schema.get_type(_type) + if graphene_type is None: + raise Exception( + 'Relay Node "{_type}" not found in schema'.format(_type=_type) + ) + + graphene_type = graphene_type.graphene_type if only_type: assert graphene_type == only_type, ("Must receive a {} id.").format( @@ -101,7 +116,11 @@ def get_node_from_global_id(cls, info, global_id, only_type=None): # We make sure the ObjectType implements the "Node" interface if cls not in graphene_type._meta.interfaces: - return None + raise Exception( + 'ObjectType "{_type}" does not implement the "{cls}" interface.'.format( + _type=_type, cls=cls + ) + ) get_node = getattr(graphene_type, "get_node", None) if get_node: diff --git a/graphene/relay/tests/test_node.py b/graphene/relay/tests/test_node.py index 62fd31a3e..de1802e90 100644 --- a/graphene/relay/tests/test_node.py +++ b/graphene/relay/tests/test_node.py @@ -1,3 +1,4 @@ +import re from graphql_relay import to_global_id from graphql.pyutils import dedent @@ -83,6 +84,20 @@ def test_node_requesting_non_node(): executed = schema.execute( '{ node(id:"%s") { __typename } } ' % Node.to_global_id("RootQuery", 1) ) + assert executed.errors + assert re.match( + r"ObjectType .* does not implement the .* interface.", + executed.errors[0].message, + ) + assert executed.data == {"node": None} + + +def test_node_requesting_unknown_type(): + executed = schema.execute( + '{ node(id:"%s") { __typename } } ' % Node.to_global_id("UnknownType", 1) + ) + assert executed.errors + assert re.match(r"Relay Node .* not found in schema", executed.errors[0].message) assert executed.data == {"node": None} @@ -90,7 +105,8 @@ def test_node_query_incorrect_id(): executed = schema.execute( '{ node(id:"%s") { ... on MyNode { name } } }' % "something:2" ) - assert not executed.errors + assert executed.errors + assert re.match(r"Unable to parse global ID .*", executed.errors[0].message) assert executed.data == {"node": None} From 03bd6984dd19750c8b472b2f15b6ba99feaaab9b Mon Sep 17 00:00:00 2001 From: David Sanders Date: Mon, 10 Feb 2020 14:17:16 -0800 Subject: [PATCH 017/141] fix example middleware class in docs (#1134) --- docs/execution/middleware.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/execution/middleware.rst b/docs/execution/middleware.rst index ad109e440..2a5e20f7a 100644 --- a/docs/execution/middleware.rst +++ b/docs/execution/middleware.rst @@ -29,7 +29,7 @@ This middleware only continues evaluation if the ``field_name`` is not ``'user'` .. code:: python class AuthorizationMiddleware(object): - def resolve(next, root, info, **args): + def resolve(self, next, root, info, **args): if info.field_name == 'user': return None return next(root, info, **args) From be97a369f7a08444540fba36d9d01ac4ecbdfc3c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=EC=A0=95=EC=9C=A0=EC=84=9D?= Date: Tue, 18 Feb 2020 17:53:48 +0900 Subject: [PATCH 018/141] fix typo in class 'Interface' (#1135) --- graphene/types/interface.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/graphene/types/interface.py b/graphene/types/interface.py index def0d040f..77086dab0 100644 --- a/graphene/types/interface.py +++ b/graphene/types/interface.py @@ -68,4 +68,4 @@ def resolve_type(cls, instance, info): return type(instance) def __init__(self, *args, **kwargs): - raise Exception("An Interface cannot be intitialized") + raise Exception("An Interface cannot be initialized") From ba5b7dd3d7b94c27359ca5d85c4320eff4ce6012 Mon Sep 17 00:00:00 2001 From: Lem Ko Date: Fri, 21 Feb 2020 19:15:51 +0800 Subject: [PATCH 019/141] Fix example query in quickstart doc (#1139) --- docs/quickstart.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/quickstart.rst b/docs/quickstart.rst index 2f0d54f99..d2ac83bed 100644 --- a/docs/quickstart.rst +++ b/docs/quickstart.rst @@ -127,7 +127,7 @@ Then we can start querying our **Schema** by passing a GraphQL query string to ` query_string = '{ hello }' result = schema.execute(query_string) print(result.data['hello']) - # "Hello stranger" + # "Hello stranger!" # or passing the argument in the query query_with_argument = '{ hello(name: "GraphQL") }' From ac98be78363b98def729e129484a06c26324dccd Mon Sep 17 00:00:00 2001 From: Jayden Windle Date: Wed, 26 Feb 2020 14:18:13 -0600 Subject: [PATCH 020/141] Use Undefined instead of the now deprecated INVALID (#1143) --- graphene/types/datetime.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/graphene/types/datetime.py b/graphene/types/datetime.py index c533d23e4..25a1248e1 100644 --- a/graphene/types/datetime.py +++ b/graphene/types/datetime.py @@ -3,7 +3,7 @@ import datetime from aniso8601 import parse_date, parse_datetime, parse_time -from graphql.error import INVALID +from graphql import Undefined from graphql.language import StringValueNode from .scalars import Scalar @@ -38,7 +38,7 @@ def parse_value(value): elif isinstance(value, str): return parse_date(value) except ValueError: - return INVALID + return Undefined class DateTime(Scalar): @@ -68,7 +68,7 @@ def parse_value(value): elif isinstance(value, str): return parse_datetime(value) except ValueError: - return INVALID + return Undefined class Time(Scalar): @@ -98,4 +98,4 @@ def parse_value(cls, value): elif isinstance(value, str): return parse_time(value) except ValueError: - return INVALID + return Undefined From 98e10f0db834d2898ed9652f3e052feaea4c2de1 Mon Sep 17 00:00:00 2001 From: Jonathan Kim Date: Thu, 27 Feb 2020 20:51:59 +0000 Subject: [PATCH 021/141] Replace INVALID with Undefined (#1146) --- graphene/types/schema.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/graphene/types/schema.py b/graphene/types/schema.py index 3249c6f6c..79b5315b5 100644 --- a/graphene/types/schema.py +++ b/graphene/types/schema.py @@ -22,7 +22,7 @@ GraphQLObjectType, GraphQLSchema, GraphQLString, - INVALID, + Undefined, ) from ..utils.str_converters import to_camel_case @@ -357,7 +357,7 @@ def construct_fields_for_type(self, map_, type_, is_input_type=False): arg_type, out_name=arg_name, description=arg.description, - default_value=INVALID + default_value=Undefined if isinstance(arg.type, NonNull) else arg.default_value, ) From 796880fc5cc1d79976f95390af00b0798201c9c3 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Wed, 4 Mar 2020 11:24:42 +0100 Subject: [PATCH 022/141] Update dependencies --- setup.py | 31 +++++++++++++++---------------- tox.ini | 10 +++++----- 2 files changed, 20 insertions(+), 21 deletions(-) diff --git a/setup.py b/setup.py index 977eba5d1..084c7707b 100644 --- a/setup.py +++ b/setup.py @@ -45,21 +45,20 @@ def run_tests(self): tests_require = [ - "pytest", - "pytest-benchmark", - "pytest-cov", - "pytest-mock", - "pytest-asyncio", - "snapshottest", - "coveralls", - "promise", - "six", - "mock", - "pytz", - "iso8601", + "pytest>=5.3,<6", + "pytest-benchmark>=3.2,<4", + "pytest-cov>=2.8,<3", + "pytest-mock>=2,<3", + "pytest-asyncio>=0.10,<2", + "snapshottest>=0.5,<1", + "coveralls>=1.11,<2", + "promise>=2.3,<3", + "mock>=4.0,<5", + "pytz==2019.3", + "iso8601>=0.1,<2", ] -dev_requires = ["black==19.3b0", "flake8==3.7.7"] + tests_require +dev_requires = ["black==19.10b0", "flake8>=3.7,<4"] + tests_require setup( name="graphene", @@ -83,9 +82,9 @@ def run_tests(self): keywords="api graphql protocol rest relay graphene", packages=find_packages(exclude=["tests", "tests.*", "examples"]), install_requires=[ - "graphql-core>=3.0.3,<4", - "graphql-relay>=3.0.0,<4", - "aniso8601>=6,<9", + "graphql-core>=3.0.3,<3.1", + "graphql-relay>=3.0,<4", + "aniso8601>=8,<9", "unidecode>=1.1.1,<2", ], tests_require=tests_require, diff --git a/tox.ini b/tox.ini index 090cca076..468f5fbc2 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = flake8,py36,py37,pre-commit,mypy +envlist = flake8,py36,py37,py38,pre-commit,mypy skipsdist = true [testenv] @@ -8,12 +8,12 @@ deps = setenv = PYTHONPATH = .:{envdir} commands = - py{36,37}: py.test --cov=graphene graphene examples tests_asyncio {posargs} + py{36,37}: pytest --cov=graphene graphene examples tests_asyncio {posargs} [testenv:pre-commit] basepython=python3.7 deps = - pre-commit>0.12.0 + pre-commit>=2,<3 setenv = LC_CTYPE=en_US.UTF-8 commands = @@ -22,12 +22,12 @@ commands = [testenv:mypy] basepython=python3.7 deps = - mypy>=0.720 + mypy>=0.761,<1 commands = mypy graphene [testenv:flake8] -basepython=python3.6 +basepython=python3.7 deps = flake8>=3.7,<4 commands = From ffb77014662d0585e5096218440525431dcf05b5 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Wed, 4 Mar 2020 11:37:00 +0100 Subject: [PATCH 023/141] Create another alpha release --- graphene/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/graphene/__init__.py b/graphene/__init__.py index f667e0147..efc333ce5 100644 --- a/graphene/__init__.py +++ b/graphene/__init__.py @@ -42,7 +42,7 @@ from .utils.module_loading import lazy_import -VERSION = (3, 0, 0, "alpha", 0) +VERSION = (3, 0, 0, "alpha", 1) __version__ = get_version(VERSION) From 5e6f68957e94ca9b15831d2eed689ce1eeada426 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Wed, 4 Mar 2020 12:23:40 +0100 Subject: [PATCH 024/141] Use latest graphql-core 3.1.0b1 instead of 3.0.3 Adapt Schema, because there is no type reducer in core 3.1 any more. --- .isort.cfg | 2 +- .../snap_test_objectidentification.py | 72 ++-- graphene/__init__.py | 2 +- graphene/relay/tests/test_node.py | 12 +- graphene/relay/tests/test_node_custom.py | 22 +- graphene/tests/issues/test_356.py | 2 +- graphene/types/datetime.py | 81 ++-- graphene/types/inputfield.py | 1 + graphene/types/schema.py | 357 ++++++++---------- graphene/types/tests/test_datetime.py | 23 +- graphene/types/tests/test_schema.py | 18 +- graphene/types/tests/test_type_map.py | 27 +- setup.py | 2 +- 13 files changed, 293 insertions(+), 328 deletions(-) diff --git a/.isort.cfg b/.isort.cfg index d4ed37bec..76c6f842f 100644 --- a/.isort.cfg +++ b/.isort.cfg @@ -1,2 +1,2 @@ [settings] -known_third_party = aniso8601,graphql,graphql_relay,promise,pytest,pytz,pyutils,setuptools,six,snapshottest,sphinx_graphene_theme +known_third_party = aniso8601,graphql,graphql_relay,promise,pytest,pytz,pyutils,setuptools,snapshottest,sphinx_graphene_theme diff --git a/examples/starwars_relay/tests/snapshots/snap_test_objectidentification.py b/examples/starwars_relay/tests/snapshots/snap_test_objectidentification.py index e42260f8e..7bce5ba33 100644 --- a/examples/starwars_relay/tests/snapshots/snap_test_objectidentification.py +++ b/examples/starwars_relay/tests/snapshots/snap_test_objectidentification.py @@ -31,7 +31,16 @@ snapshots[ "test_str_schema 1" -] = '''"""A faction in the Star Wars saga""" +] = '''type Query { + rebels: Faction + empire: Faction + node( + """The ID of the object""" + id: ID! + ): Node +} + +"""A faction in the Star Wars saga""" type Faction implements Node { """The ID of the object""" id: ID! @@ -43,28 +52,20 @@ ships(before: String = null, after: String = null, first: Int = null, last: Int = null): ShipConnection } -input IntroduceShipInput { - shipName: String! - factionId: String! - clientMutationId: String -} - -type IntroduceShipPayload { - ship: Ship - faction: Faction - clientMutationId: String -} - -type Mutation { - introduceShip(input: IntroduceShipInput!): IntroduceShipPayload -} - """An object with an ID""" interface Node { """The ID of the object""" id: ID! } +type ShipConnection { + """Pagination data for this connection.""" + pageInfo: PageInfo! + + """Contains the nodes in this connection.""" + edges: [ShipEdge]! +} + """ The Relay compliant `PageInfo` type, containing data necessary to paginate this connection. """ @@ -82,13 +83,13 @@ endCursor: String } -type Query { - rebels: Faction - empire: Faction - node( - """The ID of the object""" - id: ID! - ): Node +"""A Relay edge containing a `Ship` and its cursor.""" +type ShipEdge { + """The item at the end of the edge""" + node: Ship + + """A cursor for use in pagination""" + cursor: String! } """A ship in the Star Wars saga""" @@ -100,20 +101,19 @@ name: String } -type ShipConnection { - """Pagination data for this connection.""" - pageInfo: PageInfo! - - """Contains the nodes in this connection.""" - edges: [ShipEdge]! +type Mutation { + introduceShip(input: IntroduceShipInput!): IntroduceShipPayload } -"""A Relay edge containing a `Ship` and its cursor.""" -type ShipEdge { - """The item at the end of the edge""" - node: Ship +type IntroduceShipPayload { + ship: Ship + faction: Faction + clientMutationId: String +} - """A cursor for use in pagination""" - cursor: String! +input IntroduceShipInput { + shipName: String! + factionId: String! + clientMutationId: String } ''' diff --git a/graphene/__init__.py b/graphene/__init__.py index efc333ce5..876c30857 100644 --- a/graphene/__init__.py +++ b/graphene/__init__.py @@ -42,7 +42,7 @@ from .utils.module_loading import lazy_import -VERSION = (3, 0, 0, "alpha", 1) +VERSION = (3, 0, 0, "beta", 0) __version__ = get_version(VERSION) diff --git a/graphene/relay/tests/test_node.py b/graphene/relay/tests/test_node.py index de1802e90..92d851054 100644 --- a/graphene/relay/tests/test_node.py +++ b/graphene/relay/tests/test_node.py @@ -183,18 +183,18 @@ def test_str_schema(): name: String } - type MyOtherNode implements Node { + """An object with an ID""" + interface Node { """The ID of the object""" id: ID! - shared: String - somethingElse: String - extraField: String } - """An object with an ID""" - interface Node { + type MyOtherNode implements Node { """The ID of the object""" id: ID! + shared: String + somethingElse: String + extraField: String } type RootQuery { diff --git a/graphene/relay/tests/test_node_custom.py b/graphene/relay/tests/test_node_custom.py index 6f28eb665..cba7366b0 100644 --- a/graphene/relay/tests/test_node_custom.py +++ b/graphene/relay/tests/test_node_custom.py @@ -59,9 +59,12 @@ def test_str_schema_correct(): query: RootQuery } - interface BasePhoto { - """The width of the photo in pixels""" - width: Int + type User implements Node { + """The ID of the object""" + id: ID! + + """The full name of the user""" + name: String } interface Node { @@ -77,20 +80,17 @@ def test_str_schema_correct(): width: Int } + interface BasePhoto { + """The width of the photo in pixels""" + width: Int + } + type RootQuery { node( """The ID of the object""" id: ID! ): Node } - - type User implements Node { - """The ID of the object""" - id: ID! - - """The full name of the user""" - name: String - } ''' ) diff --git a/graphene/tests/issues/test_356.py b/graphene/tests/issues/test_356.py index 0e7daa094..480c5cd16 100644 --- a/graphene/tests/issues/test_356.py +++ b/graphene/tests/issues/test_356.py @@ -27,7 +27,7 @@ class Query(graphene.ObjectType): graphene.Schema(query=Query) assert str(exc_info.value) == ( - "Query fields cannot be resolved:" + "Query fields cannot be resolved." " IterableConnectionField type has to be a subclass of Connection." ' Received "MyUnion".' ) diff --git a/graphene/types/datetime.py b/graphene/types/datetime.py index 25a1248e1..c152668f7 100644 --- a/graphene/types/datetime.py +++ b/graphene/types/datetime.py @@ -3,8 +3,8 @@ import datetime from aniso8601 import parse_date, parse_datetime, parse_time -from graphql import Undefined -from graphql.language import StringValueNode +from graphql.error import GraphQLError +from graphql.language import StringValueNode, print_ast from .scalars import Scalar @@ -20,25 +20,30 @@ class Date(Scalar): def serialize(date): if isinstance(date, datetime.datetime): date = date.date() - assert isinstance( - date, datetime.date - ), 'Received not compatible date "{}"'.format(repr(date)) + if not isinstance(date, datetime.date): + raise GraphQLError("Date cannot represent value: {}".format(repr(date))) return date.isoformat() @classmethod def parse_literal(cls, node): - if isinstance(node, StringValueNode): - return cls.parse_value(node.value) + if not isinstance(node, StringValueNode): + raise GraphQLError( + "Date cannot represent non-string value: {}".format(print_ast(node)) + ) + return cls.parse_value(node.value) @staticmethod def parse_value(value): + if isinstance(value, datetime.date): + return value + if not isinstance(value, str): + raise GraphQLError( + "Date cannot represent non-string value: {}".format(repr(value)) + ) try: - if isinstance(value, datetime.date): - return value - elif isinstance(value, str): - return parse_date(value) + return parse_date(value) except ValueError: - return Undefined + raise GraphQLError("Date cannot represent value: {}".format(repr(value))) class DateTime(Scalar): @@ -50,25 +55,32 @@ class DateTime(Scalar): @staticmethod def serialize(dt): - assert isinstance( - dt, (datetime.datetime, datetime.date) - ), 'Received not compatible datetime "{}"'.format(repr(dt)) + if not isinstance(dt, (datetime.datetime, datetime.date)): + raise GraphQLError("DateTime cannot represent value: {}".format(repr(dt))) return dt.isoformat() @classmethod def parse_literal(cls, node): - if isinstance(node, StringValueNode): - return cls.parse_value(node.value) + if not isinstance(node, StringValueNode): + raise GraphQLError( + "DateTime cannot represent non-string value: {}".format(print_ast(node)) + ) + return cls.parse_value(node.value) @staticmethod def parse_value(value): + if isinstance(value, datetime.datetime): + return value + if not isinstance(value, str): + raise GraphQLError( + "DateTime cannot represent non-string value: {}".format(repr(value)) + ) try: - if isinstance(value, datetime.datetime): - return value - elif isinstance(value, str): - return parse_datetime(value) + return parse_datetime(value) except ValueError: - return Undefined + raise GraphQLError( + "DateTime cannot represent value: {}".format(repr(value)) + ) class Time(Scalar): @@ -80,22 +92,27 @@ class Time(Scalar): @staticmethod def serialize(time): - assert isinstance( - time, datetime.time - ), 'Received not compatible time "{}"'.format(repr(time)) + if not isinstance(time, datetime.time): + raise GraphQLError("Time cannot represent value: {}".format(repr(time))) return time.isoformat() @classmethod def parse_literal(cls, node): - if isinstance(node, StringValueNode): - return cls.parse_value(node.value) + if not isinstance(node, StringValueNode): + raise GraphQLError( + "Time cannot represent non-string value: {}".format(print_ast(node)) + ) + return cls.parse_value(node.value) @classmethod def parse_value(cls, value): + if isinstance(value, datetime.time): + return value + if not isinstance(value, str): + raise GraphQLError( + "Time cannot represent non-string value: {}".format(repr(value)) + ) try: - if isinstance(value, datetime.time): - return value - elif isinstance(value, str): - return parse_time(value) + return parse_time(value) except ValueError: - return Undefined + raise GraphQLError("Time cannot represent value: {}".format(repr(value))) diff --git a/graphene/types/inputfield.py b/graphene/types/inputfield.py index bf3538e32..24d84b6c4 100644 --- a/graphene/types/inputfield.py +++ b/graphene/types/inputfield.py @@ -1,4 +1,5 @@ from graphql import Undefined + from .mountedtype import MountedType from .structures import NonNull from .utils import get_type diff --git a/graphene/types/schema.py b/graphene/types/schema.py index 79b5315b5..d54f112a1 100644 --- a/graphene/types/schema.py +++ b/graphene/types/schema.py @@ -7,7 +7,6 @@ graphql, graphql_sync, introspection_types, - is_type, print_schema, GraphQLArgument, GraphQLBoolean, @@ -71,118 +70,74 @@ def is_graphene_type(type_): return True -def resolve_type(resolve_type_func, map_, type_name, root, info, _type): - type_ = resolve_type_func(root, info) - - if not type_: - return_type = map_[type_name] - return default_type_resolver(root, info, return_type) - - if inspect.isclass(type_) and issubclass(type_, ObjectType): - graphql_type = map_.get(type_._meta.name) - assert graphql_type, "Can't find type {} in schema".format(type_._meta.name) - assert graphql_type.graphene_type == type_, ( - "The type {} does not match with the associated graphene type {}." - ).format(type_, graphql_type.graphene_type) - return graphql_type - - return type_ - - def is_type_of_from_possible_types(possible_types, root, _info): return isinstance(root, possible_types) -class GrapheneGraphQLSchema(GraphQLSchema): - """A GraphQLSchema that can deal with Graphene types as well.""" - +class TypeMap(dict): def __init__( self, query=None, mutation=None, subscription=None, types=None, - directives=None, auto_camelcase=True, ): assert_valid_root_type(query) assert_valid_root_type(mutation) assert_valid_root_type(subscription) + if types is None: + types = [] + for type_ in types: + assert is_graphene_type(type_) self.auto_camelcase = auto_camelcase - super().__init__(query, mutation, subscription, types, directives) - - if query: - self.query_type = self.get_type( - query.name if isinstance(query, GraphQLObjectType) else query._meta.name - ) - if mutation: - self.mutation_type = self.get_type( - mutation.name - if isinstance(mutation, GraphQLObjectType) - else mutation._meta.name - ) - if subscription: - self.subscription_type = self.get_type( - subscription.name - if isinstance(subscription, GraphQLObjectType) - else subscription._meta.name - ) - def get_graphql_type(self, _type): - if not _type: - return _type - if is_type(_type): - return _type - if is_graphene_type(_type): - graphql_type = self.get_type(_type._meta.name) - assert graphql_type, "Type {} not found in this schema.".format( - _type._meta.name + create_graphql_type = self.add_type + + self.query = create_graphql_type(query) if query else None + self.mutation = create_graphql_type(mutation) if mutation else None + self.subscription = create_graphql_type(subscription) if subscription else None + + self.types = [create_graphql_type(graphene_type) for graphene_type in types] + + def add_type(self, graphene_type): + if inspect.isfunction(graphene_type): + graphene_type = graphene_type() + if isinstance(graphene_type, List): + return GraphQLList(self.add_type(graphene_type.of_type)) + if isinstance(graphene_type, NonNull): + return GraphQLNonNull(self.add_type(graphene_type.of_type)) + try: + name = graphene_type._meta.name + except AttributeError: + raise TypeError( + "Expected Graphene type, but received: {}.".format(graphene_type) ) - assert graphql_type.graphene_type == _type + graphql_type = self.get(name) + if graphql_type: return graphql_type - raise Exception("{} is not a valid GraphQL type.".format(_type)) - - # noinspection PyMethodOverriding - def type_map_reducer(self, map_, type_): - if not type_: - return map_ - if inspect.isfunction(type_): - type_ = type_() - if is_graphene_type(type_): - return self.graphene_reducer(map_, type_) - return super().type_map_reducer(map_, type_) - - def graphene_reducer(self, map_, type_): - if isinstance(type_, (List, NonNull)): - return self.type_map_reducer(map_, type_.of_type) - if type_._meta.name in map_: - _type = map_[type_._meta.name] - if isinstance(_type, GrapheneGraphQLType): - assert _type.graphene_type == type_, ( - "Found different types with the same name in the schema: {}, {}." - ).format(_type.graphene_type, type_) - return map_ - - if issubclass(type_, ObjectType): - internal_type = self.construct_objecttype(map_, type_) - elif issubclass(type_, InputObjectType): - internal_type = self.construct_inputobjecttype(map_, type_) - elif issubclass(type_, Interface): - internal_type = self.construct_interface(map_, type_) - elif issubclass(type_, Scalar): - internal_type = self.construct_scalar(type_) - elif issubclass(type_, Enum): - internal_type = self.construct_enum(type_) - elif issubclass(type_, Union): - internal_type = self.construct_union(map_, type_) + if issubclass(graphene_type, ObjectType): + graphql_type = self.create_objecttype(graphene_type) + elif issubclass(graphene_type, InputObjectType): + graphql_type = self.create_inputobjecttype(graphene_type) + elif issubclass(graphene_type, Interface): + graphql_type = self.create_interface(graphene_type) + elif issubclass(graphene_type, Scalar): + graphql_type = self.create_scalar(graphene_type) + elif issubclass(graphene_type, Enum): + graphql_type = self.create_enum(graphene_type) + elif issubclass(graphene_type, Union): + graphql_type = self.construct_union(graphene_type) else: - raise Exception("Expected Graphene type, but received: {}.".format(type_)) - - return super().type_map_reducer(map_, internal_type) + raise TypeError( + "Expected Graphene type, but received: {}.".format(graphene_type) + ) + self[name] = graphql_type + return graphql_type @staticmethod - def construct_scalar(type_): + def create_scalar(graphene_type): # We have a mapping to the original GraphQL types # so there are no collisions. _scalars = { @@ -192,29 +147,31 @@ def construct_scalar(type_): Boolean: GraphQLBoolean, ID: GraphQLID, } - if type_ in _scalars: - return _scalars[type_] + if graphene_type in _scalars: + return _scalars[graphene_type] return GrapheneScalarType( - graphene_type=type_, - name=type_._meta.name, - description=type_._meta.description, - serialize=getattr(type_, "serialize", None), - parse_value=getattr(type_, "parse_value", None), - parse_literal=getattr(type_, "parse_literal", None), + graphene_type=graphene_type, + name=graphene_type._meta.name, + description=graphene_type._meta.description, + serialize=getattr(graphene_type, "serialize", None), + parse_value=getattr(graphene_type, "parse_value", None), + parse_literal=getattr(graphene_type, "parse_literal", None), ) @staticmethod - def construct_enum(type_): + def create_enum(graphene_type): values = {} - for name, value in type_._meta.enum.__members__.items(): + for name, value in graphene_type._meta.enum.__members__.items(): description = getattr(value, "description", None) deprecation_reason = getattr(value, "deprecation_reason", None) - if not description and callable(type_._meta.description): - description = type_._meta.description(value) + if not description and callable(graphene_type._meta.description): + description = graphene_type._meta.description(value) - if not deprecation_reason and callable(type_._meta.deprecation_reason): - deprecation_reason = type_._meta.deprecation_reason(value) + if not deprecation_reason and callable( + graphene_type._meta.deprecation_reason + ): + deprecation_reason = graphene_type._meta.deprecation_reason(value) values[name] = GraphQLEnumValue( value=value.value, @@ -223,107 +180,98 @@ def construct_enum(type_): ) type_description = ( - type_._meta.description(None) - if callable(type_._meta.description) - else type_._meta.description + graphene_type._meta.description(None) + if callable(graphene_type._meta.description) + else graphene_type._meta.description ) return GrapheneEnumType( - graphene_type=type_, + graphene_type=graphene_type, values=values, - name=type_._meta.name, + name=graphene_type._meta.name, description=type_description, ) - def construct_objecttype(self, map_, type_): - if type_._meta.name in map_: - _type = map_[type_._meta.name] - if isinstance(_type, GrapheneGraphQLType): - assert _type.graphene_type == type_, ( - "Found different types with the same name in the schema: {}, {}." - ).format(_type.graphene_type, type_) - return _type + def create_objecttype(self, graphene_type): + create_graphql_type = self.add_type def interfaces(): interfaces = [] - for interface in type_._meta.interfaces: - self.graphene_reducer(map_, interface) - internal_type = map_[interface._meta.name] - assert internal_type.graphene_type == interface - interfaces.append(internal_type) + for graphene_interface in graphene_type._meta.interfaces: + interface = create_graphql_type(graphene_interface) + assert interface.graphene_type == graphene_interface + interfaces.append(interface) return interfaces - if type_._meta.possible_types: + if graphene_type._meta.possible_types: is_type_of = partial( - is_type_of_from_possible_types, type_._meta.possible_types + is_type_of_from_possible_types, graphene_type._meta.possible_types ) else: - is_type_of = type_.is_type_of + is_type_of = graphene_type.is_type_of return GrapheneObjectType( - graphene_type=type_, - name=type_._meta.name, - description=type_._meta.description, - fields=partial(self.construct_fields_for_type, map_, type_), + graphene_type=graphene_type, + name=graphene_type._meta.name, + description=graphene_type._meta.description, + fields=partial(self.create_fields_for_type, graphene_type), is_type_of=is_type_of, interfaces=interfaces, ) - def construct_interface(self, map_, type_): - if type_._meta.name in map_: - _type = map_[type_._meta.name] - if isinstance(_type, GrapheneInterfaceType): - assert _type.graphene_type == type_, ( - "Found different types with the same name in the schema: {}, {}." - ).format(_type.graphene_type, type_) - return _type - - _resolve_type = None - if type_.resolve_type: - _resolve_type = partial( - resolve_type, type_.resolve_type, map_, type_._meta.name + def create_interface(self, graphene_type): + resolve_type = ( + partial( + self.resolve_type, graphene_type.resolve_type, graphene_type._meta.name ) + if graphene_type.resolve_type + else None + ) + return GrapheneInterfaceType( - graphene_type=type_, - name=type_._meta.name, - description=type_._meta.description, - fields=partial(self.construct_fields_for_type, map_, type_), - resolve_type=_resolve_type, + graphene_type=graphene_type, + name=graphene_type._meta.name, + description=graphene_type._meta.description, + fields=partial(self.create_fields_for_type, graphene_type), + resolve_type=resolve_type, ) - def construct_inputobjecttype(self, map_, type_): + def create_inputobjecttype(self, graphene_type): return GrapheneInputObjectType( - graphene_type=type_, - name=type_._meta.name, - description=type_._meta.description, - out_type=type_._meta.container, + graphene_type=graphene_type, + name=graphene_type._meta.name, + description=graphene_type._meta.description, + out_type=graphene_type._meta.container, fields=partial( - self.construct_fields_for_type, map_, type_, is_input_type=True + self.create_fields_for_type, graphene_type, is_input_type=True ), ) - def construct_union(self, map_, type_): - _resolve_type = None - if type_.resolve_type: - _resolve_type = partial( - resolve_type, type_.resolve_type, map_, type_._meta.name - ) + def construct_union(self, graphene_type): + create_graphql_type = self.add_type def types(): union_types = [] - for objecttype in type_._meta.types: - self.graphene_reducer(map_, objecttype) - internal_type = map_[objecttype._meta.name] - assert internal_type.graphene_type == objecttype - union_types.append(internal_type) + for graphene_objecttype in graphene_type._meta.types: + object_type = create_graphql_type(graphene_objecttype) + assert object_type.graphene_type == graphene_objecttype + union_types.append(object_type) return union_types + resolve_type = ( + partial( + self.resolve_type, graphene_type.resolve_type, graphene_type._meta.name + ) + if graphene_type.resolve_type + else None + ) + return GrapheneUnionType( - graphene_type=type_, - name=type_._meta.name, - description=type_._meta.description, + graphene_type=graphene_type, + name=graphene_type._meta.name, + description=graphene_type._meta.description, types=types, - resolve_type=_resolve_type, + resolve_type=resolve_type, ) def get_name(self, name): @@ -331,15 +279,16 @@ def get_name(self, name): return to_camel_case(name) return name - def construct_fields_for_type(self, map_, type_, is_input_type=False): + def create_fields_for_type(self, graphene_type, is_input_type=False): + create_graphql_type = self.add_type + fields = {} - for name, field in type_._meta.fields.items(): + for name, field in graphene_type._meta.fields.items(): if isinstance(field, Dynamic): field = get_field_as(field.get_type(self), _as=Field) if not field: continue - map_ = self.type_map_reducer(map_, field.type) - field_type = self.get_field_type(map_, field.type) + field_type = create_graphql_type(field.type) if is_input_type: _field = GraphQLInputField( field_type, @@ -350,8 +299,7 @@ def construct_fields_for_type(self, map_, type_, is_input_type=False): else: args = {} for arg_name, arg in field.args.items(): - map_ = self.type_map_reducer(map_, arg.type) - arg_type = self.get_field_type(map_, arg.type) + arg_type = create_graphql_type(arg.type) processed_arg_name = arg.name or self.get_name(arg_name) args[processed_arg_name] = GraphQLArgument( arg_type, @@ -361,12 +309,13 @@ def construct_fields_for_type(self, map_, type_, is_input_type=False): if isinstance(arg.type, NonNull) else arg.default_value, ) + resolve = field.get_resolver( + self.get_resolver(graphene_type, name, field.default_value) + ) _field = GraphQLField( field_type, args=args, - resolve=field.get_resolver( - self.get_resolver_for_type(type_, name, field.default_value) - ), + resolve=resolve, deprecation_reason=field.deprecation_reason, description=field.description, ) @@ -374,15 +323,32 @@ def construct_fields_for_type(self, map_, type_, is_input_type=False): fields[field_name] = _field return fields - def get_resolver_for_type(self, type_, name, default_value): - if not issubclass(type_, ObjectType): + def resolve_type(self, resolve_type_func, type_name, root, info, _type): + type_ = resolve_type_func(root, info) + + if not type_: + return_type = self[type_name] + return default_type_resolver(root, info, return_type) + + if inspect.isclass(type_) and issubclass(type_, ObjectType): + graphql_type = self.get(type_._meta.name) + assert graphql_type, "Can't find type {} in schema".format(type_._meta.name) + assert graphql_type.graphene_type == type_, ( + "The type {} does not match with the associated graphene type {}." + ).format(type_, graphql_type.graphene_type) + return graphql_type + + return type_ + + def get_resolver(self, graphene_type, name, default_value): + if not issubclass(graphene_type, ObjectType): return - resolver = getattr(type_, "resolve_{}".format(name), None) + resolver = getattr(graphene_type, "resolve_{}".format(name), None) if not resolver: # If we don't find the resolver in the ObjectType class, then try to # find it in each of the interfaces interface_resolver = None - for interface in type_._meta.interfaces: + for interface in graphene_type._meta.interfaces: if name not in interface._meta.fields: continue interface_resolver = getattr(interface, "resolve_{}".format(name), None) @@ -394,16 +360,11 @@ def get_resolver_for_type(self, type_, name, default_value): if resolver: return get_unbound_function(resolver) - default_resolver = type_._meta.default_resolver or get_default_resolver() + default_resolver = ( + graphene_type._meta.default_resolver or get_default_resolver() + ) return partial(default_resolver, name, default_value) - def get_field_type(self, map_, type_): - if isinstance(type_, List): - return GraphQLList(self.get_field_type(map_, type_.of_type)) - if isinstance(type_, NonNull): - return GraphQLNonNull(self.get_field_type(map_, type_.of_type)) - return map_.get(type_._meta.name) - class Schema: """Schema Definition. @@ -419,11 +380,11 @@ class Schema: fields to *create, update or delete* data in your API. subscription (ObjectType, optional): Root subscription *ObjectType*. Describes entry point for fields to receive continuous updates. + types (List[ObjectType], optional): List of any types to include in schema that + may not be introspected through root types. directives (List[GraphQLDirective], optional): List of custom directives to include in the GraphQL schema. Defaults to only include directives defined by GraphQL spec (@include and @skip) [GraphQLIncludeDirective, GraphQLSkipDirective]. - types (List[GraphQLType], optional): List of any types to include in schema that - may not be introspected through root types. auto_camelcase (bool): Fieldnames will be transformed in Schema's TypeMap from snake_case to camelCase (preferred by GraphQL standard). Default True. """ @@ -440,13 +401,15 @@ def __init__( self.query = query self.mutation = mutation self.subscription = subscription - self.graphql_schema = GrapheneGraphQLSchema( - query, - mutation, - subscription, - types, + type_map = TypeMap( + query, mutation, subscription, types, auto_camelcase=auto_camelcase + ) + self.graphql_schema = GraphQLSchema( + type_map.query, + type_map.mutation, + type_map.subscription, + type_map.types, directives, - auto_camelcase=auto_camelcase, ) def __str__(self): diff --git a/graphene/types/tests/test_datetime.py b/graphene/types/tests/test_datetime.py index bfd56c6c0..8bc20a41f 100644 --- a/graphene/types/tests/test_datetime.py +++ b/graphene/types/tests/test_datetime.py @@ -3,7 +3,7 @@ import pytz from graphql import GraphQLError -from pytest import fixture, mark +from pytest import fixture from ..datetime import Date, DateTime, Time from ..objecttype import ObjectType @@ -84,8 +84,9 @@ def test_bad_datetime_query(): assert result.errors and len(result.errors) == 1 error = result.errors[0] assert isinstance(error, GraphQLError) - assert error.message == ( - 'Expected type DateTime, found "Some string that\'s not a datetime".' + assert ( + error.message == "DateTime cannot represent value:" + ' "Some string that\'s not a datetime"' ) assert result.data is None @@ -97,8 +98,9 @@ def test_bad_date_query(): error = result.errors[0] assert isinstance(error, GraphQLError) - assert error.message == ( - 'Expected type Date, found "Some string that\'s not a date".' + assert ( + error.message == "Date cannot represent value:" + ' "Some string that\'s not a date"' ) assert result.data is None @@ -110,8 +112,9 @@ def test_bad_time_query(): error = result.errors[0] assert isinstance(error, GraphQLError) - assert error.message == ( - 'Expected type Time, found "Some string that\'s not a time".' + assert ( + error.message == "Time cannot represent value:" + ' "Some string that\'s not a time"' ) assert result.data is None @@ -174,9 +177,6 @@ def test_time_query_variable(sample_time): assert result.data == {"time": isoformat} -@mark.xfail( - reason="creating the error message fails when un-parsable object is not JSON serializable." -) def test_bad_variables(sample_date, sample_datetime, sample_time): def _test_bad_variables(type_, input_): result = schema.execute( @@ -185,8 +185,6 @@ def _test_bad_variables(type_, input_): ), variables={"input": input_}, ) - # when `input` is not JSON serializable formatting the error message in - # `graphql.utils.is_valid_value` line 79 fails with a TypeError assert isinstance(result.errors, list) assert len(result.errors) == 1 assert isinstance(result.errors[0], GraphQLError) @@ -205,7 +203,6 @@ def _test_bad_variables(type_, input_): ("DateTime", time), ("Date", not_a_date), ("Date", not_a_date_str), - ("Date", now), ("Date", time), ("Time", not_a_date), ("Time", not_a_date_str), diff --git a/graphene/types/tests/test_schema.py b/graphene/types/tests/test_schema.py index 29581122e..7a1c299a3 100644 --- a/graphene/types/tests/test_schema.py +++ b/graphene/types/tests/test_schema.py @@ -1,5 +1,6 @@ from pytest import raises +from graphql.type import GraphQLObjectType, GraphQLSchema from graphql.pyutils import dedent from ..field import Field @@ -17,8 +18,13 @@ class Query(ObjectType): def test_schema(): - schema = Schema(Query).graphql_schema - assert schema.query_type == schema.get_graphql_type(Query) + schema = Schema(Query) + graphql_schema = schema.graphql_schema + assert isinstance(graphql_schema, GraphQLSchema) + query_type = graphql_schema.query_type + assert isinstance(query_type, GraphQLObjectType) + assert query_type.name == "Query" + assert query_type.graphene_type is Query def test_schema_get_type(): @@ -39,13 +45,13 @@ def test_schema_str(): schema = Schema(Query) assert str(schema) == dedent( """ - type MyOtherType { - field: String - } - type Query { inner: MyOtherType } + + type MyOtherType { + field: String + } """ ) diff --git a/graphene/types/tests/test_type_map.py b/graphene/types/tests/test_type_map.py index 0ef3af1be..2dbbe6bbb 100644 --- a/graphene/types/tests/test_type_map.py +++ b/graphene/types/tests/test_type_map.py @@ -1,5 +1,3 @@ -from pytest import raises - from graphql.type import ( GraphQLArgument, GraphQLEnumType, @@ -21,13 +19,13 @@ from ..objecttype import ObjectType from ..scalars import Int, String from ..structures import List, NonNull -from ..schema import GrapheneGraphQLSchema, resolve_type +from ..schema import Schema def create_type_map(types, auto_camelcase=True): - query = GraphQLObjectType("Query", {}) - schema = GrapheneGraphQLSchema(query, types=types, auto_camelcase=auto_camelcase) - return schema.type_map + query = type("Query", (ObjectType,), {}) + schema = Schema(query, types=types, auto_camelcase=auto_camelcase) + return schema.graphql_schema.type_map def test_enum(): @@ -272,20 +270,3 @@ class Meta: assert graphql_type.is_type_of assert graphql_type.is_type_of({}, None) is True assert graphql_type.is_type_of(MyObjectType(), None) is False - - -def test_resolve_type_with_missing_type(): - class MyObjectType(ObjectType): - foo_bar = String() - - class MyOtherObjectType(ObjectType): - fizz_buzz = String() - - def resolve_type_func(root, info): - return MyOtherObjectType - - type_map = create_type_map([MyObjectType]) - with raises(AssertionError) as excinfo: - resolve_type(resolve_type_func, type_map, "MyOtherObjectType", {}, {}, None) - - assert "MyOtherObjectTyp" in str(excinfo.value) diff --git a/setup.py b/setup.py index 084c7707b..4b3369895 100644 --- a/setup.py +++ b/setup.py @@ -82,7 +82,7 @@ def run_tests(self): keywords="api graphql protocol rest relay graphene", packages=find_packages(exclude=["tests", "tests.*", "examples"]), install_requires=[ - "graphql-core>=3.0.3,<3.1", + "graphql-core>=3.1.0b1,<4", "graphql-relay>=3.0,<4", "aniso8601>=8,<9", "unidecode>=1.1.1,<2", From 5d97c848e00d71863c270ecc686597ec46e3a0b5 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Wed, 4 Mar 2020 12:44:53 +0100 Subject: [PATCH 025/141] Remove misleading comment The comment originally referred to the __metaclass__ attribute which is gone now. --- graphene/utils/subclass_with_meta.py | 1 - 1 file changed, 1 deletion(-) diff --git a/graphene/utils/subclass_with_meta.py b/graphene/utils/subclass_with_meta.py index c6ba2d3fb..09f08a880 100644 --- a/graphene/utils/subclass_with_meta.py +++ b/graphene/utils/subclass_with_meta.py @@ -19,7 +19,6 @@ def __repr__(cls): class SubclassWithMeta(metaclass=SubclassWithMeta_Meta): """This class improves __init_subclass__ to receive automatically the options from meta""" - # We will only have the metaclass in Python 2 def __init_subclass__(cls, **meta_options): """This method just terminates the super() chain""" _Meta = getattr(cls, "Meta", None) From 88f79b2850e8fc38254d2c1d8f3900bf3f55dea1 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Wed, 4 Mar 2020 15:26:09 +0100 Subject: [PATCH 026/141] Fix types in Schema docstring (#1100) --- graphene/types/schema.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/graphene/types/schema.py b/graphene/types/schema.py index d54f112a1..f1d1337e9 100644 --- a/graphene/types/schema.py +++ b/graphene/types/schema.py @@ -374,13 +374,13 @@ class Schema: questions about the types through introspection. Args: - query (ObjectType): Root query *ObjectType*. Describes entry point for fields to *read* + query (Type[ObjectType]): Root query *ObjectType*. Describes entry point for fields to *read* data in your Schema. - mutation (ObjectType, optional): Root mutation *ObjectType*. Describes entry point for + mutation (Optional[Type[ObjectType]]): Root mutation *ObjectType*. Describes entry point for fields to *create, update or delete* data in your API. - subscription (ObjectType, optional): Root subscription *ObjectType*. Describes entry point + subscription (Optional[Type[ObjectType]]): Root subscription *ObjectType*. Describes entry point for fields to receive continuous updates. - types (List[ObjectType], optional): List of any types to include in schema that + types (Optional[List[Type[ObjectType]]]): List of any types to include in schema that may not be introspected through root types. directives (List[GraphQLDirective], optional): List of custom directives to include in the GraphQL schema. Defaults to only include directives defined by GraphQL spec (@include From 1cf303a27bf1a83dc4ed14f071a91c0d02f22825 Mon Sep 17 00:00:00 2001 From: Rob Blackbourn Date: Sat, 14 Mar 2020 16:48:12 +0000 Subject: [PATCH 027/141] Added support for subscription (#1107) * Added support for subscription * Added pre-commit hooks for black and formatted changed files * Checked with flake8 * Integrated changes from master. Co-authored-by: Rob Blackbourn Co-authored-by: Rob Blackbourn --- docs/execution/execute.rst | 37 ++++++++++++++++++++++++++ graphene/types/schema.py | 47 ++++++++++++++++++++++++++++++--- tests_asyncio/test_subscribe.py | 33 +++++++++++++++++++++++ 3 files changed, 113 insertions(+), 4 deletions(-) create mode 100644 tests_asyncio/test_subscribe.py diff --git a/docs/execution/execute.rst b/docs/execution/execute.rst index f0ea88537..cd29d72da 100644 --- a/docs/execution/execute.rst +++ b/docs/execution/execute.rst @@ -17,6 +17,43 @@ For executing a query against a schema, you can directly call the ``execute`` me ``result`` represents the result of execution. ``result.data`` is the result of executing the query, ``result.errors`` is ``None`` if no errors occurred, and is a non-empty list if an error occurred. +For executing a subscription, you can directly call the ``subscribe`` method on it. +This method is async and must be awaited. + +.. code:: python + + import asyncio + from datetime import datetime + from graphene import ObjectType, String, Schema, Field + + # All schema require a query. + class Query(ObjectType): + hello = String() + + def resolve_hello(root, info): + return 'Hello, world!' + + class Subscription(ObjectType): + time_of_day = Field(String) + + async def subscribe_time_of_day(root, info): + while True: + yield { 'time_of_day': datetime.now().isoformat()} + await asyncio.sleep(1) + + SCHEMA = Schema(query=Query, subscription=Subscription) + + async def main(schema): + + subscription = 'subscription { timeOfDay }' + result = await schema.subscribe(subscription) + async for item in result: + print(item.data['timeOfDay']) + + asyncio.run(main(SCHEMA)) + +The ``result`` is an async iterator which yields items in the same manner as a query. + .. _SchemaExecuteContext: Context diff --git a/graphene/types/schema.py b/graphene/types/schema.py index f1d1337e9..5228fb44c 100644 --- a/graphene/types/schema.py +++ b/graphene/types/schema.py @@ -7,7 +7,9 @@ graphql, graphql_sync, introspection_types, + parse, print_schema, + subscribe, GraphQLArgument, GraphQLBoolean, GraphQLEnumValue, @@ -309,13 +311,19 @@ def create_fields_for_type(self, graphene_type, is_input_type=False): if isinstance(arg.type, NonNull) else arg.default_value, ) - resolve = field.get_resolver( - self.get_resolver(graphene_type, name, field.default_value) - ) _field = GraphQLField( field_type, args=args, - resolve=resolve, + resolve=field.get_resolver( + self.get_resolver_for_type( + graphene_type, "resolve_{}", name, field.default_value + ) + ), + subscribe=field.get_resolver( + self.get_resolver_for_type( + graphene_type, "subscribe_{}", name, field.default_value + ) + ), deprecation_reason=field.deprecation_reason, description=field.description, ) @@ -323,6 +331,32 @@ def create_fields_for_type(self, graphene_type, is_input_type=False): fields[field_name] = _field return fields + def get_resolver_for_type(self, graphene_type, pattern, name, default_value): + if not issubclass(graphene_type, ObjectType): + return + func_name = pattern.format(name) + resolver = getattr(graphene_type, func_name, None) + if not resolver: + # If we don't find the resolver in the ObjectType class, then try to + # find it in each of the interfaces + interface_resolver = None + for interface in graphene_type._meta.interfaces: + if name not in interface._meta.fields: + continue + interface_resolver = getattr(interface, func_name, None) + if interface_resolver: + break + resolver = interface_resolver + + # Only if is not decorated with classmethod + if resolver: + return get_unbound_function(resolver) + + default_resolver = ( + graphene_type._meta.default_resolver or get_default_resolver() + ) + return partial(default_resolver, name, default_value) + def resolve_type(self, resolve_type_func, type_name, root, info, _type): type_ = resolve_type_func(root, info) @@ -468,6 +502,11 @@ async def execute_async(self, *args, **kwargs): kwargs = normalize_execute_kwargs(kwargs) return await graphql(self.graphql_schema, *args, **kwargs) + async def subscribe(self, query, *args, **kwargs): + document = parse(query) + kwargs = normalize_execute_kwargs(kwargs) + return await subscribe(self.graphql_schema, document, *args, **kwargs) + def introspect(self): introspection = self.execute(introspection_query) if introspection.errors: diff --git a/tests_asyncio/test_subscribe.py b/tests_asyncio/test_subscribe.py new file mode 100644 index 000000000..bf985d580 --- /dev/null +++ b/tests_asyncio/test_subscribe.py @@ -0,0 +1,33 @@ +from pytest import mark + +from graphene import ObjectType, Int, String, Schema, Field + + +class Query(ObjectType): + hello = String() + + def resolve_hello(root, info): + return "Hello, world!" + + +class Subscription(ObjectType): + count_to_ten = Field(Int) + + async def subscribe_count_to_ten(root, info): + count = 0 + while count < 10: + count += 1 + yield {"count_to_ten": count} + + +schema = Schema(query=Query, subscription=Subscription) + + +@mark.asyncio +async def test_subscription(): + subscription = "subscription { countToTen }" + result = await schema.subscribe(subscription) + count = 0 + async for item in result: + count = item.data["countToTen"] + assert count == 10 From 14183012a86b0accaa4c8b72984c863d68a84160 Mon Sep 17 00:00:00 2001 From: Syrus Akbary Date: Sat, 14 Mar 2020 13:19:28 -0700 Subject: [PATCH 028/141] Remove subclass polyfill (#1156) The subclass polyfill was only needed for Python 2.7-3.5 Python 3.6 introduced the __init_subclass__, so since Graphene now requires Python 3.6+, this is no longer needed. https://www.python.org/dev/peps/pep-0487/ --- graphene/pyutils/init_subclass.py | 23 ----------------------- graphene/utils/subclass_with_meta.py | 3 +-- 2 files changed, 1 insertion(+), 25 deletions(-) delete mode 100644 graphene/pyutils/init_subclass.py diff --git a/graphene/pyutils/init_subclass.py b/graphene/pyutils/init_subclass.py deleted file mode 100644 index 81198c9c0..000000000 --- a/graphene/pyutils/init_subclass.py +++ /dev/null @@ -1,23 +0,0 @@ -is_init_subclass_available = hasattr(object, "__init_subclass__") - -if not is_init_subclass_available: - - class InitSubclassMeta(type): - """Metaclass that implements PEP 487 protocol""" - - def __new__(cls, name, bases, ns, **kwargs): - __init_subclass__ = ns.pop("__init_subclass__", None) - if __init_subclass__: - __init_subclass__ = classmethod(__init_subclass__) - ns["__init_subclass__"] = __init_subclass__ - return super(InitSubclassMeta, cls).__new__(cls, name, bases, ns, **kwargs) - - def __init__(cls, name, bases, ns, **kwargs): - super(InitSubclassMeta, cls).__init__(name, bases, ns) - super_class = super(cls, cls) - if hasattr(super_class, "__init_subclass__"): - super_class.__init_subclass__.__func__(cls, **kwargs) - - -else: - InitSubclassMeta = type # type: ignore diff --git a/graphene/utils/subclass_with_meta.py b/graphene/utils/subclass_with_meta.py index 09f08a880..8900ad532 100644 --- a/graphene/utils/subclass_with_meta.py +++ b/graphene/utils/subclass_with_meta.py @@ -1,10 +1,9 @@ from inspect import isclass -from ..pyutils.init_subclass import InitSubclassMeta from .props import props -class SubclassWithMeta_Meta(InitSubclassMeta): +class SubclassWithMeta_Meta(type): _meta = None def __str__(cls): From 60a9609b9a3b97e13a25ea0c1330ff35e2332156 Mon Sep 17 00:00:00 2001 From: Syrus Akbary Date: Sat, 14 Mar 2020 17:32:44 -0700 Subject: [PATCH 029/141] =?UTF-8?q?Updated=20all=20str.format(=E2=80=A6)?= =?UTF-8?q?=20to=20f-strings=20(#1158)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Updated all str.format(…) to f-strings This revamps the PR #984 * Pass black * Fix flake8 * Updated objecttype * Fix black version --- .pre-commit-config.yaml | 2 +- UPGRADE-v2.0.md | 5 +-- docs/execution/middleware.rst | 9 ++--- docs/relay/nodes.rst | 2 +- docs/types/objecttypes.rst | 2 +- examples/complex_example.py | 2 +- graphene/relay/connection.py | 26 ++++++------- graphene/relay/mutation.py | 12 +++--- graphene/relay/node.py | 22 ++++------- graphene/relay/tests/test_connection_query.py | 36 ++++++------------ graphene/types/argument.py | 9 ++--- graphene/types/base.py | 4 +- graphene/types/datetime.py | 30 ++++++--------- graphene/types/decimal.py | 6 +-- graphene/types/field.py | 14 +++---- graphene/types/mountedtype.py | 8 ++-- graphene/types/mutation.py | 12 +++--- graphene/types/objecttype.py | 17 ++++----- graphene/types/schema.py | 38 +++++++++---------- graphene/types/structures.py | 15 ++++---- graphene/types/tests/test_datetime.py | 4 +- graphene/types/tests/test_enum.py | 2 +- graphene/types/tests/test_inputobjecttype.py | 2 +- graphene/types/tests/test_query.py | 6 +-- graphene/types/tests/test_type_map.py | 2 +- graphene/types/union.py | 2 +- graphene/types/unmountedtype.py | 2 +- graphene/types/uuid.py | 4 +- graphene/utils/deprecated.py | 14 +++---- graphene/utils/subclass_with_meta.py | 10 ++--- 30 files changed, 136 insertions(+), 183 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 7aa720015..c9ffc21ed 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -18,7 +18,7 @@ repos: hooks: - id: pyupgrade - repo: https://github.com/ambv/black - rev: 19.3b0 + rev: 19.10b0 hooks: - id: black language_version: python3 diff --git a/UPGRADE-v2.0.md b/UPGRADE-v2.0.md index fed15923b..63f8f622f 100644 --- a/UPGRADE-v2.0.md +++ b/UPGRADE-v2.0.md @@ -377,10 +377,7 @@ class Base(ObjectType): id = ID() def resolve_id(root, info): - return "{type}_{id}".format( - type=root.__class__.__name__, - id=root.id - ) + return f"{root.__class__.__name__}_{root.id}" ``` ### UUID Scalar diff --git a/docs/execution/middleware.rst b/docs/execution/middleware.rst index 2a5e20f7a..0c5458b20 100644 --- a/docs/execution/middleware.rst +++ b/docs/execution/middleware.rst @@ -55,12 +55,9 @@ logs the time it takes to resolve each field def timing_middleware(next, root, info, **args): start = timer() return_value = next(root, info, **args) - duration = timer() - start - logger.debug("{parent_type}.{field_name}: {duration} ms".format( - parent_type=root._meta.name if root and hasattr(root, '_meta') else '', - field_name=info.field_name, - duration=round(duration * 1000, 2) - )) + duration = round((timer() - start) * 1000, 2) + parent_type_name = root._meta.name if root and hasattr(root, '_meta') else '' + logger.debug(f"{parent_type_name}.{info.field_name}: {duration} ms") return return_value diff --git a/docs/relay/nodes.rst b/docs/relay/nodes.rst index 7af00ea1e..ce9bc7d8c 100644 --- a/docs/relay/nodes.rst +++ b/docs/relay/nodes.rst @@ -52,7 +52,7 @@ Example of a custom node: @staticmethod def to_global_id(type, id): - return '{}:{}'.format(type, id) + return f"{type}:{id}" @staticmethod def get_node_from_global_id(info, global_id, only_type=None): diff --git a/docs/types/objecttypes.rst b/docs/types/objecttypes.rst index 77ab130b0..984acbf06 100644 --- a/docs/types/objecttypes.rst +++ b/docs/types/objecttypes.rst @@ -331,7 +331,7 @@ A field can use a custom resolver from outside the class: from graphene import ObjectType, String def resolve_full_name(person, info): - return '{} {}'.format(person.first_name, person.last_name) + return f"{person.first_name} {person.last_name}" class Person(ObjectType): first_name = String() diff --git a/examples/complex_example.py b/examples/complex_example.py index 3c2d77ba2..aaa48a4c3 100644 --- a/examples/complex_example.py +++ b/examples/complex_example.py @@ -7,7 +7,7 @@ class GeoInput(graphene.InputObjectType): @property def latlng(self): - return "({},{})".format(self.lat, self.lng) + return f"({self.lat},{self.lng})" class Address(graphene.ObjectType): diff --git a/graphene/relay/connection.py b/graphene/relay/connection.py index 8581a4b5e..90b558a1b 100644 --- a/graphene/relay/connection.py +++ b/graphene/relay/connection.py @@ -63,16 +63,14 @@ class Meta: @classmethod def __init_subclass_with_meta__(cls, node=None, name=None, **options): _meta = ConnectionOptions(cls) - assert node, "You have to provide a node in {}.Meta".format(cls.__name__) + assert node, f"You have to provide a node in {cls.__name__}.Meta" assert isinstance(node, NonNull) or issubclass( node, (Scalar, Enum, ObjectType, Interface, Union, NonNull) - ), ('Received incompatible node "{}" for Connection {}.').format( - node, cls.__name__ - ) + ), f'Received incompatible node "{node}" for Connection {cls.__name__}.' base_name = re.sub("Connection$", "", name or cls.__name__) or node._meta.name if not name: - name = "{}Connection".format(base_name) + name = f"{base_name}Connection" edge_class = getattr(cls, "Edge", None) _node = node @@ -82,11 +80,9 @@ class EdgeBase: cursor = String(required=True, description="A cursor for use in pagination") class EdgeMeta: - description = "A Relay edge containing a `{}` and its cursor.".format( - base_name - ) + description = f"A Relay edge containing a `{base_name}` and its cursor." - edge_name = "{}Edge".format(base_name) + edge_name = f"{base_name}Edge" if edge_class: edge_bases = (edge_class, EdgeBase, ObjectType) else: @@ -141,9 +137,9 @@ def type(self): "Read more: https://github.com/graphql-python/graphene/blob/v2.0.0/UPGRADE-v2.0.md#node-connections" ) - assert issubclass(connection_type, Connection), ( - '{} type has to be a subclass of Connection. Received "{}".' - ).format(self.__class__.__name__, connection_type) + assert issubclass( + connection_type, Connection + ), f'{self.__class__.__name__} type has to be a subclass of Connection. Received "{connection_type}".' return type @classmethod @@ -152,9 +148,9 @@ def resolve_connection(cls, connection_type, args, resolved): return resolved assert isinstance(resolved, Iterable), ( - "Resolved value from the connection field has to be an iterable or instance of {}. " - 'Received "{}"' - ).format(connection_type, resolved) + f"Resolved value from the connection field has to be an iterable or instance of {connection_type}. " + f'Received "{resolved}"' + ) connection = connection_from_array( resolved, args, diff --git a/graphene/relay/mutation.py b/graphene/relay/mutation.py index fce0c5982..2f4a4b738 100644 --- a/graphene/relay/mutation.py +++ b/graphene/relay/mutation.py @@ -27,7 +27,7 @@ def __init_subclass_with_meta__( input_fields = {} cls.Input = type( - "{}Input".format(base_name), + f"{base_name}Input", bases, dict(input_fields, client_mutation_id=String(name="clientMutationId")), ) @@ -39,12 +39,12 @@ def __init_subclass_with_meta__( mutate_and_get_payload = getattr(cls, "mutate_and_get_payload", None) if cls.mutate and cls.mutate.__func__ == ClientIDMutation.mutate.__func__: assert mutate_and_get_payload, ( - "{name}.mutate_and_get_payload method is required" + f"{name or cls.__name__}.mutate_and_get_payload method is required" " in a ClientIDMutation." - ).format(name=name or cls.__name__) + ) if not name: - name = "{}Payload".format(base_name) + name = f"{base_name}Payload" super(ClientIDMutation, cls).__init_subclass_with_meta__( output=None, arguments=arguments, name=name, **options @@ -58,9 +58,7 @@ def on_resolve(payload): payload.client_mutation_id = input.get("client_mutation_id") except Exception: raise Exception( - ("Cannot set client_mutation_id in the payload object {}").format( - repr(payload) - ) + f"Cannot set client_mutation_id in the payload object {repr(payload)}" ) return payload diff --git a/graphene/relay/node.py b/graphene/relay/node.py index f8927ab76..a9d36adc0 100644 --- a/graphene/relay/node.py +++ b/graphene/relay/node.py @@ -57,7 +57,7 @@ def __init__(self, node, type=False, **kwargs): # interface type or node, id=ID(required=True, description="The ID of the object"), - **kwargs + **kwargs, ) def get_resolver(self, parent_resolver): @@ -93,33 +93,27 @@ def get_node_from_global_id(cls, info, global_id, only_type=None): except Exception as e: raise Exception( ( - 'Unable to parse global ID "{global_id}". ' + f'Unable to parse global ID "{global_id}". ' 'Make sure it is a base64 encoded string in the format: "TypeName:id". ' - "Exception message: {exception}".format( - global_id=global_id, exception=str(e) - ) + f"Exception message: {str(e)}" ) ) graphene_type = info.schema.get_type(_type) if graphene_type is None: - raise Exception( - 'Relay Node "{_type}" not found in schema'.format(_type=_type) - ) + raise Exception(f'Relay Node "{_type}" not found in schema') graphene_type = graphene_type.graphene_type if only_type: - assert graphene_type == only_type, ("Must receive a {} id.").format( - only_type._meta.name - ) + assert ( + graphene_type == only_type + ), f"Must receive a {only_type._meta.name} id." # We make sure the ObjectType implements the "Node" interface if cls not in graphene_type._meta.interfaces: raise Exception( - 'ObjectType "{_type}" does not implement the "{cls}" interface.'.format( - _type=_type, cls=cls - ) + f'ObjectType "{_type}" does not implement the "{cls}" interface.' ) get_node = getattr(graphene_type, "get_node", None) diff --git a/graphene/relay/tests/test_connection_query.py b/graphene/relay/tests/test_connection_query.py index e109067ba..cac4b65b0 100644 --- a/graphene/relay/tests/test_connection_query.py +++ b/graphene/relay/tests/test_connection_query.py @@ -134,32 +134,28 @@ async def test_respects_an_overly_large_last(): @mark.asyncio async def test_respects_first_and_after(): - await check( - 'first: 2, after: "{}"'.format(cursor_for("B")), "CD", has_next_page=True - ) + await check(f'first: 2, after: "{cursor_for("B")}"', "CD", has_next_page=True) @mark.asyncio async def test_respects_first_and_after_with_long_first(): - await check('first: 10, after: "{}"'.format(cursor_for("B")), "CDE") + await check(f'first: 10, after: "{cursor_for("B")}"', "CDE") @mark.asyncio async def test_respects_last_and_before(): - await check( - 'last: 2, before: "{}"'.format(cursor_for("D")), "BC", has_previous_page=True - ) + await check(f'last: 2, before: "{cursor_for("D")}"', "BC", has_previous_page=True) @mark.asyncio async def test_respects_last_and_before_with_long_last(): - await check('last: 10, before: "{}"'.format(cursor_for("D")), "ABC") + await check(f'last: 10, before: "{cursor_for("D")}"', "ABC") @mark.asyncio async def test_respects_first_and_after_and_before_too_few(): await check( - 'first: 2, after: "{}", before: "{}"'.format(cursor_for("A"), cursor_for("E")), + f'first: 2, after: "{cursor_for("A")}", before: "{cursor_for("E")}"', "BC", has_next_page=True, ) @@ -168,23 +164,21 @@ async def test_respects_first_and_after_and_before_too_few(): @mark.asyncio async def test_respects_first_and_after_and_before_too_many(): await check( - 'first: 4, after: "{}", before: "{}"'.format(cursor_for("A"), cursor_for("E")), - "BCD", + f'first: 4, after: "{cursor_for("A")}", before: "{cursor_for("E")}"', "BCD", ) @mark.asyncio async def test_respects_first_and_after_and_before_exactly_right(): await check( - 'first: 3, after: "{}", before: "{}"'.format(cursor_for("A"), cursor_for("E")), - "BCD", + f'first: 3, after: "{cursor_for("A")}", before: "{cursor_for("E")}"', "BCD", ) @mark.asyncio async def test_respects_last_and_after_and_before_too_few(): await check( - 'last: 2, after: "{}", before: "{}"'.format(cursor_for("A"), cursor_for("E")), + f'last: 2, after: "{cursor_for("A")}", before: "{cursor_for("E")}"', "CD", has_previous_page=True, ) @@ -193,16 +187,14 @@ async def test_respects_last_and_after_and_before_too_few(): @mark.asyncio async def test_respects_last_and_after_and_before_too_many(): await check( - 'last: 4, after: "{}", before: "{}"'.format(cursor_for("A"), cursor_for("E")), - "BCD", + f'last: 4, after: "{cursor_for("A")}", before: "{cursor_for("E")}"', "BCD", ) @mark.asyncio async def test_respects_last_and_after_and_before_exactly_right(): await check( - 'last: 3, after: "{}", before: "{}"'.format(cursor_for("A"), cursor_for("E")), - "BCD", + f'last: 3, after: "{cursor_for("A")}", before: "{cursor_for("E")}"', "BCD", ) @@ -219,9 +211,7 @@ async def test_returns_all_elements_if_cursors_are_invalid(): @mark.asyncio async def test_returns_all_elements_if_cursors_are_on_the_outside(): await check( - 'before: "{}" after: "{}"'.format( - base64("arrayconnection:%s" % 6), base64("arrayconnection:%s" % -1) - ), + f'before: "{base64("arrayconnection:%s" % 6)}" after: "{base64("arrayconnection:%s" % -1)}"', "ABCDE", ) @@ -229,9 +219,7 @@ async def test_returns_all_elements_if_cursors_are_on_the_outside(): @mark.asyncio async def test_returns_no_elements_if_cursors_cross(): await check( - 'before: "{}" after: "{}"'.format( - base64("arrayconnection:%s" % 2), base64("arrayconnection:%s" % 4) - ), + f'before: "{base64("arrayconnection:%s" % 2)}" after: "{base64("arrayconnection:%s" % 4)}"', "", ) diff --git a/graphene/types/argument.py b/graphene/types/argument.py index cdc21d4bb..897b7ecd2 100644 --- a/graphene/types/argument.py +++ b/graphene/types/argument.py @@ -94,18 +94,17 @@ def to_arguments(args, extra_args=None): if isinstance(arg, (InputField, Field)): raise ValueError( - "Expected {} to be Argument, but received {}. Try using Argument({}).".format( - default_name, type(arg).__name__, arg.type - ) + f"Expected {default_name} to be Argument, " + f"but received {type(arg).__name__}. Try using Argument({arg.type})." ) if not isinstance(arg, Argument): - raise ValueError('Unknown argument "{}".'.format(default_name)) + raise ValueError(f'Unknown argument "{default_name}".') arg_name = default_name or arg.name assert ( arg_name not in arguments - ), 'More than one Argument have same name "{}".'.format(arg_name) + ), f'More than one Argument have same name "{arg_name}".' arguments[arg_name] = arg return arguments diff --git a/graphene/types/base.py b/graphene/types/base.py index 79907b4d9..29d60fef1 100644 --- a/graphene/types/base.py +++ b/graphene/types/base.py @@ -20,10 +20,10 @@ def __setattr__(self, name, value): if not self._frozen: super(BaseOptions, self).__setattr__(name, value) else: - raise Exception("Can't modify frozen Options {}".format(self)) + raise Exception(f"Can't modify frozen Options {self}") def __repr__(self): - return "<{} name={}>".format(self.__class__.__name__, repr(self.name)) + return f"<{self.__class__.__name__} name={repr(self.name)}>" class BaseType(SubclassWithMeta): diff --git a/graphene/types/datetime.py b/graphene/types/datetime.py index c152668f7..92234ba67 100644 --- a/graphene/types/datetime.py +++ b/graphene/types/datetime.py @@ -21,14 +21,14 @@ def serialize(date): if isinstance(date, datetime.datetime): date = date.date() if not isinstance(date, datetime.date): - raise GraphQLError("Date cannot represent value: {}".format(repr(date))) + raise GraphQLError(f"Date cannot represent value: {repr(date)}") return date.isoformat() @classmethod def parse_literal(cls, node): if not isinstance(node, StringValueNode): raise GraphQLError( - "Date cannot represent non-string value: {}".format(print_ast(node)) + f"Date cannot represent non-string value: {print_ast(node)}" ) return cls.parse_value(node.value) @@ -37,13 +37,11 @@ def parse_value(value): if isinstance(value, datetime.date): return value if not isinstance(value, str): - raise GraphQLError( - "Date cannot represent non-string value: {}".format(repr(value)) - ) + raise GraphQLError(f"Date cannot represent non-string value: {repr(value)}") try: return parse_date(value) except ValueError: - raise GraphQLError("Date cannot represent value: {}".format(repr(value))) + raise GraphQLError(f"Date cannot represent value: {repr(value)}") class DateTime(Scalar): @@ -56,14 +54,14 @@ class DateTime(Scalar): @staticmethod def serialize(dt): if not isinstance(dt, (datetime.datetime, datetime.date)): - raise GraphQLError("DateTime cannot represent value: {}".format(repr(dt))) + raise GraphQLError(f"DateTime cannot represent value: {repr(dt)}") return dt.isoformat() @classmethod def parse_literal(cls, node): if not isinstance(node, StringValueNode): raise GraphQLError( - "DateTime cannot represent non-string value: {}".format(print_ast(node)) + f"DateTime cannot represent non-string value: {print_ast(node)}" ) return cls.parse_value(node.value) @@ -73,14 +71,12 @@ def parse_value(value): return value if not isinstance(value, str): raise GraphQLError( - "DateTime cannot represent non-string value: {}".format(repr(value)) + f"DateTime cannot represent non-string value: {repr(value)}" ) try: return parse_datetime(value) except ValueError: - raise GraphQLError( - "DateTime cannot represent value: {}".format(repr(value)) - ) + raise GraphQLError(f"DateTime cannot represent value: {repr(value)}") class Time(Scalar): @@ -93,14 +89,14 @@ class Time(Scalar): @staticmethod def serialize(time): if not isinstance(time, datetime.time): - raise GraphQLError("Time cannot represent value: {}".format(repr(time))) + raise GraphQLError(f"Time cannot represent value: {repr(time)}") return time.isoformat() @classmethod def parse_literal(cls, node): if not isinstance(node, StringValueNode): raise GraphQLError( - "Time cannot represent non-string value: {}".format(print_ast(node)) + f"Time cannot represent non-string value: {print_ast(node)}" ) return cls.parse_value(node.value) @@ -109,10 +105,8 @@ def parse_value(cls, value): if isinstance(value, datetime.time): return value if not isinstance(value, str): - raise GraphQLError( - "Time cannot represent non-string value: {}".format(repr(value)) - ) + raise GraphQLError(f"Time cannot represent non-string value: {repr(value)}") try: return parse_time(value) except ValueError: - raise GraphQLError("Time cannot represent value: {}".format(repr(value))) + raise GraphQLError(f"Time cannot represent value: {repr(value)}") diff --git a/graphene/types/decimal.py b/graphene/types/decimal.py index 10a2609a9..028d6d289 100644 --- a/graphene/types/decimal.py +++ b/graphene/types/decimal.py @@ -16,9 +16,9 @@ class Decimal(Scalar): def serialize(dec): if isinstance(dec, str): dec = _Decimal(dec) - assert isinstance(dec, _Decimal), 'Received not compatible Decimal "{}"'.format( - repr(dec) - ) + assert isinstance( + dec, _Decimal + ), f'Received not compatible Decimal "{repr(dec)}"' return str(dec) @classmethod diff --git a/graphene/types/field.py b/graphene/types/field.py index 56c2ff671..b2c7766a9 100644 --- a/graphene/types/field.py +++ b/graphene/types/field.py @@ -72,18 +72,18 @@ def __init__( required=False, _creation_counter=None, default_value=None, - **extra_args + **extra_args, ): super(Field, self).__init__(_creation_counter=_creation_counter) - assert not args or isinstance(args, Mapping), ( - 'Arguments in a field have to be a mapping, received "{}".' - ).format(args) + assert not args or isinstance( + args, Mapping + ), f'Arguments in a field have to be a mapping, received "{args}".' assert not ( source and resolver ), "A Field cannot have a source and a resolver in at the same time." - assert not callable(default_value), ( - 'The default value can not be a function but received "{}".' - ).format(base_type(default_value)) + assert not callable( + default_value + ), f'The default value can not be a function but received "{base_type(default_value)}".' if required: type = NonNull(type) diff --git a/graphene/types/mountedtype.py b/graphene/types/mountedtype.py index 6d0c8cf85..c42383e24 100644 --- a/graphene/types/mountedtype.py +++ b/graphene/types/mountedtype.py @@ -8,13 +8,13 @@ def mounted(cls, unmounted): # noqa: N802 """ Mount the UnmountedType instance """ - assert isinstance(unmounted, UnmountedType), ("{} can't mount {}").format( - cls.__name__, repr(unmounted) - ) + assert isinstance( + unmounted, UnmountedType + ), f"{cls.__name__} can't mount {repr(unmounted)}" return cls( unmounted.get_type(), *unmounted.args, _creation_counter=unmounted.creation_counter, - **unmounted.kwargs + **unmounted.kwargs, ) diff --git a/graphene/types/mutation.py b/graphene/types/mutation.py index 0710d66f4..6e041bbfa 100644 --- a/graphene/types/mutation.py +++ b/graphene/types/mutation.py @@ -72,7 +72,7 @@ def __init_subclass_with_meta__( output=None, arguments=None, _meta=None, - **options + **options, ): if not _meta: _meta = MutationOptions(cls) @@ -81,9 +81,9 @@ def __init_subclass_with_meta__( fields = {} for interface in interfaces: - assert issubclass(interface, Interface), ( - 'All interfaces of {} must be a subclass of Interface. Received "{}".' - ).format(cls.__name__, interface) + assert issubclass( + interface, Interface + ), f'All interfaces of {cls.__name__} must be a subclass of Interface. Received "{interface}".' fields.update(interface._meta.fields) if not output: @@ -100,11 +100,11 @@ def __init_subclass_with_meta__( if input_class: warn_deprecation( ( - "Please use {name}.Arguments instead of {name}.Input." + f"Please use {cls.__name__}.Arguments instead of {cls.__name__}.Input." " Input is now only used in ClientMutationID.\n" "Read more:" " https://github.com/graphql-python/graphene/blob/v2.0.0/UPGRADE-v2.0.md#mutation-input" - ).format(name=cls.__name__) + ) ) if input_class: diff --git a/graphene/types/objecttype.py b/graphene/types/objecttype.py index cca23d111..9b6c7d7df 100644 --- a/graphene/types/objecttype.py +++ b/graphene/types/objecttype.py @@ -93,7 +93,7 @@ def __init_subclass_with_meta__( possible_types=(), default_resolver=None, _meta=None, - **options + **options, ): if not _meta: _meta = ObjectTypeOptions(cls) @@ -101,18 +101,18 @@ def __init_subclass_with_meta__( fields = {} for interface in interfaces: - assert issubclass(interface, Interface), ( - 'All interfaces of {} must be a subclass of Interface. Received "{}".' - ).format(cls.__name__, interface) + assert issubclass( + interface, Interface + ), f'All interfaces of {cls.__name__} must be a subclass of Interface. Received "{interface}".' fields.update(interface._meta.fields) for base in reversed(cls.__mro__): fields.update(yank_fields_from_attrs(base.__dict__, _as=Field)) assert not (possible_types and cls.is_type_of), ( - "{name}.Meta.possible_types will cause type collision with {name}.is_type_of. " + f"{cls.__name__}.Meta.possible_types will cause type collision with {cls.__name__}.is_type_of. " "Please use one or other." - ).format(name=cls.__name__) + ) if _meta.fields: _meta.fields.update(fields) @@ -165,7 +165,6 @@ def __init__(self, *args, **kwargs): pass if kwargs: raise TypeError( - "'{}' is an invalid keyword argument for {}".format( - list(kwargs)[0], self.__class__.__name__ - ) + f"'{list(kwargs)[0]}' is an invalid keyword argument" + f" for {self.__class__.__name__}" ) diff --git a/graphene/types/schema.py b/graphene/types/schema.py index 5228fb44c..458948f4d 100644 --- a/graphene/types/schema.py +++ b/graphene/types/schema.py @@ -58,9 +58,9 @@ def assert_valid_root_type(type_): return is_graphene_objecttype = inspect.isclass(type_) and issubclass(type_, ObjectType) is_graphql_objecttype = isinstance(type_, GraphQLObjectType) - assert is_graphene_objecttype or is_graphql_objecttype, ( - "Type {} is not a valid ObjectType." - ).format(type_) + assert ( + is_graphene_objecttype or is_graphql_objecttype + ), f"Type {type_} is not a valid ObjectType." def is_graphene_type(type_): @@ -113,9 +113,7 @@ def add_type(self, graphene_type): try: name = graphene_type._meta.name except AttributeError: - raise TypeError( - "Expected Graphene type, but received: {}.".format(graphene_type) - ) + raise TypeError(f"Expected Graphene type, but received: {graphene_type}.") graphql_type = self.get(name) if graphql_type: return graphql_type @@ -132,9 +130,7 @@ def add_type(self, graphene_type): elif issubclass(graphene_type, Union): graphql_type = self.construct_union(graphene_type) else: - raise TypeError( - "Expected Graphene type, but received: {}.".format(graphene_type) - ) + raise TypeError(f"Expected Graphene type, but received: {graphene_type}.") self[name] = graphql_type return graphql_type @@ -316,12 +312,15 @@ def create_fields_for_type(self, graphene_type, is_input_type=False): args=args, resolve=field.get_resolver( self.get_resolver_for_type( - graphene_type, "resolve_{}", name, field.default_value + graphene_type, f"resolve_{name}", name, field.default_value ) ), subscribe=field.get_resolver( self.get_resolver_for_type( - graphene_type, "subscribe_{}", name, field.default_value + graphene_type, + f"subscribe_{name}", + name, + field.default_value, ) ), deprecation_reason=field.deprecation_reason, @@ -331,10 +330,9 @@ def create_fields_for_type(self, graphene_type, is_input_type=False): fields[field_name] = _field return fields - def get_resolver_for_type(self, graphene_type, pattern, name, default_value): + def get_resolver_for_type(self, graphene_type, func_name, name, default_value): if not issubclass(graphene_type, ObjectType): return - func_name = pattern.format(name) resolver = getattr(graphene_type, func_name, None) if not resolver: # If we don't find the resolver in the ObjectType class, then try to @@ -366,10 +364,10 @@ def resolve_type(self, resolve_type_func, type_name, root, info, _type): if inspect.isclass(type_) and issubclass(type_, ObjectType): graphql_type = self.get(type_._meta.name) - assert graphql_type, "Can't find type {} in schema".format(type_._meta.name) - assert graphql_type.graphene_type == type_, ( - "The type {} does not match with the associated graphene type {}." - ).format(type_, graphql_type.graphene_type) + assert graphql_type, f"Can't find type {type_._meta.name} in schema" + assert ( + graphql_type.graphene_type == type_ + ), f"The type {type_} does not match with the associated graphene type {graphql_type.graphene_type}." return graphql_type return type_ @@ -377,7 +375,7 @@ def resolve_type(self, resolve_type_func, type_name, root, info, _type): def get_resolver(self, graphene_type, name, default_value): if not issubclass(graphene_type, ObjectType): return - resolver = getattr(graphene_type, "resolve_{}".format(name), None) + resolver = getattr(graphene_type, f"resolve_{name}", None) if not resolver: # If we don't find the resolver in the ObjectType class, then try to # find it in each of the interfaces @@ -385,7 +383,7 @@ def get_resolver(self, graphene_type, name, default_value): for interface in graphene_type._meta.interfaces: if name not in interface._meta.fields: continue - interface_resolver = getattr(interface, "resolve_{}".format(name), None) + interface_resolver = getattr(interface, f"resolve_{name}", None) if interface_resolver: break resolver = interface_resolver @@ -458,7 +456,7 @@ def __getattr__(self, type_name): """ _type = self.graphql_schema.get_type(type_name) if _type is None: - raise AttributeError('Type "{}" not found in the Schema'.format(type_name)) + raise AttributeError(f'Type "{type_name}" not found in the Schema') if isinstance(_type, GrapheneGraphQLType): return _type.graphene_type return _type diff --git a/graphene/types/structures.py b/graphene/types/structures.py index 3341e0227..a6763978e 100644 --- a/graphene/types/structures.py +++ b/graphene/types/structures.py @@ -14,9 +14,8 @@ def __init__(self, of_type, *args, **kwargs): cls_name = type(self).__name__ of_type_name = type(of_type).__name__ raise Exception( - "{} could not have a mounted {}() as inner type. Try with {}({}).".format( - cls_name, of_type_name, cls_name, of_type_name - ) + f"{cls_name} could not have a mounted {of_type_name}()" + f" as inner type. Try with {cls_name}({of_type_name})." ) self._of_type = of_type @@ -50,7 +49,7 @@ class List(Structure): """ def __str__(self): - return "[{}]".format(self.of_type) + return f"[{self.of_type}]" def __eq__(self, other): return isinstance(other, List) and ( @@ -85,12 +84,12 @@ class NonNull(Structure): def __init__(self, *args, **kwargs): super(NonNull, self).__init__(*args, **kwargs) - assert not isinstance(self._of_type, NonNull), ( - "Can only create NonNull of a Nullable GraphQLType but got: {}." - ).format(self._of_type) + assert not isinstance( + self._of_type, NonNull + ), f"Can only create NonNull of a Nullable GraphQLType but got: {self._of_type}." def __str__(self): - return "{}!".format(self.of_type) + return f"{self.of_type}!" def __eq__(self, other): return isinstance(other, NonNull) and ( diff --git a/graphene/types/tests/test_datetime.py b/graphene/types/tests/test_datetime.py index 8bc20a41f..6a3241a17 100644 --- a/graphene/types/tests/test_datetime.py +++ b/graphene/types/tests/test_datetime.py @@ -180,9 +180,7 @@ def test_time_query_variable(sample_time): def test_bad_variables(sample_date, sample_datetime, sample_time): def _test_bad_variables(type_, input_): result = schema.execute( - """query Test($input: {}){{ {}(in: $input) }}""".format( - type_, type_.lower() - ), + f"""query Test($input: {type_}){{ {type_.lower()}(in: $input) }}""", variables={"input": input_}, ) assert isinstance(result.errors, list) diff --git a/graphene/types/tests/test_enum.py b/graphene/types/tests/test_enum.py index 40cd4afd7..1b6181208 100644 --- a/graphene/types/tests/test_enum.py +++ b/graphene/types/tests/test_enum.py @@ -15,7 +15,7 @@ class RGB(Enum): @property def description(self): - return "Description {}".format(self.name) + return f"Description {self.name}" assert RGB._meta.name == "RGB" assert RGB._meta.description == "Description" diff --git a/graphene/types/tests/test_inputobjecttype.py b/graphene/types/tests/test_inputobjecttype.py index e11823823..0fb7e3945 100644 --- a/graphene/types/tests/test_inputobjecttype.py +++ b/graphene/types/tests/test_inputobjecttype.py @@ -112,7 +112,7 @@ class Child(InputObjectType): @property def full_name(self): - return "{} {}".format(self.first_name, self.last_name) + return f"{self.first_name} {self.last_name}" class Parent(InputObjectType): child = InputField(Child) diff --git a/graphene/types/tests/test_query.py b/graphene/types/tests/test_query.py index fe9f39fce..2d3e4c730 100644 --- a/graphene/types/tests/test_query.py +++ b/graphene/types/tests/test_query.py @@ -454,15 +454,15 @@ class Query(ObjectType): info = String() def resolve_annotated(self, info, id): - return "{}-{}".format(self, id) + return f"{self}-{id}" def resolve_context(self, info): assert isinstance(info.context, Context) - return "{}-{}".format(self, info.context.key) + return f"{self}-{info.context.key}" def resolve_info(self, info): assert isinstance(info, ResolveInfo) - return "{}-{}".format(self, info.field_name) + return f"{self}-{info.field_name}" test_schema = Schema(Query) diff --git a/graphene/types/tests/test_type_map.py b/graphene/types/tests/test_type_map.py index 2dbbe6bbb..334eb2415 100644 --- a/graphene/types/tests/test_type_map.py +++ b/graphene/types/tests/test_type_map.py @@ -37,7 +37,7 @@ class MyEnum(Enum): @property def description(self): - return "Description {}={}".format(self.name, self.value) + return f"Description {self.name}={self.value}" @property def deprecation_reason(self): diff --git a/graphene/types/union.py b/graphene/types/union.py index 5ae54562a..928656ae8 100644 --- a/graphene/types/union.py +++ b/graphene/types/union.py @@ -53,7 +53,7 @@ class Query(ObjectType): def __init_subclass_with_meta__(cls, types=None, **options): assert ( isinstance(types, (list, tuple)) and len(types) > 0 - ), "Must provide types for Union {name}.".format(name=cls.__name__) + ), f"Must provide types for Union {cls.__name__}." _meta = UnionOptions(cls) _meta.types = types diff --git a/graphene/types/unmountedtype.py b/graphene/types/unmountedtype.py index 081c4ba00..83a6afefc 100644 --- a/graphene/types/unmountedtype.py +++ b/graphene/types/unmountedtype.py @@ -49,7 +49,7 @@ def get_type(self): This function is called when the UnmountedType instance is mounted (as a Field, InputField or Argument) """ - raise NotImplementedError("get_type not implemented in {}".format(self)) + raise NotImplementedError(f"get_type not implemented in {self}") def mount_as(self, _as): return _as.mounted(self) diff --git a/graphene/types/uuid.py b/graphene/types/uuid.py index ef09ae6c7..c21eb1658 100644 --- a/graphene/types/uuid.py +++ b/graphene/types/uuid.py @@ -17,9 +17,7 @@ def serialize(uuid): if isinstance(uuid, str): uuid = _UUID(uuid) - assert isinstance(uuid, _UUID), "Expected UUID instance, received {}".format( - uuid - ) + assert isinstance(uuid, _UUID), f"Expected UUID instance, received {uuid}" return str(uuid) @staticmethod diff --git a/graphene/utils/deprecated.py b/graphene/utils/deprecated.py index 2f98d8296..71a5bb404 100644 --- a/graphene/utils/deprecated.py +++ b/graphene/utils/deprecated.py @@ -2,7 +2,7 @@ import inspect import warnings -string_types = (type(b""), type(u"")) +string_types = (type(b""), type("")) def warn_deprecation(text): @@ -29,13 +29,13 @@ def deprecated(reason): def decorator(func1): if inspect.isclass(func1): - fmt1 = "Call to deprecated class {name} ({reason})." + fmt1 = f"Call to deprecated class {func1.__name__} ({reason})." else: - fmt1 = "Call to deprecated function {name} ({reason})." + fmt1 = f"Call to deprecated function {func1.__name__} ({reason})." @functools.wraps(func1) def new_func1(*args, **kwargs): - warn_deprecation(fmt1.format(name=func1.__name__, reason=reason)) + warn_deprecation(fmt1) return func1(*args, **kwargs) return new_func1 @@ -55,13 +55,13 @@ def new_func1(*args, **kwargs): func2 = reason if inspect.isclass(func2): - fmt2 = "Call to deprecated class {name}." + fmt2 = f"Call to deprecated class {func2.__name__}." else: - fmt2 = "Call to deprecated function {name}." + fmt2 = f"Call to deprecated function {func2.__name__}." @functools.wraps(func2) def new_func2(*args, **kwargs): - warn_deprecation(fmt2.format(name=func2.__name__)) + warn_deprecation(fmt2) return func2(*args, **kwargs) return new_func2 diff --git a/graphene/utils/subclass_with_meta.py b/graphene/utils/subclass_with_meta.py index 8900ad532..c4ee11d74 100644 --- a/graphene/utils/subclass_with_meta.py +++ b/graphene/utils/subclass_with_meta.py @@ -12,7 +12,7 @@ def __str__(cls): return cls.__name__ def __repr__(cls): - return "<{} meta={}>".format(cls.__name__, repr(cls._meta)) + return f"<{cls.__name__} meta={repr(cls._meta)}>" class SubclassWithMeta(metaclass=SubclassWithMeta_Meta): @@ -29,9 +29,7 @@ def __init_subclass__(cls, **meta_options): _meta_props = props(_Meta) else: raise Exception( - "Meta have to be either a class or a dict. Received {}".format( - _Meta - ) + f"Meta have to be either a class or a dict. Received {_Meta}" ) delattr(cls, "Meta") options = dict(meta_options, **_meta_props) @@ -40,8 +38,8 @@ def __init_subclass__(cls, **meta_options): if abstract: assert not options, ( "Abstract types can only contain the abstract attribute. " - "Received: abstract, {option_keys}" - ).format(option_keys=", ".join(options)) + f"Received: abstract, {', '.join(options)}" + ) else: super_class = super(cls, cls) if hasattr(super_class, "__init_subclass_with_meta__"): From f9efe15973a511ebbcdfa452e526e937d9d15f2a Mon Sep 17 00:00:00 2001 From: Oleh Kuchuk Date: Sun, 15 Mar 2020 20:52:56 +0200 Subject: [PATCH 030/141] =?UTF-8?q?Fixed=20examples,=20make=20root=20objec?= =?UTF-8?q?t=20explicit=20inside=20resolvers=20and=E2=80=A6=20(#1159)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- examples/complex_example.py | 4 ++-- examples/context_example.py | 2 +- examples/simple_example.py | 2 +- examples/starwars/schema.py | 6 +++--- examples/starwars_relay/schema.py | 4 ++-- 5 files changed, 9 insertions(+), 9 deletions(-) diff --git a/examples/complex_example.py b/examples/complex_example.py index aaa48a4c3..73a8ac1bc 100644 --- a/examples/complex_example.py +++ b/examples/complex_example.py @@ -17,7 +17,7 @@ class Address(graphene.ObjectType): class Query(graphene.ObjectType): address = graphene.Field(Address, geo=GeoInput(required=True)) - def resolve_address(self, info, geo): + def resolve_address(root, info, geo): return Address(latlng=geo.latlng) @@ -27,7 +27,7 @@ class Arguments: Output = Address - def mutate(self, info, geo): + def mutate(root, info, geo): return Address(latlng=geo.latlng) diff --git a/examples/context_example.py b/examples/context_example.py index 9b5fd1a50..235ae535c 100644 --- a/examples/context_example.py +++ b/examples/context_example.py @@ -9,7 +9,7 @@ class User(graphene.ObjectType): class Query(graphene.ObjectType): me = graphene.Field(User) - def resolve_me(self, info): + def resolve_me(root, info): return info.context["user"] diff --git a/examples/simple_example.py b/examples/simple_example.py index 9bff3070b..9bee8d1f4 100644 --- a/examples/simple_example.py +++ b/examples/simple_example.py @@ -11,7 +11,7 @@ class Query(graphene.ObjectType): patron = graphene.Field(Patron) - def resolve_patron(self, info): + def resolve_patron(root, info): return Patron(id=1, name="Syrus", age=27) diff --git a/examples/starwars/schema.py b/examples/starwars/schema.py index a05957341..25642c343 100644 --- a/examples/starwars/schema.py +++ b/examples/starwars/schema.py @@ -39,13 +39,13 @@ class Query(graphene.ObjectType): human = graphene.Field(Human, id=graphene.String()) droid = graphene.Field(Droid, id=graphene.String()) - def resolve_hero(self, info, episode=None): + def resolve_hero(root, info, episode=None): return get_hero(episode) - def resolve_human(self, info, id): + def resolve_human(root, info, id): return get_human(id) - def resolve_droid(self, info, id): + def resolve_droid(root, info, id): return get_droid(id) diff --git a/examples/starwars_relay/schema.py b/examples/starwars_relay/schema.py index 3a1cd980f..d1cce687f 100644 --- a/examples/starwars_relay/schema.py +++ b/examples/starwars_relay/schema.py @@ -64,10 +64,10 @@ class Query(graphene.ObjectType): empire = graphene.Field(Faction) node = relay.Node.Field() - def resolve_rebels(self, info): + def resolve_rebels(root, info): return get_rebels() - def resolve_empire(self, info): + def resolve_empire(root, info): return get_empire() From 00e36b52d5cc45bd69c77c87685650ea8851993a Mon Sep 17 00:00:00 2001 From: Jonathan Kim Date: Mon, 16 Mar 2020 15:51:07 +0000 Subject: [PATCH 031/141] Remove unused function (#1160) --- graphene/types/schema.py | 25 ------------------------- 1 file changed, 25 deletions(-) diff --git a/graphene/types/schema.py b/graphene/types/schema.py index 458948f4d..29ead4a70 100644 --- a/graphene/types/schema.py +++ b/graphene/types/schema.py @@ -372,31 +372,6 @@ def resolve_type(self, resolve_type_func, type_name, root, info, _type): return type_ - def get_resolver(self, graphene_type, name, default_value): - if not issubclass(graphene_type, ObjectType): - return - resolver = getattr(graphene_type, f"resolve_{name}", None) - if not resolver: - # If we don't find the resolver in the ObjectType class, then try to - # find it in each of the interfaces - interface_resolver = None - for interface in graphene_type._meta.interfaces: - if name not in interface._meta.fields: - continue - interface_resolver = getattr(interface, f"resolve_{name}", None) - if interface_resolver: - break - resolver = interface_resolver - - # Only if is not decorated with classmethod - if resolver: - return get_unbound_function(resolver) - - default_resolver = ( - graphene_type._meta.default_resolver or get_default_resolver() - ) - return partial(default_resolver, name, default_value) - class Schema: """Schema Definition. From 6f2863ef6e0697c3b09e6f6e9fc49426642171f1 Mon Sep 17 00:00:00 2001 From: Jonathan Kim Date: Mon, 16 Mar 2020 16:19:44 +0000 Subject: [PATCH 032/141] Add some more tests for Interface (#1154) --- graphene/types/tests/test_interface.py | 109 +++++++++++++++++++++++++ 1 file changed, 109 insertions(+) diff --git a/graphene/types/tests/test_interface.py b/graphene/types/tests/test_interface.py index d551f2384..c30a8a3af 100644 --- a/graphene/types/tests/test_interface.py +++ b/graphene/types/tests/test_interface.py @@ -1,5 +1,8 @@ from ..field import Field from ..interface import Interface +from ..objecttype import ObjectType +from ..scalars import String +from ..schema import Schema from ..unmountedtype import UnmountedType @@ -88,3 +91,109 @@ class MyInterface(MyAbstractType, Interface): assert list(MyInterface._meta.fields) == ["field1", "field2"] assert [type(x) for x in MyInterface._meta.fields.values()] == [Field, Field] + + +def test_resolve_type_default(): + class MyInterface(Interface): + field2 = String() + + class MyTestType(ObjectType): + class Meta: + interfaces = (MyInterface,) + + class Query(ObjectType): + test = Field(MyInterface) + + def resolve_test(_, info): + return MyTestType() + + schema = Schema(query=Query, types=[MyTestType]) + + result = schema.execute( + """ + query { + test { + __typename + } + } + """ + ) + assert not result.errors + assert result.data == {"test": {"__typename": "MyTestType"}} + + +def test_resolve_type_custom(): + class MyInterface(Interface): + field2 = String() + + @classmethod + def resolve_type(cls, instance, info): + if instance["type"] == 1: + return MyTestType1 + return MyTestType2 + + class MyTestType1(ObjectType): + class Meta: + interfaces = (MyInterface,) + + class MyTestType2(ObjectType): + class Meta: + interfaces = (MyInterface,) + + class Query(ObjectType): + test = Field(MyInterface) + + def resolve_test(_, info): + return {"type": 1} + + schema = Schema(query=Query, types=[MyTestType1, MyTestType2]) + + result = schema.execute( + """ + query { + test { + __typename + } + } + """ + ) + assert not result.errors + assert result.data == {"test": {"__typename": "MyTestType1"}} + + +def test_resolve_type_custom_interferes(): + class MyInterface(Interface): + field2 = String() + type_ = String(name="type") + + def resolve_type_(_, info): + return "foo" + + class MyTestType1(ObjectType): + class Meta: + interfaces = (MyInterface,) + + class MyTestType2(ObjectType): + class Meta: + interfaces = (MyInterface,) + + class Query(ObjectType): + test = Field(MyInterface) + + def resolve_test(_, info): + return MyTestType1() + + schema = Schema(query=Query, types=[MyTestType1, MyTestType2]) + + result = schema.execute( + """ + query { + test { + __typename + type + } + } + """ + ) + assert not result.errors + assert result.data == {"test": {"__typename": "MyTestType1", "type": "foo"}} From cb3bfe011f516032d78ee840e6779b6be02dae9a Mon Sep 17 00:00:00 2001 From: Jonathan Kim Date: Mon, 16 Mar 2020 16:20:04 +0000 Subject: [PATCH 033/141] =?UTF-8?q?Use=20default=5Fresolver=20to=20resolve?= =?UTF-8?q?=20values=20when=20using=20the=20source=20at=E2=80=A6=20(#1155)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- graphene/types/field.py | 3 ++- graphene/types/tests/test_field.py | 7 +++++++ 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/graphene/types/field.py b/graphene/types/field.py index b2c7766a9..2af737819 100644 --- a/graphene/types/field.py +++ b/graphene/types/field.py @@ -4,6 +4,7 @@ from .argument import Argument, to_arguments from .mountedtype import MountedType +from .resolver import default_resolver from .structures import NonNull from .unmountedtype import UnmountedType from .utils import get_type @@ -12,7 +13,7 @@ def source_resolver(source, root, info, **args): - resolved = getattr(root, source, None) + resolved = default_resolver(source, None, root, info, **args) if inspect.isfunction(resolved) or inspect.ismethod(resolved): return resolved() return resolved diff --git a/graphene/types/tests/test_field.py b/graphene/types/tests/test_field.py index 70ac09109..669ada4f8 100644 --- a/graphene/types/tests/test_field.py +++ b/graphene/types/tests/test_field.py @@ -66,6 +66,13 @@ def test_field_source(): assert field.resolver(MyInstance(), None) == MyInstance.value +def test_field_source_dict_or_attr(): + MyType = object() + field = Field(MyType, source="value") + assert field.resolver(MyInstance(), None) == MyInstance.value + assert field.resolver({"value": MyInstance.value}, None) == MyInstance.value + + def test_field_with_lazy_type(): MyType = object() field = Field(lambda: MyType) From 9fdab033a7c5e4d4051d92fd8b57dcbe80578aec Mon Sep 17 00:00:00 2001 From: Jonathan Kim Date: Wed, 1 Apr 2020 16:24:23 +0100 Subject: [PATCH 034/141] Add exempt labels --- .github/stale.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.github/stale.yml b/.github/stale.yml index c9418f678..bb573c081 100644 --- a/.github/stale.yml +++ b/.github/stale.yml @@ -6,6 +6,12 @@ daysUntilClose: 14 exemptLabels: - pinned - security + - 🐛 bug + - 📖 documentation + - 🙋 help wanted + - ✨ enhancement + - good first issue + - work in progress # Label to use when marking an issue as stale staleLabel: wontfix # Comment to post when marking an issue as stale. Set to `false` to disable From a2fe8dd70469ea5a9d88afbc366ce695857835bb Mon Sep 17 00:00:00 2001 From: Jonathan Kim Date: Thu, 2 Apr 2020 19:55:00 +0100 Subject: [PATCH 035/141] Add note about the use of `args` (#1170) * Add note about the use of `args` Closes #1037 * Some improvements * Link to correct place --- docs/api/index.rst | 2 ++ docs/types/objecttypes.rst | 16 ++++++++++++++++ graphene/types/field.py | 7 ++++--- 3 files changed, 22 insertions(+), 3 deletions(-) diff --git a/docs/api/index.rst b/docs/api/index.rst index 47b8b84f5..0da427e4d 100644 --- a/docs/api/index.rst +++ b/docs/api/index.rst @@ -20,6 +20,8 @@ Object types .. autoclass:: graphene.Mutation :members: +.. _fields-mounted-types: + Fields (Mounted Types) ---------------------- diff --git a/docs/types/objecttypes.rst b/docs/types/objecttypes.rst index 984acbf06..e37c3030a 100644 --- a/docs/types/objecttypes.rst +++ b/docs/types/objecttypes.rst @@ -158,6 +158,22 @@ You can then execute the following query: } } +*Note:* There are several arguments to a field that are "reserved" by Graphene +(see :ref:`fields-mounted-types`). +You can still define an argument that clashes with one of these fields by using +the ``args`` parameter like so: + +.. code:: python + + from graphene import ObjectType, Field, String + + class Query(ObjectType): + answer = String(args={'description': String()}) + + def resolve_answer(parent, info, description): + return description + + Convenience Features of Graphene Resolvers ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/graphene/types/field.py b/graphene/types/field.py index 2af737819..f0a28eb32 100644 --- a/graphene/types/field.py +++ b/graphene/types/field.py @@ -40,11 +40,12 @@ class Person(ObjectType): last_name = graphene.Field(String, description='Surname') # explicitly mounted as Field args: - type (class for a graphene.UnmountedType): must be a class (not an instance) of an + type (class for a graphene.UnmountedType): Must be a class (not an instance) of an unmounted graphene type (ex. scalar or object) which is used for the type of this field in the GraphQL schema. - args (optional, Dict[str, graphene.Argument]): arguments that can be input to the field. - Prefer to use **extra_args. + args (optional, Dict[str, graphene.Argument]): Arguments that can be input to the field. + Prefer to use ``**extra_args``, unless you use an argument name that clashes with one + of the Field arguments presented here (see :ref:`example`). resolver (optional, Callable): A function to get the value for a Field from the parent value object. If not set, the default resolver method for the schema is used. source (optional, str): attribute name to resolve for this field from the parent value From 0051f82b5fd7bc8ddb79b68c552f8aa00617b2df Mon Sep 17 00:00:00 2001 From: Jonathan Kim Date: Mon, 6 Apr 2020 09:36:53 +0100 Subject: [PATCH 036/141] v3.0.0b1 --- graphene/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/graphene/__init__.py b/graphene/__init__.py index 876c30857..f99f589d0 100644 --- a/graphene/__init__.py +++ b/graphene/__init__.py @@ -42,7 +42,7 @@ from .utils.module_loading import lazy_import -VERSION = (3, 0, 0, "beta", 0) +VERSION = (3, 0, 0, "beta", 1) __version__ = get_version(VERSION) From 871c60cf461eb313ee9c1550cd7954ee4134b566 Mon Sep 17 00:00:00 2001 From: sduthil Date: Thu, 9 Apr 2020 14:21:04 -0400 Subject: [PATCH 037/141] Docs: integrations: fix FastAPI link (#1177) --- docs/index.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/index.rst b/docs/index.rst index dfaab1d51..54f1f99c3 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -30,4 +30,4 @@ Integrations * `Graphene-GAE `_ (`source `_) * `Graphene-Mongo `_ (`source `_) * `Starlette `_ (`source `_) -* `FastAPI `_ (`source `_) +* `FastAPI `_ (`source `_) From 37d6eaea465c8dca981efd173b7c74db9a01830e Mon Sep 17 00:00:00 2001 From: rrueth Date: Sun, 12 Apr 2020 04:19:56 -0700 Subject: [PATCH 038/141] Fix resolve method parameters bullet list (#1178) The current documentation shows all of the resolve parameters on a single line as opposed to the bullet list that was intended. --- docs/types/objecttypes.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/types/objecttypes.rst b/docs/types/objecttypes.rst index e37c3030a..29d3954c6 100644 --- a/docs/types/objecttypes.rst +++ b/docs/types/objecttypes.rst @@ -52,6 +52,7 @@ Resolvers are lazily executed, so if a field is not included in a query, its res Each field on an *ObjectType* in Graphene should have a corresponding resolver method to fetch data. This resolver method should match the field name. For example, in the ``Person`` type above, the ``full_name`` field is resolved by the method ``resolve_full_name``. Each resolver method takes the parameters: + * :ref:`ResolverParamParent` for the value object use to resolve most fields * :ref:`ResolverParamInfo` for query and schema meta information and per-request context * :ref:`ResolverParamGraphQLArguments` as defined on the **Field**. From 49fcf9f2e6ffd10253a44c1db5a52cd8bd0ff2f9 Mon Sep 17 00:00:00 2001 From: Syrus Akbary Date: Sun, 12 Apr 2020 17:45:46 -0700 Subject: [PATCH 039/141] Allow fast ObjectType creation based on dataclasses (#1157) * Allow fast ObjectType creation based on dataclasses * Fixed Python 3.8 integration * Added repr and eq methods to ObjectType containers * Reformatted code * Fixed mypy issue * Removed unused __init__ for ObjectType containers * Use black in dataclasses * Use latest black verison on precommit --- graphene/pyutils/dataclasses.py | 1260 +++++++++++++++++++++++ graphene/types/base.py | 5 +- graphene/types/objecttype.py | 77 +- graphene/types/tests/test_objecttype.py | 26 +- 4 files changed, 1321 insertions(+), 47 deletions(-) create mode 100644 graphene/pyutils/dataclasses.py diff --git a/graphene/pyutils/dataclasses.py b/graphene/pyutils/dataclasses.py new file mode 100644 index 000000000..61f0ea388 --- /dev/null +++ b/graphene/pyutils/dataclasses.py @@ -0,0 +1,1260 @@ +# This is a polyfill for dataclasses +# https://docs.python.org/3/library/dataclasses.html +# Original PEP proposal: PEP 557 +# https://www.python.org/dev/peps/pep-0557/ +import re +import sys +import copy +import types +import inspect +import keyword + +__all__ = [ + "dataclass", + "field", + "Field", + "FrozenInstanceError", + "InitVar", + "MISSING", + # Helper functions. + "fields", + "asdict", + "astuple", + "make_dataclass", + "replace", + "is_dataclass", +] + +# Conditions for adding methods. The boxes indicate what action the +# dataclass decorator takes. For all of these tables, when I talk +# about init=, repr=, eq=, order=, unsafe_hash=, or frozen=, I'm +# referring to the arguments to the @dataclass decorator. When +# checking if a dunder method already exists, I mean check for an +# entry in the class's __dict__. I never check to see if an attribute +# is defined in a base class. + +# Key: +# +=========+=========================================+ +# + Value | Meaning | +# +=========+=========================================+ +# | | No action: no method is added. | +# +---------+-----------------------------------------+ +# | add | Generated method is added. | +# +---------+-----------------------------------------+ +# | raise | TypeError is raised. | +# +---------+-----------------------------------------+ +# | None | Attribute is set to None. | +# +=========+=========================================+ + +# __init__ +# +# +--- init= parameter +# | +# v | | | +# | no | yes | <--- class has __init__ in __dict__? +# +=======+=======+=======+ +# | False | | | +# +-------+-------+-------+ +# | True | add | | <- the default +# +=======+=======+=======+ + +# __repr__ +# +# +--- repr= parameter +# | +# v | | | +# | no | yes | <--- class has __repr__ in __dict__? +# +=======+=======+=======+ +# | False | | | +# +-------+-------+-------+ +# | True | add | | <- the default +# +=======+=======+=======+ + + +# __setattr__ +# __delattr__ +# +# +--- frozen= parameter +# | +# v | | | +# | no | yes | <--- class has __setattr__ or __delattr__ in __dict__? +# +=======+=======+=======+ +# | False | | | <- the default +# +-------+-------+-------+ +# | True | add | raise | +# +=======+=======+=======+ +# Raise because not adding these methods would break the "frozen-ness" +# of the class. + +# __eq__ +# +# +--- eq= parameter +# | +# v | | | +# | no | yes | <--- class has __eq__ in __dict__? +# +=======+=======+=======+ +# | False | | | +# +-------+-------+-------+ +# | True | add | | <- the default +# +=======+=======+=======+ + +# __lt__ +# __le__ +# __gt__ +# __ge__ +# +# +--- order= parameter +# | +# v | | | +# | no | yes | <--- class has any comparison method in __dict__? +# +=======+=======+=======+ +# | False | | | <- the default +# +-------+-------+-------+ +# | True | add | raise | +# +=======+=======+=======+ +# Raise because to allow this case would interfere with using +# functools.total_ordering. + +# __hash__ + +# +------------------- unsafe_hash= parameter +# | +----------- eq= parameter +# | | +--- frozen= parameter +# | | | +# v v v | | | +# | no | yes | <--- class has explicitly defined __hash__ +# +=======+=======+=======+========+========+ +# | False | False | False | | | No __eq__, use the base class __hash__ +# +-------+-------+-------+--------+--------+ +# | False | False | True | | | No __eq__, use the base class __hash__ +# +-------+-------+-------+--------+--------+ +# | False | True | False | None | | <-- the default, not hashable +# +-------+-------+-------+--------+--------+ +# | False | True | True | add | | Frozen, so hashable, allows override +# +-------+-------+-------+--------+--------+ +# | True | False | False | add | raise | Has no __eq__, but hashable +# +-------+-------+-------+--------+--------+ +# | True | False | True | add | raise | Has no __eq__, but hashable +# +-------+-------+-------+--------+--------+ +# | True | True | False | add | raise | Not frozen, but hashable +# +-------+-------+-------+--------+--------+ +# | True | True | True | add | raise | Frozen, so hashable +# +=======+=======+=======+========+========+ +# For boxes that are blank, __hash__ is untouched and therefore +# inherited from the base class. If the base is object, then +# id-based hashing is used. +# +# Note that a class may already have __hash__=None if it specified an +# __eq__ method in the class body (not one that was created by +# @dataclass). +# +# See _hash_action (below) for a coded version of this table. + + +# Raised when an attempt is made to modify a frozen class. +class FrozenInstanceError(AttributeError): + pass + + +# A sentinel object for default values to signal that a default +# factory will be used. This is given a nice repr() which will appear +# in the function signature of dataclasses' constructors. +class _HAS_DEFAULT_FACTORY_CLASS: + def __repr__(self): + return "" + + +_HAS_DEFAULT_FACTORY = _HAS_DEFAULT_FACTORY_CLASS() + +# A sentinel object to detect if a parameter is supplied or not. Use +# a class to give it a better repr. +class _MISSING_TYPE: + pass + + +MISSING = _MISSING_TYPE() + +# Since most per-field metadata will be unused, create an empty +# read-only proxy that can be shared among all fields. +_EMPTY_METADATA = types.MappingProxyType({}) + +# Markers for the various kinds of fields and pseudo-fields. +class _FIELD_BASE: + def __init__(self, name): + self.name = name + + def __repr__(self): + return self.name + + +_FIELD = _FIELD_BASE("_FIELD") +_FIELD_CLASSVAR = _FIELD_BASE("_FIELD_CLASSVAR") +_FIELD_INITVAR = _FIELD_BASE("_FIELD_INITVAR") + +# The name of an attribute on the class where we store the Field +# objects. Also used to check if a class is a Data Class. +_FIELDS = "__dataclass_fields__" + +# The name of an attribute on the class that stores the parameters to +# @dataclass. +_PARAMS = "__dataclass_params__" + +# The name of the function, that if it exists, is called at the end of +# __init__. +_POST_INIT_NAME = "__post_init__" + +# String regex that string annotations for ClassVar or InitVar must match. +# Allows "identifier.identifier[" or "identifier[". +# https://bugs.python.org/issue33453 for details. +_MODULE_IDENTIFIER_RE = re.compile(r"^(?:\s*(\w+)\s*\.)?\s*(\w+)") + + +class _InitVarMeta(type): + def __getitem__(self, params): + return self + + +class InitVar(metaclass=_InitVarMeta): + pass + + +# Instances of Field are only ever created from within this module, +# and only from the field() function, although Field instances are +# exposed externally as (conceptually) read-only objects. +# +# name and type are filled in after the fact, not in __init__. +# They're not known at the time this class is instantiated, but it's +# convenient if they're available later. +# +# When cls._FIELDS is filled in with a list of Field objects, the name +# and type fields will have been populated. +class Field: + __slots__ = ( + "name", + "type", + "default", + "default_factory", + "repr", + "hash", + "init", + "compare", + "metadata", + "_field_type", # Private: not to be used by user code. + ) + + def __init__(self, default, default_factory, init, repr, hash, compare, metadata): + self.name = None + self.type = None + self.default = default + self.default_factory = default_factory + self.init = init + self.repr = repr + self.hash = hash + self.compare = compare + self.metadata = ( + _EMPTY_METADATA + if metadata is None or len(metadata) == 0 + else types.MappingProxyType(metadata) + ) + self._field_type = None + + def __repr__(self): + return ( + "Field(" + f"name={self.name!r}," + f"type={self.type!r}," + f"default={self.default!r}," + f"default_factory={self.default_factory!r}," + f"init={self.init!r}," + f"repr={self.repr!r}," + f"hash={self.hash!r}," + f"compare={self.compare!r}," + f"metadata={self.metadata!r}," + f"_field_type={self._field_type}" + ")" + ) + + # This is used to support the PEP 487 __set_name__ protocol in the + # case where we're using a field that contains a descriptor as a + # defaul value. For details on __set_name__, see + # https://www.python.org/dev/peps/pep-0487/#implementation-details. + # + # Note that in _process_class, this Field object is overwritten + # with the default value, so the end result is a descriptor that + # had __set_name__ called on it at the right time. + def __set_name__(self, owner, name): + func = getattr(type(self.default), "__set_name__", None) + if func: + # There is a __set_name__ method on the descriptor, call + # it. + func(self.default, owner, name) + + +class _DataclassParams: + __slots__ = ( + "init", + "repr", + "eq", + "order", + "unsafe_hash", + "frozen", + ) + + def __init__(self, init, repr, eq, order, unsafe_hash, frozen): + self.init = init + self.repr = repr + self.eq = eq + self.order = order + self.unsafe_hash = unsafe_hash + self.frozen = frozen + + def __repr__(self): + return ( + "_DataclassParams(" + f"init={self.init!r}," + f"repr={self.repr!r}," + f"eq={self.eq!r}," + f"order={self.order!r}," + f"unsafe_hash={self.unsafe_hash!r}," + f"frozen={self.frozen!r}" + ")" + ) + + +# This function is used instead of exposing Field creation directly, +# so that a type checker can be told (via overloads) that this is a +# function whose type depends on its parameters. +def field( + *, + default=MISSING, + default_factory=MISSING, + init=True, + repr=True, + hash=None, + compare=True, + metadata=None, +): + """Return an object to identify dataclass fields. + + default is the default value of the field. default_factory is a + 0-argument function called to initialize a field's value. If init + is True, the field will be a parameter to the class's __init__() + function. If repr is True, the field will be included in the + object's repr(). If hash is True, the field will be included in + the object's hash(). If compare is True, the field will be used + in comparison functions. metadata, if specified, must be a + mapping which is stored but not otherwise examined by dataclass. + + It is an error to specify both default and default_factory. + """ + + if default is not MISSING and default_factory is not MISSING: + raise ValueError("cannot specify both default and default_factory") + return Field(default, default_factory, init, repr, hash, compare, metadata) + + +def _tuple_str(obj_name, fields): + # Return a string representing each field of obj_name as a tuple + # member. So, if fields is ['x', 'y'] and obj_name is "self", + # return "(self.x,self.y)". + + # Special case for the 0-tuple. + if not fields: + return "()" + # Note the trailing comma, needed if this turns out to be a 1-tuple. + return f'({",".join([f"{obj_name}.{f.name}" for f in fields])},)' + + +def _create_fn(name, args, body, *, globals=None, locals=None, return_type=MISSING): + # Note that we mutate locals when exec() is called. Caller + # beware! The only callers are internal to this module, so no + # worries about external callers. + if locals is None: + locals = {} + return_annotation = "" + if return_type is not MISSING: + locals["_return_type"] = return_type + return_annotation = "->_return_type" + args = ",".join(args) + body = "\n".join(f" {b}" for b in body) + + # Compute the text of the entire function. + txt = f"def {name}({args}){return_annotation}:\n{body}" + + exec(txt, globals, locals) + return locals[name] + + +def _field_assign(frozen, name, value, self_name): + # If we're a frozen class, then assign to our fields in __init__ + # via object.__setattr__. Otherwise, just use a simple + # assignment. + # + # self_name is what "self" is called in this function: don't + # hard-code "self", since that might be a field name. + if frozen: + return f"object.__setattr__({self_name},{name!r},{value})" + return f"{self_name}.{name}={value}" + + +def _field_init(f, frozen, globals, self_name): + # Return the text of the line in the body of __init__ that will + # initialize this field. + + default_name = f"_dflt_{f.name}" + if f.default_factory is not MISSING: + if f.init: + # This field has a default factory. If a parameter is + # given, use it. If not, call the factory. + globals[default_name] = f.default_factory + value = ( + f"{default_name}() " + f"if {f.name} is _HAS_DEFAULT_FACTORY " + f"else {f.name}" + ) + else: + # This is a field that's not in the __init__ params, but + # has a default factory function. It needs to be + # initialized here by calling the factory function, + # because there's no other way to initialize it. + + # For a field initialized with a default=defaultvalue, the + # class dict just has the default value + # (cls.fieldname=defaultvalue). But that won't work for a + # default factory, the factory must be called in __init__ + # and we must assign that to self.fieldname. We can't + # fall back to the class dict's value, both because it's + # not set, and because it might be different per-class + # (which, after all, is why we have a factory function!). + + globals[default_name] = f.default_factory + value = f"{default_name}()" + else: + # No default factory. + if f.init: + if f.default is MISSING: + # There's no default, just do an assignment. + value = f.name + elif f.default is not MISSING: + globals[default_name] = f.default + value = f.name + else: + # This field does not need initialization. Signify that + # to the caller by returning None. + return None + + # Only test this now, so that we can create variables for the + # default. However, return None to signify that we're not going + # to actually do the assignment statement for InitVars. + if f._field_type == _FIELD_INITVAR: + return None + + # Now, actually generate the field assignment. + return _field_assign(frozen, f.name, value, self_name) + + +def _init_param(f): + # Return the __init__ parameter string for this field. For + # example, the equivalent of 'x:int=3' (except instead of 'int', + # reference a variable set to int, and instead of '3', reference a + # variable set to 3). + if f.default is MISSING and f.default_factory is MISSING: + # There's no default, and no default_factory, just output the + # variable name and type. + default = "" + elif f.default is not MISSING: + # There's a default, this will be the name that's used to look + # it up. + default = f"=_dflt_{f.name}" + elif f.default_factory is not MISSING: + # There's a factory function. Set a marker. + default = "=_HAS_DEFAULT_FACTORY" + return f"{f.name}:_type_{f.name}{default}" + + +def _init_fn(fields, frozen, has_post_init, self_name): + # fields contains both real fields and InitVar pseudo-fields. + + # Make sure we don't have fields without defaults following fields + # with defaults. This actually would be caught when exec-ing the + # function source code, but catching it here gives a better error + # message, and future-proofs us in case we build up the function + # using ast. + seen_default = False + for f in fields: + # Only consider fields in the __init__ call. + if f.init: + if not (f.default is MISSING and f.default_factory is MISSING): + seen_default = True + elif seen_default: + raise TypeError( + f"non-default argument {f.name!r} " "follows default argument" + ) + + globals = {"MISSING": MISSING, "_HAS_DEFAULT_FACTORY": _HAS_DEFAULT_FACTORY} + + body_lines = [] + for f in fields: + line = _field_init(f, frozen, globals, self_name) + # line is None means that this field doesn't require + # initialization (it's a pseudo-field). Just skip it. + if line: + body_lines.append(line) + + # Does this class have a post-init function? + if has_post_init: + params_str = ",".join(f.name for f in fields if f._field_type is _FIELD_INITVAR) + body_lines.append(f"{self_name}.{_POST_INIT_NAME}({params_str})") + + # If no body lines, use 'pass'. + if not body_lines: + body_lines = ["pass"] + + locals = {f"_type_{f.name}": f.type for f in fields} + return _create_fn( + "__init__", + [self_name] + [_init_param(f) for f in fields if f.init], + body_lines, + locals=locals, + globals=globals, + return_type=None, + ) + + +def _repr_fn(fields): + return _create_fn( + "__repr__", + ("self",), + [ + 'return self.__class__.__qualname__ + f"(' + + ", ".join([f"{f.name}={{self.{f.name}!r}}" for f in fields]) + + ')"' + ], + ) + + +def _frozen_get_del_attr(cls, fields): + # XXX: globals is modified on the first call to _create_fn, then + # the modified version is used in the second call. Is this okay? + globals = {"cls": cls, "FrozenInstanceError": FrozenInstanceError} + if fields: + fields_str = "(" + ",".join(repr(f.name) for f in fields) + ",)" + else: + # Special case for the zero-length tuple. + fields_str = "()" + return ( + _create_fn( + "__setattr__", + ("self", "name", "value"), + ( + f"if type(self) is cls or name in {fields_str}:", + ' raise FrozenInstanceError(f"cannot assign to field {name!r}")', + f"super(cls, self).__setattr__(name, value)", + ), + globals=globals, + ), + _create_fn( + "__delattr__", + ("self", "name"), + ( + f"if type(self) is cls or name in {fields_str}:", + ' raise FrozenInstanceError(f"cannot delete field {name!r}")', + f"super(cls, self).__delattr__(name)", + ), + globals=globals, + ), + ) + + +def _cmp_fn(name, op, self_tuple, other_tuple): + # Create a comparison function. If the fields in the object are + # named 'x' and 'y', then self_tuple is the string + # '(self.x,self.y)' and other_tuple is the string + # '(other.x,other.y)'. + + return _create_fn( + name, + ("self", "other"), + [ + "if other.__class__ is self.__class__:", + f" return {self_tuple}{op}{other_tuple}", + "return NotImplemented", + ], + ) + + +def _hash_fn(fields): + self_tuple = _tuple_str("self", fields) + return _create_fn("__hash__", ("self",), [f"return hash({self_tuple})"]) + + +def _is_classvar(a_type, typing): + # This test uses a typing internal class, but it's the best way to + # test if this is a ClassVar. + return type(a_type) is typing._ClassVar + + +def _is_initvar(a_type, dataclasses): + # The module we're checking against is the module we're + # currently in (dataclasses.py). + return a_type is dataclasses.InitVar + + +def _is_type(annotation, cls, a_module, a_type, is_type_predicate): + # Given a type annotation string, does it refer to a_type in + # a_module? For example, when checking that annotation denotes a + # ClassVar, then a_module is typing, and a_type is + # typing.ClassVar. + + # It's possible to look up a_module given a_type, but it involves + # looking in sys.modules (again!), and seems like a waste since + # the caller already knows a_module. + + # - annotation is a string type annotation + # - cls is the class that this annotation was found in + # - a_module is the module we want to match + # - a_type is the type in that module we want to match + # - is_type_predicate is a function called with (obj, a_module) + # that determines if obj is of the desired type. + + # Since this test does not do a local namespace lookup (and + # instead only a module (global) lookup), there are some things it + # gets wrong. + + # With string annotations, cv0 will be detected as a ClassVar: + # CV = ClassVar + # @dataclass + # class C0: + # cv0: CV + + # But in this example cv1 will not be detected as a ClassVar: + # @dataclass + # class C1: + # CV = ClassVar + # cv1: CV + + # In C1, the code in this function (_is_type) will look up "CV" in + # the module and not find it, so it will not consider cv1 as a + # ClassVar. This is a fairly obscure corner case, and the best + # way to fix it would be to eval() the string "CV" with the + # correct global and local namespaces. However that would involve + # a eval() penalty for every single field of every dataclass + # that's defined. It was judged not worth it. + + match = _MODULE_IDENTIFIER_RE.match(annotation) + if match: + ns = None + module_name = match.group(1) + if not module_name: + # No module name, assume the class's module did + # "from dataclasses import InitVar". + ns = sys.modules.get(cls.__module__).__dict__ + else: + # Look up module_name in the class's module. + module = sys.modules.get(cls.__module__) + if module and module.__dict__.get(module_name) is a_module: + ns = sys.modules.get(a_type.__module__).__dict__ + if ns and is_type_predicate(ns.get(match.group(2)), a_module): + return True + return False + + +def _get_field(cls, a_name, a_type): + # Return a Field object for this field name and type. ClassVars + # and InitVars are also returned, but marked as such (see + # f._field_type). + + # If the default value isn't derived from Field, then it's only a + # normal default value. Convert it to a Field(). + default = getattr(cls, a_name, MISSING) + if isinstance(default, Field): + f = default + else: + if isinstance(default, types.MemberDescriptorType): + # This is a field in __slots__, so it has no default value. + default = MISSING + f = field(default=default) + + # Only at this point do we know the name and the type. Set them. + f.name = a_name + f.type = a_type + + # Assume it's a normal field until proven otherwise. We're next + # going to decide if it's a ClassVar or InitVar, everything else + # is just a normal field. + f._field_type = _FIELD + + # In addition to checking for actual types here, also check for + # string annotations. get_type_hints() won't always work for us + # (see https://github.com/python/typing/issues/508 for example), + # plus it's expensive and would require an eval for every stirng + # annotation. So, make a best effort to see if this is a ClassVar + # or InitVar using regex's and checking that the thing referenced + # is actually of the correct type. + + # For the complete discussion, see https://bugs.python.org/issue33453 + + # If typing has not been imported, then it's impossible for any + # annotation to be a ClassVar. So, only look for ClassVar if + # typing has been imported by any module (not necessarily cls's + # module). + typing = sys.modules.get("typing") + if typing: + if _is_classvar(a_type, typing) or ( + isinstance(f.type, str) + and _is_type(f.type, cls, typing, typing.ClassVar, _is_classvar) + ): + f._field_type = _FIELD_CLASSVAR + + # If the type is InitVar, or if it's a matching string annotation, + # then it's an InitVar. + if f._field_type is _FIELD: + # The module we're checking against is the module we're + # currently in (dataclasses.py). + dataclasses = sys.modules[__name__] + if _is_initvar(a_type, dataclasses) or ( + isinstance(f.type, str) + and _is_type(f.type, cls, dataclasses, dataclasses.InitVar, _is_initvar) + ): + f._field_type = _FIELD_INITVAR + + # Validations for individual fields. This is delayed until now, + # instead of in the Field() constructor, since only here do we + # know the field name, which allows for better error reporting. + + # Special restrictions for ClassVar and InitVar. + if f._field_type in (_FIELD_CLASSVAR, _FIELD_INITVAR): + if f.default_factory is not MISSING: + raise TypeError(f"field {f.name} cannot have a " "default factory") + # Should I check for other field settings? default_factory + # seems the most serious to check for. Maybe add others. For + # example, how about init=False (or really, + # init=)? It makes no sense for + # ClassVar and InitVar to specify init=. + + # For real fields, disallow mutable defaults for known types. + if f._field_type is _FIELD and isinstance(f.default, (list, dict, set)): + raise ValueError( + f"mutable default {type(f.default)} for field " + f"{f.name} is not allowed: use default_factory" + ) + + return f + + +def _set_new_attribute(cls, name, value): + # Never overwrites an existing attribute. Returns True if the + # attribute already exists. + if name in cls.__dict__: + return True + setattr(cls, name, value) + return False + + +# Decide if/how we're going to create a hash function. Key is +# (unsafe_hash, eq, frozen, does-hash-exist). Value is the action to +# take. The common case is to do nothing, so instead of providing a +# function that is a no-op, use None to signify that. + + +def _hash_set_none(cls, fields): + return None + + +def _hash_add(cls, fields): + flds = [f for f in fields if (f.compare if f.hash is None else f.hash)] + return _hash_fn(flds) + + +def _hash_exception(cls, fields): + # Raise an exception. + raise TypeError(f"Cannot overwrite attribute __hash__ " f"in class {cls.__name__}") + + +# +# +-------------------------------------- unsafe_hash? +# | +------------------------------- eq? +# | | +------------------------ frozen? +# | | | +---------------- has-explicit-hash? +# | | | | +# | | | | +------- action +# | | | | | +# v v v v v +_hash_action = { + (False, False, False, False): None, + (False, False, False, True): None, + (False, False, True, False): None, + (False, False, True, True): None, + (False, True, False, False): _hash_set_none, + (False, True, False, True): None, + (False, True, True, False): _hash_add, + (False, True, True, True): None, + (True, False, False, False): _hash_add, + (True, False, False, True): _hash_exception, + (True, False, True, False): _hash_add, + (True, False, True, True): _hash_exception, + (True, True, False, False): _hash_add, + (True, True, False, True): _hash_exception, + (True, True, True, False): _hash_add, + (True, True, True, True): _hash_exception, +} +# See https://bugs.python.org/issue32929#msg312829 for an if-statement +# version of this table. + + +def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen): + # Now that dicts retain insertion order, there's no reason to use + # an ordered dict. I am leveraging that ordering here, because + # derived class fields overwrite base class fields, but the order + # is defined by the base class, which is found first. + fields = {} + + setattr(cls, _PARAMS, _DataclassParams(init, repr, eq, order, unsafe_hash, frozen)) + + # Find our base classes in reverse MRO order, and exclude + # ourselves. In reversed order so that more derived classes + # override earlier field definitions in base classes. As long as + # we're iterating over them, see if any are frozen. + any_frozen_base = False + has_dataclass_bases = False + for b in cls.__mro__[-1:0:-1]: + # Only process classes that have been processed by our + # decorator. That is, they have a _FIELDS attribute. + base_fields = getattr(b, _FIELDS, None) + if base_fields: + has_dataclass_bases = True + for f in base_fields.values(): + fields[f.name] = f + if getattr(b, _PARAMS).frozen: + any_frozen_base = True + + # Annotations that are defined in this class (not in base + # classes). If __annotations__ isn't present, then this class + # adds no new annotations. We use this to compute fields that are + # added by this class. + # + # Fields are found from cls_annotations, which is guaranteed to be + # ordered. Default values are from class attributes, if a field + # has a default. If the default value is a Field(), then it + # contains additional info beyond (and possibly including) the + # actual default value. Pseudo-fields ClassVars and InitVars are + # included, despite the fact that they're not real fields. That's + # dealt with later. + cls_annotations = cls.__dict__.get("__annotations__", {}) + + # Now find fields in our class. While doing so, validate some + # things, and set the default values (as class attributes) where + # we can. + cls_fields = [_get_field(cls, name, type) for name, type in cls_annotations.items()] + for f in cls_fields: + fields[f.name] = f + + # If the class attribute (which is the default value for this + # field) exists and is of type 'Field', replace it with the + # real default. This is so that normal class introspection + # sees a real default value, not a Field. + if isinstance(getattr(cls, f.name, None), Field): + if f.default is MISSING: + # If there's no default, delete the class attribute. + # This happens if we specify field(repr=False), for + # example (that is, we specified a field object, but + # no default value). Also if we're using a default + # factory. The class attribute should not be set at + # all in the post-processed class. + delattr(cls, f.name) + else: + setattr(cls, f.name, f.default) + + # Do we have any Field members that don't also have annotations? + for name, value in cls.__dict__.items(): + if isinstance(value, Field) and not name in cls_annotations: + raise TypeError(f"{name!r} is a field but has no type annotation") + + # Check rules that apply if we are derived from any dataclasses. + if has_dataclass_bases: + # Raise an exception if any of our bases are frozen, but we're not. + if any_frozen_base and not frozen: + raise TypeError("cannot inherit non-frozen dataclass from a " "frozen one") + + # Raise an exception if we're frozen, but none of our bases are. + if not any_frozen_base and frozen: + raise TypeError("cannot inherit frozen dataclass from a " "non-frozen one") + + # Remember all of the fields on our class (including bases). This + # also marks this class as being a dataclass. + setattr(cls, _FIELDS, fields) + + # Was this class defined with an explicit __hash__? Note that if + # __eq__ is defined in this class, then python will automatically + # set __hash__ to None. This is a heuristic, as it's possible + # that such a __hash__ == None was not auto-generated, but it + # close enough. + class_hash = cls.__dict__.get("__hash__", MISSING) + has_explicit_hash = not ( + class_hash is MISSING or (class_hash is None and "__eq__" in cls.__dict__) + ) + + # If we're generating ordering methods, we must be generating the + # eq methods. + if order and not eq: + raise ValueError("eq must be true if order is true") + + if init: + # Does this class have a post-init function? + has_post_init = hasattr(cls, _POST_INIT_NAME) + + # Include InitVars and regular fields (so, not ClassVars). + flds = [f for f in fields.values() if f._field_type in (_FIELD, _FIELD_INITVAR)] + _set_new_attribute( + cls, + "__init__", + _init_fn( + flds, + frozen, + has_post_init, + # The name to use for the "self" + # param in __init__. Use "self" + # if possible. + "__dataclass_self__" if "self" in fields else "self", + ), + ) + + # Get the fields as a list, and include only real fields. This is + # used in all of the following methods. + field_list = [f for f in fields.values() if f._field_type is _FIELD] + + if repr: + flds = [f for f in field_list if f.repr] + _set_new_attribute(cls, "__repr__", _repr_fn(flds)) + + if eq: + # Create _eq__ method. There's no need for a __ne__ method, + # since python will call __eq__ and negate it. + flds = [f for f in field_list if f.compare] + self_tuple = _tuple_str("self", flds) + other_tuple = _tuple_str("other", flds) + _set_new_attribute( + cls, "__eq__", _cmp_fn("__eq__", "==", self_tuple, other_tuple) + ) + + if order: + # Create and set the ordering methods. + flds = [f for f in field_list if f.compare] + self_tuple = _tuple_str("self", flds) + other_tuple = _tuple_str("other", flds) + for name, op in [ + ("__lt__", "<"), + ("__le__", "<="), + ("__gt__", ">"), + ("__ge__", ">="), + ]: + if _set_new_attribute( + cls, name, _cmp_fn(name, op, self_tuple, other_tuple) + ): + raise TypeError( + f"Cannot overwrite attribute {name} " + f"in class {cls.__name__}. Consider using " + "functools.total_ordering" + ) + + if frozen: + for fn in _frozen_get_del_attr(cls, field_list): + if _set_new_attribute(cls, fn.__name__, fn): + raise TypeError( + f"Cannot overwrite attribute {fn.__name__} " + f"in class {cls.__name__}" + ) + + # Decide if/how we're going to create a hash function. + hash_action = _hash_action[ + bool(unsafe_hash), bool(eq), bool(frozen), has_explicit_hash + ] + if hash_action: + # No need to call _set_new_attribute here, since by the time + # we're here the overwriting is unconditional. + cls.__hash__ = hash_action(cls, field_list) + + if not getattr(cls, "__doc__"): + # Create a class doc-string. + cls.__doc__ = cls.__name__ + str(inspect.signature(cls)).replace(" -> None", "") + + return cls + + +# _cls should never be specified by keyword, so start it with an +# underscore. The presence of _cls is used to detect if this +# decorator is being called with parameters or not. +def dataclass( + _cls=None, + *, + init=True, + repr=True, + eq=True, + order=False, + unsafe_hash=False, + frozen=False, +): + """Returns the same class as was passed in, with dunder methods + added based on the fields defined in the class. + + Examines PEP 526 __annotations__ to determine fields. + + If init is true, an __init__() method is added to the class. If + repr is true, a __repr__() method is added. If order is true, rich + comparison dunder methods are added. If unsafe_hash is true, a + __hash__() method function is added. If frozen is true, fields may + not be assigned to after instance creation. + """ + + def wrap(cls): + return _process_class(cls, init, repr, eq, order, unsafe_hash, frozen) + + # See if we're being called as @dataclass or @dataclass(). + if _cls is None: + # We're called with parens. + return wrap + + # We're called as @dataclass without parens. + return wrap(_cls) + + +def fields(class_or_instance): + """Return a tuple describing the fields of this dataclass. + + Accepts a dataclass or an instance of one. Tuple elements are of + type Field. + """ + + # Might it be worth caching this, per class? + try: + fields = getattr(class_or_instance, _FIELDS) + except AttributeError: + raise TypeError("must be called with a dataclass type or instance") + + # Exclude pseudo-fields. Note that fields is sorted by insertion + # order, so the order of the tuple is as the fields were defined. + return tuple(f for f in fields.values() if f._field_type is _FIELD) + + +def _is_dataclass_instance(obj): + """Returns True if obj is an instance of a dataclass.""" + return not isinstance(obj, type) and hasattr(obj, _FIELDS) + + +def is_dataclass(obj): + """Returns True if obj is a dataclass or an instance of a + dataclass.""" + return hasattr(obj, _FIELDS) + + +def asdict(obj, *, dict_factory=dict): + """Return the fields of a dataclass instance as a new dictionary mapping + field names to field values. + + Example usage: + + @dataclass + class C: + x: int + y: int + + c = C(1, 2) + assert asdict(c) == {'x': 1, 'y': 2} + + If given, 'dict_factory' will be used instead of built-in dict. + The function applies recursively to field values that are + dataclass instances. This will also look into built-in containers: + tuples, lists, and dicts. + """ + if not _is_dataclass_instance(obj): + raise TypeError("asdict() should be called on dataclass instances") + return _asdict_inner(obj, dict_factory) + + +def _asdict_inner(obj, dict_factory): + if _is_dataclass_instance(obj): + result = [] + for f in fields(obj): + value = _asdict_inner(getattr(obj, f.name), dict_factory) + result.append((f.name, value)) + return dict_factory(result) + elif isinstance(obj, (list, tuple)): + return type(obj)(_asdict_inner(v, dict_factory) for v in obj) + elif isinstance(obj, dict): + return type(obj)( + (_asdict_inner(k, dict_factory), _asdict_inner(v, dict_factory)) + for k, v in obj.items() + ) + else: + return copy.deepcopy(obj) + + +def astuple(obj, *, tuple_factory=tuple): + """Return the fields of a dataclass instance as a new tuple of field values. + + Example usage:: + + @dataclass + class C: + x: int + y: int + + c = C(1, 2) + assert astuple(c) == (1, 2) + + If given, 'tuple_factory' will be used instead of built-in tuple. + The function applies recursively to field values that are + dataclass instances. This will also look into built-in containers: + tuples, lists, and dicts. + """ + + if not _is_dataclass_instance(obj): + raise TypeError("astuple() should be called on dataclass instances") + return _astuple_inner(obj, tuple_factory) + + +def _astuple_inner(obj, tuple_factory): + if _is_dataclass_instance(obj): + result = [] + for f in fields(obj): + value = _astuple_inner(getattr(obj, f.name), tuple_factory) + result.append(value) + return tuple_factory(result) + elif isinstance(obj, (list, tuple)): + return type(obj)(_astuple_inner(v, tuple_factory) for v in obj) + elif isinstance(obj, dict): + return type(obj)( + (_astuple_inner(k, tuple_factory), _astuple_inner(v, tuple_factory)) + for k, v in obj.items() + ) + else: + return copy.deepcopy(obj) + + +def make_dataclass( + cls_name, + fields, + *, + bases=(), + namespace=None, + init=True, + repr=True, + eq=True, + order=False, + unsafe_hash=False, + frozen=False, +): + """Return a new dynamically created dataclass. + + The dataclass name will be 'cls_name'. 'fields' is an iterable + of either (name), (name, type) or (name, type, Field) objects. If type is + omitted, use the string 'typing.Any'. Field objects are created by + the equivalent of calling 'field(name, type [, Field-info])'. + + C = make_dataclass('C', ['x', ('y', int), ('z', int, field(init=False))], bases=(Base,)) + + is equivalent to: + + @dataclass + class C(Base): + x: 'typing.Any' + y: int + z: int = field(init=False) + + For the bases and namespace parameters, see the builtin type() function. + + The parameters init, repr, eq, order, unsafe_hash, and frozen are passed to + dataclass(). + """ + + if namespace is None: + namespace = {} + else: + # Copy namespace since we're going to mutate it. + namespace = namespace.copy() + + # While we're looking through the field names, validate that they + # are identifiers, are not keywords, and not duplicates. + seen = set() + anns = {} + for item in fields: + if isinstance(item, str): + name = item + tp = "typing.Any" + elif len(item) == 2: + name, tp, = item + elif len(item) == 3: + name, tp, spec = item + namespace[name] = spec + else: + raise TypeError(f"Invalid field: {item!r}") + + if not isinstance(name, str) or not name.isidentifier(): + raise TypeError(f"Field names must be valid identifers: {name!r}") + if keyword.iskeyword(name): + raise TypeError(f"Field names must not be keywords: {name!r}") + if name in seen: + raise TypeError(f"Field name duplicated: {name!r}") + + seen.add(name) + anns[name] = tp + + namespace["__annotations__"] = anns + # We use `types.new_class()` instead of simply `type()` to allow dynamic creation + # of generic dataclassses. + cls = types.new_class(cls_name, bases, {}, lambda ns: ns.update(namespace)) + return dataclass( + cls, + init=init, + repr=repr, + eq=eq, + order=order, + unsafe_hash=unsafe_hash, + frozen=frozen, + ) + + +def replace(obj, **changes): + """Return a new object replacing specified fields with new values. + + This is especially useful for frozen classes. Example usage: + + @dataclass(frozen=True) + class C: + x: int + y: int + + c = C(1, 2) + c1 = replace(c, x=3) + assert c1.x == 3 and c1.y == 2 + """ + + # We're going to mutate 'changes', but that's okay because it's a + # new dict, even if called with 'replace(obj, **my_changes)'. + + if not _is_dataclass_instance(obj): + raise TypeError("replace() should be called on dataclass instances") + + # It's an error to have init=False fields in 'changes'. + # If a field is not in 'changes', read its value from the provided obj. + + for f in getattr(obj, _FIELDS).values(): + if not f.init: + # Error if this field is specified in changes. + if f.name in changes: + raise ValueError( + f"field {f.name} is declared with " + "init=False, it cannot be specified with " + "replace()" + ) + continue + + if f.name not in changes: + changes[f.name] = getattr(obj, f.name) + + # Create the new object, which calls __init__() and + # __post_init__() (if defined), using all of the init fields we've + # added and/or left in 'changes'. If there are values supplied in + # changes that aren't fields, this will correctly raise a + # TypeError. + return obj.__class__(**changes) diff --git a/graphene/types/base.py b/graphene/types/base.py index 29d60fef1..129834b0d 100644 --- a/graphene/types/base.py +++ b/graphene/types/base.py @@ -1,6 +1,6 @@ from typing import Type -from ..utils.subclass_with_meta import SubclassWithMeta +from ..utils.subclass_with_meta import SubclassWithMeta, SubclassWithMeta_Meta from ..utils.trim_docstring import trim_docstring @@ -26,6 +26,9 @@ def __repr__(self): return f"<{self.__class__.__name__} name={repr(self.name)}>" +BaseTypeMeta = SubclassWithMeta_Meta + + class BaseType(SubclassWithMeta): @classmethod def create_type(cls, class_name, **options): diff --git a/graphene/types/objecttype.py b/graphene/types/objecttype.py index 9b6c7d7df..c16f50b8f 100644 --- a/graphene/types/objecttype.py +++ b/graphene/types/objecttype.py @@ -1,8 +1,13 @@ -from .base import BaseOptions, BaseType +from .base import BaseOptions, BaseType, BaseTypeMeta from .field import Field from .interface import Interface from .utils import yank_fields_from_attrs +try: + from dataclasses import make_dataclass, field +except ImportError: + from ..pyutils.dataclasses import make_dataclass, field # type: ignore + # For static type checking with Mypy MYPY = False if MYPY: @@ -14,7 +19,34 @@ class ObjectTypeOptions(BaseOptions): interfaces = () # type: Iterable[Type[Interface]] -class ObjectType(BaseType): +class ObjectTypeMeta(BaseTypeMeta): + def __new__(cls, name, bases, namespace): + # We create this type, to then overload it with the dataclass attrs + class InterObjectType: + pass + + base_cls = super().__new__(cls, name, (InterObjectType,) + bases, namespace) + if base_cls._meta: + fields = [ + ( + key, + "typing.Any", + field( + default=field_value.default_value + if isinstance(field_value, Field) + else None + ), + ) + for key, field_value in base_cls._meta.fields.items() + ] + dataclass = make_dataclass(name, fields, bases=()) + InterObjectType.__init__ = dataclass.__init__ + InterObjectType.__eq__ = dataclass.__eq__ + InterObjectType.__repr__ = dataclass.__repr__ + return base_cls + + +class ObjectType(BaseType, metaclass=ObjectTypeMeta): """ Object Type Definition @@ -127,44 +159,3 @@ def __init_subclass_with_meta__( super(ObjectType, cls).__init_subclass_with_meta__(_meta=_meta, **options) is_type_of = None - - def __init__(self, *args, **kwargs): - # ObjectType acting as container - args_len = len(args) - fields = self._meta.fields.items() - if args_len > len(fields): - # Daft, but matches old exception sans the err msg. - raise IndexError("Number of args exceeds number of fields") - fields_iter = iter(fields) - - if not kwargs: - for val, (name, field) in zip(args, fields_iter): - setattr(self, name, val) - else: - for val, (name, field) in zip(args, fields_iter): - setattr(self, name, val) - kwargs.pop(name, None) - - for name, field in fields_iter: - try: - val = kwargs.pop( - name, field.default_value if isinstance(field, Field) else None - ) - setattr(self, name, val) - except KeyError: - pass - - if kwargs: - for prop in list(kwargs): - try: - if isinstance( - getattr(self.__class__, prop), property - ) or prop.startswith("_"): - setattr(self, prop, kwargs.pop(prop)) - except AttributeError: - pass - if kwargs: - raise TypeError( - f"'{list(kwargs)[0]}' is an invalid keyword argument" - f" for {self.__class__.__name__}" - ) diff --git a/graphene/types/tests/test_objecttype.py b/graphene/types/tests/test_objecttype.py index 25025e4d1..85a46867e 100644 --- a/graphene/types/tests/test_objecttype.py +++ b/graphene/types/tests/test_objecttype.py @@ -83,6 +83,10 @@ class MyObjectType(ObjectType): def test_generate_objecttype_with_private_attributes(): class MyObjectType(ObjectType): + def __init__(self, _private_state=None, **kwargs): + self._private_state = _private_state + super().__init__(**kwargs) + _private_state = None assert "_private_state" not in MyObjectType._meta.fields @@ -155,6 +159,20 @@ def test_objecttype_as_container_only_args(): assert container.field2 == "2" +def test_objecttype_repr(): + container = Container("1", "2") + assert repr(container) == "Container(field1='1', field2='2')" + + +def test_objecttype_eq(): + container1 = Container("1", "2") + container2 = Container("1", "2") + container3 = Container("2", "3") + assert container1 == container1 + assert container1 == container2 + assert container2 != container3 + + def test_objecttype_as_container_args_kwargs(): container = Container("1", field2="2") assert container.field1 == "1" @@ -173,17 +191,19 @@ def test_objecttype_as_container_all_kwargs(): def test_objecttype_as_container_extra_args(): - with raises(IndexError) as excinfo: + with raises(TypeError) as excinfo: Container("1", "2", "3") - assert "Number of args exceeds number of fields" == str(excinfo.value) + assert "__init__() takes from 1 to 3 positional arguments but 4 were given" == str( + excinfo.value + ) def test_objecttype_as_container_invalid_kwargs(): with raises(TypeError) as excinfo: Container(unexisting_field="3") - assert "'unexisting_field' is an invalid keyword argument for Container" == str( + assert "__init__() got an unexpected keyword argument 'unexisting_field'" == str( excinfo.value ) From 7a1e9d7798f6ce2c7742799ed83cba2b24e03afa Mon Sep 17 00:00:00 2001 From: Kimball Leavitt Date: Tue, 14 Apr 2020 03:25:10 -0600 Subject: [PATCH 040/141] added graphene import to READMEs (#1183) it's nice to just be able to copy/paste the entire example without having to remember the import --- README.md | 2 ++ README.rst | 2 ++ 2 files changed, 4 insertions(+) diff --git a/README.md b/README.md index e7bc5a60f..85849a3dc 100644 --- a/README.md +++ b/README.md @@ -45,6 +45,8 @@ pip install "graphene>=2.0" Here is one example for you to get started: ```python +import graphene + class Query(graphene.ObjectType): hello = graphene.String(description='A typical hello world') diff --git a/README.rst b/README.rst index bb8fecb28..243215e48 100644 --- a/README.rst +++ b/README.rst @@ -67,6 +67,8 @@ Here is one example for you to get started: .. code:: python + import graphene + class Query(graphene.ObjectType): hello = graphene.String(description='A typical hello world') From 133a831ab923aec1744862811b21177cc1672e65 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rados=C5=82aw=20Kowalski?= Date: Fri, 17 Apr 2020 14:27:22 +0200 Subject: [PATCH 041/141] =?UTF-8?q?Update=20excluded=20packages=20list=20t?= =?UTF-8?q?o=20properly=20exclude=20examples=20pack=E2=80=A6=20(#1187)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * examples package will not be installed with graphene --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 4b3369895..ec63b12f4 100644 --- a/setup.py +++ b/setup.py @@ -80,7 +80,7 @@ def run_tests(self): "Programming Language :: Python :: 3.8", ], keywords="api graphql protocol rest relay graphene", - packages=find_packages(exclude=["tests", "tests.*", "examples"]), + packages=find_packages(exclude=["tests", "tests.*", "examples*"]), install_requires=[ "graphql-core>=3.1.0b1,<4", "graphql-relay>=3.0,<4", From 12ec8dc007a922a49330e4512737b5beb8eb87b8 Mon Sep 17 00:00:00 2001 From: Jonathan Kim Date: Sun, 26 Apr 2020 11:44:16 +0100 Subject: [PATCH 042/141] Don't exclude tests from distribution --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index ec63b12f4..d924f9f46 100644 --- a/setup.py +++ b/setup.py @@ -80,7 +80,7 @@ def run_tests(self): "Programming Language :: Python :: 3.8", ], keywords="api graphql protocol rest relay graphene", - packages=find_packages(exclude=["tests", "tests.*", "examples*"]), + packages=find_packages(exclude=["examples*"]), install_requires=[ "graphql-core>=3.1.0b1,<4", "graphql-relay>=3.0,<4", From 396b278affd76cb3a5acae9182959b5f87b5244e Mon Sep 17 00:00:00 2001 From: Kevin Harvey Date: Wed, 29 Apr 2020 07:38:56 -0500 Subject: [PATCH 043/141] Fix typos (#1192) --- docs/types/schema.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/types/schema.rst b/docs/types/schema.rst index 1af5c2947..0cf777ae4 100644 --- a/docs/types/schema.rst +++ b/docs/types/schema.rst @@ -18,8 +18,8 @@ Schema will collect all type definitions related to the root operations and then A Root Query is just a special :ref:`ObjectType` that :ref:`defines the fields ` that are the entrypoint for your API. Root Mutation and Root Subscription are similar to Root Query, but for different operation types: * Query fetches data -* Mutation to changes data and retrieve the changes -* Subscription to sends changes to clients in real time +* Mutation changes data and retrieves the changes +* Subscription sends changes to clients in real time Review the `GraphQL documentation on Schema`_ for a brief overview of fields, schema and operations. @@ -56,7 +56,7 @@ In this case, we need to use the ``types`` argument when creating the Schema. .. _SchemaAutoCamelCase: -Auto CamelCase field names +Auto camelCase field names -------------------------- By default all field and argument names (that are not From df67e691298fd06568e22215cfb5901c883ed4e1 Mon Sep 17 00:00:00 2001 From: Jonathan Kim Date: Sat, 9 May 2020 13:04:05 +0100 Subject: [PATCH 044/141] v3.0b2 --- graphene/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/graphene/__init__.py b/graphene/__init__.py index f99f589d0..56abf96e1 100644 --- a/graphene/__init__.py +++ b/graphene/__init__.py @@ -42,7 +42,7 @@ from .utils.module_loading import lazy_import -VERSION = (3, 0, 0, "beta", 1) +VERSION = (3, 0, 0, "beta", 2) __version__ = get_version(VERSION) From 0723dd1d6c76d7287a9a452cb92072a2d1674668 Mon Sep 17 00:00:00 2001 From: Jonathan Kim Date: Sat, 9 May 2020 13:31:55 +0100 Subject: [PATCH 045/141] Update issue templates --- .github/ISSUE_TEMPLATE/bug_report.md | 34 +++++++++++++++++++++++ .github/ISSUE_TEMPLATE/feature_request.md | 20 +++++++++++++ 2 files changed, 54 insertions(+) create mode 100644 .github/ISSUE_TEMPLATE/bug_report.md create mode 100644 .github/ISSUE_TEMPLATE/feature_request.md diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 000000000..64573d243 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,34 @@ +--- +name: Bug report +about: Create a report to help us improve +title: '' +labels: "\U0001F41B bug" +assignees: '' + +--- + +**Note: for support questions, please use stackoverflow**. This repository's issues are reserved for feature requests and bug reports. + +* **What is the current behavior?** + + + +* **If the current behavior is a bug, please provide the steps to reproduce and if possible a minimal demo of the problem** via +a github repo, https://repl.it or similar. + + + +* **What is the expected behavior?** + + + +* **What is the motivation / use case for changing the behavior?** + + + +* **Please tell us about your environment:** + + - Version: + - Platform: + +* **Other information** (e.g. detailed explanation, stacktraces, related issues, suggestions how to fix, links for us to have context, eg. stackoverflow) diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 000000000..c0d97a697 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,20 @@ +--- +name: Feature request +about: Suggest an idea for this project +title: '' +labels: "✨ enhancement" +assignees: '' + +--- + +**Is your feature request related to a problem? Please describe.** +A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] + +**Describe the solution you'd like** +A clear and concise description of what you want to happen. + +**Describe alternatives you've considered** +A clear and concise description of any alternative solutions or features you've considered. + +**Additional context** +Add any other context or screenshots about the feature request here. From d6acfc6eae730581bd012eee675b5833efe770d7 Mon Sep 17 00:00:00 2001 From: Jonathan Kim Date: Sat, 9 May 2020 13:32:38 +0100 Subject: [PATCH 046/141] Create config.yml --- .github/ISSUE_TEMPLATE/config.yml | 1 + 1 file changed, 1 insertion(+) create mode 100644 .github/ISSUE_TEMPLATE/config.yml diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 000000000..3ba13e0ce --- /dev/null +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1 @@ +blank_issues_enabled: false From 9b756bf12c2f32bfae50f02adcaebab4b50594ee Mon Sep 17 00:00:00 2001 From: Jonathan Kim Date: Sat, 9 May 2020 13:32:52 +0100 Subject: [PATCH 047/141] Delete CODEOWNERS --- CODEOWNERS | 3 --- 1 file changed, 3 deletions(-) delete mode 100644 CODEOWNERS diff --git a/CODEOWNERS b/CODEOWNERS deleted file mode 100644 index 7137418ef..000000000 --- a/CODEOWNERS +++ /dev/null @@ -1,3 +0,0 @@ -* @ekampf @dan98765 @projectcheshire @jkimbo -/docs/ @dvndrsn @phalt @changeling -/examples/ @dvndrsn @phalt @changeling From b0c8a17ec712ff8b332eb5e4227ce48dc8f439ea Mon Sep 17 00:00:00 2001 From: Jonathan Kim Date: Wed, 20 May 2020 06:12:41 +0100 Subject: [PATCH 048/141] Fix issue with trailing whitespace (#1197) --- .github/ISSUE_TEMPLATE/bug_report.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index 64573d243..c37bc75a9 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -27,8 +27,8 @@ a github repo, https://repl.it or similar. * **Please tell us about your environment:** - - - Version: - - Platform: + + - Version: + - Platform: * **Other information** (e.g. detailed explanation, stacktraces, related issues, suggestions how to fix, links for us to have context, eg. stackoverflow) From 966aba06cd7a203eae9643cf20cbc53cbf7556fc Mon Sep 17 00:00:00 2001 From: dbgb <1745377+dbgb@users.noreply.github.com> Date: Thu, 28 May 2020 14:41:38 +0100 Subject: [PATCH 049/141] Fix typo in quickstart document (#1201) --- docs/quickstart.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/quickstart.rst b/docs/quickstart.rst index d2ac83bed..62d11949a 100644 --- a/docs/quickstart.rst +++ b/docs/quickstart.rst @@ -103,7 +103,7 @@ For each **Field** in our **Schema**, we write a **Resolver** method to fetch da Schema Definition Language (SDL) ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -In the `GraphQL Schema Definition Language`_, we could describe the fields defined by our example code as show below. +In the `GraphQL Schema Definition Language`_, we could describe the fields defined by our example code as shown below. .. _GraphQL Schema Definition Language: https://graphql.org/learn/schema/ From 47c63f3dd79dd90e81869cd815674e532933bce5 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Fri, 5 Jun 2020 06:30:23 +0200 Subject: [PATCH 050/141] Fix DateTime Scalar parse_literal methods (#1199) (#1200) --- graphene/types/datetime.py | 6 ++-- graphene/types/tests/test_datetime.py | 51 +++++++++++++++++++++++++++ 2 files changed, 54 insertions(+), 3 deletions(-) diff --git a/graphene/types/datetime.py b/graphene/types/datetime.py index 92234ba67..d4f74470b 100644 --- a/graphene/types/datetime.py +++ b/graphene/types/datetime.py @@ -25,7 +25,7 @@ def serialize(date): return date.isoformat() @classmethod - def parse_literal(cls, node): + def parse_literal(cls, node, _variables=None): if not isinstance(node, StringValueNode): raise GraphQLError( f"Date cannot represent non-string value: {print_ast(node)}" @@ -58,7 +58,7 @@ def serialize(dt): return dt.isoformat() @classmethod - def parse_literal(cls, node): + def parse_literal(cls, node, _variables=None): if not isinstance(node, StringValueNode): raise GraphQLError( f"DateTime cannot represent non-string value: {print_ast(node)}" @@ -93,7 +93,7 @@ def serialize(time): return time.isoformat() @classmethod - def parse_literal(cls, node): + def parse_literal(cls, node, _variables=None): if not isinstance(node, StringValueNode): raise GraphQLError( f"Time cannot represent non-string value: {print_ast(node)}" diff --git a/graphene/types/tests/test_datetime.py b/graphene/types/tests/test_datetime.py index 6a3241a17..74f88bd88 100644 --- a/graphene/types/tests/test_datetime.py +++ b/graphene/types/tests/test_datetime.py @@ -60,6 +60,23 @@ def test_datetime_query(sample_datetime): assert result.data == {"datetime": isoformat} +def test_datetime_query_with_variables(sample_datetime): + isoformat = sample_datetime.isoformat() + + result = schema.execute( + """ + query GetDate($datetime: DateTime) { + literal: datetime(in: "%s") + value: datetime(in: $datetime) + } + """ + % isoformat, + variable_values={"datetime": isoformat}, + ) + assert not result.errors + assert result.data == {"literal": isoformat, "value": isoformat} + + def test_date_query(sample_date): isoformat = sample_date.isoformat() @@ -68,6 +85,23 @@ def test_date_query(sample_date): assert result.data == {"date": isoformat} +def test_date_query_with_variables(sample_date): + isoformat = sample_date.isoformat() + + result = schema.execute( + """ + query GetDate($date: Date) { + literal: date(in: "%s") + value: date(in: $date) + } + """ + % isoformat, + variable_values={"date": isoformat}, + ) + assert not result.errors + assert result.data == {"literal": isoformat, "value": isoformat} + + def test_time_query(sample_time): isoformat = sample_time.isoformat() @@ -76,6 +110,23 @@ def test_time_query(sample_time): assert result.data == {"time": isoformat} +def test_time_query_with_variables(sample_time): + isoformat = sample_time.isoformat() + + result = schema.execute( + """ + query GetTime($time: Time) { + literal: time(at: "%s") + value: time(at: $time) + } + """ + % isoformat, + variable_values={"time": isoformat}, + ) + assert not result.errors + assert result.data == {"literal": isoformat, "value": isoformat} + + def test_bad_datetime_query(): not_a_date = "Some string that's not a datetime" From 4b701860319fc46c8bdf8f936908ba1dff991cc4 Mon Sep 17 00:00:00 2001 From: Jonathan Kim Date: Thu, 25 Jun 2020 03:18:59 +0100 Subject: [PATCH 051/141] Remove @staticmethod decorator in mutations doc (#1206) --- docs/types/mutations.rst | 1 - 1 file changed, 1 deletion(-) diff --git a/docs/types/mutations.rst b/docs/types/mutations.rst index d63ada3ed..f8c76f350 100644 --- a/docs/types/mutations.rst +++ b/docs/types/mutations.rst @@ -104,7 +104,6 @@ To use an InputField you define an InputObjectType that specifies the structure person = graphene.Field(Person) - @staticmethod def mutate(root, info, person_data=None): person = Person( name=person_data.name, From a1fc3688aa56cdf01e25d15e3be9c5f2a50694ac Mon Sep 17 00:00:00 2001 From: Jonathan Kim Date: Thu, 25 Jun 2020 03:21:40 +0100 Subject: [PATCH 052/141] Remove to_const function (#1212) --- graphene/utils/str_converters.py | 5 ----- graphene/utils/tests/test_str_converters.py | 10 +--------- setup.py | 1 - 3 files changed, 1 insertion(+), 15 deletions(-) diff --git a/graphene/utils/str_converters.py b/graphene/utils/str_converters.py index 9ac8461f3..2a214f064 100644 --- a/graphene/utils/str_converters.py +++ b/graphene/utils/str_converters.py @@ -1,5 +1,4 @@ import re -from unidecode import unidecode # Adapted from this response in Stackoverflow @@ -16,7 +15,3 @@ def to_camel_case(snake_str): def to_snake_case(name): s1 = re.sub("(.)([A-Z][a-z]+)", r"\1_\2", name) return re.sub("([a-z0-9])([A-Z])", r"\1_\2", s1).lower() - - -def to_const(string): - return re.sub(r"[\W|^]+", "_", unidecode(string)).upper() diff --git a/graphene/utils/tests/test_str_converters.py b/graphene/utils/tests/test_str_converters.py index d765906c8..307d68771 100644 --- a/graphene/utils/tests/test_str_converters.py +++ b/graphene/utils/tests/test_str_converters.py @@ -1,5 +1,5 @@ # coding: utf-8 -from ..str_converters import to_camel_case, to_const, to_snake_case +from ..str_converters import to_camel_case, to_snake_case def test_snake_case(): @@ -17,11 +17,3 @@ def test_camel_case(): assert to_camel_case("snakes_on_a__plane") == "snakesOnA_Plane" assert to_camel_case("i_phone_hysteria") == "iPhoneHysteria" assert to_camel_case("field_i18n") == "fieldI18n" - - -def test_to_const(): - assert to_const('snakes $1. on a "#plane') == "SNAKES_1_ON_A_PLANE" - - -def test_to_const_unicode(): - assert to_const("Skoða þetta unicode stöff") == "SKODA_THETTA_UNICODE_STOFF" diff --git a/setup.py b/setup.py index d924f9f46..34ee7dba1 100644 --- a/setup.py +++ b/setup.py @@ -85,7 +85,6 @@ def run_tests(self): "graphql-core>=3.1.0b1,<4", "graphql-relay>=3.0,<4", "aniso8601>=8,<9", - "unidecode>=1.1.1,<2", ], tests_require=tests_require, extras_require={"test": tests_require, "dev": dev_requires}, From a9625dac0ed07d5fb35adc83cbc41387755f080f Mon Sep 17 00:00:00 2001 From: Jonathan Kim Date: Thu, 25 Jun 2020 03:22:22 +0100 Subject: [PATCH 053/141] Update requirement for Query type in mutation docs (#1213) --- docs/types/mutations.rst | 6 +----- graphene/types/schema.py | 2 +- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/docs/types/mutations.rst b/docs/types/mutations.rst index f8c76f350..543f39b45 100644 --- a/docs/types/mutations.rst +++ b/docs/types/mutations.rst @@ -48,11 +48,7 @@ So, we can finish our schema like this: class MyMutations(graphene.ObjectType): create_person = CreatePerson.Field() - # We must define a query for our schema - class Query(graphene.ObjectType): - person = graphene.Field(Person) - - schema = graphene.Schema(query=Query, mutation=MyMutations) + schema = graphene.Schema(mutation=MyMutations) Executing the Mutation ---------------------- diff --git a/graphene/types/schema.py b/graphene/types/schema.py index 29ead4a70..0b6e9dac0 100644 --- a/graphene/types/schema.py +++ b/graphene/types/schema.py @@ -381,7 +381,7 @@ class Schema: questions about the types through introspection. Args: - query (Type[ObjectType]): Root query *ObjectType*. Describes entry point for fields to *read* + query (Optional[Type[ObjectType]]): Root query *ObjectType*. Describes entry point for fields to *read* data in your Schema. mutation (Optional[Type[ObjectType]]): Root mutation *ObjectType*. Describes entry point for fields to *create, update or delete* data in your API. From 05d96a983374f368bb1a2124be5878545ba3d2a9 Mon Sep 17 00:00:00 2001 From: Jonathan Kim Date: Thu, 25 Jun 2020 17:57:42 +0100 Subject: [PATCH 054/141] v3.0.0b3 --- graphene/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/graphene/__init__.py b/graphene/__init__.py index 56abf96e1..a59347dde 100644 --- a/graphene/__init__.py +++ b/graphene/__init__.py @@ -42,7 +42,7 @@ from .utils.module_loading import lazy_import -VERSION = (3, 0, 0, "beta", 2) +VERSION = (3, 0, 0, "beta", 3) __version__ = get_version(VERSION) From bf034ca85fe67ff35b99f41a10f0f65e242eab38 Mon Sep 17 00:00:00 2001 From: Jonathan Ehwald Date: Sat, 27 Jun 2020 12:18:11 +0200 Subject: [PATCH 055/141] Rename variables called type to type_ (#1216) Co-authored-by: Daniel Gallagher --- docs/relay/nodes.rst | 12 ++++++------ graphene/pyutils/dataclasses.py | 4 +++- graphene/relay/connection.py | 14 +++++++------- graphene/relay/node.py | 10 +++++----- graphene/relay/tests/test_node_custom.py | 2 +- graphene/types/argument.py | 6 +++--- graphene/types/dynamic.py | 6 +++--- graphene/types/field.py | 6 +++--- graphene/types/inputfield.py | 6 +++--- graphene/types/tests/test_definition.py | 8 ++++---- 10 files changed, 38 insertions(+), 36 deletions(-) diff --git a/docs/relay/nodes.rst b/docs/relay/nodes.rst index ce9bc7d8c..285dbb20d 100644 --- a/docs/relay/nodes.rst +++ b/docs/relay/nodes.rst @@ -51,20 +51,20 @@ Example of a custom node: name = 'Node' @staticmethod - def to_global_id(type, id): - return f"{type}:{id}" + def to_global_id(type_, id): + return f"{type_}:{id}" @staticmethod def get_node_from_global_id(info, global_id, only_type=None): - type, id = global_id.split(':') + type_, id = global_id.split(':') if only_type: # We assure that the node type that we want to retrieve # is the same that was indicated in the field type - assert type == only_type._meta.name, 'Received not compatible node.' + assert type_ == only_type._meta.name, 'Received not compatible node.' - if type == 'User': + if type_ == 'User': return get_user(id) - elif type == 'Photo': + elif type_ == 'Photo': return get_photo(id) diff --git a/graphene/pyutils/dataclasses.py b/graphene/pyutils/dataclasses.py index 61f0ea388..19530eff1 100644 --- a/graphene/pyutils/dataclasses.py +++ b/graphene/pyutils/dataclasses.py @@ -845,7 +845,9 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen): # Now find fields in our class. While doing so, validate some # things, and set the default values (as class attributes) where # we can. - cls_fields = [_get_field(cls, name, type) for name, type in cls_annotations.items()] + cls_fields = [ + _get_field(cls, name, type_) for name, type_ in cls_annotations.items() + ] for f in cls_fields: fields[f.name] = f diff --git a/graphene/relay/connection.py b/graphene/relay/connection.py index 90b558a1b..cfb6cb633 100644 --- a/graphene/relay/connection.py +++ b/graphene/relay/connection.py @@ -117,19 +117,19 @@ def connection_adapter(cls, edges, pageInfo): class IterableConnectionField(Field): - def __init__(self, type, *args, **kwargs): + def __init__(self, type_, *args, **kwargs): kwargs.setdefault("before", String()) kwargs.setdefault("after", String()) kwargs.setdefault("first", Int()) kwargs.setdefault("last", Int()) - super(IterableConnectionField, self).__init__(type, *args, **kwargs) + super(IterableConnectionField, self).__init__(type_, *args, **kwargs) @property def type(self): - type = super(IterableConnectionField, self).type - connection_type = type - if isinstance(type, NonNull): - connection_type = type.of_type + type_ = super(IterableConnectionField, self).type + connection_type = type_ + if isinstance(type_, NonNull): + connection_type = type_.of_type if is_node(connection_type): raise Exception( @@ -140,7 +140,7 @@ def type(self): assert issubclass( connection_type, Connection ), f'{self.__class__.__name__} type has to be a subclass of Connection. Received "{connection_type}".' - return type + return type_ @classmethod def resolve_connection(cls, connection_type, args, resolved): diff --git a/graphene/relay/node.py b/graphene/relay/node.py index a9d36adc0..13fb8cea2 100644 --- a/graphene/relay/node.py +++ b/graphene/relay/node.py @@ -47,15 +47,15 @@ def get_resolver(self, parent_resolver): class NodeField(Field): - def __init__(self, node, type=False, **kwargs): + def __init__(self, node, type_=False, **kwargs): assert issubclass(node, Node), "NodeField can only operate in Nodes" self.node_type = node - self.field_type = type + self.field_type = type_ super(NodeField, self).__init__( # If we don's specify a type, the field type will be the node # interface - type or node, + type_ or node, id=ID(required=True, description="The ID of the object"), **kwargs, ) @@ -125,5 +125,5 @@ def from_global_id(cls, global_id): return from_global_id(global_id) @classmethod - def to_global_id(cls, type, id): - return to_global_id(type, id) + def to_global_id(cls, type_, id): + return to_global_id(type_, id) diff --git a/graphene/relay/tests/test_node_custom.py b/graphene/relay/tests/test_node_custom.py index cba7366b0..30d62e7ba 100644 --- a/graphene/relay/tests/test_node_custom.py +++ b/graphene/relay/tests/test_node_custom.py @@ -11,7 +11,7 @@ class Meta: name = "Node" @staticmethod - def to_global_id(type, id): + def to_global_id(type_, id): return id @staticmethod diff --git a/graphene/types/argument.py b/graphene/types/argument.py index 897b7ecd2..71026d45b 100644 --- a/graphene/types/argument.py +++ b/graphene/types/argument.py @@ -40,7 +40,7 @@ class Argument(MountedType): def __init__( self, - type, + type_, default_value=None, description=None, name=None, @@ -50,10 +50,10 @@ def __init__( super(Argument, self).__init__(_creation_counter=_creation_counter) if required: - type = NonNull(type) + type_ = NonNull(type_) self.name = name - self._type = type + self._type = type_ self.default_value = default_value self.description = description diff --git a/graphene/types/dynamic.py b/graphene/types/dynamic.py index 588c53bbf..3bb2b0fde 100644 --- a/graphene/types/dynamic.py +++ b/graphene/types/dynamic.py @@ -10,10 +10,10 @@ class Dynamic(MountedType): the schema. So we can have lazy fields. """ - def __init__(self, type, with_schema=False, _creation_counter=None): + def __init__(self, type_, with_schema=False, _creation_counter=None): super(Dynamic, self).__init__(_creation_counter=_creation_counter) - assert inspect.isfunction(type) or isinstance(type, partial) - self.type = type + assert inspect.isfunction(type_) or isinstance(type_, partial) + self.type = type_ self.with_schema = with_schema def get_type(self, schema=None): diff --git a/graphene/types/field.py b/graphene/types/field.py index f0a28eb32..1a1ccf93b 100644 --- a/graphene/types/field.py +++ b/graphene/types/field.py @@ -64,7 +64,7 @@ class Person(ObjectType): def __init__( self, - type, + type_, args=None, resolver=None, source=None, @@ -88,7 +88,7 @@ def __init__( ), f'The default value can not be a function but received "{base_type(default_value)}".' if required: - type = NonNull(type) + type_ = NonNull(type_) # Check if name is actually an argument of the field if isinstance(name, (Argument, UnmountedType)): @@ -101,7 +101,7 @@ def __init__( source = None self.name = name - self._type = type + self._type = type_ self.args = to_arguments(args or {}, extra_args) if source: resolver = partial(source_resolver, source) diff --git a/graphene/types/inputfield.py b/graphene/types/inputfield.py index 24d84b6c4..791ca6a48 100644 --- a/graphene/types/inputfield.py +++ b/graphene/types/inputfield.py @@ -48,7 +48,7 @@ class Person(InputObjectType): def __init__( self, - type, + type_, name=None, default_value=Undefined, deprecation_reason=None, @@ -60,8 +60,8 @@ def __init__( super(InputField, self).__init__(_creation_counter=_creation_counter) self.name = name if required: - type = NonNull(type) - self._type = type + type_ = NonNull(type_) + self._type = type_ self.deprecation_reason = deprecation_reason self.default_value = default_value self.description = description diff --git a/graphene/types/tests/test_definition.py b/graphene/types/tests/test_definition.py index b3b480af0..0d8a95dfa 100644 --- a/graphene/types/tests/test_definition.py +++ b/graphene/types/tests/test_definition.py @@ -234,10 +234,10 @@ def test_stringifies_simple_types(): # (InputObjectType, True) # ) -# for type, answer in expected: -# assert is_input_type(type) == answer -# assert is_input_type(GraphQLList(type)) == answer -# assert is_input_type(GraphQLNonNull(type)) == answer +# for type_, answer in expected: +# assert is_input_type(type_) == answer +# assert is_input_type(GraphQLList(type_)) == answer +# assert is_input_type(GraphQLNonNull(type_)) == answer # def test_identifies_output_types(): From 324df19d3d92bad33b6261ae678b49caaeb0ea96 Mon Sep 17 00:00:00 2001 From: Jonathan Kim Date: Mon, 29 Jun 2020 05:54:36 +0100 Subject: [PATCH 056/141] Set min version of graphql-core to v3.1.1 (#1215) --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 34ee7dba1..24bddcf90 100644 --- a/setup.py +++ b/setup.py @@ -82,7 +82,7 @@ def run_tests(self): keywords="api graphql protocol rest relay graphene", packages=find_packages(exclude=["examples*"]), install_requires=[ - "graphql-core>=3.1.0b1,<4", + "graphql-core>=3.1.1,<4", "graphql-relay>=3.0,<4", "aniso8601>=8,<9", ], From ecd11ccc1e53d965e355742e9fef19bc9286522e Mon Sep 17 00:00:00 2001 From: Jonathan Kim Date: Mon, 29 Jun 2020 15:53:53 +0100 Subject: [PATCH 057/141] Revert 1213 update mutation docs (#1214) * Revert "Update requirement for Query type in mutation docs (#1213)" This reverts commit a9625dac0ed07d5fb35adc83cbc41387755f080f. * Add test to check that Query type must be defined --- docs/types/mutations.rst | 6 +++++- graphene/types/schema.py | 2 +- graphene/types/tests/test_schema.py | 9 +++++++++ 3 files changed, 15 insertions(+), 2 deletions(-) diff --git a/docs/types/mutations.rst b/docs/types/mutations.rst index 543f39b45..f8c76f350 100644 --- a/docs/types/mutations.rst +++ b/docs/types/mutations.rst @@ -48,7 +48,11 @@ So, we can finish our schema like this: class MyMutations(graphene.ObjectType): create_person = CreatePerson.Field() - schema = graphene.Schema(mutation=MyMutations) + # We must define a query for our schema + class Query(graphene.ObjectType): + person = graphene.Field(Person) + + schema = graphene.Schema(query=Query, mutation=MyMutations) Executing the Mutation ---------------------- diff --git a/graphene/types/schema.py b/graphene/types/schema.py index 0b6e9dac0..29ead4a70 100644 --- a/graphene/types/schema.py +++ b/graphene/types/schema.py @@ -381,7 +381,7 @@ class Schema: questions about the types through introspection. Args: - query (Optional[Type[ObjectType]]): Root query *ObjectType*. Describes entry point for fields to *read* + query (Type[ObjectType]): Root query *ObjectType*. Describes entry point for fields to *read* data in your Schema. mutation (Optional[Type[ObjectType]]): Root mutation *ObjectType*. Describes entry point for fields to *create, update or delete* data in your API. diff --git a/graphene/types/tests/test_schema.py b/graphene/types/tests/test_schema.py index 7a1c299a3..0c85e1708 100644 --- a/graphene/types/tests/test_schema.py +++ b/graphene/types/tests/test_schema.py @@ -59,3 +59,12 @@ def test_schema_str(): def test_schema_introspect(): schema = Schema(Query) assert "__schema" in schema.introspect() + + +def test_schema_requires_query_type(): + schema = Schema() + result = schema.execute("query {}") + + assert len(result.errors) == 1 + error = result.errors[0] + assert error.message == "Query root type must be provided." From 5b2eb1109a58dfddfbf9ee40f35f28435bf14ac6 Mon Sep 17 00:00:00 2001 From: Jonathan Kim Date: Mon, 29 Jun 2020 23:26:08 +0100 Subject: [PATCH 058/141] ObjectType meta arguments (#1219) * Pass extra kwargs down the meta chain * Rename name argument to allow custom name * Reword error message * Explicitly define kwargs * Revert change to explicit kwargs * name -> name_ for Enum __new__ function --- graphene/types/base.py | 2 +- graphene/types/enum.py | 4 ++-- graphene/types/objecttype.py | 10 +++++++--- graphene/types/tests/test_objecttype.py | 18 ++++++++++++++++++ 4 files changed, 28 insertions(+), 6 deletions(-) diff --git a/graphene/types/base.py b/graphene/types/base.py index 129834b0d..84cb377a2 100644 --- a/graphene/types/base.py +++ b/graphene/types/base.py @@ -38,7 +38,7 @@ def create_type(cls, class_name, **options): def __init_subclass_with_meta__( cls, name=None, description=None, _meta=None, **_kwargs ): - assert "_meta" not in cls.__dict__, "Can't assign directly meta" + assert "_meta" not in cls.__dict__, "Can't assign meta directly" if not _meta: return _meta.name = name or cls.__name__ diff --git a/graphene/types/enum.py b/graphene/types/enum.py index 1d290a20e..70e8ee8e0 100644 --- a/graphene/types/enum.py +++ b/graphene/types/enum.py @@ -21,14 +21,14 @@ class EnumOptions(BaseOptions): class EnumMeta(SubclassWithMeta_Meta): - def __new__(cls, name, bases, classdict, **options): + def __new__(cls, name_, bases, classdict, **options): enum_members = dict(classdict, __eq__=eq_enum) # We remove the Meta attribute from the class to not collide # with the enum values. enum_members.pop("Meta", None) enum = PyEnum(cls.__name__, enum_members) return SubclassWithMeta_Meta.__new__( - cls, name, bases, dict(classdict, __enum__=enum), **options + cls, name_, bases, dict(classdict, __enum__=enum), **options ) def get(cls, value): diff --git a/graphene/types/objecttype.py b/graphene/types/objecttype.py index c16f50b8f..f4a0f5a0e 100644 --- a/graphene/types/objecttype.py +++ b/graphene/types/objecttype.py @@ -20,12 +20,16 @@ class ObjectTypeOptions(BaseOptions): class ObjectTypeMeta(BaseTypeMeta): - def __new__(cls, name, bases, namespace): + def __new__(cls, name_, bases, namespace, **options): + # Note: it's safe to pass options as keyword arguments as they are still type-checked by ObjectTypeOptions. + # We create this type, to then overload it with the dataclass attrs class InterObjectType: pass - base_cls = super().__new__(cls, name, (InterObjectType,) + bases, namespace) + base_cls = super().__new__( + cls, name_, (InterObjectType,) + bases, namespace, **options, + ) if base_cls._meta: fields = [ ( @@ -39,7 +43,7 @@ class InterObjectType: ) for key, field_value in base_cls._meta.fields.items() ] - dataclass = make_dataclass(name, fields, bases=()) + dataclass = make_dataclass(name_, fields, bases=()) InterObjectType.__init__ = dataclass.__init__ InterObjectType.__eq__ = dataclass.__eq__ InterObjectType.__repr__ = dataclass.__repr__ diff --git a/graphene/types/tests/test_objecttype.py b/graphene/types/tests/test_objecttype.py index 85a46867e..1ff8fc8f7 100644 --- a/graphene/types/tests/test_objecttype.py +++ b/graphene/types/tests/test_objecttype.py @@ -295,3 +295,21 @@ def resolve_hello(self, info): schema = Schema(query=Query) assert schema is not None + + +def test_objecttype_meta_arguments(): + class MyInterface(Interface): + foo = String() + + class MyType(ObjectType, interfaces=[MyInterface]): + bar = String() + + assert MyType._meta.interfaces == [MyInterface] + assert list(MyType._meta.fields.keys()) == ["foo", "bar"] + + +def test_objecttype_type_name(): + class MyObjectType(ObjectType, name="FooType"): + pass + + assert MyObjectType._meta.name == "FooType" From c61f0f736aab9391fcc21b346d4b6d9db4b34a92 Mon Sep 17 00:00:00 2001 From: Eric Rodrigues Pires Date: Thu, 2 Jul 2020 14:52:44 -0300 Subject: [PATCH 059/141] Add Base64 scalar (#1221) --- graphene/types/base64.py | 43 ++++++++++++ graphene/types/tests/test_base64.py | 101 ++++++++++++++++++++++++++++ 2 files changed, 144 insertions(+) create mode 100644 graphene/types/base64.py create mode 100644 graphene/types/tests/test_base64.py diff --git a/graphene/types/base64.py b/graphene/types/base64.py new file mode 100644 index 000000000..baedabebe --- /dev/null +++ b/graphene/types/base64.py @@ -0,0 +1,43 @@ +from binascii import Error as _Error +from base64 import b64decode, b64encode + +from graphql.error import GraphQLError +from graphql.language import StringValueNode, print_ast + +from .scalars import Scalar + + +class Base64(Scalar): + """ + The `Base64` scalar type represents a base64-encoded String. + """ + + @staticmethod + def serialize(value): + if not isinstance(value, bytes): + if isinstance(value, str): + value = value.encode("utf-8") + else: + value = str(value).encode("utf-8") + return b64encode(value).decode("utf-8") + + @classmethod + def parse_literal(cls, node): + if not isinstance(node, StringValueNode): + raise GraphQLError( + f"Base64 cannot represent non-string value: {print_ast(node)}" + ) + return cls.parse_value(node.value) + + @staticmethod + def parse_value(value): + if not isinstance(value, bytes): + if not isinstance(value, str): + raise GraphQLError( + f"Base64 cannot represent non-string value: {repr(value)}" + ) + value = value.encode("utf-8") + try: + return b64decode(value, validate=True).decode("utf-8") + except _Error: + raise GraphQLError(f"Base64 cannot decode value: {repr(value)}") diff --git a/graphene/types/tests/test_base64.py b/graphene/types/tests/test_base64.py new file mode 100644 index 000000000..b096dcbc8 --- /dev/null +++ b/graphene/types/tests/test_base64.py @@ -0,0 +1,101 @@ +import base64 + +from graphql import GraphQLError + +from ..objecttype import ObjectType +from ..scalars import String +from ..schema import Schema +from ..base64 import Base64 + + +class Query(ObjectType): + base64 = Base64(_in=Base64(name="input"), _match=String(name="match")) + bytes_as_base64 = Base64() + string_as_base64 = Base64() + number_as_base64 = Base64() + + def resolve_base64(self, info, _in=None, _match=None): + if _match: + assert _in == _match + return _in + + def resolve_bytes_as_base64(self, info): + return b"Hello world" + + def resolve_string_as_base64(self, info): + return "Spam and eggs" + + def resolve_number_as_base64(self, info): + return 42 + + +schema = Schema(query=Query) + + +def test_base64_query(): + base64_value = base64.b64encode(b"Random string").decode("utf-8") + result = schema.execute( + """{{ base64(input: "{}", match: "Random string") }}""".format(base64_value) + ) + assert not result.errors + assert result.data == {"base64": base64_value} + + +def test_base64_query_with_variable(): + base64_value = base64.b64encode(b"Another string").decode("utf-8") + + # test datetime variable in string representation + result = schema.execute( + """ + query GetBase64($base64: Base64) { + base64(input: $base64, match: "Another string") + } + """, + variables={"base64": base64_value}, + ) + assert not result.errors + assert result.data == {"base64": base64_value} + + +def test_base64_query_none(): + result = schema.execute("""{ base64 }""") + assert not result.errors + assert result.data == {"base64": None} + + +def test_base64_query_invalid(): + bad_inputs = [ + dict(), + 123, + "This is not valid base64", + ] + + for input_ in bad_inputs: + result = schema.execute( + """{ base64(input: $input) }""", variables={"input": input_}, + ) + assert isinstance(result.errors, list) + assert len(result.errors) == 1 + assert isinstance(result.errors[0], GraphQLError) + assert result.data is None + + +def test_base64_from_bytes(): + base64_value = base64.b64encode(b"Hello world").decode("utf-8") + result = schema.execute("""{ bytesAsBase64 }""") + assert not result.errors + assert result.data == {"bytesAsBase64": base64_value} + + +def test_base64_from_string(): + base64_value = base64.b64encode(b"Spam and eggs").decode("utf-8") + result = schema.execute("""{ stringAsBase64 }""") + assert not result.errors + assert result.data == {"stringAsBase64": base64_value} + + +def test_base64_from_number(): + base64_value = base64.b64encode(b"42").decode("utf-8") + result = schema.execute("""{ numberAsBase64 }""") + assert not result.errors + assert result.data == {"numberAsBase64": base64_value} From d042d5e95a86d9ad9e77abc3dc6f6a37dea99a57 Mon Sep 17 00:00:00 2001 From: Jonathan Kim Date: Thu, 9 Jul 2020 17:55:27 +0100 Subject: [PATCH 060/141] Expose Base64 type and add custom scalar examples (#1223) --- docs/api/index.rst | 2 + docs/types/scalars.rst | 185 +++++++++++++++++++++++++++++++++++-- graphene/__init__.py | 112 +++++++++++----------- graphene/types/__init__.py | 63 ++++++------- 4 files changed, 267 insertions(+), 95 deletions(-) diff --git a/docs/api/index.rst b/docs/api/index.rst index 0da427e4d..c5e3b6e1c 100644 --- a/docs/api/index.rst +++ b/docs/api/index.rst @@ -64,6 +64,8 @@ Graphene Scalars .. autoclass:: graphene.JSONString() +.. autoclass:: graphene.Base64() + Enum ---- diff --git a/docs/types/scalars.rst b/docs/types/scalars.rst index 740024839..db20a522d 100644 --- a/docs/types/scalars.rst +++ b/docs/types/scalars.rst @@ -3,6 +3,11 @@ Scalars ======= +Scalar types represent concrete values at the leaves of a query. There are +several built in types that Graphene provides out of the box which represent common +values in Python. You can also create your own Scalar types to better express +values that you might have in your data model. + All Scalar types accept the following arguments. All are optional: ``name``: *string* @@ -27,34 +32,39 @@ All Scalar types accept the following arguments. All are optional: -Base scalars ------------- +Built in scalars +---------------- -Graphene defines the following base Scalar Types: +Graphene defines the following base Scalar Types that match the default `GraphQL types `_: ``graphene.String`` +^^^^^^^^^^^^^^^^^^^ Represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text. ``graphene.Int`` +^^^^^^^^^^^^^^^^ Represents non-fractional signed whole numeric values. Int is a signed 32‐bit integer per the `GraphQL spec `_ ``graphene.Float`` +^^^^^^^^^^^^^^^^^^ Represents signed double-precision fractional values as specified by `IEEE 754 `_. ``graphene.Boolean`` +^^^^^^^^^^^^^^^^^^^^ Represents `true` or `false`. ``graphene.ID`` +^^^^^^^^^^^^^^^ Represents a unique identifier, often used to refetch an object or as key for a cache. The ID type appears in a JSON @@ -62,24 +72,183 @@ Graphene defines the following base Scalar Types: When expected as an input type, any string (such as `"4"`) or integer (such as `4`) input value will be accepted as an ID. -Graphene also provides custom scalars for Dates, Times, and JSON: +---- -``graphene.types.datetime.Date`` +Graphene also provides custom scalars for common values: + +``graphene.Date`` +^^^^^^^^^^^^^^^^^ Represents a Date value as specified by `iso8601 `_. -``graphene.types.datetime.DateTime`` +.. code:: python + + import datetime + from graphene import Schema, ObjectType, Date + + class Query(ObjectType): + one_week_from = Date(required=True, date_input=Date(required=True)) + + def resolve_one_week_from(root, info, date_input): + assert date_input == datetime.date(2006, 1, 2) + return date_input + datetime.timedelta(weeks=1) + + schema = Schema(query=Query) + + results = schema.execute(""" + query { + oneWeekFrom(dateInput: "2006-01-02") + } + """) + + assert results.data == {"oneWeekFrom": "2006-01-09"} + + +``graphene.DateTime`` +^^^^^^^^^^^^^^^^^^^^^ Represents a DateTime value as specified by `iso8601 `_. -``graphene.types.datetime.Time`` +.. code:: python + + import datetime + from graphene import Schema, ObjectType, DateTime + + class Query(ObjectType): + one_hour_from = DateTime(required=True, datetime_input=DateTime(required=True)) + + def resolve_one_hour_from(root, info, datetime_input): + assert datetime_input == datetime.datetime(2006, 1, 2, 15, 4, 5) + return datetime_input + datetime.timedelta(hours=1) + + schema = Schema(query=Query) + + results = schema.execute(""" + query { + oneHourFrom(datetimeInput: "2006-01-02T15:04:05") + } + """) + + assert results.data == {"oneHourFrom": "2006-01-02T16:04:05"} + +``graphene.Time`` +^^^^^^^^^^^^^^^^^ Represents a Time value as specified by `iso8601 `_. -``graphene.types.json.JSONString`` +.. code:: python + + import datetime + from graphene import Schema, ObjectType, Time + + class Query(ObjectType): + one_hour_from = Time(required=True, time_input=Time(required=True)) + + def resolve_one_hour_from(root, info, time_input): + assert time_input == datetime.time(15, 4, 5) + tmp_time_input = datetime.datetime.combine(datetime.date(1, 1, 1), time_input) + return (tmp_time_input + datetime.timedelta(hours=1)).time() + + schema = Schema(query=Query) + + results = schema.execute(""" + query { + oneHourFrom(timeInput: "15:04:05") + } + """) + + assert results.data == {"oneHourFrom": "16:04:05"} + +``graphene.Decimal`` +^^^^^^^^^^^^^^^^^^^^ + + Represents a Python Decimal value. + +.. code:: python + + import decimal + from graphene import Schema, ObjectType, Decimal + + class Query(ObjectType): + add_one_to = Decimal(required=True, decimal_input=Decimal(required=True)) + + def resolve_add_one_to(root, info, decimal_input): + assert decimal_input == decimal.Decimal("10.50") + return decimal_input + decimal.Decimal("1") + + schema = Schema(query=Query) + + results = schema.execute(""" + query { + addOneTo(decimalInput: "10.50") + } + """) + + assert results.data == {"addOneTo": "11.50"} + +``graphene.JSONString`` +^^^^^^^^^^^^^^^^^^^^^^^ Represents a JSON string. +.. code:: python + + from graphene import Schema, ObjectType, JSONString, String + + class Query(ObjectType): + update_json_key = JSONString( + required=True, + json_input=JSONString(required=True), + key=String(required=True), + value=String(required=True) + ) + + def resolve_update_json_key(root, info, json_input, key, value): + assert json_input == {"name": "Jane"} + json_input[key] = value + return json_input + + schema = Schema(query=Query) + + results = schema.execute(""" + query { + updateJsonKey(jsonInput: "{\\"name\\": \\"Jane\\"}", key: "name", value: "Beth") + } + """) + + assert results.data == {"updateJsonKey": "{\"name\": \"Beth\"}"} + + +``graphene.Base64`` +^^^^^^^^^^^^^^^^^^^ + + Represents a Base64 encoded string. + +.. code:: python + + from graphene import Schema, ObjectType, Base64 + + class Query(ObjectType): + increment_encoded_id = Base64( + required=True, + base64_input=Base64(required=True), + ) + + def resolve_increment_encoded_id(root, info, base64_input): + assert base64_input == "4" + return int(base64_input) + 1 + + schema = Schema(query=Query) + + results = schema.execute(""" + query { + incrementEncodedId(base64Input: "NA==") + } + """) + + assert results.data == {"incrementEncodedId": "NQ=="} + + Custom scalars -------------- diff --git a/graphene/__init__.py b/graphene/__init__.py index a59347dde..13eb375d3 100644 --- a/graphene/__init__.py +++ b/graphene/__init__.py @@ -1,46 +1,45 @@ from .pyutils.version import get_version - +from .relay import ( + ClientIDMutation, + Connection, + ConnectionField, + GlobalID, + Node, + PageInfo, + is_node, +) from .types import ( - ObjectType, - InputObjectType, - Interface, - Mutation, - Field, - InputField, - Schema, - Scalar, - String, ID, - Int, - Float, + UUID, + Argument, + Base64, Boolean, + Context, Date, DateTime, - Time, Decimal, + Dynamic, + Enum, + Field, + Float, + InputField, + InputObjectType, + Int, + Interface, JSONString, - UUID, List, + Mutation, NonNull, - Enum, - Argument, - Dynamic, - Union, - Context, + ObjectType, ResolveInfo, + Scalar, + Schema, + String, + Time, + Union, ) -from .relay import ( - Node, - is_node, - GlobalID, - ClientIDMutation, - Connection, - ConnectionField, - PageInfo, -) -from .utils.resolve_only_args import resolve_only_args from .utils.module_loading import lazy_import - +from .utils.resolve_only_args import resolve_only_args VERSION = (3, 0, 0, "beta", 3) @@ -49,40 +48,41 @@ __all__ = [ "__version__", - "ObjectType", - "InputObjectType", - "Interface", - "Mutation", - "Field", - "InputField", - "Schema", - "Scalar", - "String", - "ID", - "Int", - "Float", - "Enum", + "Argument", + "Base64", "Boolean", + "ClientIDMutation", + "Connection", + "ConnectionField", + "Context", "Date", "DateTime", - "Time", "Decimal", + "Dynamic", + "Enum", + "Field", + "Float", + "GlobalID", + "ID", + "InputField", + "InputObjectType", + "Int", + "Interface", "JSONString", - "UUID", "List", + "Mutation", + "Node", "NonNull", - "Argument", - "Dynamic", + "ObjectType", + "PageInfo", + "ResolveInfo", + "Scalar", + "Schema", + "String", + "Time", + "UUID", "Union", - "resolve_only_args", - "Node", "is_node", - "GlobalID", - "ClientIDMutation", - "Connection", - "ConnectionField", - "PageInfo", "lazy_import", - "Context", - "ResolveInfo", + "resolve_only_args", ] diff --git a/graphene/types/__init__.py b/graphene/types/__init__.py index 680149a35..2641dd539 100644 --- a/graphene/types/__init__.py +++ b/graphene/types/__init__.py @@ -1,52 +1,53 @@ # flake8: noqa from graphql import GraphQLResolveInfo as ResolveInfo -from .objecttype import ObjectType -from .interface import Interface -from .mutation import Mutation -from .scalars import Scalar, String, ID, Int, Float, Boolean +from .argument import Argument +from .base64 import Base64 +from .context import Context from .datetime import Date, DateTime, Time from .decimal import Decimal -from .json import JSONString -from .uuid import UUID -from .schema import Schema -from .structures import List, NonNull +from .dynamic import Dynamic from .enum import Enum from .field import Field from .inputfield import InputField -from .argument import Argument from .inputobjecttype import InputObjectType -from .dynamic import Dynamic +from .interface import Interface +from .json import JSONString +from .mutation import Mutation +from .objecttype import ObjectType +from .scalars import ID, Boolean, Float, Int, Scalar, String +from .schema import Schema +from .structures import List, NonNull from .union import Union -from .context import Context - +from .uuid import UUID __all__ = [ - "ObjectType", - "InputObjectType", - "Interface", - "Mutation", + "Argument", + "Base64", + "Boolean", + "Context", + "Date", + "DateTime", + "Decimal", + "Dynamic", "Enum", "Field", - "InputField", - "Schema", - "Scalar", - "String", + "Float", "ID", + "InputField", + "InputObjectType", "Int", - "Float", - "Date", - "DateTime", - "Time", - "Decimal", + "Interface", "JSONString", - "UUID", - "Boolean", "List", + "Mutation", "NonNull", - "Argument", - "Dynamic", - "Union", - "Context", + "ObjectType", "ResolveInfo", + "Scalar", + "Schema", + "String", + "Time", + "UUID", + "Union", ] From 81fff0f1b5f70a75b149b2ea26d656377560e656 Mon Sep 17 00:00:00 2001 From: Jonathan Kim Date: Mon, 13 Jul 2020 23:40:57 +0100 Subject: [PATCH 061/141] Improve enum compatibility (#1153) * Improve enum compatibility by supporting return enum as well as values and names * Handle invalid enum values * Rough implementation of compat middleware * Move enum middleware into compat module * Fix tests * Tweak enum examples * Add some tests for the middleware * Clean up tests * Add missing imports * Remove enum compat middleware * Use custom dedent function and pin graphql-core to >3.1.2 --- docs/types/enums.rst | 5 +- graphene/relay/tests/test_node.py | 2 +- graphene/relay/tests/test_node_custom.py | 3 +- graphene/tests/utils.py | 9 + graphene/types/definitions.py | 17 +- graphene/types/schema.py | 2 +- graphene/types/tests/test_enum.py | 247 +++++++++++++++++++++++ graphene/types/tests/test_schema.py | 4 +- graphene/types/utils.py | 7 + setup.py | 2 +- 10 files changed, 290 insertions(+), 8 deletions(-) create mode 100644 graphene/tests/utils.py diff --git a/docs/types/enums.rst b/docs/types/enums.rst index 02cc267c6..a3215cada 100644 --- a/docs/types/enums.rst +++ b/docs/types/enums.rst @@ -61,7 +61,8 @@ you can add description etc. to your enum without changing the original: graphene.Enum.from_enum( AlreadyExistingPyEnum, - description=lambda v: return 'foo' if v == AlreadyExistingPyEnum.Foo else 'bar') + description=lambda v: return 'foo' if v == AlreadyExistingPyEnum.Foo else 'bar' + ) Notes @@ -76,6 +77,7 @@ In the Python ``Enum`` implementation you can access a member by initing the Enu .. code:: python from enum import Enum + class Color(Enum): RED = 1 GREEN = 2 @@ -89,6 +91,7 @@ However, in Graphene ``Enum`` you need to call get to have the same effect: .. code:: python from graphene import Enum + class Color(Enum): RED = 1 GREEN = 2 diff --git a/graphene/relay/tests/test_node.py b/graphene/relay/tests/test_node.py index 92d851054..d46838acd 100644 --- a/graphene/relay/tests/test_node.py +++ b/graphene/relay/tests/test_node.py @@ -1,7 +1,7 @@ import re from graphql_relay import to_global_id -from graphql.pyutils import dedent +from graphene.tests.utils import dedent from ...types import ObjectType, Schema, String from ..node import Node, is_node diff --git a/graphene/relay/tests/test_node_custom.py b/graphene/relay/tests/test_node_custom.py index 30d62e7ba..76a2cad36 100644 --- a/graphene/relay/tests/test_node_custom.py +++ b/graphene/relay/tests/test_node_custom.py @@ -1,5 +1,6 @@ from graphql import graphql_sync -from graphql.pyutils import dedent + +from graphene.tests.utils import dedent from ...types import Interface, ObjectType, Schema from ...types.scalars import Int, String diff --git a/graphene/tests/utils.py b/graphene/tests/utils.py new file mode 100644 index 000000000..b9804d9be --- /dev/null +++ b/graphene/tests/utils.py @@ -0,0 +1,9 @@ +from textwrap import dedent as _dedent + + +def dedent(text: str) -> str: + """Fix indentation of given text by removing leading spaces and tabs. + Also removes leading newlines and trailing spaces and tabs, but keeps trailing + newlines. + """ + return _dedent(text.lstrip("\n").rstrip(" \t")) diff --git a/graphene/types/definitions.py b/graphene/types/definitions.py index 009169201..908cc7c86 100644 --- a/graphene/types/definitions.py +++ b/graphene/types/definitions.py @@ -1,3 +1,5 @@ +from enum import Enum as PyEnum + from graphql import ( GraphQLEnumType, GraphQLInputObjectType, @@ -5,6 +7,7 @@ GraphQLObjectType, GraphQLScalarType, GraphQLUnionType, + Undefined, ) @@ -36,7 +39,19 @@ class GrapheneScalarType(GrapheneGraphQLType, GraphQLScalarType): class GrapheneEnumType(GrapheneGraphQLType, GraphQLEnumType): - pass + def serialize(self, value): + if not isinstance(value, PyEnum): + enum = self.graphene_type._meta.enum + try: + # Try and get enum by value + value = enum(value) + except ValueError: + # Try and get enum by name + try: + value = enum[value] + except KeyError: + return Undefined + return super(GrapheneEnumType, self).serialize(value) class GrapheneInputObjectType(GrapheneGraphQLType, GraphQLInputObjectType): diff --git a/graphene/types/schema.py b/graphene/types/schema.py index 29ead4a70..ce0c74398 100644 --- a/graphene/types/schema.py +++ b/graphene/types/schema.py @@ -172,7 +172,7 @@ def create_enum(graphene_type): deprecation_reason = graphene_type._meta.deprecation_reason(value) values[name] = GraphQLEnumValue( - value=value.value, + value=value, description=description, deprecation_reason=deprecation_reason, ) diff --git a/graphene/types/tests/test_enum.py b/graphene/types/tests/test_enum.py index 1b6181208..8d5e87af4 100644 --- a/graphene/types/tests/test_enum.py +++ b/graphene/types/tests/test_enum.py @@ -1,7 +1,12 @@ +from textwrap import dedent + from ..argument import Argument from ..enum import Enum, PyEnum from ..field import Field from ..inputfield import InputField +from ..inputobjecttype import InputObjectType +from ..mutation import Mutation +from ..scalars import String from ..schema import ObjectType, Schema @@ -224,3 +229,245 @@ class Meta: "GREEN": RGB1.GREEN, "BLUE": RGB1.BLUE, } + + +def test_enum_types(): + from enum import Enum as PyEnum + + class Color(PyEnum): + """Primary colors""" + + RED = 1 + YELLOW = 2 + BLUE = 3 + + GColor = Enum.from_enum(Color) + + class Query(ObjectType): + color = GColor(required=True) + + def resolve_color(_, info): + return Color.RED + + schema = Schema(query=Query) + + assert str(schema) == dedent( + '''\ + type Query { + color: Color! + } + + """Primary colors""" + enum Color { + RED + YELLOW + BLUE + } + ''' + ) + + +def test_enum_resolver(): + from enum import Enum as PyEnum + + class Color(PyEnum): + RED = 1 + GREEN = 2 + BLUE = 3 + + GColor = Enum.from_enum(Color) + + class Query(ObjectType): + color = GColor(required=True) + + def resolve_color(_, info): + return Color.RED + + schema = Schema(query=Query) + + results = schema.execute("query { color }") + assert not results.errors + + assert results.data["color"] == Color.RED.name + + +def test_enum_resolver_compat(): + from enum import Enum as PyEnum + + class Color(PyEnum): + RED = 1 + GREEN = 2 + BLUE = 3 + + GColor = Enum.from_enum(Color) + + class Query(ObjectType): + color = GColor(required=True) + color_by_name = GColor(required=True) + + def resolve_color(_, info): + return Color.RED.value + + def resolve_color_by_name(_, info): + return Color.RED.name + + schema = Schema(query=Query) + + results = schema.execute( + """query { + color + colorByName + }""" + ) + assert not results.errors + + assert results.data["color"] == Color.RED.name + assert results.data["colorByName"] == Color.RED.name + + +def test_enum_resolver_invalid(): + from enum import Enum as PyEnum + + class Color(PyEnum): + RED = 1 + GREEN = 2 + BLUE = 3 + + GColor = Enum.from_enum(Color) + + class Query(ObjectType): + color = GColor(required=True) + + def resolve_color(_, info): + return "BLACK" + + schema = Schema(query=Query) + + results = schema.execute("query { color }") + assert results.errors + assert ( + results.errors[0].message + == "Expected a value of type 'Color' but received: 'BLACK'" + ) + + +def test_field_enum_argument(): + class Color(Enum): + RED = 1 + GREEN = 2 + BLUE = 3 + + class Brick(ObjectType): + color = Color(required=True) + + color_filter = None + + class Query(ObjectType): + bricks_by_color = Field(Brick, color=Color(required=True)) + + def resolve_bricks_by_color(_, info, color): + nonlocal color_filter + color_filter = color + return Brick(color=color) + + schema = Schema(query=Query) + + results = schema.execute( + """ + query { + bricksByColor(color: RED) { + color + } + } + """ + ) + assert not results.errors + assert results.data == {"bricksByColor": {"color": "RED"}} + assert color_filter == Color.RED + + +def test_mutation_enum_input(): + class RGB(Enum): + """Available colors""" + + RED = 1 + GREEN = 2 + BLUE = 3 + + color_input = None + + class CreatePaint(Mutation): + class Arguments: + color = RGB(required=True) + + color = RGB(required=True) + + def mutate(_, info, color): + nonlocal color_input + color_input = color + return CreatePaint(color=color) + + class MyMutation(ObjectType): + create_paint = CreatePaint.Field() + + class Query(ObjectType): + a = String() + + schema = Schema(query=Query, mutation=MyMutation) + result = schema.execute( + """ mutation MyMutation { + createPaint(color: RED) { + color + } + } + """ + ) + assert not result.errors + assert result.data == {"createPaint": {"color": "RED"}} + + assert color_input == RGB.RED + + +def test_mutation_enum_input_type(): + class RGB(Enum): + """Available colors""" + + RED = 1 + GREEN = 2 + BLUE = 3 + + class ColorInput(InputObjectType): + color = RGB(required=True) + + color_input_value = None + + class CreatePaint(Mutation): + class Arguments: + color_input = ColorInput(required=True) + + color = RGB(required=True) + + def mutate(_, info, color_input): + nonlocal color_input_value + color_input_value = color_input.color + return CreatePaint(color=color_input.color) + + class MyMutation(ObjectType): + create_paint = CreatePaint.Field() + + class Query(ObjectType): + a = String() + + schema = Schema(query=Query, mutation=MyMutation) + result = schema.execute( + """ mutation MyMutation { + createPaint(colorInput: { color: RED }) { + color + } + } + """, + ) + assert not result.errors + assert result.data == {"createPaint": {"color": "RED"}} + + assert color_input_value == RGB.RED diff --git a/graphene/types/tests/test_schema.py b/graphene/types/tests/test_schema.py index 0c85e1708..fe4739c98 100644 --- a/graphene/types/tests/test_schema.py +++ b/graphene/types/tests/test_schema.py @@ -1,7 +1,7 @@ +from graphql.type import GraphQLObjectType, GraphQLSchema from pytest import raises -from graphql.type import GraphQLObjectType, GraphQLSchema -from graphql.pyutils import dedent +from graphene.tests.utils import dedent from ..field import Field from ..objecttype import ObjectType diff --git a/graphene/types/utils.py b/graphene/types/utils.py index 3b195d692..1976448aa 100644 --- a/graphene/types/utils.py +++ b/graphene/types/utils.py @@ -41,3 +41,10 @@ def get_type(_type): if inspect.isfunction(_type) or isinstance(_type, partial): return _type() return _type + + +def get_underlying_type(_type): + """Get the underlying type even if it is wrapped in structures like NonNull""" + while hasattr(_type, "of_type"): + _type = _type.of_type + return _type diff --git a/setup.py b/setup.py index 24bddcf90..48d7d285d 100644 --- a/setup.py +++ b/setup.py @@ -82,7 +82,7 @@ def run_tests(self): keywords="api graphql protocol rest relay graphene", packages=find_packages(exclude=["examples*"]), install_requires=[ - "graphql-core>=3.1.1,<4", + "graphql-core>=3.1.2,<4", "graphql-relay>=3.0,<4", "aniso8601>=8,<9", ], From 64af43748c9340d432f6d949324867f426d4aee6 Mon Sep 17 00:00:00 2001 From: Jonathan Kim Date: Tue, 14 Jul 2020 14:31:54 +0100 Subject: [PATCH 062/141] v3.0.0b4 --- graphene/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/graphene/__init__.py b/graphene/__init__.py index 13eb375d3..e252ba463 100644 --- a/graphene/__init__.py +++ b/graphene/__init__.py @@ -41,7 +41,7 @@ from .utils.module_loading import lazy_import from .utils.resolve_only_args import resolve_only_args -VERSION = (3, 0, 0, "beta", 3) +VERSION = (3, 0, 0, "beta", 4) __version__ = get_version(VERSION) From 21300054064fdaa79a75ebe9d81a19f9a8e738c7 Mon Sep 17 00:00:00 2001 From: Redowan Delowar Date: Tue, 28 Jul 2020 00:56:14 +0600 Subject: [PATCH 063/141] Minor grammatical fix in the schema docs (#1237) --- docs/types/schema.rst | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/types/schema.rst b/docs/types/schema.rst index 0cf777ae4..a8c60226d 100644 --- a/docs/types/schema.rst +++ b/docs/types/schema.rst @@ -1,11 +1,11 @@ Schema ====== -A GraphQL **Schema** defines the types and relationship between **Fields** in your API. +A GraphQL **Schema** defines the types and relationships between **Fields** in your API. A Schema is created by supplying the root :ref:`ObjectType` of each operation, query (mandatory), mutation and subscription. -Schema will collect all type definitions related to the root operations and then supplied to the validator and executor. +Schema will collect all type definitions related to the root operations and then supply them to the validator and executor. .. code:: python @@ -15,11 +15,11 @@ Schema will collect all type definitions related to the root operations and then subscription=MyRootSubscription ) -A Root Query is just a special :ref:`ObjectType` that :ref:`defines the fields ` that are the entrypoint for your API. Root Mutation and Root Subscription are similar to Root Query, but for different operation types: +A Root Query is just a special :ref:`ObjectType` that :ref:` defines the fields ` that are the entrypoint for your API. Root Mutation and Root Subscription are similar to Root Query, but for different operation types: * Query fetches data * Mutation changes data and retrieves the changes -* Subscription sends changes to clients in real time +* Subscription sends changes to clients in real-time Review the `GraphQL documentation on Schema`_ for a brief overview of fields, schema and operations. From d085c8852be40552428f74fdf891b20e54674540 Mon Sep 17 00:00:00 2001 From: Syrus Akbary Date: Tue, 28 Jul 2020 13:33:21 -0700 Subject: [PATCH 064/141] Subscription revamp (#1235) * Integrate async tests into main code * Added full support for subscriptions * Fixed syntax using black * Fixed typo --- Makefile | 6 +- graphene/relay/connection.py | 4 +- graphene/relay/node.py | 4 +- .../relay/tests/test_connection_async.py | 0 graphene/relay/tests/test_global_id.py | 4 +- .../relay/tests/test_mutation_async.py | 0 graphene/types/field.py | 22 +++++- graphene/types/schema.py | 73 ++++++++++++++----- graphene/types/tests/test_subscribe_async.py | 56 ++++++++++++++ tests_asyncio/test_subscribe.py | 33 --------- tox.ini | 2 +- 11 files changed, 140 insertions(+), 64 deletions(-) rename tests_asyncio/test_relay_connection.py => graphene/relay/tests/test_connection_async.py (100%) rename tests_asyncio/test_relay_mutation.py => graphene/relay/tests/test_mutation_async.py (100%) create mode 100644 graphene/types/tests/test_subscribe_async.py delete mode 100644 tests_asyncio/test_subscribe.py diff --git a/Makefile b/Makefile index df3b41181..c78e2b4fb 100644 --- a/Makefile +++ b/Makefile @@ -8,7 +8,7 @@ install-dev: pip install -e ".[dev]" test: - py.test graphene examples tests_asyncio + py.test graphene examples .PHONY: docs ## Generate docs docs: install-dev @@ -20,8 +20,8 @@ docs-live: install-dev .PHONY: format format: - black graphene examples setup.py tests_asyncio + black graphene examples setup.py .PHONY: lint lint: - flake8 graphene examples setup.py tests_asyncio + flake8 graphene examples setup.py diff --git a/graphene/relay/connection.py b/graphene/relay/connection.py index cfb6cb633..1a4684e56 100644 --- a/graphene/relay/connection.py +++ b/graphene/relay/connection.py @@ -171,8 +171,8 @@ def connection_resolver(cls, resolver, connection_type, root, info, **args): on_resolve = partial(cls.resolve_connection, connection_type, args) return maybe_thenable(resolved, on_resolve) - def get_resolver(self, parent_resolver): - resolver = super(IterableConnectionField, self).get_resolver(parent_resolver) + def wrap_resolve(self, parent_resolver): + resolver = super(IterableConnectionField, self).wrap_resolve(parent_resolver) return partial(self.connection_resolver, resolver, self.type) diff --git a/graphene/relay/node.py b/graphene/relay/node.py index 13fb8cea2..b189bc97b 100644 --- a/graphene/relay/node.py +++ b/graphene/relay/node.py @@ -37,7 +37,7 @@ def id_resolver(parent_resolver, node, root, info, parent_type_name=None, **args parent_type_name = parent_type_name or info.parent_type.name return node.to_global_id(parent_type_name, type_id) # root._meta.name - def get_resolver(self, parent_resolver): + def wrap_resolve(self, parent_resolver): return partial( self.id_resolver, parent_resolver, @@ -60,7 +60,7 @@ def __init__(self, node, type_=False, **kwargs): **kwargs, ) - def get_resolver(self, parent_resolver): + def wrap_resolve(self, parent_resolver): return partial(self.node_type.node_resolver, get_type(self.field_type)) diff --git a/tests_asyncio/test_relay_connection.py b/graphene/relay/tests/test_connection_async.py similarity index 100% rename from tests_asyncio/test_relay_connection.py rename to graphene/relay/tests/test_connection_async.py diff --git a/graphene/relay/tests/test_global_id.py b/graphene/relay/tests/test_global_id.py index 2fe813008..81860d9dc 100644 --- a/graphene/relay/tests/test_global_id.py +++ b/graphene/relay/tests/test_global_id.py @@ -45,7 +45,7 @@ def test_global_id_allows_overriding_of_node_and_required(): def test_global_id_defaults_to_info_parent_type(): my_id = "1" gid = GlobalID() - id_resolver = gid.get_resolver(lambda *_: my_id) + id_resolver = gid.wrap_resolve(lambda *_: my_id) my_global_id = id_resolver(None, Info(User)) assert my_global_id == to_global_id(User._meta.name, my_id) @@ -53,6 +53,6 @@ def test_global_id_defaults_to_info_parent_type(): def test_global_id_allows_setting_customer_parent_type(): my_id = "1" gid = GlobalID(parent_type=User) - id_resolver = gid.get_resolver(lambda *_: my_id) + id_resolver = gid.wrap_resolve(lambda *_: my_id) my_global_id = id_resolver(None, None) assert my_global_id == to_global_id(User._meta.name, my_id) diff --git a/tests_asyncio/test_relay_mutation.py b/graphene/relay/tests/test_mutation_async.py similarity index 100% rename from tests_asyncio/test_relay_mutation.py rename to graphene/relay/tests/test_mutation_async.py diff --git a/graphene/types/field.py b/graphene/types/field.py index 1a1ccf93b..dafb04b53 100644 --- a/graphene/types/field.py +++ b/graphene/types/field.py @@ -8,6 +8,7 @@ from .structures import NonNull from .unmountedtype import UnmountedType from .utils import get_type +from ..utils.deprecated import warn_deprecation base_type = type @@ -114,5 +115,24 @@ def __init__( def type(self): return get_type(self._type) - def get_resolver(self, parent_resolver): + get_resolver = None + + def wrap_resolve(self, parent_resolver): + """ + Wraps a function resolver, using the ObjectType resolve_{FIELD_NAME} + (parent_resolver) if the Field definition has no resolver. + """ + if self.get_resolver is not None: + warn_deprecation( + "The get_resolver method is being deprecated, please rename it to wrap_resolve." + ) + return self.get_resolver(parent_resolver) + return self.resolver or parent_resolver + + def wrap_subscribe(self, parent_subscribe): + """ + Wraps a function subscribe, using the ObjectType subscribe_{FIELD_NAME} + (parent_subscribe) if the Field definition has no subscribe. + """ + return parent_subscribe diff --git a/graphene/types/schema.py b/graphene/types/schema.py index ce0c74398..5eb59e663 100644 --- a/graphene/types/schema.py +++ b/graphene/types/schema.py @@ -10,8 +10,11 @@ parse, print_schema, subscribe, + validate, + ExecutionResult, GraphQLArgument, GraphQLBoolean, + GraphQLError, GraphQLEnumValue, GraphQLField, GraphQLFloat, @@ -76,6 +79,11 @@ def is_type_of_from_possible_types(possible_types, root, _info): return isinstance(root, possible_types) +# We use this resolver for subscriptions +def identity_resolve(root, info): + return root + + class TypeMap(dict): def __init__( self, @@ -307,22 +315,39 @@ def create_fields_for_type(self, graphene_type, is_input_type=False): if isinstance(arg.type, NonNull) else arg.default_value, ) + subscribe = field.wrap_subscribe( + self.get_function_for_type( + graphene_type, f"subscribe_{name}", name, field.default_value, + ) + ) + + # If we are in a subscription, we use (by default) an + # identity-based resolver for the root, rather than the + # default resolver for objects/dicts. + if subscribe: + field_default_resolver = identity_resolve + elif issubclass(graphene_type, ObjectType): + default_resolver = ( + graphene_type._meta.default_resolver or get_default_resolver() + ) + field_default_resolver = partial( + default_resolver, name, field.default_value + ) + else: + field_default_resolver = None + + resolve = field.wrap_resolve( + self.get_function_for_type( + graphene_type, f"resolve_{name}", name, field.default_value + ) + or field_default_resolver + ) + _field = GraphQLField( field_type, args=args, - resolve=field.get_resolver( - self.get_resolver_for_type( - graphene_type, f"resolve_{name}", name, field.default_value - ) - ), - subscribe=field.get_resolver( - self.get_resolver_for_type( - graphene_type, - f"subscribe_{name}", - name, - field.default_value, - ) - ), + resolve=resolve, + subscribe=subscribe, deprecation_reason=field.deprecation_reason, description=field.description, ) @@ -330,7 +355,8 @@ def create_fields_for_type(self, graphene_type, is_input_type=False): fields[field_name] = _field return fields - def get_resolver_for_type(self, graphene_type, func_name, name, default_value): + def get_function_for_type(self, graphene_type, func_name, name, default_value): + """Gets a resolve or subscribe function for a given ObjectType""" if not issubclass(graphene_type, ObjectType): return resolver = getattr(graphene_type, func_name, None) @@ -350,11 +376,6 @@ def get_resolver_for_type(self, graphene_type, func_name, name, default_value): if resolver: return get_unbound_function(resolver) - default_resolver = ( - graphene_type._meta.default_resolver or get_default_resolver() - ) - return partial(default_resolver, name, default_value) - def resolve_type(self, resolve_type_func, type_name, root, info, _type): type_ = resolve_type_func(root, info) @@ -476,7 +497,19 @@ async def execute_async(self, *args, **kwargs): return await graphql(self.graphql_schema, *args, **kwargs) async def subscribe(self, query, *args, **kwargs): - document = parse(query) + """Execute a GraphQL subscription on the schema asynchronously.""" + # Do parsing + try: + document = parse(query) + except GraphQLError as error: + return ExecutionResult(data=None, errors=[error]) + + # Do validation + validation_errors = validate(self.graphql_schema, document) + if validation_errors: + return ExecutionResult(data=None, errors=validation_errors) + + # Execute the query kwargs = normalize_execute_kwargs(kwargs) return await subscribe(self.graphql_schema, document, *args, **kwargs) diff --git a/graphene/types/tests/test_subscribe_async.py b/graphene/types/tests/test_subscribe_async.py new file mode 100644 index 000000000..6f7ce4c65 --- /dev/null +++ b/graphene/types/tests/test_subscribe_async.py @@ -0,0 +1,56 @@ +from pytest import mark + +from graphene import ObjectType, Int, String, Schema, Field + + +class Query(ObjectType): + hello = String() + + def resolve_hello(root, info): + return "Hello, world!" + + +class Subscription(ObjectType): + count_to_ten = Field(Int) + + async def subscribe_count_to_ten(root, info): + count = 0 + while count < 10: + count += 1 + yield count + + +schema = Schema(query=Query, subscription=Subscription) + + +@mark.asyncio +async def test_subscription(): + subscription = "subscription { countToTen }" + result = await schema.subscribe(subscription) + count = 0 + async for item in result: + count = item.data["countToTen"] + assert count == 10 + + +@mark.asyncio +async def test_subscription_fails_with_invalid_query(): + # It fails if the provided query is invalid + subscription = "subscription { " + result = await schema.subscribe(subscription) + assert not result.data + assert result.errors + assert "Syntax Error: Expected Name, found " in str(result.errors[0]) + + +@mark.asyncio +async def test_subscription_fails_when_query_is_not_valid(): + # It can't subscribe to two fields at the same time, triggering a + # validation error. + subscription = "subscription { countToTen, b: countToTen }" + result = await schema.subscribe(subscription) + assert not result.data + assert result.errors + assert "Anonymous Subscription must select only one top level field." in str( + result.errors[0] + ) diff --git a/tests_asyncio/test_subscribe.py b/tests_asyncio/test_subscribe.py deleted file mode 100644 index bf985d580..000000000 --- a/tests_asyncio/test_subscribe.py +++ /dev/null @@ -1,33 +0,0 @@ -from pytest import mark - -from graphene import ObjectType, Int, String, Schema, Field - - -class Query(ObjectType): - hello = String() - - def resolve_hello(root, info): - return "Hello, world!" - - -class Subscription(ObjectType): - count_to_ten = Field(Int) - - async def subscribe_count_to_ten(root, info): - count = 0 - while count < 10: - count += 1 - yield {"count_to_ten": count} - - -schema = Schema(query=Query, subscription=Subscription) - - -@mark.asyncio -async def test_subscription(): - subscription = "subscription { countToTen }" - result = await schema.subscribe(subscription) - count = 0 - async for item in result: - count = item.data["countToTen"] - assert count == 10 diff --git a/tox.ini b/tox.ini index 468f5fbc2..b0298feae 100644 --- a/tox.ini +++ b/tox.ini @@ -8,7 +8,7 @@ deps = setenv = PYTHONPATH = .:{envdir} commands = - py{36,37}: pytest --cov=graphene graphene examples tests_asyncio {posargs} + py{36,37}: pytest --cov=graphene graphene examples {posargs} [testenv:pre-commit] basepython=python3.7 From 29dd3f83916826eb302cf346dd7e73abe7f2517a Mon Sep 17 00:00:00 2001 From: Jonathan Kim Date: Thu, 6 Aug 2020 17:19:02 +0100 Subject: [PATCH 065/141] v3.0.0b5 --- graphene/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/graphene/__init__.py b/graphene/__init__.py index e252ba463..882128713 100644 --- a/graphene/__init__.py +++ b/graphene/__init__.py @@ -41,7 +41,7 @@ from .utils.module_loading import lazy_import from .utils.resolve_only_args import resolve_only_args -VERSION = (3, 0, 0, "beta", 4) +VERSION = (3, 0, 0, "beta", 5) __version__ = get_version(VERSION) From 86b904d327dd60538596d391948b7fcb85292217 Mon Sep 17 00:00:00 2001 From: Jonathan Kim Date: Fri, 7 Aug 2020 22:37:32 +0100 Subject: [PATCH 066/141] Split out the subscriptions documentation a separate file and fix it (#1245) --- docs/execution/execute.rst | 38 ------------------------------ docs/execution/index.rst | 1 + docs/execution/subscriptions.rst | 40 ++++++++++++++++++++++++++++++++ 3 files changed, 41 insertions(+), 38 deletions(-) create mode 100644 docs/execution/subscriptions.rst diff --git a/docs/execution/execute.rst b/docs/execution/execute.rst index cd29d72da..4bb5613ed 100644 --- a/docs/execution/execute.rst +++ b/docs/execution/execute.rst @@ -3,7 +3,6 @@ Executing a query ================= - For executing a query against a schema, you can directly call the ``execute`` method on it. @@ -17,43 +16,6 @@ For executing a query against a schema, you can directly call the ``execute`` me ``result`` represents the result of execution. ``result.data`` is the result of executing the query, ``result.errors`` is ``None`` if no errors occurred, and is a non-empty list if an error occurred. -For executing a subscription, you can directly call the ``subscribe`` method on it. -This method is async and must be awaited. - -.. code:: python - - import asyncio - from datetime import datetime - from graphene import ObjectType, String, Schema, Field - - # All schema require a query. - class Query(ObjectType): - hello = String() - - def resolve_hello(root, info): - return 'Hello, world!' - - class Subscription(ObjectType): - time_of_day = Field(String) - - async def subscribe_time_of_day(root, info): - while True: - yield { 'time_of_day': datetime.now().isoformat()} - await asyncio.sleep(1) - - SCHEMA = Schema(query=Query, subscription=Subscription) - - async def main(schema): - - subscription = 'subscription { timeOfDay }' - result = await schema.subscribe(subscription) - async for item in result: - print(item.data['timeOfDay']) - - asyncio.run(main(SCHEMA)) - -The ``result`` is an async iterator which yields items in the same manner as a query. - .. _SchemaExecuteContext: Context diff --git a/docs/execution/index.rst b/docs/execution/index.rst index 93a028456..dbfbfa726 100644 --- a/docs/execution/index.rst +++ b/docs/execution/index.rst @@ -9,3 +9,4 @@ Execution middleware dataloader fileuploading + subscriptions diff --git a/docs/execution/subscriptions.rst b/docs/execution/subscriptions.rst new file mode 100644 index 000000000..86ed78a1c --- /dev/null +++ b/docs/execution/subscriptions.rst @@ -0,0 +1,40 @@ +.. _SchemaSubscription: + +Subscriptions +============= + +To create a subscription, you can directly call the ``subscribe`` method on the +schema. This method is async and must be awaited. + +.. code:: python + + import asyncio + from datetime import datetime + from graphene import ObjectType, String, Schema, Field + + # Every schema requires a query. + class Query(ObjectType): + hello = String() + + def resolve_hello(root, info): + return "Hello, world!" + + class Subscription(ObjectType): + time_of_day = String() + + async def subscribe_time_of_day(root, info): + while True: + yield datetime.now().isoformat() + await asyncio.sleep(1) + + schema = Schema(query=Query, subscription=Subscription) + + async def main(schema): + subscription = 'subscription { timeOfDay }' + result = await schema.subscribe(subscription) + async for item in result: + print(item.data['timeOfDay']) + + asyncio.run(main(schema)) + +The ``result`` is an async iterator which yields items in the same manner as a query. From 188ce9a6cb53d13565088f5279b04f6e1947e98f Mon Sep 17 00:00:00 2001 From: Jonathan Kim Date: Wed, 12 Aug 2020 22:43:35 +0100 Subject: [PATCH 067/141] Fix subscribe with arguments (#1251) --- graphene/types/schema.py | 2 +- graphene/types/tests/test_subscribe_async.py | 24 ++++++++++++++++++++ 2 files changed, 25 insertions(+), 1 deletion(-) diff --git a/graphene/types/schema.py b/graphene/types/schema.py index 5eb59e663..55f0bf93b 100644 --- a/graphene/types/schema.py +++ b/graphene/types/schema.py @@ -80,7 +80,7 @@ def is_type_of_from_possible_types(possible_types, root, _info): # We use this resolver for subscriptions -def identity_resolve(root, info): +def identity_resolve(root, info, **arguments): return root diff --git a/graphene/types/tests/test_subscribe_async.py b/graphene/types/tests/test_subscribe_async.py index 6f7ce4c65..9b7a1f13b 100644 --- a/graphene/types/tests/test_subscribe_async.py +++ b/graphene/types/tests/test_subscribe_async.py @@ -54,3 +54,27 @@ async def test_subscription_fails_when_query_is_not_valid(): assert "Anonymous Subscription must select only one top level field." in str( result.errors[0] ) + + +@mark.asyncio +async def test_subscription_with_args(): + class Query(ObjectType): + hello = String() + + class Subscription(ObjectType): + count_upwards = Field(Int, limit=Int(required=True)) + + async def subscribe_count_upwards(root, info, limit): + count = 0 + while count < limit: + count += 1 + yield count + + schema = Schema(query=Query, subscription=Subscription) + + subscription = "subscription { countUpwards(limit: 5) }" + result = await schema.subscribe(subscription) + count = 0 + async for item in result: + count = item.data["countUpwards"] + assert count == 5 From 6918db1033269a79d7e2370d51e66da383302f4b Mon Sep 17 00:00:00 2001 From: "Daniel T. Plop" Date: Wed, 12 Aug 2020 23:44:00 +0200 Subject: [PATCH 068/141] Fix Typo in Docs (#1252) The example of executing a query by passing a root value had a typo for the trailing parenthesis, namely it was '}' instead of ')'. --- docs/execution/execute.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/execution/execute.rst b/docs/execution/execute.rst index 4bb5613ed..23be0b420 100644 --- a/docs/execution/execute.rst +++ b/docs/execution/execute.rst @@ -85,7 +85,7 @@ Value used for :ref:`ResolverParamParent` in root queries and mutations can be o return {'id': root.id, 'firstName': root.name} schema = Schema(Query) - user_root = User(id=12, name='bob'} + user_root = User(id=12, name='bob') result = schema.execute( ''' query getUser { From b685e109f5d085e9861b73400b0802abe9b77545 Mon Sep 17 00:00:00 2001 From: Varun Dey Date: Mon, 24 Aug 2020 16:20:33 +0000 Subject: [PATCH 069/141] Fix typo in Schema docs (#1259) --- docs/types/schema.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/types/schema.rst b/docs/types/schema.rst index a8c60226d..08ff27d04 100644 --- a/docs/types/schema.rst +++ b/docs/types/schema.rst @@ -15,7 +15,7 @@ Schema will collect all type definitions related to the root operations and then subscription=MyRootSubscription ) -A Root Query is just a special :ref:`ObjectType` that :ref:` defines the fields ` that are the entrypoint for your API. Root Mutation and Root Subscription are similar to Root Query, but for different operation types: +A Root Query is just a special :ref:`ObjectType` that defines the fields that are the entrypoint for your API. Root Mutation and Root Subscription are similar to Root Query, but for different operation types: * Query fetches data * Mutation changes data and retrieves the changes From 8c327fc4ed98ddf7fae0a67c48f3de24c530a06d Mon Sep 17 00:00:00 2001 From: Paul Bailey Date: Fri, 28 Aug 2020 10:55:46 -0500 Subject: [PATCH 070/141] add BigInt type (#1261) * add BigInt type * formatting * more Int tests --- graphene/types/scalars.py | 27 +++++++++++++++++++++++++++ graphene/types/tests/test_scalar.py | 22 +++++++++++++++++++++- 2 files changed, 48 insertions(+), 1 deletion(-) diff --git a/graphene/types/scalars.py b/graphene/types/scalars.py index 245fa570b..472f2d41e 100644 --- a/graphene/types/scalars.py +++ b/graphene/types/scalars.py @@ -82,6 +82,33 @@ def parse_literal(ast): return num +class BigInt(Scalar): + """ + The `BigInt` scalar type represents non-fractional whole numeric values. + `BigInt` is not constrained to 32-bit like the `Int` type and thus is a less + compatible type. + """ + + @staticmethod + def coerce_int(value): + try: + num = int(value) + except ValueError: + try: + num = int(float(value)) + except ValueError: + return None + return num + + serialize = coerce_int + parse_value = coerce_int + + @staticmethod + def parse_literal(ast): + if isinstance(ast, IntValueNode): + return int(ast.value) + + class Float(Scalar): """ The `Float` scalar type represents signed double-precision fractional diff --git a/graphene/types/tests/test_scalar.py b/graphene/types/tests/test_scalar.py index 559c0ce68..2ff672082 100644 --- a/graphene/types/tests/test_scalar.py +++ b/graphene/types/tests/test_scalar.py @@ -1,4 +1,5 @@ -from ..scalars import Scalar +from ..scalars import Scalar, Int, BigInt +from graphql.language.ast import IntValueNode def test_scalar(): @@ -7,3 +8,22 @@ class JSONScalar(Scalar): assert JSONScalar._meta.name == "JSONScalar" assert JSONScalar._meta.description == "Documentation" + + +def test_ints(): + assert Int.parse_value(2 ** 31 - 1) is not None + assert Int.parse_value("2.0") is not None + assert Int.parse_value(2 ** 31) is None + + assert Int.parse_literal(IntValueNode(value=str(2 ** 31 - 1))) == 2 ** 31 - 1 + assert Int.parse_literal(IntValueNode(value=str(2 ** 31))) is None + + assert Int.parse_value(-(2 ** 31)) is not None + assert Int.parse_value(-(2 ** 31) - 1) is None + + assert BigInt.parse_value(2 ** 31) is not None + assert BigInt.parse_value("2.0") is not None + assert BigInt.parse_value(-(2 ** 31) - 1) is not None + + assert BigInt.parse_literal(IntValueNode(value=str(2 ** 31 - 1))) == 2 ** 31 - 1 + assert BigInt.parse_literal(IntValueNode(value=str(2 ** 31))) == 2 ** 31 From a53b782bf8ec5612d5cceb582fbde68eeba859aa Mon Sep 17 00:00:00 2001 From: Ali Reza Yahyapour Date: Tue, 22 Sep 2020 19:40:01 +0330 Subject: [PATCH 071/141] Syntax Error Fixed for Dictionary assert (#1267) --- docs/types/objecttypes.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/types/objecttypes.rst b/docs/types/objecttypes.rst index 29d3954c6..3cc8d8302 100644 --- a/docs/types/objecttypes.rst +++ b/docs/types/objecttypes.rst @@ -102,7 +102,7 @@ When we execute a query against that schema. query_string = "{ me { fullName } }" result = schema.execute(query_string) - assert result.data["me"] == {"fullName": "Luke Skywalker") + assert result.data["me"] == {"fullName": "Luke Skywalker"} Then we go through the following steps to resolve this query: From e24ac547d670d90931cc2392d2330787747c041f Mon Sep 17 00:00:00 2001 From: Alec Rosenbaum Date: Wed, 21 Oct 2020 05:13:32 -0400 Subject: [PATCH 072/141] Add UnforgivingExecutionContext (#1255) --- graphene/types/schema.py | 101 +++++++++++++++++++++++- graphene/types/tests/test_schema.py | 117 +++++++++++++++++++++++++++- 2 files changed, 215 insertions(+), 3 deletions(-) diff --git a/graphene/types/schema.py b/graphene/types/schema.py index 55f0bf93b..4fd71769c 100644 --- a/graphene/types/schema.py +++ b/graphene/types/schema.py @@ -28,6 +28,8 @@ GraphQLString, Undefined, ) +from graphql.execution import ExecutionContext +from graphql.execution.values import get_argument_values from ..utils.str_converters import to_camel_case from ..utils.get_unbound_function import get_unbound_function @@ -317,7 +319,7 @@ def create_fields_for_type(self, graphene_type, is_input_type=False): ) subscribe = field.wrap_subscribe( self.get_function_for_type( - graphene_type, f"subscribe_{name}", name, field.default_value, + graphene_type, f"subscribe_{name}", name, field.default_value ) ) @@ -394,6 +396,101 @@ def resolve_type(self, resolve_type_func, type_name, root, info, _type): return type_ +class UnforgivingExecutionContext(ExecutionContext): + """An execution context which doesn't swallow exceptions. + + The only difference between this execution context and the one it inherits from is + that ``except Exception`` is commented out within ``resolve_field_value_or_error``. + By removing that exception handling, only ``GraphQLError``'s are caught. + """ + + def resolve_field_value_or_error( + self, field_def, field_nodes, resolve_fn, source, info + ): + """Resolve field to a value or an error. + + Isolates the "ReturnOrAbrupt" behavior to not de-opt the resolve_field() + method. Returns the result of resolveFn or the abrupt-return Error object. + + For internal use only. + """ + try: + # Build a dictionary of arguments from the field.arguments AST, using the + # variables scope to fulfill any variable references. + args = get_argument_values(field_def, field_nodes[0], self.variable_values) + + # Note that contrary to the JavaScript implementation, we pass the context + # value as part of the resolve info. + result = resolve_fn(source, info, **args) + if self.is_awaitable(result): + # noinspection PyShadowingNames + async def await_result(): + try: + return await result + except GraphQLError as error: + return error + # except Exception as error: + # return GraphQLError(str(error), original_error=error) + + # Yes, this is commented out code. It's been intentionally + # _not_ removed to show what has changed from the original + # implementation. + + return await_result() + return result + except GraphQLError as error: + return error + # except Exception as error: + # return GraphQLError(str(error), original_error=error) + + # Yes, this is commented out code. It's been intentionally _not_ + # removed to show what has changed from the original implementation. + + def complete_value_catching_error( + self, return_type, field_nodes, info, path, result + ): + """Complete a value while catching an error. + + This is a small wrapper around completeValue which detects and logs errors in + the execution context. + """ + try: + if self.is_awaitable(result): + + async def await_result(): + value = self.complete_value( + return_type, field_nodes, info, path, await result + ) + if self.is_awaitable(value): + return await value + return value + + completed = await_result() + else: + completed = self.complete_value( + return_type, field_nodes, info, path, result + ) + if self.is_awaitable(completed): + # noinspection PyShadowingNames + async def await_completed(): + try: + return await completed + + # CHANGE WAS MADE HERE + # ``GraphQLError`` was swapped in for ``except Exception`` + except GraphQLError as error: + self.handle_field_error(error, field_nodes, path, return_type) + + return await_completed() + return completed + + # CHANGE WAS MADE HERE + # ``GraphQLError`` was swapped in for ``except Exception`` + except GraphQLError as error: + self.handle_field_error(error, field_nodes, path, return_type) + return None + + class Schema: """Schema Definition. @@ -481,6 +578,8 @@ def execute(self, *args, **kwargs): request_string, an operation name must be provided for the result to be provided. middleware (List[SupportsGraphQLMiddleware]): Supply request level middleware as defined in `graphql-core`. + execution_context_class (ExecutionContext, optional): The execution context class + to use when resolving queries and mutations. Returns: :obj:`ExecutionResult` containing any data and errors for the operation. diff --git a/graphene/types/tests/test_schema.py b/graphene/types/tests/test_schema.py index fe4739c98..54c48b4f2 100644 --- a/graphene/types/tests/test_schema.py +++ b/graphene/types/tests/test_schema.py @@ -1,12 +1,13 @@ from graphql.type import GraphQLObjectType, GraphQLSchema -from pytest import raises +from graphql import GraphQLError +from pytest import mark, raises, fixture from graphene.tests.utils import dedent from ..field import Field from ..objecttype import ObjectType from ..scalars import String -from ..schema import Schema +from ..schema import Schema, UnforgivingExecutionContext class MyOtherType(ObjectType): @@ -68,3 +69,115 @@ def test_schema_requires_query_type(): assert len(result.errors) == 1 error = result.errors[0] assert error.message == "Query root type must be provided." + + +class TestUnforgivingExecutionContext: + @fixture + def schema(self): + class ErrorFieldsMixin: + sanity_field = String() + expected_error_field = String() + unexpected_value_error_field = String() + unexpected_type_error_field = String() + unexpected_attribute_error_field = String() + unexpected_key_error_field = String() + + @staticmethod + def resolve_sanity_field(obj, info): + return "not an error" + + @staticmethod + def resolve_expected_error_field(obj, info): + raise GraphQLError("expected error") + + @staticmethod + def resolve_unexpected_value_error_field(obj, info): + raise ValueError("unexpected error") + + @staticmethod + def resolve_unexpected_type_error_field(obj, info): + raise TypeError("unexpected error") + + @staticmethod + def resolve_unexpected_attribute_error_field(obj, info): + raise AttributeError("unexpected error") + + @staticmethod + def resolve_unexpected_key_error_field(obj, info): + return {}["fails"] + + class NestedObject(ErrorFieldsMixin, ObjectType): + pass + + class MyQuery(ErrorFieldsMixin, ObjectType): + nested_object = Field(NestedObject) + nested_object_error = Field(NestedObject) + + @staticmethod + def resolve_nested_object(obj, info): + return object() + + @staticmethod + def resolve_nested_object_error(obj, info): + raise TypeError() + + schema = Schema(query=MyQuery) + return schema + + def test_sanity_check(self, schema): + # this should pass with no errors (sanity check) + result = schema.execute( + "query { sanityField }", + execution_context_class=UnforgivingExecutionContext, + ) + assert not result.errors + assert result.data == {"sanityField": "not an error"} + + def test_nested_sanity_check(self, schema): + # this should pass with no errors (sanity check) + result = schema.execute( + r"query { nestedObject { sanityField } }", + execution_context_class=UnforgivingExecutionContext, + ) + assert not result.errors + assert result.data == {"nestedObject": {"sanityField": "not an error"}} + + def test_graphql_error(self, schema): + result = schema.execute( + "query { expectedErrorField }", + execution_context_class=UnforgivingExecutionContext, + ) + assert len(result.errors) == 1 + assert result.errors[0].message == "expected error" + assert result.data == {"expectedErrorField": None} + + def test_nested_graphql_error(self, schema): + result = schema.execute( + r"query { nestedObject { expectedErrorField } }", + execution_context_class=UnforgivingExecutionContext, + ) + assert len(result.errors) == 1 + assert result.errors[0].message == "expected error" + assert result.data == {"nestedObject": {"expectedErrorField": None}} + + @mark.parametrize( + "field,exception", + [ + ("unexpectedValueErrorField", ValueError), + ("unexpectedTypeErrorField", TypeError), + ("unexpectedAttributeErrorField", AttributeError), + ("unexpectedKeyErrorField", KeyError), + ("nestedObject { unexpectedValueErrorField }", ValueError), + ("nestedObject { unexpectedTypeErrorField }", TypeError), + ("nestedObject { unexpectedAttributeErrorField }", AttributeError), + ("nestedObject { unexpectedKeyErrorField }", KeyError), + ("nestedObjectError { __typename }", TypeError), + ], + ) + def test_unexpected_error(self, field, exception, schema): + with raises(exception): + # no result, but the exception should be propagated + schema.execute( + f"query {{ {field} }}", + execution_context_class=UnforgivingExecutionContext, + ) From 84582eb3749a3f56968facfc4c7d41f5887a9b0a Mon Sep 17 00:00:00 2001 From: Jonathan Kim Date: Wed, 21 Oct 2020 10:15:38 +0100 Subject: [PATCH 073/141] v3.0.0b6 --- graphene/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/graphene/__init__.py b/graphene/__init__.py index 882128713..b88543bb1 100644 --- a/graphene/__init__.py +++ b/graphene/__init__.py @@ -41,7 +41,7 @@ from .utils.module_loading import lazy_import from .utils.resolve_only_args import resolve_only_args -VERSION = (3, 0, 0, "beta", 5) +VERSION = (3, 0, 0, "beta", 6) __version__ = get_version(VERSION) From 7d09e5b138695f8cdf56660564daa914ac04c9d3 Mon Sep 17 00:00:00 2001 From: Jonathan Kim Date: Tue, 27 Oct 2020 08:51:51 +0000 Subject: [PATCH 074/141] Update stale.yml --- .github/stale.yml | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/.github/stale.yml b/.github/stale.yml index bb573c081..322a3edad 100644 --- a/.github/stale.yml +++ b/.github/stale.yml @@ -1,7 +1,7 @@ # Number of days of inactivity before an issue becomes stale -daysUntilStale: 90 +daysUntilStale: false # Number of days of inactivity before a stale issue is closed -daysUntilClose: 14 +daysUntilClose: false # Issues with these labels will never be considered stale exemptLabels: - pinned @@ -15,9 +15,10 @@ exemptLabels: # Label to use when marking an issue as stale staleLabel: wontfix # Comment to post when marking an issue as stale. Set to `false` to disable -markComment: > - This issue has been automatically marked as stale because it has not had - recent activity. It will be closed if no further activity occurs. Thank you - for your contributions. +markComment: false +# markComment: > + # This issue has been automatically marked as stale because it has not had + # recent activity. It will be closed if no further activity occurs. Thank you + # for your contributions. # Comment to post when closing a stale issue. Set to `false` to disable closeComment: false From e0d4bec2d80ce259e5091497f3491e3076eb06a6 Mon Sep 17 00:00:00 2001 From: Varun Dey Date: Tue, 17 Nov 2020 21:31:21 +0530 Subject: [PATCH 075/141] Remove Object Mutation dead link from Relay docs (#1272) The official Relay project has removed 'Relay input Object Mutation' in favour of general mutation spec from their docs in [this PR](https://github.com/facebook/relay/pull/2401/files#diff-98bee0595817d7a46cd52d86e6c3db70) and is unavailable on their [official website](https://relay.dev/docs/en/graphql-server-specification#mutations) as well --- docs/relay/index.rst | 1 - 1 file changed, 1 deletion(-) diff --git a/docs/relay/index.rst b/docs/relay/index.rst index 7eb418df5..8435ca2e5 100644 --- a/docs/relay/index.rst +++ b/docs/relay/index.rst @@ -26,4 +26,3 @@ Useful links .. _Getting started with Relay: https://facebook.github.io/relay/docs/en/quick-start-guide.html .. _Relay Global Identification Specification: https://facebook.github.io/relay/graphql/objectidentification.htm .. _Relay Cursor Connection Specification: https://facebook.github.io/relay/graphql/connections.htm -.. _Relay input Object Mutation: https://facebook.github.io/relay/graphql/mutations.htm From e5eeb9d831715825e4b6b15ff94c5f6e04feaee8 Mon Sep 17 00:00:00 2001 From: Jason Kraus Date: Wed, 6 Jan 2021 01:54:45 -0800 Subject: [PATCH 076/141] fix(Decimal): parse integers as decimal. (#1295) --- graphene/types/decimal.py | 4 ++-- graphene/types/tests/test_decimal.py | 8 ++++++++ tox.ini | 2 +- 3 files changed, 11 insertions(+), 3 deletions(-) diff --git a/graphene/types/decimal.py b/graphene/types/decimal.py index 028d6d289..b2acbe7e7 100644 --- a/graphene/types/decimal.py +++ b/graphene/types/decimal.py @@ -2,7 +2,7 @@ from decimal import Decimal as _Decimal -from graphql.language.ast import StringValueNode +from graphql.language.ast import StringValueNode, IntValueNode from .scalars import Scalar @@ -23,7 +23,7 @@ def serialize(dec): @classmethod def parse_literal(cls, node): - if isinstance(node, StringValueNode): + if isinstance(node, (StringValueNode, IntValueNode)): return cls.parse_value(node.value) @staticmethod diff --git a/graphene/types/tests/test_decimal.py b/graphene/types/tests/test_decimal.py index fd77f4821..9757e82ca 100644 --- a/graphene/types/tests/test_decimal.py +++ b/graphene/types/tests/test_decimal.py @@ -41,3 +41,11 @@ def test_bad_decimal_query(): result = schema.execute("""{ decimal(input: "%s") }""" % not_a_decimal) assert len(result.errors) == 1 assert result.data is None + + +def test_decimal_string_query_integer(): + decimal_value = 1 + result = schema.execute("""{ decimal(input: %s) }""" % decimal_value) + assert not result.errors + assert result.data == {"decimal": str(decimal_value)} + assert decimal.Decimal(result.data["decimal"]) == decimal_value diff --git a/tox.ini b/tox.ini index b0298feae..dd922c469 100644 --- a/tox.ini +++ b/tox.ini @@ -8,7 +8,7 @@ deps = setenv = PYTHONPATH = .:{envdir} commands = - py{36,37}: pytest --cov=graphene graphene examples {posargs} + py{36,37,38}: pytest --cov=graphene graphene examples {posargs} [testenv:pre-commit] basepython=python3.7 From 2e87ebe5fcd50349f9572a0b20117e9ae46b15de Mon Sep 17 00:00:00 2001 From: Jonathan Kim Date: Wed, 6 Jan 2021 09:58:19 +0000 Subject: [PATCH 077/141] v3.0.0b7 --- graphene/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/graphene/__init__.py b/graphene/__init__.py index b88543bb1..34729de0b 100644 --- a/graphene/__init__.py +++ b/graphene/__init__.py @@ -41,7 +41,7 @@ from .utils.module_loading import lazy_import from .utils.resolve_only_args import resolve_only_args -VERSION = (3, 0, 0, "beta", 6) +VERSION = (3, 0, 0, "beta", 7) __version__ = get_version(VERSION) From 6f9cdb4888a230d39d99ffc540395cdc379d746c Mon Sep 17 00:00:00 2001 From: bartenra <77667589+bartenra@users.noreply.github.com> Date: Wed, 24 Mar 2021 20:32:35 +0100 Subject: [PATCH 078/141] Fix links to Relay docs (#1318) --- docs/relay/index.rst | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/docs/relay/index.rst b/docs/relay/index.rst index 8435ca2e5..2efde25fe 100644 --- a/docs/relay/index.rst +++ b/docs/relay/index.rst @@ -19,10 +19,8 @@ Useful links - `Getting started with Relay`_ - `Relay Global Identification Specification`_ - `Relay Cursor Connection Specification`_ -- `Relay input Object Mutation`_ -.. _Relay: https://facebook.github.io/relay/docs/en/graphql-server-specification.html -.. _Relay specification: https://facebook.github.io/relay/graphql/objectidentification.htm#sec-Node-root-field -.. _Getting started with Relay: https://facebook.github.io/relay/docs/en/quick-start-guide.html -.. _Relay Global Identification Specification: https://facebook.github.io/relay/graphql/objectidentification.htm -.. _Relay Cursor Connection Specification: https://facebook.github.io/relay/graphql/connections.htm +.. _Relay: https://relay.dev/docs/guides/graphql-server-specification/ +.. _Getting started with Relay: https://relay.dev/docs/getting-started/step-by-step-guide/ +.. _Relay Global Identification Specification: https://relay.dev/graphql/objectidentification.htm +.. _Relay Cursor Connection Specification: https://relay.dev/graphql/connections.htm From f622f1f53c2da2ca5c6a0a105dc91dae66778c3d Mon Sep 17 00:00:00 2001 From: shukryzablah Date: Wed, 24 Mar 2021 15:32:51 -0400 Subject: [PATCH 079/141] Update index.rst (#1313) --- docs/testing/index.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/testing/index.rst b/docs/testing/index.rst index 0103779cf..877879f6f 100644 --- a/docs/testing/index.rst +++ b/docs/testing/index.rst @@ -77,13 +77,13 @@ Snapshot testing As our APIs evolve, we need to know when our changes introduce any breaking changes that might break some of the clients of our GraphQL app. -However, writing tests and replicate the same response we expect from our GraphQL application can be +However, writing tests and replicating the same response we expect from our GraphQL application can be a tedious and repetitive task, and sometimes it's easier to skip this process. Because of that, we recommend the usage of `SnapshotTest `_. -SnapshotTest let us write all this tests in a breeze, as creates automatically the ``snapshots`` for us -the first time the test is executed. +SnapshotTest lets us write all these tests in a breeze, as it automatically creates the ``snapshots`` for us +the first time the test are executed. Here is a simple example on how our tests will look if we use ``pytest``: From c08379ed85b2759de32777eb5dd3dca143a6d69f Mon Sep 17 00:00:00 2001 From: Minh Tu Le Date: Mon, 19 Apr 2021 10:03:11 -0700 Subject: [PATCH 080/141] Use argument's `default_value` regardless if the input field is required (#1326) * Use argument's default value regardless if the input field is required * Add a test * Format code --- graphene/types/schema.py | 5 +---- graphene/types/tests/test_type_map.py | 16 ++++++++++++++++ 2 files changed, 17 insertions(+), 4 deletions(-) diff --git a/graphene/types/schema.py b/graphene/types/schema.py index 4fd71769c..995323542 100644 --- a/graphene/types/schema.py +++ b/graphene/types/schema.py @@ -26,7 +26,6 @@ GraphQLObjectType, GraphQLSchema, GraphQLString, - Undefined, ) from graphql.execution import ExecutionContext from graphql.execution.values import get_argument_values @@ -313,9 +312,7 @@ def create_fields_for_type(self, graphene_type, is_input_type=False): arg_type, out_name=arg_name, description=arg.description, - default_value=Undefined - if isinstance(arg.type, NonNull) - else arg.default_value, + default_value=arg.default_value, ) subscribe = field.wrap_subscribe( self.get_function_for_type( diff --git a/graphene/types/tests/test_type_map.py b/graphene/types/tests/test_type_map.py index 334eb2415..12e7a1f44 100644 --- a/graphene/types/tests/test_type_map.py +++ b/graphene/types/tests/test_type_map.py @@ -6,6 +6,7 @@ GraphQLInputField, GraphQLInputObjectType, GraphQLInterfaceType, + GraphQLNonNull, GraphQLObjectType, GraphQLString, ) @@ -94,6 +95,21 @@ def resolve_foo(self, bar): } +def test_required_argument_with_default_value(): + class MyObjectType(ObjectType): + foo = String(bar=String(required=True, default_value="x")) + + type_map = create_type_map([MyObjectType]) + + graphql_type = type_map["MyObjectType"] + foo_field = graphql_type.fields["foo"] + + bar_argument = foo_field.args["bar"] + assert bar_argument.default_value == "x" + assert isinstance(bar_argument.type, GraphQLNonNull) + assert bar_argument.type.of_type == GraphQLString + + def test_dynamic_objecttype(): class MyObjectType(ObjectType): """Description""" From 485b1ed325287fd721b13aac8b4ec872d6295c6a Mon Sep 17 00:00:00 2001 From: kevinr-electric <77303321+kevinr-electric@users.noreply.github.com> Date: Thu, 22 Apr 2021 23:28:05 -0400 Subject: [PATCH 081/141] fix field name in execute.rst example (#1327) fix field name in execute.rst 'Operation Name' example --- docs/execution/execute.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/execution/execute.rst b/docs/execution/execute.rst index 23be0b420..1c0e2599a 100644 --- a/docs/execution/execute.rst +++ b/docs/execution/execute.rst @@ -110,7 +110,7 @@ If there are multiple operations defined in a query string, ``operation_name`` s from graphene import ObjectType, Field, Schema class Query(ObjectType): - me = Field(User) + user = Field(User) def resolve_user(root, info): return get_user_by_id(12) From 69b628686105e4269c389f1125d79742ccab2bc2 Mon Sep 17 00:00:00 2001 From: Sergey Fedoseev Date: Fri, 16 Jul 2021 22:10:53 +0500 Subject: [PATCH 082/141] Fix typo in docstring of ObjectType (#1343) --- graphene/types/objecttype.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/graphene/types/objecttype.py b/graphene/types/objecttype.py index f4a0f5a0e..c69be937b 100644 --- a/graphene/types/objecttype.py +++ b/graphene/types/objecttype.py @@ -66,7 +66,7 @@ class ObjectType(BaseType, metaclass=ObjectTypeMeta): Methods starting with ``resolve_`` are bound as resolvers of the matching Field name. If no resolver is provided, the default resolver is used. - Ambiguous types with Interface and Union can be determined through``is_type_of`` method and + Ambiguous types with Interface and Union can be determined through ``is_type_of`` method and ``Meta.possible_types`` attribute. .. code:: python From 5290c9364c2479884eef2dbf06702381a4828fcb Mon Sep 17 00:00:00 2001 From: Fabian Affolter Date: Fri, 16 Jul 2021 19:11:49 +0200 Subject: [PATCH 083/141] Allow later aniso8601 releases (#1331) --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 48d7d285d..03001111d 100644 --- a/setup.py +++ b/setup.py @@ -84,7 +84,7 @@ def run_tests(self): install_requires=[ "graphql-core>=3.1.2,<4", "graphql-relay>=3.0,<4", - "aniso8601>=8,<9", + "aniso8601>=8,<10", ], tests_require=tests_require, extras_require={"test": tests_require, "dev": dev_requires}, From fce45ef5520c92f59d7988f36eddbb889e48e1fd Mon Sep 17 00:00:00 2001 From: Fabian Affolter Date: Fri, 16 Jul 2021 19:12:09 +0200 Subject: [PATCH 084/141] Update pytz to 2021.1 (#1330) --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 03001111d..1503c3c69 100644 --- a/setup.py +++ b/setup.py @@ -54,7 +54,7 @@ def run_tests(self): "coveralls>=1.11,<2", "promise>=2.3,<3", "mock>=4.0,<5", - "pytz==2019.3", + "pytz==2021.1", "iso8601>=0.1,<2", ] From aa11681048a6be67023627a4907e013d65dd13d1 Mon Sep 17 00:00:00 2001 From: Aryan Iyappan <69184573+aryan340@users.noreply.github.com> Date: Fri, 13 Aug 2021 18:22:12 +0530 Subject: [PATCH 085/141] add depth limit validator --- docs/execution/index.rst | 1 + docs/execution/validators.rst | 35 +++ graphene/validators/__init__.py | 6 + graphene/validators/depth_limit_validator.py | 198 +++++++++++++ graphene/validators/tests/__init__.py | 0 .../tests/test_depth_limit_validator.py | 279 ++++++++++++++++++ 6 files changed, 519 insertions(+) create mode 100644 docs/execution/validators.rst create mode 100644 graphene/validators/__init__.py create mode 100644 graphene/validators/depth_limit_validator.py create mode 100644 graphene/validators/tests/__init__.py create mode 100644 graphene/validators/tests/test_depth_limit_validator.py diff --git a/docs/execution/index.rst b/docs/execution/index.rst index dbfbfa726..466526657 100644 --- a/docs/execution/index.rst +++ b/docs/execution/index.rst @@ -10,3 +10,4 @@ Execution dataloader fileuploading subscriptions + validators diff --git a/docs/execution/validators.rst b/docs/execution/validators.rst new file mode 100644 index 000000000..94bb7c2fc --- /dev/null +++ b/docs/execution/validators.rst @@ -0,0 +1,35 @@ +Middleware +========== + +Validation rules help validate a given GraphQL query, before executing it.To help with common use +cases, graphene provides a few validation rules out of the box. + + +Depth limit Validator +----------------- +The depth limit validator helps to prevent execution of malicious +queries. It takes in the following arguments. + +- ``max_depth`` is the maximum allowed depth for any operation in a GraphQL document. +- ``ignore`` Stops recursive depth checking based on a field name. Either a string or regexp to match the name, or a function that returns a boolean +- ``callback`` Called each time validation runs. Receives an Object which is a map of the depths for each operation. + +Example +------- + +Here is how you would implement depth-limiting on your schema. + +.. code:: python + from graphene.validators import depth_limit_validator + + # The following schema doesn't execute queries + # which have a depth more than 20. + + result = schema.execute( + 'THE QUERY', + validation_rules=[ + depth_limit_validator( + max_depth=20 + ) + ] + ) diff --git a/graphene/validators/__init__.py b/graphene/validators/__init__.py new file mode 100644 index 000000000..8bd8d884f --- /dev/null +++ b/graphene/validators/__init__.py @@ -0,0 +1,6 @@ +from .depth_limit_validator import depth_limit_validator + + +__all__ = [ + "depth_limit_validator" +] diff --git a/graphene/validators/depth_limit_validator.py b/graphene/validators/depth_limit_validator.py new file mode 100644 index 000000000..436152055 --- /dev/null +++ b/graphene/validators/depth_limit_validator.py @@ -0,0 +1,198 @@ +# This is a Python port of https://github.com/stems/graphql-depth-limit +# which is licensed under the terms of the MIT license, reproduced below. +# +# ----------- +# +# MIT License +# +# Copyright (c) 2017 Stem +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +import re +from typing import Callable, Dict, List, Optional, Union + +from graphql import GraphQLError, is_introspection_type +from graphql.language import ( + DefinitionNode, + FieldNode, + FragmentDefinitionNode, + FragmentSpreadNode, + InlineFragmentNode, + Node, + OperationDefinitionNode, +) +from graphql.validation import ValidationContext, ValidationRule + + +IgnoreType = Union[Callable[[str], bool], re.Pattern, str] + + +def depth_limit_validator( + max_depth: int, + ignore: Optional[List[IgnoreType]] = None, + callback: Callable[[Dict[str, int]], None] = None, +): + class DepthLimitValidator(ValidationRule): + def __init__(self, validation_context: ValidationContext): + document = validation_context.document + definitions = document.definitions + + fragments = get_fragments(definitions) + queries = get_queries_and_mutations(definitions) + query_depths = {} + + for name in queries: + query_depths[name] = determine_depth( + node=queries[name], + fragments=fragments, + depth_so_far=0, + max_depth=max_depth, + context=validation_context, + operation_name=name, + ignore=ignore, + ) + + if callable(callback): + callback(query_depths) + super().__init__(validation_context) + + return DepthLimitValidator + + +def get_fragments( + definitions: List[DefinitionNode], +) -> Dict[str, FragmentDefinitionNode]: + fragments = {} + for definition in definitions: + if isinstance(definition, FragmentDefinitionNode): + fragments[definition.name.value] = definition + + return fragments + + +# This will actually get both queries and mutations. +# We can basically treat those the same +def get_queries_and_mutations( + definitions: List[DefinitionNode], +) -> Dict[str, OperationDefinitionNode]: + operations = {} + + for definition in definitions: + if isinstance(definition, OperationDefinitionNode): + operation = definition.name.value if definition.name else "anonymous" + operations[operation] = definition + + return operations + + +def determine_depth( + node: Node, + fragments: Dict[str, FragmentDefinitionNode], + depth_so_far: int, + max_depth: int, + context: ValidationContext, + operation_name: str, + ignore: Optional[List[IgnoreType]] = None, +) -> int: + if depth_so_far > max_depth: + context.report_error( + GraphQLError( + f"'{operation_name}' exceeds maximum operation depth of {max_depth}", + [node], + ) + ) + return depth_so_far + + if isinstance(node, FieldNode): + # from: https://spec.graphql.org/June2018/#sec-Schema + # > All types and directives defined within a schema must not have a name which + # > begins with "__" (two underscores), as this is used exclusively + # > by GraphQL’s introspection system. + should_ignore = str(node.name.value).startswith("__") or is_ignored( + node, ignore + ) + + if should_ignore or not node.selection_set: + return 0 + + return 1 + max( + map( + lambda selection: determine_depth( + node=selection, + fragments=fragments, + depth_so_far=depth_so_far + 1, + max_depth=max_depth, + context=context, + operation_name=operation_name, + ignore=ignore, + ), + node.selection_set.selections, + ) + ) + elif isinstance(node, FragmentSpreadNode): + return determine_depth( + node=fragments[node.name.value], + fragments=fragments, + depth_so_far=depth_so_far, + max_depth=max_depth, + context=context, + operation_name=operation_name, + ignore=ignore, + ) + elif isinstance( + node, (InlineFragmentNode, FragmentDefinitionNode, OperationDefinitionNode) + ): + return max( + map( + lambda selection: determine_depth( + node=selection, + fragments=fragments, + depth_so_far=depth_so_far, + max_depth=max_depth, + context=context, + operation_name=operation_name, + ignore=ignore, + ), + node.selection_set.selections, + ) + ) + else: + raise Exception(f"Depth crawler cannot handle: {node.kind}") # pragma: no cover + + +def is_ignored(node: FieldNode, ignore: Optional[List[IgnoreType]] = None) -> bool: + if ignore is None: + return False + + for rule in ignore: + field_name = node.name.value + if isinstance(rule, str): + if field_name == rule: + return True + elif isinstance(rule, re.Pattern): + if rule.match(field_name): + return True + elif callable(rule): + if rule(field_name): + return True + else: + raise ValueError(f"Invalid ignore option: {rule}") + + return False diff --git a/graphene/validators/tests/__init__.py b/graphene/validators/tests/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/graphene/validators/tests/test_depth_limit_validator.py b/graphene/validators/tests/test_depth_limit_validator.py new file mode 100644 index 000000000..ea50c8d49 --- /dev/null +++ b/graphene/validators/tests/test_depth_limit_validator.py @@ -0,0 +1,279 @@ +import re + +from pytest import raises +from graphql import parse, get_introspection_query, validate + +from ...types import Schema, ObjectType, Interface +from ...types import String, Int, List, Field +from ..depth_limit_validator import depth_limit_validator + + +class PetType(Interface): + name = String(required=True) + + class meta: + name = "Pet" + + +class CatType(ObjectType): + class meta: + name = "Cat" + interfaces = (PetType,) + + +class DogType(ObjectType): + class meta: + name = "Dog" + interfaces = (PetType,) + + +class AddressType(ObjectType): + street = String(required=True) + number = Int(required=True) + city = String(required=True) + country = String(required=True) + + class Meta: + name = "Address" + + +class HumanType(ObjectType): + name = String(required=True) + email = String(required=True) + address = Field(AddressType, required=True) + pets = List(PetType, required=True) + + class Meta: + name = "Human" + + +class Query(ObjectType): + user = Field( + HumanType, + required=True, + name=String() + ) + version = String( + required=True + ) + user1 = Field( + HumanType, + required=True + ) + user2 = Field( + HumanType, + required=True + ) + user3 = Field( + HumanType, + required=True + ) + + @staticmethod + def resolve_user(root, info, name=None): + pass + + +schema = Schema(query=Query) + + +def run_query(query: str, max_depth: int, ignore=None): + document = parse(query) + + result = None + + def callback(query_depths): + nonlocal result + result = query_depths + + errors = validate( + schema.graphql_schema, + document, + rules=( + depth_limit_validator( + max_depth=max_depth, + ignore=ignore, + callback=callback + ), + ), + ) + + return errors, result + + +def test_should_count_depth_without_fragment(): + query = """ + query read0 { + version + } + query read1 { + version + user { + name + } + } + query read2 { + matt: user(name: "matt") { + email + } + andy: user(name: "andy") { + email + address { + city + } + } + } + query read3 { + matt: user(name: "matt") { + email + } + andy: user(name: "andy") { + email + address { + city + } + pets { + name + owner { + name + } + } + } + } + """ + + expected = {"read0": 0, "read1": 1, "read2": 2, "read3": 3} + + errors, result = run_query(query, 10) + assert not errors + assert result == expected + + +def test_should_count_with_fragments(): + query = """ + query read0 { + ... on Query { + version + } + } + query read1 { + version + user { + ... on Human { + name + } + } + } + fragment humanInfo on Human { + email + } + fragment petInfo on Pet { + name + owner { + name + } + } + query read2 { + matt: user(name: "matt") { + ...humanInfo + } + andy: user(name: "andy") { + ...humanInfo + address { + city + } + } + } + query read3 { + matt: user(name: "matt") { + ...humanInfo + } + andy: user(name: "andy") { + ... on Human { + email + } + address { + city + } + pets { + ...petInfo + } + } + } + """ + + expected = {"read0": 0, "read1": 1, "read2": 2, "read3": 3} + + errors, result = run_query(query, 10) + assert not errors + assert result == expected + + +def test_should_ignore_the_introspection_query(): + errors, result = run_query(get_introspection_query(), 10) + assert not errors + assert result == {"IntrospectionQuery": 0} + + +def test_should_catch_very_deep_query(): + query = """{ + user { + pets { + owner { + pets { + owner { + pets { + name + } + } + } + } + } + } + } + """ + errors, result = run_query(query, 4) + + assert len(errors) == 1 + assert errors[0].message == "'anonymous' exceeds maximum operation depth of 4" + + +def test_should_ignore_field(): + query = """ + query read1 { + user { address { city } } + } + query read2 { + user1 { address { city } } + user2 { address { city } } + user3 { address { city } } + } + """ + + errors, result = run_query( + query, + 10, + ignore=[ + "user1", + re.compile("user2"), + lambda field_name: field_name == "user3", + ], + ) + + expected = {"read1": 2, "read2": 0} + assert not errors + assert result == expected + + +def test_should_raise_invalid_ignore(): + query = """ + query read1 { + user { address { city } } + } + """ + with raises(ValueError, match="Invalid ignore option:"): + run_query( + query, + 10, + ignore=[True], + ) From fc2967e276bb78a4b388feaa091c2f9bc1f31ca2 Mon Sep 17 00:00:00 2001 From: Aryan Iyappan <69184573+aryan340@users.noreply.github.com> Date: Fri, 13 Aug 2021 18:51:23 +0530 Subject: [PATCH 086/141] remove unused imports --- graphene/validators/depth_limit_validator.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/graphene/validators/depth_limit_validator.py b/graphene/validators/depth_limit_validator.py index 436152055..d25897006 100644 --- a/graphene/validators/depth_limit_validator.py +++ b/graphene/validators/depth_limit_validator.py @@ -28,7 +28,7 @@ import re from typing import Callable, Dict, List, Optional, Union -from graphql import GraphQLError, is_introspection_type +from graphql import GraphQLError from graphql.language import ( DefinitionNode, FieldNode, From 4259502dc373c8d3a4d6463696441e4bd5a0cc68 Mon Sep 17 00:00:00 2001 From: Aryan Iyappan <69184573+aryan340@users.noreply.github.com> Date: Fri, 13 Aug 2021 20:02:20 +0530 Subject: [PATCH 087/141] update docs --- docs/execution/validators.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/execution/validators.rst b/docs/execution/validators.rst index 94bb7c2fc..f1cfac882 100644 --- a/docs/execution/validators.rst +++ b/docs/execution/validators.rst @@ -1,4 +1,4 @@ -Middleware +Validators ========== Validation rules help validate a given GraphQL query, before executing it.To help with common use From 5977b1648ce75730a3a494aeeef9df43fc5f2330 Mon Sep 17 00:00:00 2001 From: Aryan Iyappan <69184573+aryan340@users.noreply.github.com> Date: Fri, 13 Aug 2021 20:04:42 +0530 Subject: [PATCH 088/141] fix typo --- docs/execution/validators.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/execution/validators.rst b/docs/execution/validators.rst index f1cfac882..a37c80ab4 100644 --- a/docs/execution/validators.rst +++ b/docs/execution/validators.rst @@ -1,7 +1,7 @@ Validators ========== -Validation rules help validate a given GraphQL query, before executing it.To help with common use +Validation rules help validate a given GraphQL query, before executing it. To help with common use cases, graphene provides a few validation rules out of the box. From a784ef15e59851afa804162a200b9c80a11c200c Mon Sep 17 00:00:00 2001 From: Aryan Iyappan <69184573+aryan340@users.noreply.github.com> Date: Fri, 13 Aug 2021 20:24:53 +0530 Subject: [PATCH 089/141] add disable introspection --- docs/execution/validators.rst | 2 +- graphene/utils/is_introspection_key.py | 6 + graphene/validation/__init__.py | 8 + .../depth_limit.py} | 10 +- graphene/validation/disable_introspection.py | 22 ++ .../tests/__init__.py | 0 .../tests/test_disable_introspection.py | 33 +++ graphene/validators/__init__.py | 6 - .../tests/test_depth_limit_validator.py | 279 ------------------ 9 files changed, 74 insertions(+), 292 deletions(-) create mode 100644 graphene/utils/is_introspection_key.py create mode 100644 graphene/validation/__init__.py rename graphene/{validators/depth_limit_validator.py => validation/depth_limit.py} (94%) create mode 100644 graphene/validation/disable_introspection.py rename graphene/{validators => validation}/tests/__init__.py (100%) create mode 100644 graphene/validation/tests/test_disable_introspection.py delete mode 100644 graphene/validators/__init__.py delete mode 100644 graphene/validators/tests/test_depth_limit_validator.py diff --git a/docs/execution/validators.rst b/docs/execution/validators.rst index a37c80ab4..92b8ecd2b 100644 --- a/docs/execution/validators.rst +++ b/docs/execution/validators.rst @@ -20,7 +20,7 @@ Example Here is how you would implement depth-limiting on your schema. .. code:: python - from graphene.validators import depth_limit_validator + from graphene.validation import depth_limit_validator # The following schema doesn't execute queries # which have a depth more than 20. diff --git a/graphene/utils/is_introspection_key.py b/graphene/utils/is_introspection_key.py new file mode 100644 index 000000000..689519404 --- /dev/null +++ b/graphene/utils/is_introspection_key.py @@ -0,0 +1,6 @@ +def is_introspection_key(key): + # from: https://spec.graphql.org/June2018/#sec-Schema + # > All types and directives defined within a schema must not have a name which + # > begins with "__" (two underscores), as this is used exclusively + # > by GraphQL’s introspection system. + return str(node.name.value).startswith("__") diff --git a/graphene/validation/__init__.py b/graphene/validation/__init__.py new file mode 100644 index 000000000..03e4605c8 --- /dev/null +++ b/graphene/validation/__init__.py @@ -0,0 +1,8 @@ +from .depth_limit import depth_limit_validator +from .disable_introspection import disable_introspection + + +__all__ = [ + "depth_limit_validator", + "disable_introspection" +] diff --git a/graphene/validators/depth_limit_validator.py b/graphene/validation/depth_limit.py similarity index 94% rename from graphene/validators/depth_limit_validator.py rename to graphene/validation/depth_limit.py index d25897006..4136555d7 100644 --- a/graphene/validators/depth_limit_validator.py +++ b/graphene/validation/depth_limit.py @@ -29,6 +29,7 @@ from typing import Callable, Dict, List, Optional, Union from graphql import GraphQLError +from graphql.validation import ValidationContext, ValidationRule from graphql.language import ( DefinitionNode, FieldNode, @@ -38,7 +39,8 @@ Node, OperationDefinitionNode, ) -from graphql.validation import ValidationContext, ValidationRule + +from ..utils.is_introspection_key import is_introspection_key IgnoreType = Union[Callable[[str], bool], re.Pattern, str] @@ -121,11 +123,7 @@ def determine_depth( return depth_so_far if isinstance(node, FieldNode): - # from: https://spec.graphql.org/June2018/#sec-Schema - # > All types and directives defined within a schema must not have a name which - # > begins with "__" (two underscores), as this is used exclusively - # > by GraphQL’s introspection system. - should_ignore = str(node.name.value).startswith("__") or is_ignored( + should_ignore = is_introspection_key(node.name.value) or is_ignored( node, ignore ) diff --git a/graphene/validation/disable_introspection.py b/graphene/validation/disable_introspection.py new file mode 100644 index 000000000..eb24be554 --- /dev/null +++ b/graphene/validation/disable_introspection.py @@ -0,0 +1,22 @@ +from graphql import GraphQLError +from graphql.language import FieldNode +from graphql.validation import ValidationRule + +from ..utils.is_introspection_key import is_introspection_key + + +def disable_introspection(): + class DisableIntrospection(ValidationRule): + def enter_field(self, node: FieldNode, *_args): + field_name = node.name.value + if not is_introspection_key(field_name): + return + + self.report_error( + GraphQLError( + f"Cannot query '{field_name}': introspection is disabled.", + node, + ) + ) + + return DisableIntrospection diff --git a/graphene/validators/tests/__init__.py b/graphene/validation/tests/__init__.py similarity index 100% rename from graphene/validators/tests/__init__.py rename to graphene/validation/tests/__init__.py diff --git a/graphene/validation/tests/test_disable_introspection.py b/graphene/validation/tests/test_disable_introspection.py new file mode 100644 index 000000000..4d1faa7d2 --- /dev/null +++ b/graphene/validation/tests/test_disable_introspection.py @@ -0,0 +1,33 @@ +from graphql import parse, validate + +from ...types import Schema, ObjectType, String +from ..disable_introspection import disable_introspection + + +class Query(ObjectType): + name = String( + required=True + ) + + +schema = Schema(query=Query) + + +def run_query(query: str): + document = parse(query) + + result = None + + def callback(query_depths): + nonlocal result + result = query_depths + + errors = validate( + schema.graphql_schema, + document, + rules=( + disable_introspection(), + ), + ) + + return errors, result diff --git a/graphene/validators/__init__.py b/graphene/validators/__init__.py deleted file mode 100644 index 8bd8d884f..000000000 --- a/graphene/validators/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -from .depth_limit_validator import depth_limit_validator - - -__all__ = [ - "depth_limit_validator" -] diff --git a/graphene/validators/tests/test_depth_limit_validator.py b/graphene/validators/tests/test_depth_limit_validator.py deleted file mode 100644 index ea50c8d49..000000000 --- a/graphene/validators/tests/test_depth_limit_validator.py +++ /dev/null @@ -1,279 +0,0 @@ -import re - -from pytest import raises -from graphql import parse, get_introspection_query, validate - -from ...types import Schema, ObjectType, Interface -from ...types import String, Int, List, Field -from ..depth_limit_validator import depth_limit_validator - - -class PetType(Interface): - name = String(required=True) - - class meta: - name = "Pet" - - -class CatType(ObjectType): - class meta: - name = "Cat" - interfaces = (PetType,) - - -class DogType(ObjectType): - class meta: - name = "Dog" - interfaces = (PetType,) - - -class AddressType(ObjectType): - street = String(required=True) - number = Int(required=True) - city = String(required=True) - country = String(required=True) - - class Meta: - name = "Address" - - -class HumanType(ObjectType): - name = String(required=True) - email = String(required=True) - address = Field(AddressType, required=True) - pets = List(PetType, required=True) - - class Meta: - name = "Human" - - -class Query(ObjectType): - user = Field( - HumanType, - required=True, - name=String() - ) - version = String( - required=True - ) - user1 = Field( - HumanType, - required=True - ) - user2 = Field( - HumanType, - required=True - ) - user3 = Field( - HumanType, - required=True - ) - - @staticmethod - def resolve_user(root, info, name=None): - pass - - -schema = Schema(query=Query) - - -def run_query(query: str, max_depth: int, ignore=None): - document = parse(query) - - result = None - - def callback(query_depths): - nonlocal result - result = query_depths - - errors = validate( - schema.graphql_schema, - document, - rules=( - depth_limit_validator( - max_depth=max_depth, - ignore=ignore, - callback=callback - ), - ), - ) - - return errors, result - - -def test_should_count_depth_without_fragment(): - query = """ - query read0 { - version - } - query read1 { - version - user { - name - } - } - query read2 { - matt: user(name: "matt") { - email - } - andy: user(name: "andy") { - email - address { - city - } - } - } - query read3 { - matt: user(name: "matt") { - email - } - andy: user(name: "andy") { - email - address { - city - } - pets { - name - owner { - name - } - } - } - } - """ - - expected = {"read0": 0, "read1": 1, "read2": 2, "read3": 3} - - errors, result = run_query(query, 10) - assert not errors - assert result == expected - - -def test_should_count_with_fragments(): - query = """ - query read0 { - ... on Query { - version - } - } - query read1 { - version - user { - ... on Human { - name - } - } - } - fragment humanInfo on Human { - email - } - fragment petInfo on Pet { - name - owner { - name - } - } - query read2 { - matt: user(name: "matt") { - ...humanInfo - } - andy: user(name: "andy") { - ...humanInfo - address { - city - } - } - } - query read3 { - matt: user(name: "matt") { - ...humanInfo - } - andy: user(name: "andy") { - ... on Human { - email - } - address { - city - } - pets { - ...petInfo - } - } - } - """ - - expected = {"read0": 0, "read1": 1, "read2": 2, "read3": 3} - - errors, result = run_query(query, 10) - assert not errors - assert result == expected - - -def test_should_ignore_the_introspection_query(): - errors, result = run_query(get_introspection_query(), 10) - assert not errors - assert result == {"IntrospectionQuery": 0} - - -def test_should_catch_very_deep_query(): - query = """{ - user { - pets { - owner { - pets { - owner { - pets { - name - } - } - } - } - } - } - } - """ - errors, result = run_query(query, 4) - - assert len(errors) == 1 - assert errors[0].message == "'anonymous' exceeds maximum operation depth of 4" - - -def test_should_ignore_field(): - query = """ - query read1 { - user { address { city } } - } - query read2 { - user1 { address { city } } - user2 { address { city } } - user3 { address { city } } - } - """ - - errors, result = run_query( - query, - 10, - ignore=[ - "user1", - re.compile("user2"), - lambda field_name: field_name == "user3", - ], - ) - - expected = {"read1": 2, "read2": 0} - assert not errors - assert result == expected - - -def test_should_raise_invalid_ignore(): - query = """ - query read1 { - user { address { city } } - } - """ - with raises(ValueError, match="Invalid ignore option:"): - run_query( - query, - 10, - ignore=[True], - ) From d7b474751d59b9c94279283295057f224b7688a7 Mon Sep 17 00:00:00 2001 From: Aryan Iyappan <69184573+aryan340@users.noreply.github.com> Date: Sat, 14 Aug 2021 07:45:34 +0530 Subject: [PATCH 090/141] add depth limit validator tests --- docs/execution/validators.rst | 24 +- graphene/utils/is_introspection_key.py | 2 +- .../tests/test_depth_limit_validator.py | 279 ++++++++++++++++++ .../tests/test_disable_introspection.py | 4 - 4 files changed, 297 insertions(+), 12 deletions(-) create mode 100644 graphene/validation/tests/test_depth_limit_validator.py diff --git a/docs/execution/validators.rst b/docs/execution/validators.rst index 92b8ecd2b..d7e1310b1 100644 --- a/docs/execution/validators.rst +++ b/docs/execution/validators.rst @@ -20,16 +20,26 @@ Example Here is how you would implement depth-limiting on your schema. .. code:: python + from graphql import validate + from graphene import ObjectType, Schema, String from graphene.validation import depth_limit_validator - # The following schema doesn't execute queries - # which have a depth more than 20. - result = schema.execute( - 'THE QUERY', - validation_rules=[ + class MyQuery(ObjectType): + name = String(required=True) + + + schema = Schema(query=MyQuery) + + # Queries which have a depth more than 20 + # will not be executed. + + validation_errors = validate( + schema=schema, + document='THE QUERY', + rules=( depth_limit_validator( max_depth=20 - ) - ] + ), + ) ) diff --git a/graphene/utils/is_introspection_key.py b/graphene/utils/is_introspection_key.py index 689519404..59d72b24c 100644 --- a/graphene/utils/is_introspection_key.py +++ b/graphene/utils/is_introspection_key.py @@ -3,4 +3,4 @@ def is_introspection_key(key): # > All types and directives defined within a schema must not have a name which # > begins with "__" (two underscores), as this is used exclusively # > by GraphQL’s introspection system. - return str(node.name.value).startswith("__") + return str(key).startswith("__") diff --git a/graphene/validation/tests/test_depth_limit_validator.py b/graphene/validation/tests/test_depth_limit_validator.py new file mode 100644 index 000000000..3eea3a32c --- /dev/null +++ b/graphene/validation/tests/test_depth_limit_validator.py @@ -0,0 +1,279 @@ +import re + +from pytest import raises +from graphql import parse, get_introspection_query, validate + +from ...types import Schema, ObjectType, Interface +from ...types import String, Int, List, Field +from ..depth_limit import depth_limit_validator + + +class PetType(Interface): + name = String(required=True) + + class meta: + name = "Pet" + + +class CatType(ObjectType): + class meta: + name = "Cat" + interfaces = (PetType,) + + +class DogType(ObjectType): + class meta: + name = "Dog" + interfaces = (PetType,) + + +class AddressType(ObjectType): + street = String(required=True) + number = Int(required=True) + city = String(required=True) + country = String(required=True) + + class Meta: + name = "Address" + + +class HumanType(ObjectType): + name = String(required=True) + email = String(required=True) + address = Field(AddressType, required=True) + pets = List(PetType, required=True) + + class Meta: + name = "Human" + + +class Query(ObjectType): + user = Field( + HumanType, + required=True, + name=String() + ) + version = String( + required=True + ) + user1 = Field( + HumanType, + required=True + ) + user2 = Field( + HumanType, + required=True + ) + user3 = Field( + HumanType, + required=True + ) + + @staticmethod + def resolve_user(root, info, name=None): + pass + + +schema = Schema(query=Query) + + +def run_query(query: str, max_depth: int, ignore=None): + document = parse(query) + + result = None + + def callback(query_depths): + nonlocal result + result = query_depths + + errors = validate( + schema.graphql_schema, + document, + rules=( + depth_limit_validator( + max_depth=max_depth, + ignore=ignore, + callback=callback + ), + ), + ) + + return errors, result + + +def test_should_count_depth_without_fragment(): + query = """ + query read0 { + version + } + query read1 { + version + user { + name + } + } + query read2 { + matt: user(name: "matt") { + email + } + andy: user(name: "andy") { + email + address { + city + } + } + } + query read3 { + matt: user(name: "matt") { + email + } + andy: user(name: "andy") { + email + address { + city + } + pets { + name + owner { + name + } + } + } + } + """ + + expected = {"read0": 0, "read1": 1, "read2": 2, "read3": 3} + + errors, result = run_query(query, 10) + assert not errors + assert result == expected + + +def test_should_count_with_fragments(): + query = """ + query read0 { + ... on Query { + version + } + } + query read1 { + version + user { + ... on Human { + name + } + } + } + fragment humanInfo on Human { + email + } + fragment petInfo on Pet { + name + owner { + name + } + } + query read2 { + matt: user(name: "matt") { + ...humanInfo + } + andy: user(name: "andy") { + ...humanInfo + address { + city + } + } + } + query read3 { + matt: user(name: "matt") { + ...humanInfo + } + andy: user(name: "andy") { + ... on Human { + email + } + address { + city + } + pets { + ...petInfo + } + } + } + """ + + expected = {"read0": 0, "read1": 1, "read2": 2, "read3": 3} + + errors, result = run_query(query, 10) + assert not errors + assert result == expected + + +def test_should_ignore_the_introspection_query(): + errors, result = run_query(get_introspection_query(), 10) + assert not errors + assert result == {"IntrospectionQuery": 0} + + +def test_should_catch_very_deep_query(): + query = """{ + user { + pets { + owner { + pets { + owner { + pets { + name + } + } + } + } + } + } + } + """ + errors, result = run_query(query, 4) + + assert len(errors) == 1 + assert errors[0].message == "'anonymous' exceeds maximum operation depth of 4" + + +def test_should_ignore_field(): + query = """ + query read1 { + user { address { city } } + } + query read2 { + user1 { address { city } } + user2 { address { city } } + user3 { address { city } } + } + """ + + errors, result = run_query( + query, + 10, + ignore=[ + "user1", + re.compile("user2"), + lambda field_name: field_name == "user3", + ], + ) + + expected = {"read1": 2, "read2": 0} + assert not errors + assert result == expected + + +def test_should_raise_invalid_ignore(): + query = """ + query read1 { + user { address { city } } + } + """ + with raises(ValueError, match="Invalid ignore option:"): + run_query( + query, + 10, + ignore=[True], + ) diff --git a/graphene/validation/tests/test_disable_introspection.py b/graphene/validation/tests/test_disable_introspection.py index 4d1faa7d2..c13786ed8 100644 --- a/graphene/validation/tests/test_disable_introspection.py +++ b/graphene/validation/tests/test_disable_introspection.py @@ -18,10 +18,6 @@ def run_query(query: str): result = None - def callback(query_depths): - nonlocal result - result = query_depths - errors = validate( schema.graphql_schema, document, From 7be4bd6bc6a916f2c4f2ecc7cf184064dc2c8f19 Mon Sep 17 00:00:00 2001 From: Aryan Iyappan <69184573+aryan340@users.noreply.github.com> Date: Sat, 14 Aug 2021 07:49:09 +0530 Subject: [PATCH 091/141] update docs --- docs/execution/index.rst | 2 +- docs/execution/{validators.rst => validation.rst} | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) rename docs/execution/{validators.rst => validation.rst} (99%) diff --git a/docs/execution/index.rst b/docs/execution/index.rst index 466526657..f775cc007 100644 --- a/docs/execution/index.rst +++ b/docs/execution/index.rst @@ -10,4 +10,4 @@ Execution dataloader fileuploading subscriptions - validators + validation diff --git a/docs/execution/validators.rst b/docs/execution/validation.rst similarity index 99% rename from docs/execution/validators.rst rename to docs/execution/validation.rst index d7e1310b1..ac27ec438 100644 --- a/docs/execution/validators.rst +++ b/docs/execution/validation.rst @@ -1,4 +1,4 @@ -Validators +Validation ========== Validation rules help validate a given GraphQL query, before executing it. To help with common use From ac5dd90f5fc610a37af7fc27efd022d15ebe821f Mon Sep 17 00:00:00 2001 From: Aryan Iyappan <69184573+aryan340@users.noreply.github.com> Date: Sat, 14 Aug 2021 07:54:58 +0530 Subject: [PATCH 092/141] fix typo in docs --- docs/execution/validation.rst | 4 ++-- graphene/validation/tests/test_depth_limit_validator.py | 4 ++-- graphene/validation/tests/test_disable_introspection.py | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/docs/execution/validation.rst b/docs/execution/validation.rst index ac27ec438..7be4fd664 100644 --- a/docs/execution/validation.rst +++ b/docs/execution/validation.rst @@ -20,7 +20,7 @@ Example Here is how you would implement depth-limiting on your schema. .. code:: python - from graphql import validate + from graphql import validate, parse from graphene import ObjectType, Schema, String from graphene.validation import depth_limit_validator @@ -36,7 +36,7 @@ Here is how you would implement depth-limiting on your schema. validation_errors = validate( schema=schema, - document='THE QUERY', + document_ast=parse('THE QUERY'), rules=( depth_limit_validator( max_depth=20 diff --git a/graphene/validation/tests/test_depth_limit_validator.py b/graphene/validation/tests/test_depth_limit_validator.py index 3eea3a32c..ea62f9999 100644 --- a/graphene/validation/tests/test_depth_limit_validator.py +++ b/graphene/validation/tests/test_depth_limit_validator.py @@ -87,8 +87,8 @@ def callback(query_depths): result = query_depths errors = validate( - schema.graphql_schema, - document, + schema=schema.graphql_schema, + document_ast=document, rules=( depth_limit_validator( max_depth=max_depth, diff --git a/graphene/validation/tests/test_disable_introspection.py b/graphene/validation/tests/test_disable_introspection.py index c13786ed8..b7f0b83fe 100644 --- a/graphene/validation/tests/test_disable_introspection.py +++ b/graphene/validation/tests/test_disable_introspection.py @@ -19,8 +19,8 @@ def run_query(query: str): result = None errors = validate( - schema.graphql_schema, - document, + schema=schema.graphql_schema, + document_ast=document, rules=( disable_introspection(), ), From c68071952da8da3e6ca9d125587e626a8d02d8cd Mon Sep 17 00:00:00 2001 From: Aryan Iyappan <69184573+aryan340@users.noreply.github.com> Date: Sat, 14 Aug 2021 08:20:46 +0530 Subject: [PATCH 093/141] mention how to implement custom validators --- docs/execution/index.rst | 2 +- docs/execution/queryvalidation.rst | 87 ++++++++++++++++++++++++++++++ docs/execution/validation.rst | 45 ---------------- 3 files changed, 88 insertions(+), 46 deletions(-) create mode 100644 docs/execution/queryvalidation.rst delete mode 100644 docs/execution/validation.rst diff --git a/docs/execution/index.rst b/docs/execution/index.rst index f775cc007..f26259d36 100644 --- a/docs/execution/index.rst +++ b/docs/execution/index.rst @@ -10,4 +10,4 @@ Execution dataloader fileuploading subscriptions - validation + queryvalidation diff --git a/docs/execution/queryvalidation.rst b/docs/execution/queryvalidation.rst new file mode 100644 index 000000000..35f3577e2 --- /dev/null +++ b/docs/execution/queryvalidation.rst @@ -0,0 +1,87 @@ +Query Validation +========== +GraphQL uses query validators to check if Query AST is valid and can be executed. Every GraphQL server implements +standard query validators. For example, there is an validator that tests if queried field exists on queried type, that +makes query fail with "Cannot query field on type" error if it doesn't. + +To help with common use cases, graphene provides a few validation rules out of the box. + + +Depth limit Validator +----------------- +The depth limit validator helps to prevent execution of malicious +queries. It takes in the following arguments. + +- ``max_depth`` is the maximum allowed depth for any operation in a GraphQL document. +- ``ignore`` Stops recursive depth checking based on a field name. Either a string or regexp to match the name, or a function that returns a boolean +- ``callback`` Called each time validation runs. Receives an Object which is a map of the depths for each operation. + +Example +------- + +Here is how you would implement depth-limiting on your schema. + +.. code:: python + from graphql import validate, parse + from graphene import ObjectType, Schema, String + from graphene.validation import depth_limit_validator + + + class MyQuery(ObjectType): + name = String(required=True) + + + schema = Schema(query=MyQuery) + + # Queries which have a depth more than 20 + # will not be executed. + + validation_errors = validate( + schema=schema, + document_ast=parse('THE QUERY'), + rules=( + depth_limit_validator( + max_depth=20 + ), + ) + ) + + +Implementing custom validators +------------------------------ +All custom query validators should extend the `ValidationRule `_ +base class importable from the graphql.validation.rules module. Query validators are visitor classes. They are +instantiated at the time of query validation with one required argument (context: ASTValidationContext). In order to +perform validation, your validator class should define one or more of enter_* and leave_* methods. For possible +enter/leave items as well as details on function documentation, please see contents of the visitor module. To make +validation fail, you should call validator's report_error method with the instance of GraphQLError describing failure +reason. Here is an example query validator that visits field definitions in GraphQL query and fails query validation +if any of those fields are introspection fields: + +.. code:: python + from graphql import GraphQLError + from graphql.language import FieldNode + from graphql.validation import ValidationRule + + + my_blacklist = ( + "disallowed_field", + ) + + + def is_blacklisted_field(field_name: str): + return key.lower() in my_blacklist + + + class BlackListRule(ValidationRule): + def enter_field(self, node: FieldNode, *_args): + field_name = node.name.value + if not is_blacklisted_field(field_name): + return + + self.report_error( + GraphQLError( + f"Cannot query '{field_name}': field is blacklisted.", node, + ) + ) + diff --git a/docs/execution/validation.rst b/docs/execution/validation.rst deleted file mode 100644 index 7be4fd664..000000000 --- a/docs/execution/validation.rst +++ /dev/null @@ -1,45 +0,0 @@ -Validation -========== - -Validation rules help validate a given GraphQL query, before executing it. To help with common use -cases, graphene provides a few validation rules out of the box. - - -Depth limit Validator ------------------ -The depth limit validator helps to prevent execution of malicious -queries. It takes in the following arguments. - -- ``max_depth`` is the maximum allowed depth for any operation in a GraphQL document. -- ``ignore`` Stops recursive depth checking based on a field name. Either a string or regexp to match the name, or a function that returns a boolean -- ``callback`` Called each time validation runs. Receives an Object which is a map of the depths for each operation. - -Example -------- - -Here is how you would implement depth-limiting on your schema. - -.. code:: python - from graphql import validate, parse - from graphene import ObjectType, Schema, String - from graphene.validation import depth_limit_validator - - - class MyQuery(ObjectType): - name = String(required=True) - - - schema = Schema(query=MyQuery) - - # Queries which have a depth more than 20 - # will not be executed. - - validation_errors = validate( - schema=schema, - document_ast=parse('THE QUERY'), - rules=( - depth_limit_validator( - max_depth=20 - ), - ) - ) From ec982ac50b2c79dc956f4e59fde5cf40092af0d8 Mon Sep 17 00:00:00 2001 From: Aryan Iyappan <69184573+aryan340@users.noreply.github.com> Date: Sat, 14 Aug 2021 08:22:04 +0530 Subject: [PATCH 094/141] update docs typo --- docs/execution/queryvalidation.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/execution/queryvalidation.rst b/docs/execution/queryvalidation.rst index 35f3577e2..2d58f7ab3 100644 --- a/docs/execution/queryvalidation.rst +++ b/docs/execution/queryvalidation.rst @@ -56,7 +56,7 @@ perform validation, your validator class should define one or more of enter_* an enter/leave items as well as details on function documentation, please see contents of the visitor module. To make validation fail, you should call validator's report_error method with the instance of GraphQLError describing failure reason. Here is an example query validator that visits field definitions in GraphQL query and fails query validation -if any of those fields are introspection fields: +if any of those fields are blacklisted fields: .. code:: python from graphql import GraphQLError From 4e32dac25118e8b043601d61229a1cacfda9cbf6 Mon Sep 17 00:00:00 2001 From: Aryan Iyappan <69184573+aryan340@users.noreply.github.com> Date: Sat, 14 Aug 2021 08:41:24 +0530 Subject: [PATCH 095/141] add tests and docs for disable introspection rule --- docs/execution/queryvalidation.rst | 41 +++++++++++++++++-- graphene/validation/__init__.py | 6 +-- graphene/validation/depth_limit.py | 6 +-- graphene/validation/disable_introspection.py | 23 +++++------ .../tests/test_depth_limit_validator.py | 2 +- .../tests/test_disable_introspection.py | 24 ++++++++--- 6 files changed, 73 insertions(+), 29 deletions(-) diff --git a/docs/execution/queryvalidation.rst b/docs/execution/queryvalidation.rst index 2d58f7ab3..8402b9ea9 100644 --- a/docs/execution/queryvalidation.rst +++ b/docs/execution/queryvalidation.rst @@ -16,7 +16,7 @@ queries. It takes in the following arguments. - ``ignore`` Stops recursive depth checking based on a field name. Either a string or regexp to match the name, or a function that returns a boolean - ``callback`` Called each time validation runs. Receives an Object which is a map of the depths for each operation. -Example +Usage ------- Here is how you would implement depth-limiting on your schema. @@ -33,7 +33,7 @@ Here is how you would implement depth-limiting on your schema. schema = Schema(query=MyQuery) - # Queries which have a depth more than 20 + # queries which have a depth more than 20 # will not be executed. validation_errors = validate( @@ -47,6 +47,39 @@ Here is how you would implement depth-limiting on your schema. ) +Disable Introspection +--------------------- +the disable introspection validation rule ensures that your schema cannot be introspected. +This is a useful security measure in production environments. + +Usage +------- + +Here is how you would disable introspection for your schema. + +.. code:: python + from graphql import validate, parse + from graphene import ObjectType, Schema, String + from graphene.validation import DisableIntrospection + + + class MyQuery(ObjectType): + name = String(required=True) + + + schema = Schema(query=MyQuery) + + # introspection queries will not be executed. + + validation_errors = validate( + schema=schema, + document_ast=parse('THE QUERY'), + rules=( + DisableIntrospection, + ) + ) + + Implementing custom validators ------------------------------ All custom query validators should extend the `ValidationRule `_ @@ -56,7 +89,7 @@ perform validation, your validator class should define one or more of enter_* an enter/leave items as well as details on function documentation, please see contents of the visitor module. To make validation fail, you should call validator's report_error method with the instance of GraphQLError describing failure reason. Here is an example query validator that visits field definitions in GraphQL query and fails query validation -if any of those fields are blacklisted fields: +if any of those fields are blacklisted: .. code:: python from graphql import GraphQLError @@ -70,7 +103,7 @@ if any of those fields are blacklisted fields: def is_blacklisted_field(field_name: str): - return key.lower() in my_blacklist + return field_name.lower() in my_blacklist class BlackListRule(ValidationRule): diff --git a/graphene/validation/__init__.py b/graphene/validation/__init__.py index 03e4605c8..f338e2d0d 100644 --- a/graphene/validation/__init__.py +++ b/graphene/validation/__init__.py @@ -1,8 +1,8 @@ from .depth_limit import depth_limit_validator -from .disable_introspection import disable_introspection +from .disable_introspection import DisableIntrospection __all__ = [ - "depth_limit_validator", - "disable_introspection" + "DisableIntrospection", + "depth_limit_validator" ] diff --git a/graphene/validation/depth_limit.py b/graphene/validation/depth_limit.py index 4136555d7..8363a6c9c 100644 --- a/graphene/validation/depth_limit.py +++ b/graphene/validation/depth_limit.py @@ -116,7 +116,7 @@ def determine_depth( if depth_so_far > max_depth: context.report_error( GraphQLError( - f"'{operation_name}' exceeds maximum operation depth of {max_depth}", + f"'{operation_name}' exceeds maximum operation depth of {max_depth}.", [node], ) ) @@ -172,7 +172,7 @@ def determine_depth( ) ) else: - raise Exception(f"Depth crawler cannot handle: {node.kind}") # pragma: no cover + raise Exception(f"Depth crawler cannot handle: {node.kind}.") # pragma: no cover def is_ignored(node: FieldNode, ignore: Optional[List[IgnoreType]] = None) -> bool: @@ -191,6 +191,6 @@ def is_ignored(node: FieldNode, ignore: Optional[List[IgnoreType]] = None) -> bo if rule(field_name): return True else: - raise ValueError(f"Invalid ignore option: {rule}") + raise ValueError(f"Invalid ignore option: {rule}.") return False diff --git a/graphene/validation/disable_introspection.py b/graphene/validation/disable_introspection.py index eb24be554..4c83050e6 100644 --- a/graphene/validation/disable_introspection.py +++ b/graphene/validation/disable_introspection.py @@ -5,18 +5,15 @@ from ..utils.is_introspection_key import is_introspection_key -def disable_introspection(): - class DisableIntrospection(ValidationRule): - def enter_field(self, node: FieldNode, *_args): - field_name = node.name.value - if not is_introspection_key(field_name): - return +class DisableIntrospection(ValidationRule): + def enter_field(self, node: FieldNode, *_args): + field_name = node.name.value + if not is_introspection_key(field_name): + return - self.report_error( - GraphQLError( - f"Cannot query '{field_name}': introspection is disabled.", - node, - ) + self.report_error( + GraphQLError( + f"Cannot query '{field_name}': introspection is disabled.", + node, ) - - return DisableIntrospection + ) diff --git a/graphene/validation/tests/test_depth_limit_validator.py b/graphene/validation/tests/test_depth_limit_validator.py index ea62f9999..499adbcce 100644 --- a/graphene/validation/tests/test_depth_limit_validator.py +++ b/graphene/validation/tests/test_depth_limit_validator.py @@ -235,7 +235,7 @@ def test_should_catch_very_deep_query(): errors, result = run_query(query, 4) assert len(errors) == 1 - assert errors[0].message == "'anonymous' exceeds maximum operation depth of 4" + assert errors[0].message == "'anonymous' exceeds maximum operation depth of 4." def test_should_ignore_field(): diff --git a/graphene/validation/tests/test_disable_introspection.py b/graphene/validation/tests/test_disable_introspection.py index b7f0b83fe..060199001 100644 --- a/graphene/validation/tests/test_disable_introspection.py +++ b/graphene/validation/tests/test_disable_introspection.py @@ -1,7 +1,7 @@ from graphql import parse, validate from ...types import Schema, ObjectType, String -from ..disable_introspection import disable_introspection +from ..disable_introspection import DisableIntrospection class Query(ObjectType): @@ -9,6 +9,10 @@ class Query(ObjectType): required=True ) + @staticmethod + def resolve_name(root, info): + return "Hello world!" + schema = Schema(query=Query) @@ -16,14 +20,24 @@ class Query(ObjectType): def run_query(query: str): document = parse(query) - result = None - errors = validate( schema=schema.graphql_schema, document_ast=document, rules=( - disable_introspection(), + DisableIntrospection, ), ) - return errors, result + return errors + + +def test_disallows_introspection_queries(): + errors = run_query("{ __schema { queryType { name } } }") + + assert len(errors) == 1 + assert errors[0].message == "Cannot query '__schema': introspection is disabled." + + +def test_allows_non_introspection_queries(): + errors = run_query("{ name }") + assert len(errors) == 0 From b4be4a686bd2d5279433dc77346b279f13d3f1e3 Mon Sep 17 00:00:00 2001 From: Aryan Iyappan <69184573+aryan340@users.noreply.github.com> Date: Thu, 19 Aug 2021 10:59:58 +0530 Subject: [PATCH 096/141] add notice to failing tests --- graphene/types/tests/test_schema.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/graphene/types/tests/test_schema.py b/graphene/types/tests/test_schema.py index 54c48b4f2..f84d2e204 100644 --- a/graphene/types/tests/test_schema.py +++ b/graphene/types/tests/test_schema.py @@ -175,6 +175,11 @@ def test_nested_graphql_error(self, schema): ], ) def test_unexpected_error(self, field, exception, schema): + # FIXME: tests are failing currently because no exception + # is being raised below. Instead, the errors are being propagated + # to the `errors` array of the response. If this is intended + # behaviour, we need to check if the error exists in the `errors` + # array rather than checking if an exception is raised. with raises(exception): # no result, but the exception should be propagated schema.execute( From 467b1f8e8d30d59a1422bbfe2805e20145f0a7fd Mon Sep 17 00:00:00 2001 From: Aryan Iyappan <69184573+aryan340@users.noreply.github.com> Date: Thu, 19 Aug 2021 12:03:27 +0530 Subject: [PATCH 097/141] add workflow: tests --- .github/{ => workflows}/stale.yml | 0 .github/workflows/tests.yml | 85 +++++++++++++++++++ ...mmit-config.yaml => .pre-commit-config.yml | 0 .travis.yml | 42 --------- 4 files changed, 85 insertions(+), 42 deletions(-) rename .github/{ => workflows}/stale.yml (100%) create mode 100644 .github/workflows/tests.yml rename .pre-commit-config.yaml => .pre-commit-config.yml (100%) delete mode 100644 .travis.yml diff --git a/.github/stale.yml b/.github/workflows/stale.yml similarity index 100% rename from .github/stale.yml rename to .github/workflows/stale.yml diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml new file mode 100644 index 000000000..cf8b87190 --- /dev/null +++ b/.github/workflows/tests.yml @@ -0,0 +1,85 @@ +name: 📄 Tests +on: + push: + branches: + - master + - '*.x' + paths-ignore: + - 'docs/**' + - '*.md' + - '*.rst' + pull_request: + branches: + - master + - '*.x' + paths-ignore: + - 'docs/**' + - '*.md' + - '*.rst' +jobs: + tests: + # runs the test suite + name: ${{ matrix.name }} + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + include: + - {name: '3.8', python: '3.8', os: ubuntu-latest, tox: py38} + - {name: '3.7', python: '3.7', os: ubuntu-latest, tox: py37} + - {name: '3.6', python: '3.6', os: ubuntu-latest, tox: py36} + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python }} + + - name: update pip + run: | + pip install -U wheel + pip install -U setuptools + python -m pip install -U pip + + - name: get pip cache dir + id: pip-cache + run: echo "::set-output name=dir::$(pip cache dir)" + + - name: cache pip + uses: actions/cache@v2 + with: + path: ${{ steps.pip-cache.outputs.dir }} + key: pip|${{ runner.os }}|${{ matrix.python }}|${{ hashFiles('setup.py') }} + + - run: pip install tox + - run: tox -e ${{ matrix.tox }} + + coveralls: + # check coverage increase/decrease + needs: tests + runs-on: ${{ matrix.os }} + steps: + - name: Coveralls Finished + uses: AndreMiras/coveralls-python-action@develop + with: + parallel-finished: true + + deploy: + # builds and publishes to PyPi + runs-on: ${{ matrix.os }} + steps: + - uses: actions/checkout@v2 + - name: Set up Python + uses: actions/setup-python@v2 + with: + python-version: '3.7' + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install build + - name: Build package + run: python -m build + - name: Publish package + uses: pypa/gh-action-pypi-publish@release/v1 + with: + user: __token__ + password: ${{ secrets.PYPI_API_TOKEN }} diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yml similarity index 100% rename from .pre-commit-config.yaml rename to .pre-commit-config.yml diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index e1e551198..000000000 --- a/.travis.yml +++ /dev/null @@ -1,42 +0,0 @@ -language: python -dist: xenial - -python: - - "3.6" - - "3.7" - - "3.8" - -install: - - pip install tox tox-travis -script: tox -after_success: - - pip install coveralls - - coveralls -cache: - directories: - - $HOME/.cache/pip - - $HOME/.cache/pre-commit - -stages: - - test - - name: deploy - if: tag IS present - -jobs: - fast_finish: true - include: - - env: TOXENV=pre-commit - python: 3.7 - - env: TOXENV=mypy - python: 3.7 - - stage: deploy - python: 3.7 - after_success: true - deploy: - provider: pypi - user: syrusakbary - on: - tags: true - password: - secure: LHOp9DvYR+70vj4YVY8+JRNCKUOfYZREEUY3+4lMUpY7Zy5QwDfgEMXG64ybREH9dFldpUqVXRj53eeU3spfudSfh8NHkgqW7qihez2AhSnRc4dK6ooNfB+kLcSoJ4nUFGxdYImABc4V1hJvflGaUkTwDNYVxJF938bPaO797IvSbuI86llwqkvuK2Vegv9q/fy9sVGaF9VZIs4JgXwR5AyDR7FBArl+S84vWww4vTFD33hoE88VR4QvFY3/71BwRtQrnCMm7AOm31P9u29yi3bpzQpiOR2rHsgrsYdm597QzFKVxYwsmf9uAx2bpbSPy2WibunLePIvOFwm8xcfwnz4/J4ONBc5PSFmUytTWpzEnxb0bfUNLuYloIS24V6OZ8BfAhiYZ1AwySeJCQDM4Vk1V8IF6trTtyx5EW/uV9jsHCZ3LFsAD7UnFRTosIgN3SAK3ZWCEk5oF2IvjecsolEfkRXB3q9EjMkkuXRUeFDH2lWJLgNE27BzY6myvZVzPmfwZUsPBlPD/6w+WLSp97Rjgr9zS3T1d4ddqFM4ZYu04f2i7a/UUQqG+itzzuX5DWLPvzuNt37JB45mB9IsvxPyXZ6SkAcLl48NGyKok1f3vQnvphkfkl4lni29woKhaau8xlsuEDrcwOoeAsVcZXiItg+l+z2SlIwM0A06EvQ= - distributions: "sdist bdist_wheel" From c0ddbbfaf4fd4a777834dec7662f8ce9d85ffb50 Mon Sep 17 00:00:00 2001 From: Aryan Iyappan <69184573+aryan340@users.noreply.github.com> Date: Thu, 19 Aug 2021 12:13:46 +0530 Subject: [PATCH 098/141] update workflow matrix --- .github/workflows/tests.yml | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index cf8b87190..b7cb3fdb4 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -57,6 +57,10 @@ jobs: # check coverage increase/decrease needs: tests runs-on: ${{ matrix.os }} + strategy: + matrix: + include: + - { os: ubuntu-latest } steps: - name: Coveralls Finished uses: AndreMiras/coveralls-python-action@develop @@ -66,12 +70,16 @@ jobs: deploy: # builds and publishes to PyPi runs-on: ${{ matrix.os }} + strategy: + matrix: + include: + - { python: '3.7', os: ubuntu-latest } steps: - uses: actions/checkout@v2 - name: Set up Python uses: actions/setup-python@v2 with: - python-version: '3.7' + python-version: ${{ matrix.python }} - name: Install dependencies run: | python -m pip install --upgrade pip From 8ae436915575f2efefdef2e289cd04d667c9c6a8 Mon Sep 17 00:00:00 2001 From: Aryan Iyappan <69184573+aryan340@users.noreply.github.com> Date: Thu, 19 Aug 2021 12:16:13 +0530 Subject: [PATCH 099/141] remove build matrix wherever not needed --- .github/workflows/tests.yml | 14 +++----------- 1 file changed, 3 insertions(+), 11 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index b7cb3fdb4..39f15ef40 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -56,11 +56,7 @@ jobs: coveralls: # check coverage increase/decrease needs: tests - runs-on: ${{ matrix.os }} - strategy: - matrix: - include: - - { os: ubuntu-latest } + runs-on: "ubuntu-latest" steps: - name: Coveralls Finished uses: AndreMiras/coveralls-python-action@develop @@ -69,17 +65,13 @@ jobs: deploy: # builds and publishes to PyPi - runs-on: ${{ matrix.os }} - strategy: - matrix: - include: - - { python: '3.7', os: ubuntu-latest } + runs-on: "ubuntu-latest" steps: - uses: actions/checkout@v2 - name: Set up Python uses: actions/setup-python@v2 with: - python-version: ${{ matrix.python }} + python-version: "3.7" - name: Install dependencies run: | python -m pip install --upgrade pip From 0e4c14b0767627c504cc0f0adee9a21824fd05a6 Mon Sep 17 00:00:00 2001 From: Aryan Iyappan <69184573+aryan340@users.noreply.github.com> Date: Thu, 19 Aug 2021 15:00:09 +0530 Subject: [PATCH 100/141] update workflow: tests --- .github/workflows/tests.yml | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 39f15ef40..6de43f373 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -44,7 +44,7 @@ jobs: id: pip-cache run: echo "::set-output name=dir::$(pip cache dir)" - - name: cache pip + - name: cache pip dependencies uses: actions/cache@v2 with: path: ${{ steps.pip-cache.outputs.dir }} @@ -53,25 +53,23 @@ jobs: - run: pip install tox - run: tox -e ${{ matrix.tox }} - coveralls: + coveralls_finish: # check coverage increase/decrease needs: tests - runs-on: "ubuntu-latest" + runs-on: ubuntu-latest steps: - name: Coveralls Finished uses: AndreMiras/coveralls-python-action@develop - with: - parallel-finished: true deploy: # builds and publishes to PyPi - runs-on: "ubuntu-latest" + runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - name: Set up Python uses: actions/setup-python@v2 with: - python-version: "3.7" + python-version: '3.7' - name: Install dependencies run: | python -m pip install --upgrade pip From 7d890bf91521a3e7905e95f442a4e934a68603fb Mon Sep 17 00:00:00 2001 From: Syrus Akbary Date: Thu, 19 Aug 2021 14:02:45 -0500 Subject: [PATCH 101/141] Update graphene/validation/disable_introspection.py --- graphene/validation/disable_introspection.py | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/graphene/validation/disable_introspection.py b/graphene/validation/disable_introspection.py index 4c83050e6..be25a2871 100644 --- a/graphene/validation/disable_introspection.py +++ b/graphene/validation/disable_introspection.py @@ -8,12 +8,10 @@ class DisableIntrospection(ValidationRule): def enter_field(self, node: FieldNode, *_args): field_name = node.name.value - if not is_introspection_key(field_name): - return - - self.report_error( - GraphQLError( - f"Cannot query '{field_name}': introspection is disabled.", - node, + if is_introspection_key(field_name): + self.report_error( + GraphQLError( + f"Cannot query '{field_name}': introspection is disabled.", + node, + ) ) - ) From 946c2a3807d8970deee4f51eed07144349f1dde3 Mon Sep 17 00:00:00 2001 From: Aryan Iyappan <69184573+codebyaryan@users.noreply.github.com> Date: Fri, 20 Aug 2021 15:58:43 +0530 Subject: [PATCH 102/141] Update schema.py --- graphene/types/schema.py | 236 --------------------------------------- 1 file changed, 236 deletions(-) diff --git a/graphene/types/schema.py b/graphene/types/schema.py index 995323542..9d3c8be50 100644 --- a/graphene/types/schema.py +++ b/graphene/types/schema.py @@ -391,239 +391,3 @@ def resolve_type(self, resolve_type_func, type_name, root, info, _type): return graphql_type return type_ - - -class UnforgivingExecutionContext(ExecutionContext): - """An execution context which doesn't swallow exceptions. - - The only difference between this execution context and the one it inherits from is - that ``except Exception`` is commented out within ``resolve_field_value_or_error``. - By removing that exception handling, only ``GraphQLError``'s are caught. - """ - - def resolve_field_value_or_error( - self, field_def, field_nodes, resolve_fn, source, info - ): - """Resolve field to a value or an error. - - Isolates the "ReturnOrAbrupt" behavior to not de-opt the resolve_field() - method. Returns the result of resolveFn or the abrupt-return Error object. - - For internal use only. - """ - try: - # Build a dictionary of arguments from the field.arguments AST, using the - # variables scope to fulfill any variable references. - args = get_argument_values(field_def, field_nodes[0], self.variable_values) - - # Note that contrary to the JavaScript implementation, we pass the context - # value as part of the resolve info. - result = resolve_fn(source, info, **args) - if self.is_awaitable(result): - # noinspection PyShadowingNames - async def await_result(): - try: - return await result - except GraphQLError as error: - return error - # except Exception as error: - # return GraphQLError(str(error), original_error=error) - - # Yes, this is commented out code. It's been intentionally - # _not_ removed to show what has changed from the original - # implementation. - - return await_result() - return result - except GraphQLError as error: - return error - # except Exception as error: - # return GraphQLError(str(error), original_error=error) - - # Yes, this is commented out code. It's been intentionally _not_ - # removed to show what has changed from the original implementation. - - def complete_value_catching_error( - self, return_type, field_nodes, info, path, result - ): - """Complete a value while catching an error. - - This is a small wrapper around completeValue which detects and logs errors in - the execution context. - """ - try: - if self.is_awaitable(result): - - async def await_result(): - value = self.complete_value( - return_type, field_nodes, info, path, await result - ) - if self.is_awaitable(value): - return await value - return value - - completed = await_result() - else: - completed = self.complete_value( - return_type, field_nodes, info, path, result - ) - if self.is_awaitable(completed): - # noinspection PyShadowingNames - async def await_completed(): - try: - return await completed - - # CHANGE WAS MADE HERE - # ``GraphQLError`` was swapped in for ``except Exception`` - except GraphQLError as error: - self.handle_field_error(error, field_nodes, path, return_type) - - return await_completed() - return completed - - # CHANGE WAS MADE HERE - # ``GraphQLError`` was swapped in for ``except Exception`` - except GraphQLError as error: - self.handle_field_error(error, field_nodes, path, return_type) - return None - - -class Schema: - """Schema Definition. - - A Graphene Schema can execute operations (query, mutation, subscription) against the defined - types. For advanced purposes, the schema can be used to lookup type definitions and answer - questions about the types through introspection. - - Args: - query (Type[ObjectType]): Root query *ObjectType*. Describes entry point for fields to *read* - data in your Schema. - mutation (Optional[Type[ObjectType]]): Root mutation *ObjectType*. Describes entry point for - fields to *create, update or delete* data in your API. - subscription (Optional[Type[ObjectType]]): Root subscription *ObjectType*. Describes entry point - for fields to receive continuous updates. - types (Optional[List[Type[ObjectType]]]): List of any types to include in schema that - may not be introspected through root types. - directives (List[GraphQLDirective], optional): List of custom directives to include in the - GraphQL schema. Defaults to only include directives defined by GraphQL spec (@include - and @skip) [GraphQLIncludeDirective, GraphQLSkipDirective]. - auto_camelcase (bool): Fieldnames will be transformed in Schema's TypeMap from snake_case - to camelCase (preferred by GraphQL standard). Default True. - """ - - def __init__( - self, - query=None, - mutation=None, - subscription=None, - types=None, - directives=None, - auto_camelcase=True, - ): - self.query = query - self.mutation = mutation - self.subscription = subscription - type_map = TypeMap( - query, mutation, subscription, types, auto_camelcase=auto_camelcase - ) - self.graphql_schema = GraphQLSchema( - type_map.query, - type_map.mutation, - type_map.subscription, - type_map.types, - directives, - ) - - def __str__(self): - return print_schema(self.graphql_schema) - - def __getattr__(self, type_name): - """ - This function let the developer select a type in a given schema - by accessing its attrs. - - Example: using schema.Query for accessing the "Query" type in the Schema - """ - _type = self.graphql_schema.get_type(type_name) - if _type is None: - raise AttributeError(f'Type "{type_name}" not found in the Schema') - if isinstance(_type, GrapheneGraphQLType): - return _type.graphene_type - return _type - - def lazy(self, _type): - return lambda: self.get_type(_type) - - def execute(self, *args, **kwargs): - """Execute a GraphQL query on the schema. - - Use the `graphql_sync` function from `graphql-core` to provide the result - for a query string. Most of the time this method will be called by one of the Graphene - :ref:`Integrations` via a web request. - - Args: - request_string (str or Document): GraphQL request (query, mutation or subscription) - as string or parsed AST form from `graphql-core`. - root_value (Any, optional): Value to use as the parent value object when resolving - root types. - context_value (Any, optional): Value to be made available to all resolvers via - `info.context`. Can be used to share authorization, dataloaders or other - information needed to resolve an operation. - variable_values (dict, optional): If variables are used in the request string, they can - be provided in dictionary form mapping the variable name to the variable value. - operation_name (str, optional): If multiple operations are provided in the - request_string, an operation name must be provided for the result to be provided. - middleware (List[SupportsGraphQLMiddleware]): Supply request level middleware as - defined in `graphql-core`. - execution_context_class (ExecutionContext, optional): The execution context class - to use when resolving queries and mutations. - - Returns: - :obj:`ExecutionResult` containing any data and errors for the operation. - """ - kwargs = normalize_execute_kwargs(kwargs) - return graphql_sync(self.graphql_schema, *args, **kwargs) - - async def execute_async(self, *args, **kwargs): - """Execute a GraphQL query on the schema asynchronously. - - Same as `execute`, but uses `graphql` instead of `graphql_sync`. - """ - kwargs = normalize_execute_kwargs(kwargs) - return await graphql(self.graphql_schema, *args, **kwargs) - - async def subscribe(self, query, *args, **kwargs): - """Execute a GraphQL subscription on the schema asynchronously.""" - # Do parsing - try: - document = parse(query) - except GraphQLError as error: - return ExecutionResult(data=None, errors=[error]) - - # Do validation - validation_errors = validate(self.graphql_schema, document) - if validation_errors: - return ExecutionResult(data=None, errors=validation_errors) - - # Execute the query - kwargs = normalize_execute_kwargs(kwargs) - return await subscribe(self.graphql_schema, document, *args, **kwargs) - - def introspect(self): - introspection = self.execute(introspection_query) - if introspection.errors: - raise introspection.errors[0] - return introspection.data - - -def normalize_execute_kwargs(kwargs): - """Replace alias names in keyword arguments for graphql()""" - if "root" in kwargs and "root_value" not in kwargs: - kwargs["root_value"] = kwargs.pop("root") - if "context" in kwargs and "context_value" not in kwargs: - kwargs["context_value"] = kwargs.pop("context") - if "variables" in kwargs and "variable_values" not in kwargs: - kwargs["variable_values"] = kwargs.pop("variables") - if "operation" in kwargs and "operation_name" not in kwargs: - kwargs["operation_name"] = kwargs.pop("operation") - return kwargs From 18cd3451f9715f3db900a64fd288b00b6706c003 Mon Sep 17 00:00:00 2001 From: Aryan Iyappan <69184573+codebyaryan@users.noreply.github.com> Date: Fri, 20 Aug 2021 15:59:38 +0530 Subject: [PATCH 103/141] Update test_schema.py --- graphene/types/tests/test_schema.py | 119 +--------------------------- 1 file changed, 1 insertion(+), 118 deletions(-) diff --git a/graphene/types/tests/test_schema.py b/graphene/types/tests/test_schema.py index f84d2e204..9cdbde3bc 100644 --- a/graphene/types/tests/test_schema.py +++ b/graphene/types/tests/test_schema.py @@ -7,7 +7,7 @@ from ..field import Field from ..objecttype import ObjectType from ..scalars import String -from ..schema import Schema, UnforgivingExecutionContext +from ..schema import Schema class MyOtherType(ObjectType): @@ -69,120 +69,3 @@ def test_schema_requires_query_type(): assert len(result.errors) == 1 error = result.errors[0] assert error.message == "Query root type must be provided." - - -class TestUnforgivingExecutionContext: - @fixture - def schema(self): - class ErrorFieldsMixin: - sanity_field = String() - expected_error_field = String() - unexpected_value_error_field = String() - unexpected_type_error_field = String() - unexpected_attribute_error_field = String() - unexpected_key_error_field = String() - - @staticmethod - def resolve_sanity_field(obj, info): - return "not an error" - - @staticmethod - def resolve_expected_error_field(obj, info): - raise GraphQLError("expected error") - - @staticmethod - def resolve_unexpected_value_error_field(obj, info): - raise ValueError("unexpected error") - - @staticmethod - def resolve_unexpected_type_error_field(obj, info): - raise TypeError("unexpected error") - - @staticmethod - def resolve_unexpected_attribute_error_field(obj, info): - raise AttributeError("unexpected error") - - @staticmethod - def resolve_unexpected_key_error_field(obj, info): - return {}["fails"] - - class NestedObject(ErrorFieldsMixin, ObjectType): - pass - - class MyQuery(ErrorFieldsMixin, ObjectType): - nested_object = Field(NestedObject) - nested_object_error = Field(NestedObject) - - @staticmethod - def resolve_nested_object(obj, info): - return object() - - @staticmethod - def resolve_nested_object_error(obj, info): - raise TypeError() - - schema = Schema(query=MyQuery) - return schema - - def test_sanity_check(self, schema): - # this should pass with no errors (sanity check) - result = schema.execute( - "query { sanityField }", - execution_context_class=UnforgivingExecutionContext, - ) - assert not result.errors - assert result.data == {"sanityField": "not an error"} - - def test_nested_sanity_check(self, schema): - # this should pass with no errors (sanity check) - result = schema.execute( - r"query { nestedObject { sanityField } }", - execution_context_class=UnforgivingExecutionContext, - ) - assert not result.errors - assert result.data == {"nestedObject": {"sanityField": "not an error"}} - - def test_graphql_error(self, schema): - result = schema.execute( - "query { expectedErrorField }", - execution_context_class=UnforgivingExecutionContext, - ) - assert len(result.errors) == 1 - assert result.errors[0].message == "expected error" - assert result.data == {"expectedErrorField": None} - - def test_nested_graphql_error(self, schema): - result = schema.execute( - r"query { nestedObject { expectedErrorField } }", - execution_context_class=UnforgivingExecutionContext, - ) - assert len(result.errors) == 1 - assert result.errors[0].message == "expected error" - assert result.data == {"nestedObject": {"expectedErrorField": None}} - - @mark.parametrize( - "field,exception", - [ - ("unexpectedValueErrorField", ValueError), - ("unexpectedTypeErrorField", TypeError), - ("unexpectedAttributeErrorField", AttributeError), - ("unexpectedKeyErrorField", KeyError), - ("nestedObject { unexpectedValueErrorField }", ValueError), - ("nestedObject { unexpectedTypeErrorField }", TypeError), - ("nestedObject { unexpectedAttributeErrorField }", AttributeError), - ("nestedObject { unexpectedKeyErrorField }", KeyError), - ("nestedObjectError { __typename }", TypeError), - ], - ) - def test_unexpected_error(self, field, exception, schema): - # FIXME: tests are failing currently because no exception - # is being raised below. Instead, the errors are being propagated - # to the `errors` array of the response. If this is intended - # behaviour, we need to check if the error exists in the `errors` - # array rather than checking if an exception is raised. - with raises(exception): - # no result, but the exception should be propagated - schema.execute( - f"query {{ {field} }}", - execution_context_class=UnforgivingExecutionContext, - ) From ea4e6d65e9db41c69da9a23ecaaceaecb084054a Mon Sep 17 00:00:00 2001 From: Aryan Iyappan <69184573+codebyaryan@users.noreply.github.com> Date: Fri, 20 Aug 2021 16:08:58 +0530 Subject: [PATCH 104/141] Update schema.py --- graphene/types/schema.py | 134 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 134 insertions(+) diff --git a/graphene/types/schema.py b/graphene/types/schema.py index 9d3c8be50..1ff0bff4f 100644 --- a/graphene/types/schema.py +++ b/graphene/types/schema.py @@ -391,3 +391,137 @@ def resolve_type(self, resolve_type_func, type_name, root, info, _type): return graphql_type return type_ + + +class Schema: + """Schema Definition. + A Graphene Schema can execute operations (query, mutation, subscription) against the defined + types. For advanced purposes, the schema can be used to lookup type definitions and answer + questions about the types through introspection. + Args: + query (Type[ObjectType]): Root query *ObjectType*. Describes entry point for fields to *read* + data in your Schema. + mutation (Optional[Type[ObjectType]]): Root mutation *ObjectType*. Describes entry point for + fields to *create, update or delete* data in your API. + subscription (Optional[Type[ObjectType]]): Root subscription *ObjectType*. Describes entry point + for fields to receive continuous updates. + types (Optional[List[Type[ObjectType]]]): List of any types to include in schema that + may not be introspected through root types. + directives (List[GraphQLDirective], optional): List of custom directives to include in the + GraphQL schema. Defaults to only include directives defined by GraphQL spec (@include + and @skip) [GraphQLIncludeDirective, GraphQLSkipDirective]. + auto_camelcase (bool): Fieldnames will be transformed in Schema's TypeMap from snake_case + to camelCase (preferred by GraphQL standard). Default True. + """ + + def __init__( + self, + query=None, + mutation=None, + subscription=None, + types=None, + directives=None, + auto_camelcase=True, + ): + self.query = query + self.mutation = mutation + self.subscription = subscription + type_map = TypeMap( + query, mutation, subscription, types, auto_camelcase=auto_camelcase + ) + self.graphql_schema = GraphQLSchema( + type_map.query, + type_map.mutation, + type_map.subscription, + type_map.types, + directives, + ) + + def __str__(self): + return print_schema(self.graphql_schema) + + def __getattr__(self, type_name): + """ + This function let the developer select a type in a given schema + by accessing its attrs. + Example: using schema.Query for accessing the "Query" type in the Schema + """ + _type = self.graphql_schema.get_type(type_name) + if _type is None: + raise AttributeError(f'Type "{type_name}" not found in the Schema') + if isinstance(_type, GrapheneGraphQLType): + return _type.graphene_type + return _type + + def lazy(self, _type): + return lambda: self.get_type(_type) + + def execute(self, *args, **kwargs): + """Execute a GraphQL query on the schema. + Use the `graphql_sync` function from `graphql-core` to provide the result + for a query string. Most of the time this method will be called by one of the Graphene + :ref:`Integrations` via a web request. + Args: + request_string (str or Document): GraphQL request (query, mutation or subscription) + as string or parsed AST form from `graphql-core`. + root_value (Any, optional): Value to use as the parent value object when resolving + root types. + context_value (Any, optional): Value to be made available to all resolvers via + `info.context`. Can be used to share authorization, dataloaders or other + information needed to resolve an operation. + variable_values (dict, optional): If variables are used in the request string, they can + be provided in dictionary form mapping the variable name to the variable value. + operation_name (str, optional): If multiple operations are provided in the + request_string, an operation name must be provided for the result to be provided. + middleware (List[SupportsGraphQLMiddleware]): Supply request level middleware as + defined in `graphql-core`. + execution_context_class (ExecutionContext, optional): The execution context class + to use when resolving queries and mutations. + Returns: + :obj:`ExecutionResult` containing any data and errors for the operation. + """ + kwargs = normalize_execute_kwargs(kwargs) + return graphql_sync(self.graphql_schema, *args, **kwargs) + + async def execute_async(self, *args, **kwargs): + """Execute a GraphQL query on the schema asynchronously. + Same as `execute`, but uses `graphql` instead of `graphql_sync`. + """ + kwargs = normalize_execute_kwargs(kwargs) + return await graphql(self.graphql_schema, *args, **kwargs) + + async def subscribe(self, query, *args, **kwargs): + """Execute a GraphQL subscription on the schema asynchronously.""" + # Do parsing + try: + document = parse(query) + except GraphQLError as error: + return ExecutionResult(data=None, errors=[error]) + + # Do validation + validation_errors = validate(self.graphql_schema, document) + if validation_errors: + return ExecutionResult(data=None, errors=validation_errors) + + # Execute the query + kwargs = normalize_execute_kwargs(kwargs) + return await subscribe(self.graphql_schema, document, *args, **kwargs) + + def introspect(self): + introspection = self.execute(introspection_query) + if introspection.errors: + raise introspection.errors[0] + return introspection.data + + +def normalize_execute_kwargs(kwargs): + """Replace alias names in keyword arguments for graphql()""" + if "root" in kwargs and "root_value" not in kwargs: + kwargs["root_value"] = kwargs.pop("root") + if "context" in kwargs and "context_value" not in kwargs: + kwargs["context_value"] = kwargs.pop("context") + if "variables" in kwargs and "variable_values" not in kwargs: + kwargs["variable_values"] = kwargs.pop("variables") + if "operation" in kwargs and "operation_name" not in kwargs: + kwargs["operation_name"] = kwargs.pop("operation") + return kwargs From 57a4394bf3b149a16e36259a8b26cbe5aadc6970 Mon Sep 17 00:00:00 2001 From: Aryan Iyappan <69184573+codebyaryan@users.noreply.github.com> Date: Fri, 20 Aug 2021 20:56:19 +0530 Subject: [PATCH 105/141] Update depth_limit.py --- graphene/validation/depth_limit.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/graphene/validation/depth_limit.py b/graphene/validation/depth_limit.py index 8363a6c9c..0a95aeaef 100644 --- a/graphene/validation/depth_limit.py +++ b/graphene/validation/depth_limit.py @@ -25,7 +25,12 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -import re +try: + from re import Pattern +except ImportError: + # backwards compatibility for v3.6 + from typing import Pattern + from typing import Callable, Dict, List, Optional, Union from graphql import GraphQLError @@ -43,7 +48,7 @@ from ..utils.is_introspection_key import is_introspection_key -IgnoreType = Union[Callable[[str], bool], re.Pattern, str] +IgnoreType = Union[Callable[[str], bool], Pattern, str] def depth_limit_validator( From 98980b53f6032c186d94982ebf4d87b4a3bf5f80 Mon Sep 17 00:00:00 2001 From: Aryan Iyappan <69184573+codebyaryan@users.noreply.github.com> Date: Fri, 20 Aug 2021 21:04:22 +0530 Subject: [PATCH 106/141] Update depth_limit.py --- graphene/validation/depth_limit.py | 1 + 1 file changed, 1 insertion(+) diff --git a/graphene/validation/depth_limit.py b/graphene/validation/depth_limit.py index 0a95aeaef..47a044034 100644 --- a/graphene/validation/depth_limit.py +++ b/graphene/validation/depth_limit.py @@ -31,6 +31,7 @@ # backwards compatibility for v3.6 from typing import Pattern +import re from typing import Callable, Dict, List, Optional, Union from graphql import GraphQLError From 74a6565ea3f77f68758b099291fde9544d10d03f Mon Sep 17 00:00:00 2001 From: Aryan Iyappan <69184573+codebyaryan@users.noreply.github.com> Date: Fri, 20 Aug 2021 21:07:57 +0530 Subject: [PATCH 107/141] Update depth_limit.py --- graphene/validation/depth_limit.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/graphene/validation/depth_limit.py b/graphene/validation/depth_limit.py index 47a044034..c72b78d02 100644 --- a/graphene/validation/depth_limit.py +++ b/graphene/validation/depth_limit.py @@ -31,7 +31,6 @@ # backwards compatibility for v3.6 from typing import Pattern -import re from typing import Callable, Dict, List, Optional, Union from graphql import GraphQLError @@ -190,7 +189,7 @@ def is_ignored(node: FieldNode, ignore: Optional[List[IgnoreType]] = None) -> bo if isinstance(rule, str): if field_name == rule: return True - elif isinstance(rule, re.Pattern): + elif isinstance(rule, Pattern): if rule.match(field_name): return True elif callable(rule): From 3c50fa817af3b0d8205f1fb26186ae4dbf6dccf5 Mon Sep 17 00:00:00 2001 From: Aryan Iyappan <69184573+codebyaryan@users.noreply.github.com> Date: Sat, 21 Aug 2021 17:14:52 +0530 Subject: [PATCH 108/141] Delete stale.yml --- .github/workflows/stale.yml | 24 ------------------------ 1 file changed, 24 deletions(-) delete mode 100644 .github/workflows/stale.yml diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml deleted file mode 100644 index 322a3edad..000000000 --- a/.github/workflows/stale.yml +++ /dev/null @@ -1,24 +0,0 @@ -# Number of days of inactivity before an issue becomes stale -daysUntilStale: false -# Number of days of inactivity before a stale issue is closed -daysUntilClose: false -# Issues with these labels will never be considered stale -exemptLabels: - - pinned - - security - - 🐛 bug - - 📖 documentation - - 🙋 help wanted - - ✨ enhancement - - good first issue - - work in progress -# Label to use when marking an issue as stale -staleLabel: wontfix -# Comment to post when marking an issue as stale. Set to `false` to disable -markComment: false -# markComment: > - # This issue has been automatically marked as stale because it has not had - # recent activity. It will be closed if no further activity occurs. Thank you - # for your contributions. -# Comment to post when closing a stale issue. Set to `false` to disable -closeComment: false From e1822c9ae97f2a76949b5cb1de98ae826ac8efcf Mon Sep 17 00:00:00 2001 From: Aryan Iyappan <69184573+codebyaryan@users.noreply.github.com> Date: Sat, 21 Aug 2021 17:15:28 +0530 Subject: [PATCH 109/141] Create stale.yml --- .github/stale.yml | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) create mode 100644 .github/stale.yml diff --git a/.github/stale.yml b/.github/stale.yml new file mode 100644 index 000000000..322a3edad --- /dev/null +++ b/.github/stale.yml @@ -0,0 +1,24 @@ +# Number of days of inactivity before an issue becomes stale +daysUntilStale: false +# Number of days of inactivity before a stale issue is closed +daysUntilClose: false +# Issues with these labels will never be considered stale +exemptLabels: + - pinned + - security + - 🐛 bug + - 📖 documentation + - 🙋 help wanted + - ✨ enhancement + - good first issue + - work in progress +# Label to use when marking an issue as stale +staleLabel: wontfix +# Comment to post when marking an issue as stale. Set to `false` to disable +markComment: false +# markComment: > + # This issue has been automatically marked as stale because it has not had + # recent activity. It will be closed if no further activity occurs. Thank you + # for your contributions. +# Comment to post when closing a stale issue. Set to `false` to disable +closeComment: false From 16551369b2adc6650014ae4a167fb9a7baa585e7 Mon Sep 17 00:00:00 2001 From: Aryan Iyappan <69184573+codebyaryan@users.noreply.github.com> Date: Sat, 21 Aug 2021 17:16:43 +0530 Subject: [PATCH 110/141] Update tests.yml --- .github/workflows/tests.yml | 29 ----------------------------- 1 file changed, 29 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 6de43f373..cdc4d01e5 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -52,32 +52,3 @@ jobs: - run: pip install tox - run: tox -e ${{ matrix.tox }} - - coveralls_finish: - # check coverage increase/decrease - needs: tests - runs-on: ubuntu-latest - steps: - - name: Coveralls Finished - uses: AndreMiras/coveralls-python-action@develop - - deploy: - # builds and publishes to PyPi - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - name: Set up Python - uses: actions/setup-python@v2 - with: - python-version: '3.7' - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install build - - name: Build package - run: python -m build - - name: Publish package - uses: pypa/gh-action-pypi-publish@release/v1 - with: - user: __token__ - password: ${{ secrets.PYPI_API_TOKEN }} From dc6b820635f38396e7b65226ca4c60ba67070e82 Mon Sep 17 00:00:00 2001 From: Aryan Iyappan <69184573+codebyaryan@users.noreply.github.com> Date: Sat, 21 Aug 2021 17:19:53 +0530 Subject: [PATCH 111/141] Create coveralls.yml --- .github/workflows/coveralls.yml | 27 +++++++++++++++++++++++++++ 1 file changed, 27 insertions(+) create mode 100644 .github/workflows/coveralls.yml diff --git a/.github/workflows/coveralls.yml b/.github/workflows/coveralls.yml new file mode 100644 index 000000000..c26a8975c --- /dev/null +++ b/.github/workflows/coveralls.yml @@ -0,0 +1,27 @@ + +name: 📊 Check Coverage +on: + push: + branches: + - master + - '*.x' + paths-ignore: + - 'docs/**' + - '*.md' + - '*.rst' + pull_request: + branches: + - master + - '*.x' + paths-ignore: + - 'docs/**' + - '*.md' + - '*.rst' +jobs: + coveralls_finish: + # check coverage increase/decrease + needs: tests + runs-on: ubuntu-latest + steps: + - name: Coveralls Finished + uses: AndreMiras/coveralls-python-action@develop From 0aef168687f4621e670ca6a8a66b2410f7976aea Mon Sep 17 00:00:00 2001 From: Aryan Iyappan <69184573+codebyaryan@users.noreply.github.com> Date: Sat, 21 Aug 2021 17:21:48 +0530 Subject: [PATCH 112/141] Create deploy.yml --- .github/workflows/deploy.yml | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) create mode 100644 .github/workflows/deploy.yml diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml new file mode 100644 index 000000000..2a6cdc6b0 --- /dev/null +++ b/.github/workflows/deploy.yml @@ -0,0 +1,26 @@ +name: 🚀 Deploy to PyPI + +on: + push: + tags: + - 'v*' + +jobs: + build: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + - name: Set up Python 3.8 + uses: actions/setup-python@v2 + with: + python-version: 3.8 + - name: Build wheel and source tarball + run: | + pip install wheel + python setup.py sdist bdist_wheel + - name: Publish a Python distribution to PyPI + uses: pypa/gh-action-pypi-publish@v1.1.0 + with: + user: __token__ + password: ${{ secrets.pypi_password }} From 772986ac8362500805be8b0ab6c58abc5e63af47 Mon Sep 17 00:00:00 2001 From: Aryan Iyappan <69184573+codebyaryan@users.noreply.github.com> Date: Sat, 21 Aug 2021 17:23:27 +0530 Subject: [PATCH 113/141] Create lint.yml --- lint.yml | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) create mode 100644 lint.yml diff --git a/lint.yml b/lint.yml new file mode 100644 index 000000000..95251d9b3 --- /dev/null +++ b/lint.yml @@ -0,0 +1,26 @@ +name: 💅 Lint + +on: [push, pull_request] + +jobs: + build: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + - name: Set up Python 3.8 + uses: actions/setup-python@v2 + with: + python-version: 3.8 + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install tox + - name: Run lint + run: tox + env: + TOXENV: pre-commit + - name: Run mypy + run: tox + env: + TOXENV: mypy From 1654d2fa29e42e9dc438279cfcebe4e32255ec90 Mon Sep 17 00:00:00 2001 From: Aryan Iyappan <69184573+codebyaryan@users.noreply.github.com> Date: Sat, 21 Aug 2021 17:24:11 +0530 Subject: [PATCH 114/141] Update coveralls.yml --- .github/workflows/coveralls.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/coveralls.yml b/.github/workflows/coveralls.yml index c26a8975c..c2f9f3cc6 100644 --- a/.github/workflows/coveralls.yml +++ b/.github/workflows/coveralls.yml @@ -1,4 +1,3 @@ - name: 📊 Check Coverage on: push: From e66d6148ab471fca40ae3a34ec2a480091a5fe6b Mon Sep 17 00:00:00 2001 From: Aryan Iyappan <69184573+codebyaryan@users.noreply.github.com> Date: Sat, 21 Aug 2021 17:25:38 +0530 Subject: [PATCH 115/141] Create lint.yml --- .github/workflows/lint.yml | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) create mode 100644 .github/workflows/lint.yml diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml new file mode 100644 index 000000000..95251d9b3 --- /dev/null +++ b/.github/workflows/lint.yml @@ -0,0 +1,26 @@ +name: 💅 Lint + +on: [push, pull_request] + +jobs: + build: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + - name: Set up Python 3.8 + uses: actions/setup-python@v2 + with: + python-version: 3.8 + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install tox + - name: Run lint + run: tox + env: + TOXENV: pre-commit + - name: Run mypy + run: tox + env: + TOXENV: mypy From 9807d6102ce8a7757b67a472548f26a521f9a8b4 Mon Sep 17 00:00:00 2001 From: Aryan Iyappan <69184573+codebyaryan@users.noreply.github.com> Date: Sat, 21 Aug 2021 17:40:08 +0530 Subject: [PATCH 116/141] Update coveralls.yml --- .github/workflows/coveralls.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/coveralls.yml b/.github/workflows/coveralls.yml index c2f9f3cc6..a8e2875c8 100644 --- a/.github/workflows/coveralls.yml +++ b/.github/workflows/coveralls.yml @@ -19,7 +19,6 @@ on: jobs: coveralls_finish: # check coverage increase/decrease - needs: tests runs-on: ubuntu-latest steps: - name: Coveralls Finished From 7960b02124a2638cfd7144f77a2dd2606c042518 Mon Sep 17 00:00:00 2001 From: Aryan Iyappan <69184573+codebyaryan@users.noreply.github.com> Date: Sat, 21 Aug 2021 17:41:12 +0530 Subject: [PATCH 117/141] Delete lint.yml --- lint.yml | 26 -------------------------- 1 file changed, 26 deletions(-) delete mode 100644 lint.yml diff --git a/lint.yml b/lint.yml deleted file mode 100644 index 95251d9b3..000000000 --- a/lint.yml +++ /dev/null @@ -1,26 +0,0 @@ -name: 💅 Lint - -on: [push, pull_request] - -jobs: - build: - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v2 - - name: Set up Python 3.8 - uses: actions/setup-python@v2 - with: - python-version: 3.8 - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install tox - - name: Run lint - run: tox - env: - TOXENV: pre-commit - - name: Run mypy - run: tox - env: - TOXENV: mypy From 314554338655c68ade79a24a6f9a54fff5a6d562 Mon Sep 17 00:00:00 2001 From: Aryan Iyappan <69184573+codebyaryan@users.noreply.github.com> Date: Sat, 21 Aug 2021 17:46:14 +0530 Subject: [PATCH 118/141] Update tox.ini --- tox.ini | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tox.ini b/tox.ini index dd922c469..6a3166f4d 100644 --- a/tox.ini +++ b/tox.ini @@ -11,7 +11,7 @@ commands = py{36,37,38}: pytest --cov=graphene graphene examples {posargs} [testenv:pre-commit] -basepython=python3.7 +basepython=python3.8 deps = pre-commit>=2,<3 setenv = @@ -20,16 +20,16 @@ commands = pre-commit {posargs:run --all-files} [testenv:mypy] -basepython=python3.7 +basepython=python3.8 deps = mypy>=0.761,<1 commands = mypy graphene [testenv:flake8] -basepython=python3.7 +basepython=python3.8 deps = - flake8>=3.7,<4 + flake8>=3.8,<4 commands = pip install --pre -e . flake8 graphene From ce59f1ff15a32b0b4b90b96a4fb466970cf1c6aa Mon Sep 17 00:00:00 2001 From: Aryan Iyappan <69184573+codebyaryan@users.noreply.github.com> Date: Sat, 21 Aug 2021 17:48:33 +0530 Subject: [PATCH 119/141] Rename .pre-commit-config.yml to .pre-commit-config.yaml --- .pre-commit-config.yml => .pre-commit-config.yaml | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename .pre-commit-config.yml => .pre-commit-config.yaml (100%) diff --git a/.pre-commit-config.yml b/.pre-commit-config.yaml similarity index 100% rename from .pre-commit-config.yml rename to .pre-commit-config.yaml From 7827219ba2c7c20858351ecbdc74ba041fdd587f Mon Sep 17 00:00:00 2001 From: Aryan Iyappan <69184573+codebyaryan@users.noreply.github.com> Date: Sat, 21 Aug 2021 18:29:56 +0530 Subject: [PATCH 120/141] Update schema.py --- graphene/types/schema.py | 1 - 1 file changed, 1 deletion(-) diff --git a/graphene/types/schema.py b/graphene/types/schema.py index 1ff0bff4f..79341d83b 100644 --- a/graphene/types/schema.py +++ b/graphene/types/schema.py @@ -27,7 +27,6 @@ GraphQLSchema, GraphQLString, ) -from graphql.execution import ExecutionContext from graphql.execution.values import get_argument_values from ..utils.str_converters import to_camel_case From 0ebff3313d985126cdf23e9b26d200d1a5902e1e Mon Sep 17 00:00:00 2001 From: Aryan Iyappan <69184573+codebyaryan@users.noreply.github.com> Date: Sat, 21 Aug 2021 18:30:42 +0530 Subject: [PATCH 121/141] Update test_schema.py --- graphene/types/tests/test_schema.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/graphene/types/tests/test_schema.py b/graphene/types/tests/test_schema.py index 9cdbde3bc..fe4739c98 100644 --- a/graphene/types/tests/test_schema.py +++ b/graphene/types/tests/test_schema.py @@ -1,6 +1,5 @@ from graphql.type import GraphQLObjectType, GraphQLSchema -from graphql import GraphQLError -from pytest import mark, raises, fixture +from pytest import raises from graphene.tests.utils import dedent From 1886ec9dcbea27a944fd8864f5b530bacdab957d Mon Sep 17 00:00:00 2001 From: Aryan Iyappan <69184573+codebyaryan@users.noreply.github.com> Date: Sat, 21 Aug 2021 18:34:39 +0530 Subject: [PATCH 122/141] Update schema.py --- graphene/types/schema.py | 1 - 1 file changed, 1 deletion(-) diff --git a/graphene/types/schema.py b/graphene/types/schema.py index 79341d83b..0c6d4183c 100644 --- a/graphene/types/schema.py +++ b/graphene/types/schema.py @@ -27,7 +27,6 @@ GraphQLSchema, GraphQLString, ) -from graphql.execution.values import get_argument_values from ..utils.str_converters import to_camel_case from ..utils.get_unbound_function import get_unbound_function From d54b81955225156db6a2d39a58769ba528b9e784 Mon Sep 17 00:00:00 2001 From: Aryan Iyappan <69184573+codebyaryan@users.noreply.github.com> Date: Sat, 21 Aug 2021 18:37:44 +0530 Subject: [PATCH 123/141] Update .pre-commit-config.yaml --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index c9ffc21ed..f2e50e5e3 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -23,6 +23,6 @@ repos: - id: black language_version: python3 - repo: https://github.com/PyCQA/flake8 - rev: 3.7.8 + rev: 3.8.4 hooks: - id: flake8 From 16d0b32a8fdcaf3b7960677b3ae11bb28cd8be4a Mon Sep 17 00:00:00 2001 From: Aryan Iyappan <69184573+codebyaryan@users.noreply.github.com> Date: Sat, 21 Aug 2021 20:21:46 +0530 Subject: [PATCH 124/141] Update .pre-commit-config.yaml --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index f2e50e5e3..be6670200 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: git://github.com/pre-commit/pre-commit-hooks - rev: v2.1.0 + rev: v2.3.0 hooks: - id: check-merge-conflict - id: check-json @@ -18,7 +18,7 @@ repos: hooks: - id: pyupgrade - repo: https://github.com/ambv/black - rev: 19.10b0 + rev: 19.3b0 hooks: - id: black language_version: python3 From 3b77b5f92a049baee54d4a797163069693498bb4 Mon Sep 17 00:00:00 2001 From: Aryan Iyappan <69184573+codebyaryan@users.noreply.github.com> Date: Sat, 21 Aug 2021 20:36:41 +0530 Subject: [PATCH 125/141] Update tox.ini --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 6a3166f4d..ff9973f7d 100644 --- a/tox.ini +++ b/tox.ini @@ -17,7 +17,7 @@ deps = setenv = LC_CTYPE=en_US.UTF-8 commands = - pre-commit {posargs:run --all-files} + pre-commit run --all-files [testenv:mypy] basepython=python3.8 From 76701e0809d20ef93a7b266d4f701378ba163e57 Mon Sep 17 00:00:00 2001 From: Aryan Iyappan <69184573+codebyaryan@users.noreply.github.com> Date: Sat, 21 Aug 2021 20:53:58 +0530 Subject: [PATCH 126/141] Update .pre-commit-config.yaml --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index be6670200..7f782135f 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -20,8 +20,8 @@ repos: - repo: https://github.com/ambv/black rev: 19.3b0 hooks: - - id: black - language_version: python3 + - id: black + language_version: python3 - repo: https://github.com/PyCQA/flake8 rev: 3.8.4 hooks: From 5896ade2dd50058bc25ab3b73e93eff6b6a01dfa Mon Sep 17 00:00:00 2001 From: Aryan Iyappan <69184573+codebyaryan@users.noreply.github.com> Date: Sat, 21 Aug 2021 20:58:18 +0530 Subject: [PATCH 127/141] Update test_connection_query.py --- graphene/relay/tests/test_connection_query.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/graphene/relay/tests/test_connection_query.py b/graphene/relay/tests/test_connection_query.py index cac4b65b0..8226febc8 100644 --- a/graphene/relay/tests/test_connection_query.py +++ b/graphene/relay/tests/test_connection_query.py @@ -51,10 +51,10 @@ def resolve_connection_letters(self, info, **args): def edges(selected_letters): return [ { - "node": {"id": base64("Letter:%s" % l.id), "letter": l.letter}, - "cursor": base64("arrayconnection:%s" % l.id), + "node": {"id": base64("Letter:%s" % letter.id), "letter": letter.letter}, + "cursor": base64("arrayconnection:%s" % letter.id), } - for l in [letters[i] for i in selected_letters] + for letter in [letters[i] for i in selected_letters] ] From 1c3054b7c8099b800671403856e90cdc455ab577 Mon Sep 17 00:00:00 2001 From: Aryan Iyappan <69184573+codebyaryan@users.noreply.github.com> Date: Sat, 21 Aug 2021 21:01:27 +0530 Subject: [PATCH 128/141] Update test_connection_async.py --- graphene/relay/tests/test_connection_async.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/graphene/relay/tests/test_connection_async.py b/graphene/relay/tests/test_connection_async.py index b139f6a39..ae228cf9a 100644 --- a/graphene/relay/tests/test_connection_async.py +++ b/graphene/relay/tests/test_connection_async.py @@ -51,10 +51,10 @@ def resolve_connection_letters(self, info, **args): def edges(selected_letters): return [ { - "node": {"id": base64("Letter:%s" % l.id), "letter": l.letter}, - "cursor": base64("arrayconnection:%s" % l.id), + "node": {"id": base64("Letter:%s" % letter.id), "letter": letter.letter}, + "cursor": base64("arrayconnection:%s" % letter.id), } - for l in [letters[i] for i in selected_letters] + for letter in [letters[i] for i in selected_letters] ] From 7087710d025f940bd542f264e9f3493208a6a3d4 Mon Sep 17 00:00:00 2001 From: Aryan Iyappan <69184573+codebyaryan@users.noreply.github.com> Date: Sat, 21 Aug 2021 21:17:00 +0530 Subject: [PATCH 129/141] Update .pre-commit-config.yaml --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 7f782135f..f0e353b45 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -14,7 +14,7 @@ repos: - id: trailing-whitespace exclude: README.md - repo: https://github.com/asottile/pyupgrade - rev: v1.12.0 + rev: v2.24.0 hooks: - id: pyupgrade - repo: https://github.com/ambv/black From a3a2f999aadf385ec54c6ec8793536daf6588a1c Mon Sep 17 00:00:00 2001 From: Aryan Iyappan <69184573+codebyaryan@users.noreply.github.com> Date: Sat, 21 Aug 2021 21:33:00 +0530 Subject: [PATCH 130/141] Update .pre-commit-config.yaml --- .pre-commit-config.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index f0e353b45..58edab7bb 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -13,16 +13,16 @@ repos: - --autofix - id: trailing-whitespace exclude: README.md -- repo: https://github.com/asottile/pyupgrade +- repo: git://github.com/asottile/pyupgrade rev: v2.24.0 hooks: - id: pyupgrade -- repo: https://github.com/ambv/black +- repo: git://github.com/ambv/black rev: 19.3b0 hooks: - id: black language_version: python3 -- repo: https://github.com/PyCQA/flake8 +- repo: git://github.com/PyCQA/flake8 rev: 3.8.4 hooks: - id: flake8 From 85f06fb2a6e9286860ea8f6a21cefce5108cb245 Mon Sep 17 00:00:00 2001 From: Aryan Iyappan <69184573+codebyaryan@users.noreply.github.com> Date: Sat, 21 Aug 2021 21:37:30 +0530 Subject: [PATCH 131/141] Update tox.ini --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index ff9973f7d..c4bf6ad05 100644 --- a/tox.ini +++ b/tox.ini @@ -17,7 +17,7 @@ deps = setenv = LC_CTYPE=en_US.UTF-8 commands = - pre-commit run --all-files + pre-commit run --all-files --show-diff-on-failure [testenv:mypy] basepython=python3.8 From d5d7a0e5e079e0a67d2236dd5c0b0b4e6419232c Mon Sep 17 00:00:00 2001 From: Aryan Iyappan <69184573+codebyaryan@users.noreply.github.com> Date: Sat, 21 Aug 2021 21:41:47 +0530 Subject: [PATCH 132/141] Update tox.ini --- tox.ini | 1 + 1 file changed, 1 insertion(+) diff --git a/tox.ini b/tox.ini index c4bf6ad05..693f5e69e 100644 --- a/tox.ini +++ b/tox.ini @@ -17,6 +17,7 @@ deps = setenv = LC_CTYPE=en_US.UTF-8 commands = + pre-commit install pre-commit run --all-files --show-diff-on-failure [testenv:mypy] From 908d5aeaeb9a565307b0cea7bd2842737a748334 Mon Sep 17 00:00:00 2001 From: Aryan Iyappan <69184573+codebyaryan@users.noreply.github.com> Date: Sun, 22 Aug 2021 08:48:15 +0530 Subject: [PATCH 133/141] Update .pre-commit-config.yaml --- .pre-commit-config.yaml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 58edab7bb..bd6a7340d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,3 +1,6 @@ +default_language_version: + python: python3.8 + repos: - repo: git://github.com/pre-commit/pre-commit-hooks rev: v2.3.0 @@ -21,7 +24,6 @@ repos: rev: 19.3b0 hooks: - id: black - language_version: python3 - repo: git://github.com/PyCQA/flake8 rev: 3.8.4 hooks: From 2c66e496f7e0f3b85f244b04223aa7d87ba96ad9 Mon Sep 17 00:00:00 2001 From: Aryan Iyappan <69184573+codebyaryan@users.noreply.github.com> Date: Sun, 22 Aug 2021 08:48:38 +0530 Subject: [PATCH 134/141] Update tox.ini --- tox.ini | 1 - 1 file changed, 1 deletion(-) diff --git a/tox.ini b/tox.ini index 693f5e69e..c4bf6ad05 100644 --- a/tox.ini +++ b/tox.ini @@ -17,7 +17,6 @@ deps = setenv = LC_CTYPE=en_US.UTF-8 commands = - pre-commit install pre-commit run --all-files --show-diff-on-failure [testenv:mypy] From 2e5944eb2023179714d213bcf38d44a945b180b1 Mon Sep 17 00:00:00 2001 From: Aryan Iyappan Date: Sun, 22 Aug 2021 11:03:22 +0530 Subject: [PATCH 135/141] format code --- graphene/pyutils/dataclasses.py | 40 +++---------------- graphene/relay/tests/test_connection_query.py | 13 +++--- graphene/types/mutation.py | 9 +---- graphene/types/objecttype.py | 11 +++-- graphene/types/tests/test_base64.py | 3 +- graphene/validation/__init__.py | 5 +-- graphene/validation/depth_limit.py | 12 ++---- .../tests/test_depth_limit_validator.py | 29 +++----------- .../tests/test_disable_introspection.py | 8 +--- 9 files changed, 33 insertions(+), 97 deletions(-) diff --git a/graphene/pyutils/dataclasses.py b/graphene/pyutils/dataclasses.py index 19530eff1..f847b211a 100644 --- a/graphene/pyutils/dataclasses.py +++ b/graphene/pyutils/dataclasses.py @@ -442,13 +442,11 @@ def _field_init(f, frozen, globals, self_name): # This field does not need initialization. Signify that # to the caller by returning None. return None - # Only test this now, so that we can create variables for the # default. However, return None to signify that we're not going # to actually do the assignment statement for InitVars. if f._field_type == _FIELD_INITVAR: return None - # Now, actually generate the field assignment. return _field_assign(frozen, f.name, value, self_name) @@ -490,7 +488,6 @@ def _init_fn(fields, frozen, has_post_init, self_name): raise TypeError( f"non-default argument {f.name!r} " "follows default argument" ) - globals = {"MISSING": MISSING, "_HAS_DEFAULT_FACTORY": _HAS_DEFAULT_FACTORY} body_lines = [] @@ -500,16 +497,13 @@ def _init_fn(fields, frozen, has_post_init, self_name): # initialization (it's a pseudo-field). Just skip it. if line: body_lines.append(line) - # Does this class have a post-init function? if has_post_init: params_str = ",".join(f.name for f in fields if f._field_type is _FIELD_INITVAR) body_lines.append(f"{self_name}.{_POST_INIT_NAME}({params_str})") - # If no body lines, use 'pass'. if not body_lines: body_lines = ["pass"] - locals = {f"_type_{f.name}": f.type for f in fields} return _create_fn( "__init__", @@ -674,7 +668,6 @@ def _get_field(cls, a_name, a_type): # This is a field in __slots__, so it has no default value. default = MISSING f = field(default=default) - # Only at this point do we know the name and the type. Set them. f.name = a_name f.type = a_type @@ -705,7 +698,6 @@ def _get_field(cls, a_name, a_type): and _is_type(f.type, cls, typing, typing.ClassVar, _is_classvar) ): f._field_type = _FIELD_CLASSVAR - # If the type is InitVar, or if it's a matching string annotation, # then it's an InitVar. if f._field_type is _FIELD: @@ -717,7 +709,6 @@ def _get_field(cls, a_name, a_type): and _is_type(f.type, cls, dataclasses, dataclasses.InitVar, _is_initvar) ): f._field_type = _FIELD_INITVAR - # Validations for individual fields. This is delayed until now, # instead of in the Field() constructor, since only here do we # know the field name, which allows for better error reporting. @@ -731,14 +722,12 @@ def _get_field(cls, a_name, a_type): # example, how about init=False (or really, # init=)? It makes no sense for # ClassVar and InitVar to specify init=. - # For real fields, disallow mutable defaults for known types. if f._field_type is _FIELD and isinstance(f.default, (list, dict, set)): raise ValueError( f"mutable default {type(f.default)} for field " f"{f.name} is not allowed: use default_factory" ) - return f @@ -827,7 +816,6 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen): fields[f.name] = f if getattr(b, _PARAMS).frozen: any_frozen_base = True - # Annotations that are defined in this class (not in base # classes). If __annotations__ isn't present, then this class # adds no new annotations. We use this to compute fields that are @@ -866,22 +854,18 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen): delattr(cls, f.name) else: setattr(cls, f.name, f.default) - # Do we have any Field members that don't also have annotations? for name, value in cls.__dict__.items(): if isinstance(value, Field) and not name in cls_annotations: raise TypeError(f"{name!r} is a field but has no type annotation") - # Check rules that apply if we are derived from any dataclasses. if has_dataclass_bases: # Raise an exception if any of our bases are frozen, but we're not. if any_frozen_base and not frozen: raise TypeError("cannot inherit non-frozen dataclass from a " "frozen one") - # Raise an exception if we're frozen, but none of our bases are. if not any_frozen_base and frozen: raise TypeError("cannot inherit frozen dataclass from a " "non-frozen one") - # Remember all of the fields on our class (including bases). This # also marks this class as being a dataclass. setattr(cls, _FIELDS, fields) @@ -900,7 +884,6 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen): # eq methods. if order and not eq: raise ValueError("eq must be true if order is true") - if init: # Does this class have a post-init function? has_post_init = hasattr(cls, _POST_INIT_NAME) @@ -920,7 +903,6 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen): "__dataclass_self__" if "self" in fields else "self", ), ) - # Get the fields as a list, and include only real fields. This is # used in all of the following methods. field_list = [f for f in fields.values() if f._field_type is _FIELD] @@ -928,7 +910,6 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen): if repr: flds = [f for f in field_list if f.repr] _set_new_attribute(cls, "__repr__", _repr_fn(flds)) - if eq: # Create _eq__ method. There's no need for a __ne__ method, # since python will call __eq__ and negate it. @@ -938,7 +919,6 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen): _set_new_attribute( cls, "__eq__", _cmp_fn("__eq__", "==", self_tuple, other_tuple) ) - if order: # Create and set the ordering methods. flds = [f for f in field_list if f.compare] @@ -958,7 +938,6 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen): f"in class {cls.__name__}. Consider using " "functools.total_ordering" ) - if frozen: for fn in _frozen_get_del_attr(cls, field_list): if _set_new_attribute(cls, fn.__name__, fn): @@ -966,7 +945,6 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen): f"Cannot overwrite attribute {fn.__name__} " f"in class {cls.__name__}" ) - # Decide if/how we're going to create a hash function. hash_action = _hash_action[ bool(unsafe_hash), bool(eq), bool(frozen), has_explicit_hash @@ -975,11 +953,9 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen): # No need to call _set_new_attribute here, since by the time # we're here the overwriting is unconditional. cls.__hash__ = hash_action(cls, field_list) - if not getattr(cls, "__doc__"): # Create a class doc-string. cls.__doc__ = cls.__name__ + str(inspect.signature(cls)).replace(" -> None", "") - return cls @@ -1015,7 +991,6 @@ def wrap(cls): if _cls is None: # We're called with parens. return wrap - # We're called as @dataclass without parens. return wrap(_cls) @@ -1032,7 +1007,6 @@ def fields(class_or_instance): fields = getattr(class_or_instance, _FIELDS) except AttributeError: raise TypeError("must be called with a dataclass type or instance") - # Exclude pseudo-fields. Note that fields is sorted by insertion # order, so the order of the tuple is as the fields were defined. return tuple(f for f in fields.values() if f._field_type is _FIELD) @@ -1174,7 +1148,6 @@ class C(Base): else: # Copy namespace since we're going to mutate it. namespace = namespace.copy() - # While we're looking through the field names, validate that they # are identifiers, are not keywords, and not duplicates. seen = set() @@ -1184,23 +1157,23 @@ class C(Base): name = item tp = "typing.Any" elif len(item) == 2: - name, tp, = item + ( + name, + tp, + ) = item elif len(item) == 3: name, tp, spec = item namespace[name] = spec else: raise TypeError(f"Invalid field: {item!r}") - if not isinstance(name, str) or not name.isidentifier(): raise TypeError(f"Field names must be valid identifers: {name!r}") if keyword.iskeyword(name): raise TypeError(f"Field names must not be keywords: {name!r}") if name in seen: raise TypeError(f"Field name duplicated: {name!r}") - seen.add(name) anns[name] = tp - namespace["__annotations__"] = anns # We use `types.new_class()` instead of simply `type()` to allow dynamic creation # of generic dataclassses. @@ -1229,14 +1202,13 @@ class C: c = C(1, 2) c1 = replace(c, x=3) assert c1.x == 3 and c1.y == 2 - """ + """ # We're going to mutate 'changes', but that's okay because it's a # new dict, even if called with 'replace(obj, **my_changes)'. if not _is_dataclass_instance(obj): raise TypeError("replace() should be called on dataclass instances") - # It's an error to have init=False fields in 'changes'. # If a field is not in 'changes', read its value from the provided obj. @@ -1250,10 +1222,8 @@ class C: "replace()" ) continue - if f.name not in changes: changes[f.name] = getattr(obj, f.name) - # Create the new object, which calls __init__() and # __post_init__() (if defined), using all of the init fields we've # added and/or left in 'changes'. If there are values supplied in diff --git a/graphene/relay/tests/test_connection_query.py b/graphene/relay/tests/test_connection_query.py index 8226febc8..42345e540 100644 --- a/graphene/relay/tests/test_connection_query.py +++ b/graphene/relay/tests/test_connection_query.py @@ -66,7 +66,6 @@ def cursor_for(ltr): async def execute(args=""): if args: args = "(" + args + ")" - return await schema.execute_async( """ { @@ -164,14 +163,16 @@ async def test_respects_first_and_after_and_before_too_few(): @mark.asyncio async def test_respects_first_and_after_and_before_too_many(): await check( - f'first: 4, after: "{cursor_for("A")}", before: "{cursor_for("E")}"', "BCD", + f'first: 4, after: "{cursor_for("A")}", before: "{cursor_for("E")}"', + "BCD", ) @mark.asyncio async def test_respects_first_and_after_and_before_exactly_right(): await check( - f'first: 3, after: "{cursor_for("A")}", before: "{cursor_for("E")}"', "BCD", + f'first: 3, after: "{cursor_for("A")}", before: "{cursor_for("E")}"', + "BCD", ) @@ -187,14 +188,16 @@ async def test_respects_last_and_after_and_before_too_few(): @mark.asyncio async def test_respects_last_and_after_and_before_too_many(): await check( - f'last: 4, after: "{cursor_for("A")}", before: "{cursor_for("E")}"', "BCD", + f'last: 4, after: "{cursor_for("A")}", before: "{cursor_for("E")}"', + "BCD", ) @mark.asyncio async def test_respects_last_and_after_and_before_exactly_right(): await check( - f'last: 3, after: "{cursor_for("A")}", before: "{cursor_for("E")}"', "BCD", + f'last: 3, after: "{cursor_for("A")}", before: "{cursor_for("E")}"', + "BCD", ) diff --git a/graphene/types/mutation.py b/graphene/types/mutation.py index 6e041bbfa..7f98e3126 100644 --- a/graphene/types/mutation.py +++ b/graphene/types/mutation.py @@ -76,7 +76,6 @@ def __init_subclass_with_meta__( ): if not _meta: _meta = MutationOptions(cls) - output = output or getattr(cls, "Output", None) fields = {} @@ -85,14 +84,12 @@ def __init_subclass_with_meta__( interface, Interface ), f'All interfaces of {cls.__name__} must be a subclass of Interface. Received "{interface}".' fields.update(interface._meta.fields) - if not output: # If output is defined, we don't need to get the fields fields = {} for base in reversed(cls.__mro__): fields.update(yank_fields_from_attrs(base.__dict__, _as=Field)) output = cls - if not arguments: input_class = getattr(cls, "Arguments", None) if not input_class: @@ -106,22 +103,18 @@ def __init_subclass_with_meta__( " https://github.com/graphql-python/graphene/blob/v2.0.0/UPGRADE-v2.0.md#mutation-input" ) ) - if input_class: arguments = props(input_class) else: arguments = {} - if not resolver: mutate = getattr(cls, "mutate", None) assert mutate, "All mutations must define a mutate method in it" resolver = get_unbound_function(mutate) - if _meta.fields: _meta.fields.update(fields) else: _meta.fields = fields - _meta.interfaces = interfaces _meta.output = output _meta.resolver = resolver @@ -133,7 +126,7 @@ def __init_subclass_with_meta__( def Field( cls, name=None, description=None, deprecation_reason=None, required=False ): - """ Mount instance of mutation Field. """ + """Mount instance of mutation Field.""" return Field( cls._meta.output, args=cls._meta.arguments, diff --git a/graphene/types/objecttype.py b/graphene/types/objecttype.py index c69be937b..2b1902eaf 100644 --- a/graphene/types/objecttype.py +++ b/graphene/types/objecttype.py @@ -7,7 +7,6 @@ from dataclasses import make_dataclass, field except ImportError: from ..pyutils.dataclasses import make_dataclass, field # type: ignore - # For static type checking with Mypy MYPY = False if MYPY: @@ -28,7 +27,11 @@ class InterObjectType: pass base_cls = super().__new__( - cls, name_, (InterObjectType,) + bases, namespace, **options, + cls, + name_, + (InterObjectType,) + bases, + namespace, + **options, ) if base_cls._meta: fields = [ @@ -133,7 +136,6 @@ def __init_subclass_with_meta__( ): if not _meta: _meta = ObjectTypeOptions(cls) - fields = {} for interface in interfaces: @@ -141,10 +143,8 @@ def __init_subclass_with_meta__( interface, Interface ), f'All interfaces of {cls.__name__} must be a subclass of Interface. Received "{interface}".' fields.update(interface._meta.fields) - for base in reversed(cls.__mro__): fields.update(yank_fields_from_attrs(base.__dict__, _as=Field)) - assert not (possible_types and cls.is_type_of), ( f"{cls.__name__}.Meta.possible_types will cause type collision with {cls.__name__}.is_type_of. " "Please use one or other." @@ -154,7 +154,6 @@ def __init_subclass_with_meta__( _meta.fields.update(fields) else: _meta.fields = fields - if not _meta.interfaces: _meta.interfaces = interfaces _meta.possible_types = possible_types diff --git a/graphene/types/tests/test_base64.py b/graphene/types/tests/test_base64.py index b096dcbc8..d1b76cb48 100644 --- a/graphene/types/tests/test_base64.py +++ b/graphene/types/tests/test_base64.py @@ -72,7 +72,8 @@ def test_base64_query_invalid(): for input_ in bad_inputs: result = schema.execute( - """{ base64(input: $input) }""", variables={"input": input_}, + """{ base64(input: $input) }""", + variables={"input": input_}, ) assert isinstance(result.errors, list) assert len(result.errors) == 1 diff --git a/graphene/validation/__init__.py b/graphene/validation/__init__.py index f338e2d0d..5b592a2cd 100644 --- a/graphene/validation/__init__.py +++ b/graphene/validation/__init__.py @@ -2,7 +2,4 @@ from .disable_introspection import DisableIntrospection -__all__ = [ - "DisableIntrospection", - "depth_limit_validator" -] +__all__ = ["DisableIntrospection", "depth_limit_validator"] diff --git a/graphene/validation/depth_limit.py b/graphene/validation/depth_limit.py index c72b78d02..5be852c7b 100644 --- a/graphene/validation/depth_limit.py +++ b/graphene/validation/depth_limit.py @@ -30,7 +30,6 @@ except ImportError: # backwards compatibility for v3.6 from typing import Pattern - from typing import Callable, Dict, List, Optional, Union from graphql import GraphQLError @@ -75,7 +74,6 @@ def __init__(self, validation_context: ValidationContext): operation_name=name, ignore=ignore, ) - if callable(callback): callback(query_depths) super().__init__(validation_context) @@ -90,7 +88,6 @@ def get_fragments( for definition in definitions: if isinstance(definition, FragmentDefinitionNode): fragments[definition.name.value] = definition - return fragments @@ -105,7 +102,6 @@ def get_queries_and_mutations( if isinstance(definition, OperationDefinitionNode): operation = definition.name.value if definition.name else "anonymous" operations[operation] = definition - return operations @@ -126,7 +122,6 @@ def determine_depth( ) ) return depth_so_far - if isinstance(node, FieldNode): should_ignore = is_introspection_key(node.name.value) or is_ignored( node, ignore @@ -134,7 +129,6 @@ def determine_depth( if should_ignore or not node.selection_set: return 0 - return 1 + max( map( lambda selection: determine_depth( @@ -177,13 +171,14 @@ def determine_depth( ) ) else: - raise Exception(f"Depth crawler cannot handle: {node.kind}.") # pragma: no cover + raise Exception( + f"Depth crawler cannot handle: {node.kind}." + ) # pragma: no cover def is_ignored(node: FieldNode, ignore: Optional[List[IgnoreType]] = None) -> bool: if ignore is None: return False - for rule in ignore: field_name = node.name.value if isinstance(rule, str): @@ -197,5 +192,4 @@ def is_ignored(node: FieldNode, ignore: Optional[List[IgnoreType]] = None) -> bo return True else: raise ValueError(f"Invalid ignore option: {rule}.") - return False diff --git a/graphene/validation/tests/test_depth_limit_validator.py b/graphene/validation/tests/test_depth_limit_validator.py index 499adbcce..0c22089ab 100644 --- a/graphene/validation/tests/test_depth_limit_validator.py +++ b/graphene/validation/tests/test_depth_limit_validator.py @@ -48,26 +48,11 @@ class Meta: class Query(ObjectType): - user = Field( - HumanType, - required=True, - name=String() - ) - version = String( - required=True - ) - user1 = Field( - HumanType, - required=True - ) - user2 = Field( - HumanType, - required=True - ) - user3 = Field( - HumanType, - required=True - ) + user = Field(HumanType, required=True, name=String()) + version = String(required=True) + user1 = Field(HumanType, required=True) + user2 = Field(HumanType, required=True) + user3 = Field(HumanType, required=True) @staticmethod def resolve_user(root, info, name=None): @@ -91,9 +76,7 @@ def callback(query_depths): document_ast=document, rules=( depth_limit_validator( - max_depth=max_depth, - ignore=ignore, - callback=callback + max_depth=max_depth, ignore=ignore, callback=callback ), ), ) diff --git a/graphene/validation/tests/test_disable_introspection.py b/graphene/validation/tests/test_disable_introspection.py index 060199001..958a1afa6 100644 --- a/graphene/validation/tests/test_disable_introspection.py +++ b/graphene/validation/tests/test_disable_introspection.py @@ -5,9 +5,7 @@ class Query(ObjectType): - name = String( - required=True - ) + name = String(required=True) @staticmethod def resolve_name(root, info): @@ -23,9 +21,7 @@ def run_query(query: str): errors = validate( schema=schema.graphql_schema, document_ast=document, - rules=( - DisableIntrospection, - ), + rules=(DisableIntrospection,), ) return errors From 47696559c700c1314db3efcb52ed1f4986bc2b27 Mon Sep 17 00:00:00 2001 From: Aryan Iyappan <69184573+aryan340@users.noreply.github.com> Date: Tue, 24 Aug 2021 08:30:54 +0530 Subject: [PATCH 136/141] run linters locally --- graphene/pyutils/dataclasses.py | 14 ++------------ graphene/relay/node.py | 8 +++----- graphene/relay/tests/test_connection_query.py | 12 ++++-------- graphene/types/mutation.py | 10 ++++------ graphene/types/objecttype.py | 6 +----- graphene/types/tests/test_base64.py | 9 ++------- graphene/types/tests/test_enum.py | 8 ++++---- graphene/utils/orderedtype.py | 2 +- graphene/validation/disable_introspection.py | 3 +-- .../validation/tests/test_depth_limit_validator.py | 12 ++---------- 10 files changed, 24 insertions(+), 60 deletions(-) diff --git a/graphene/pyutils/dataclasses.py b/graphene/pyutils/dataclasses.py index f847b211a..1a474526d 100644 --- a/graphene/pyutils/dataclasses.py +++ b/graphene/pyutils/dataclasses.py @@ -291,14 +291,7 @@ def __set_name__(self, owner, name): class _DataclassParams: - __slots__ = ( - "init", - "repr", - "eq", - "order", - "unsafe_hash", - "frozen", - ) + __slots__ = ("init", "repr", "eq", "order", "unsafe_hash", "frozen") def __init__(self, init, repr, eq, order, unsafe_hash, frozen): self.init = init @@ -1157,10 +1150,7 @@ class C(Base): name = item tp = "typing.Any" elif len(item) == 2: - ( - name, - tp, - ) = item + (name, tp) = item elif len(item) == 3: name, tp, spec = item namespace[name] = spec diff --git a/graphene/relay/node.py b/graphene/relay/node.py index b189bc97b..8defefff1 100644 --- a/graphene/relay/node.py +++ b/graphene/relay/node.py @@ -92,11 +92,9 @@ def get_node_from_global_id(cls, info, global_id, only_type=None): _type, _id = cls.from_global_id(global_id) except Exception as e: raise Exception( - ( - f'Unable to parse global ID "{global_id}". ' - 'Make sure it is a base64 encoded string in the format: "TypeName:id". ' - f"Exception message: {str(e)}" - ) + f'Unable to parse global ID "{global_id}". ' + 'Make sure it is a base64 encoded string in the format: "TypeName:id". ' + f"Exception message: {str(e)}" ) graphene_type = info.schema.get_type(_type) diff --git a/graphene/relay/tests/test_connection_query.py b/graphene/relay/tests/test_connection_query.py index 42345e540..b697c462a 100644 --- a/graphene/relay/tests/test_connection_query.py +++ b/graphene/relay/tests/test_connection_query.py @@ -163,16 +163,14 @@ async def test_respects_first_and_after_and_before_too_few(): @mark.asyncio async def test_respects_first_and_after_and_before_too_many(): await check( - f'first: 4, after: "{cursor_for("A")}", before: "{cursor_for("E")}"', - "BCD", + f'first: 4, after: "{cursor_for("A")}", before: "{cursor_for("E")}"', "BCD" ) @mark.asyncio async def test_respects_first_and_after_and_before_exactly_right(): await check( - f'first: 3, after: "{cursor_for("A")}", before: "{cursor_for("E")}"', - "BCD", + f'first: 3, after: "{cursor_for("A")}", before: "{cursor_for("E")}"', "BCD" ) @@ -188,16 +186,14 @@ async def test_respects_last_and_after_and_before_too_few(): @mark.asyncio async def test_respects_last_and_after_and_before_too_many(): await check( - f'last: 4, after: "{cursor_for("A")}", before: "{cursor_for("E")}"', - "BCD", + f'last: 4, after: "{cursor_for("A")}", before: "{cursor_for("E")}"', "BCD" ) @mark.asyncio async def test_respects_last_and_after_and_before_exactly_right(): await check( - f'last: 3, after: "{cursor_for("A")}", before: "{cursor_for("E")}"', - "BCD", + f'last: 3, after: "{cursor_for("A")}", before: "{cursor_for("E")}"', "BCD" ) diff --git a/graphene/types/mutation.py b/graphene/types/mutation.py index 7f98e3126..ca87775ab 100644 --- a/graphene/types/mutation.py +++ b/graphene/types/mutation.py @@ -96,12 +96,10 @@ def __init_subclass_with_meta__( input_class = getattr(cls, "Input", None) if input_class: warn_deprecation( - ( - f"Please use {cls.__name__}.Arguments instead of {cls.__name__}.Input." - " Input is now only used in ClientMutationID.\n" - "Read more:" - " https://github.com/graphql-python/graphene/blob/v2.0.0/UPGRADE-v2.0.md#mutation-input" - ) + f"Please use {cls.__name__}.Arguments instead of {cls.__name__}.Input." + " Input is now only used in ClientMutationID.\n" + "Read more:" + " https://github.com/graphql-python/graphene/blob/v2.0.0/UPGRADE-v2.0.md#mutation-input" ) if input_class: arguments = props(input_class) diff --git a/graphene/types/objecttype.py b/graphene/types/objecttype.py index 2b1902eaf..1ff29a2e4 100644 --- a/graphene/types/objecttype.py +++ b/graphene/types/objecttype.py @@ -27,11 +27,7 @@ class InterObjectType: pass base_cls = super().__new__( - cls, - name_, - (InterObjectType,) + bases, - namespace, - **options, + cls, name_, (InterObjectType,) + bases, namespace, **options ) if base_cls._meta: fields = [ diff --git a/graphene/types/tests/test_base64.py b/graphene/types/tests/test_base64.py index d1b76cb48..433f63c34 100644 --- a/graphene/types/tests/test_base64.py +++ b/graphene/types/tests/test_base64.py @@ -64,16 +64,11 @@ def test_base64_query_none(): def test_base64_query_invalid(): - bad_inputs = [ - dict(), - 123, - "This is not valid base64", - ] + bad_inputs = [dict(), 123, "This is not valid base64"] for input_ in bad_inputs: result = schema.execute( - """{ base64(input: $input) }""", - variables={"input": input_}, + """{ base64(input: $input) }""", variables={"input": input_} ) assert isinstance(result.errors, list) assert len(result.errors) == 1 diff --git a/graphene/types/tests/test_enum.py b/graphene/types/tests/test_enum.py index 8d5e87af4..6e204aa9c 100644 --- a/graphene/types/tests/test_enum.py +++ b/graphene/types/tests/test_enum.py @@ -26,8 +26,8 @@ def description(self): assert RGB._meta.description == "Description" values = RGB._meta.enum.__members__.values() - assert sorted([v.name for v in values]) == ["BLUE", "GREEN", "RED"] - assert sorted([v.description for v in values]) == [ + assert sorted(v.name for v in values) == ["BLUE", "GREEN", "RED"] + assert sorted(v.description for v in values) == [ "Description BLUE", "Description GREEN", "Description RED", @@ -52,7 +52,7 @@ def test_enum_instance_construction(): RGB = Enum("RGB", "RED,GREEN,BLUE") values = RGB._meta.enum.__members__.values() - assert sorted([v.name for v in values]) == ["BLUE", "GREEN", "RED"] + assert sorted(v.name for v in values) == ["BLUE", "GREEN", "RED"] def test_enum_from_builtin_enum(): @@ -465,7 +465,7 @@ class Query(ObjectType): color } } - """, + """ ) assert not result.errors assert result.data == {"createPaint": {"color": "RED"}} diff --git a/graphene/utils/orderedtype.py b/graphene/utils/orderedtype.py index fb8783d27..294ad54e7 100644 --- a/graphene/utils/orderedtype.py +++ b/graphene/utils/orderedtype.py @@ -36,4 +36,4 @@ def __gt__(self, other): return NotImplemented def __hash__(self): - return hash((self.creation_counter)) + return hash(self.creation_counter) diff --git a/graphene/validation/disable_introspection.py b/graphene/validation/disable_introspection.py index be25a2871..49a7d6073 100644 --- a/graphene/validation/disable_introspection.py +++ b/graphene/validation/disable_introspection.py @@ -11,7 +11,6 @@ def enter_field(self, node: FieldNode, *_args): if is_introspection_key(field_name): self.report_error( GraphQLError( - f"Cannot query '{field_name}': introspection is disabled.", - node, + f"Cannot query '{field_name}': introspection is disabled.", node ) ) diff --git a/graphene/validation/tests/test_depth_limit_validator.py b/graphene/validation/tests/test_depth_limit_validator.py index 0c22089ab..29c1508c4 100644 --- a/graphene/validation/tests/test_depth_limit_validator.py +++ b/graphene/validation/tests/test_depth_limit_validator.py @@ -236,11 +236,7 @@ def test_should_ignore_field(): errors, result = run_query( query, 10, - ignore=[ - "user1", - re.compile("user2"), - lambda field_name: field_name == "user3", - ], + ignore=["user1", re.compile("user2"), lambda field_name: field_name == "user3"], ) expected = {"read1": 2, "read2": 0} @@ -255,8 +251,4 @@ def test_should_raise_invalid_ignore(): } """ with raises(ValueError, match="Invalid ignore option:"): - run_query( - query, - 10, - ignore=[True], - ) + run_query(query, 10, ignore=[True]) From c1bd25555ce8e06a3a02513b627c7aacc6b6bb55 Mon Sep 17 00:00:00 2001 From: Aryan Iyappan <69184573+codebyaryan@users.noreply.github.com> Date: Tue, 28 Sep 2021 06:41:54 +0530 Subject: [PATCH 137/141] Update queryvalidation.rst --- docs/execution/queryvalidation.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docs/execution/queryvalidation.rst b/docs/execution/queryvalidation.rst index 8402b9ea9..e4ff76410 100644 --- a/docs/execution/queryvalidation.rst +++ b/docs/execution/queryvalidation.rst @@ -22,6 +22,7 @@ Usage Here is how you would implement depth-limiting on your schema. .. code:: python + from graphql import validate, parse from graphene import ObjectType, Schema, String from graphene.validation import depth_limit_validator @@ -58,6 +59,7 @@ Usage Here is how you would disable introspection for your schema. .. code:: python + from graphql import validate, parse from graphene import ObjectType, Schema, String from graphene.validation import DisableIntrospection @@ -92,6 +94,7 @@ reason. Here is an example query validator that visits field definitions in Grap if any of those fields are blacklisted: .. code:: python + from graphql import GraphQLError from graphql.language import FieldNode from graphql.validation import ValidationRule From 1d6f9e984b7d96cc90e21667a0127d665241c92d Mon Sep 17 00:00:00 2001 From: Aryan Iyappan <69184573+codebyaryan@users.noreply.github.com> Date: Wed, 29 Sep 2021 18:13:08 +0530 Subject: [PATCH 138/141] Mame sure to pass correct graphql schema instance --- docs/execution/queryvalidation.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/execution/queryvalidation.rst b/docs/execution/queryvalidation.rst index e4ff76410..9c24a2e38 100644 --- a/docs/execution/queryvalidation.rst +++ b/docs/execution/queryvalidation.rst @@ -38,7 +38,7 @@ Here is how you would implement depth-limiting on your schema. # will not be executed. validation_errors = validate( - schema=schema, + schema=schema.graphql_schema, document_ast=parse('THE QUERY'), rules=( depth_limit_validator( @@ -74,7 +74,7 @@ Here is how you would disable introspection for your schema. # introspection queries will not be executed. validation_errors = validate( - schema=schema, + schema=schema.graphql_schema, document_ast=parse('THE QUERY'), rules=( DisableIntrospection, From b6c8931b22d7016a0022fc8de3af12d73b7697ec Mon Sep 17 00:00:00 2001 From: Eran Kampf <205185+ekampf@users.noreply.github.com> Date: Wed, 29 Sep 2021 17:11:16 -0700 Subject: [PATCH 139/141] Fix GraphQL-core dependency GraphQL-core released `3.2.0rc1` with some breaking changes and 1. We should be getting RC releases in our dependencies 2. It has breaking changes, so we shouldn't get 3.2.0 unless someone fixes it explicitly --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 1503c3c69..ae59a92ad 100644 --- a/setup.py +++ b/setup.py @@ -82,7 +82,7 @@ def run_tests(self): keywords="api graphql protocol rest relay graphene", packages=find_packages(exclude=["examples*"]), install_requires=[ - "graphql-core>=3.1.2,<4", + "graphql-core~=3.1.2", "graphql-relay>=3.0,<4", "aniso8601>=8,<10", ], From 0a54094f59e1b1bca83e4574dbb35587536bbce6 Mon Sep 17 00:00:00 2001 From: Eran Kampf <205185+ekampf@users.noreply.github.com> Date: Wed, 29 Sep 2021 23:42:36 -0700 Subject: [PATCH 140/141] v3.0.0b8 --- graphene/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/graphene/__init__.py b/graphene/__init__.py index 34729de0b..c8ffc0c4a 100644 --- a/graphene/__init__.py +++ b/graphene/__init__.py @@ -41,7 +41,7 @@ from .utils.module_loading import lazy_import from .utils.resolve_only_args import resolve_only_args -VERSION = (3, 0, 0, "beta", 7) +VERSION = (3, 0, 0, "beta", 8) __version__ = get_version(VERSION) From 27f19e5a905f95f3703a187aaabb67f1623d4a92 Mon Sep 17 00:00:00 2001 From: Mel van Londen Date: Sat, 13 Nov 2021 14:15:18 -0800 Subject: [PATCH 141/141] release v3 stable --- README.md | 8 +------- docs/quickstart.rst | 4 ++-- graphene/__init__.py | 2 +- 3 files changed, 4 insertions(+), 10 deletions(-) diff --git a/README.md b/README.md index 85849a3dc..a7714e336 100644 --- a/README.md +++ b/README.md @@ -4,12 +4,6 @@ **We are looking for contributors**! Please check the [ROADMAP](https://github.com/graphql-python/graphene/blob/master/ROADMAP.md) to see how you can help ❤️ ---- - -**The below readme is the documentation for the `dev` (prerelease) version of Graphene. To view the documentation for the latest stable Graphene version go to the [v2 docs](https://docs.graphene-python.org/en/stable/)** - ---- - ## Introduction [Graphene](http://graphene-python.org) is an opinionated Python library for building GraphQL schemas/types fast and easily. @@ -37,7 +31,7 @@ Also, Graphene is fully compatible with the GraphQL spec, working seamlessly wit For instaling graphene, just run this command in your shell ```bash -pip install "graphene>=2.0" +pip install "graphene>=3.0" ``` ## Examples diff --git a/docs/quickstart.rst b/docs/quickstart.rst index 62d11949a..0b6c69938 100644 --- a/docs/quickstart.rst +++ b/docs/quickstart.rst @@ -60,14 +60,14 @@ Requirements ~~~~~~~~~~~~ - Python (2.7, 3.4, 3.5, 3.6, pypy) -- Graphene (2.0) +- Graphene (3.0) Project setup ~~~~~~~~~~~~~ .. code:: bash - pip install "graphene>=2.0" + pip install "graphene>=3.0" Creating a basic Schema ~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/graphene/__init__.py b/graphene/__init__.py index c8ffc0c4a..b0b4244da 100644 --- a/graphene/__init__.py +++ b/graphene/__init__.py @@ -41,7 +41,7 @@ from .utils.module_loading import lazy_import from .utils.resolve_only_args import resolve_only_args -VERSION = (3, 0, 0, "beta", 8) +VERSION = (3, 0, 0, "final", 0) __version__ = get_version(VERSION)