diff --git a/HISTORY.md b/HISTORY.md
index 4203afeb68e..f5ff0d39dde 100644
--- a/HISTORY.md
+++ b/HISTORY.md
@@ -1,3 +1,9 @@
+## v1.10.6 (2023-03-08)
+
+* Implement logic to support creating validators from non standard callables by using defaults to identify them and unwrapping `functools.partial` and `functools.partialmethod` when checking the signature, #5126 by @JensHeinrich
+* Fix mypy plugin for v1.1.1, and fix `dataclass_transform` decorator for pydantic dataclasses, #5111 by @cdce8p
+* Raise `ValidationError`, not `ConfigError`, when a discriminator value is unhashable, #4773 by @kurtmckee
+
## v1.10.5 (2023-02-15)
* Fix broken parametrized bases handling with `GenericModel`s with complex sets of models, #5052 by @MarkusSintonen
diff --git a/README.md b/README.md
index e74f0382b51..e4e7abe3271 100644
--- a/README.md
+++ b/README.md
@@ -13,6 +13,11 @@ Data validation and settings management using Python type hints.
Fast and extensible, *pydantic* plays nicely with your linters/IDE/brain.
Define how data should be in pure, canonical Python 3.7+; validate it with *pydantic*.
+## Pydantic Company :rocket:
+
+We've started a company based on the principles that I believe have led to Pydantic's success.
+Learning more from the [Company Announcement](https://pydantic.dev/announcement/).
+
## Help
See [documentation](https://docs.pydantic.dev/) for more details.
diff --git a/docs/extra/tweaks.css b/docs/extra/tweaks.css
index abe9d5b4594..4b5f1eaab0d 100644
--- a/docs/extra/tweaks.css
+++ b/docs/extra/tweaks.css
@@ -1,3 +1,12 @@
+:root {
+ --md-admonition-icon--pied-piper: url('data:image/svg+xml;charset=utf-8, ')
+}
+
+.md-typeset .announcement>.admonition-title:before {
+ -webkit-mask-image: var(--md-admonition-icon--pied-piper) !important;
+ mask-image: var(--md-admonition-icon--pied-piper) !important;
+}
+
.sponsors {
display: flex;
justify-content: center;
diff --git a/docs/theme/main.html b/docs/theme/main.html
index 8973df77f3c..9e7b087faa6 100644
--- a/docs/theme/main.html
+++ b/docs/theme/main.html
@@ -1,6 +1,13 @@
{% extends "base.html" %}
{% block content %}
+
+
Pydantic Company
+
We've started a company based on the principles that I believe have led to Pydantic's success.
+
Learning more from the Company Announcement .
+
+
{{ super() }}
+
{% endblock %}
diff --git a/docs/usage/models.md b/docs/usage/models.md
index 24a72441420..99fa4ea1390 100644
--- a/docs/usage/models.md
+++ b/docs/usage/models.md
@@ -155,7 +155,7 @@ Here a vanilla class is used to demonstrate the principle, but any ORM class cou
### Data binding
Arbitrary classes are processed by *pydantic* using the `GetterDict` class (see
-[utils.py](https://github.com/pydantic/pydantic/blob/main/pydantic/utils.py)), which attempts to
+[utils.py](https://github.com/pydantic/pydantic/blob/1.10.X-fixes/pydantic/utils.py)), which attempts to
provide a dictionary-like interface to any class. You can customise how this works by setting your own
sub-class of `GetterDict` as the value of `Config.getter_dict` (see [config](model_config.md)).
diff --git a/mkdocs.yml b/mkdocs.yml
index cdaa6bf2bb0..d9b6252ee50 100644
--- a/mkdocs.yml
+++ b/mkdocs.yml
@@ -1,5 +1,5 @@
-site_name: pydantic
-site_description: Data validation and settings management using Python type hints
+site_name: Pydantic
+site_description: Data validation using Python type hints
strict: true
site_url: https://docs.pydantic.dev/
diff --git a/pydantic/class_validators.py b/pydantic/class_validators.py
index 87190610c1f..71e66509398 100644
--- a/pydantic/class_validators.py
+++ b/pydantic/class_validators.py
@@ -1,6 +1,6 @@
import warnings
from collections import ChainMap
-from functools import wraps
+from functools import partial, partialmethod, wraps
from itertools import chain
from types import FunctionType
from typing import TYPE_CHECKING, Any, Callable, Dict, Iterable, List, Optional, Set, Tuple, Type, Union, overload
@@ -55,7 +55,7 @@ def validator(
each_item: bool = False,
always: bool = False,
check_fields: bool = True,
- whole: bool = None,
+ whole: Optional[bool] = None,
allow_reuse: bool = False,
) -> Callable[[AnyCallable], 'AnyClassMethod']:
"""
@@ -147,7 +147,11 @@ def _prepare_validator(function: AnyCallable, allow_reuse: bool) -> 'AnyClassMet
"""
f_cls = function if isinstance(function, classmethod) else classmethod(function)
if not in_ipython() and not allow_reuse:
- ref = f_cls.__func__.__module__ + '.' + f_cls.__func__.__qualname__
+ ref = (
+ getattr(f_cls.__func__, '__module__', '')
+ + '.'
+ + getattr(f_cls.__func__, '__qualname__', f'')
+ )
if ref in _FUNCS:
raise ConfigError(f'duplicate validator function "{ref}"; if this is intended, set `allow_reuse=True`')
_FUNCS.add(ref)
@@ -165,14 +169,18 @@ def get_validators(self, name: str) -> Optional[Dict[str, Validator]]:
if name != ROOT_KEY:
validators += self.validators.get('*', [])
if validators:
- return {v.func.__name__: v for v in validators}
+ return {getattr(v.func, '__name__', f''): v for v in validators}
else:
return None
def check_for_unused(self) -> None:
unused_validators = set(
chain.from_iterable(
- (v.func.__name__ for v in self.validators[f] if v.check_fields)
+ (
+ getattr(v.func, '__name__', f'')
+ for v in self.validators[f]
+ if v.check_fields
+ )
for f in (self.validators.keys() - self.used_validators)
)
)
@@ -243,8 +251,19 @@ def make_generic_validator(validator: AnyCallable) -> 'ValidatorCallable':
"""
from inspect import signature
- sig = signature(validator)
- args = list(sig.parameters.keys())
+ if not isinstance(validator, (partial, partialmethod)):
+ # This should be the default case, so overhead is reduced
+ sig = signature(validator)
+ args = list(sig.parameters.keys())
+ else:
+ # Fix the generated argument lists of partial methods
+ sig = signature(validator.func)
+ args = [
+ k
+ for k in signature(validator.func).parameters.keys()
+ if k not in validator.args | validator.keywords.keys()
+ ]
+
first_arg = args.pop(0)
if first_arg == 'self':
raise ConfigError(
diff --git a/pydantic/dataclasses.py b/pydantic/dataclasses.py
index f03def0d9c0..86bad1e6381 100644
--- a/pydantic/dataclasses.py
+++ b/pydantic/dataclasses.py
@@ -32,6 +32,7 @@ class M:
validation without altering default `M` behaviour.
"""
import copy
+import dataclasses
import sys
from contextlib import contextmanager
from functools import wraps
@@ -93,7 +94,7 @@ def __validate__(cls: Type['DataclassT'], v: Any) -> 'DataclassT':
if sys.version_info >= (3, 10):
- @dataclass_transform(kw_only_default=True, field_specifiers=(Field, FieldInfo))
+ @dataclass_transform(field_specifiers=(dataclasses.field, Field))
@overload
def dataclass(
*,
@@ -110,7 +111,7 @@ def dataclass(
) -> Callable[[Type[_T]], 'DataclassClassOrWrapper']:
...
- @dataclass_transform(kw_only_default=True, field_specifiers=(Field, FieldInfo))
+ @dataclass_transform(field_specifiers=(dataclasses.field, Field))
@overload
def dataclass(
_cls: Type[_T],
@@ -130,7 +131,7 @@ def dataclass(
else:
- @dataclass_transform(kw_only_default=True, field_specifiers=(Field, FieldInfo))
+ @dataclass_transform(field_specifiers=(dataclasses.field, Field))
@overload
def dataclass(
*,
@@ -146,7 +147,7 @@ def dataclass(
) -> Callable[[Type[_T]], 'DataclassClassOrWrapper']:
...
- @dataclass_transform(kw_only_default=True, field_specifiers=(Field, FieldInfo))
+ @dataclass_transform(field_specifiers=(dataclasses.field, Field))
@overload
def dataclass(
_cls: Type[_T],
@@ -164,7 +165,7 @@ def dataclass(
...
-@dataclass_transform(kw_only_default=True, field_specifiers=(Field, FieldInfo))
+@dataclass_transform(field_specifiers=(dataclasses.field, Field))
def dataclass(
_cls: Optional[Type[_T]] = None,
*,
@@ -188,8 +189,6 @@ def dataclass(
the_config = get_config(config)
def wrap(cls: Type[Any]) -> 'DataclassClassOrWrapper':
- import dataclasses
-
should_use_proxy = (
use_proxy
if use_proxy is not None
@@ -328,7 +327,6 @@ def new_init(self: 'Dataclass', *args: Any, **kwargs: Any) -> None:
if hasattr(self, '__post_init_post_parse__'):
# We need to find again the initvars. To do that we use `__dataclass_fields__` instead of
# public method `dataclasses.fields`
- import dataclasses
# get all initvars and their default values
initvars_and_values: Dict[str, Any] = {}
@@ -377,8 +375,6 @@ def create_pydantic_model_from_dataclass(
config: Type[Any] = BaseConfig,
dc_cls_doc: Optional[str] = None,
) -> Type['BaseModel']:
- import dataclasses
-
field_definitions: Dict[str, Any] = {}
for field in dataclasses.fields(dc_cls):
default: Any = Undefined
@@ -466,8 +462,6 @@ class B(A):
In this case, when we first check `B`, we make an extra check and look at the annotations ('y'),
which won't be a superset of all the dataclass fields (only the stdlib fields i.e. 'x')
"""
- import dataclasses
-
return (
dataclasses.is_dataclass(_cls)
and not hasattr(_cls, '__pydantic_model__')
diff --git a/pydantic/fields.py b/pydantic/fields.py
index ae9a7630ce6..90dcde6b702 100644
--- a/pydantic/fields.py
+++ b/pydantic/fields.py
@@ -223,28 +223,28 @@ def Field(
default: Any = Undefined,
*,
default_factory: Optional[NoArgAnyCallable] = None,
- alias: str = None,
- title: str = None,
- description: str = None,
- exclude: Union['AbstractSetIntStr', 'MappingIntStrAny', Any] = None,
- include: Union['AbstractSetIntStr', 'MappingIntStrAny', Any] = None,
- const: bool = None,
- gt: float = None,
- ge: float = None,
- lt: float = None,
- le: float = None,
- multiple_of: float = None,
- allow_inf_nan: bool = None,
- max_digits: int = None,
- decimal_places: int = None,
- min_items: int = None,
- max_items: int = None,
- unique_items: bool = None,
- min_length: int = None,
- max_length: int = None,
+ alias: Optional[str] = None,
+ title: Optional[str] = None,
+ description: Optional[str] = None,
+ exclude: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny', Any]] = None,
+ include: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny', Any]] = None,
+ const: Optional[bool] = None,
+ gt: Optional[float] = None,
+ ge: Optional[float] = None,
+ lt: Optional[float] = None,
+ le: Optional[float] = None,
+ multiple_of: Optional[float] = None,
+ allow_inf_nan: Optional[bool] = None,
+ max_digits: Optional[int] = None,
+ decimal_places: Optional[int] = None,
+ min_items: Optional[int] = None,
+ max_items: Optional[int] = None,
+ unique_items: Optional[bool] = None,
+ min_length: Optional[int] = None,
+ max_length: Optional[int] = None,
allow_mutation: bool = True,
- regex: str = None,
- discriminator: str = None,
+ regex: Optional[str] = None,
+ discriminator: Optional[str] = None,
repr: bool = True,
**extra: Any,
) -> Any:
@@ -402,7 +402,7 @@ def __init__(
default_factory: Optional[NoArgAnyCallable] = None,
required: 'BoolUndefined' = Undefined,
final: bool = False,
- alias: str = None,
+ alias: Optional[str] = None,
field_info: Optional[FieldInfo] = None,
) -> None:
@@ -1117,15 +1117,18 @@ def _validate_discriminated_union(
except (AttributeError, TypeError):
return v, ErrorWrapper(MissingDiscriminator(discriminator_key=self.discriminator_key), loc)
- try:
- sub_field = self.sub_fields_mapping[discriminator_value] # type: ignore[index]
- except TypeError:
+ if self.sub_fields_mapping is None:
assert cls is not None
raise ConfigError(
f'field "{self.name}" not yet prepared so type is still a ForwardRef, '
f'you might need to call {cls.__name__}.update_forward_refs().'
)
- except KeyError:
+
+ try:
+ sub_field = self.sub_fields_mapping[discriminator_value]
+ except (KeyError, TypeError):
+ # KeyError: `discriminator_value` is not in the dictionary.
+ # TypeError: `discriminator_value` is unhashable.
assert self.sub_fields_mapping is not None
return v, ErrorWrapper(
InvalidDiscriminator(
diff --git a/pydantic/main.py b/pydantic/main.py
index 361c9669d79..683f3f8801a 100644
--- a/pydantic/main.py
+++ b/pydantic/main.py
@@ -33,7 +33,6 @@
from .fields import (
MAPPING_LIKE_SHAPES,
Field,
- FieldInfo,
ModelField,
ModelPrivateAttr,
PrivateAttr,
@@ -118,7 +117,7 @@ def hash_function(self_: Any) -> int:
_is_base_model_class_defined = False
-@dataclass_transform(kw_only_default=True, field_specifiers=(Field, FieldInfo))
+@dataclass_transform(kw_only_default=True, field_specifiers=(Field,))
class ModelMetaclass(ABCMeta):
@no_type_check # noqa C901
def __new__(mcs, name, bases, namespace, **kwargs): # noqa C901
diff --git a/pydantic/mypy.py b/pydantic/mypy.py
index 02a0510a0f6..0a036ae3a26 100644
--- a/pydantic/mypy.py
+++ b/pydantic/mypy.py
@@ -76,6 +76,7 @@
METADATA_KEY = 'pydantic-mypy-metadata'
BASEMODEL_FULLNAME = 'pydantic.main.BaseModel'
BASESETTINGS_FULLNAME = 'pydantic.env_settings.BaseSettings'
+MODEL_METACLASS_FULLNAME = 'pydantic.main.ModelMetaclass'
FIELD_FULLNAME = 'pydantic.fields.Field'
DATACLASS_FULLNAME = 'pydantic.dataclasses.dataclass'
@@ -87,6 +88,9 @@ def parse_mypy_version(version: str) -> Tuple[int, ...]:
MYPY_VERSION_TUPLE = parse_mypy_version(mypy_version)
BUILTINS_NAME = 'builtins' if MYPY_VERSION_TUPLE >= (0, 930) else '__builtins__'
+# Increment version if plugin changes and mypy caches should be invalidated
+PLUGIN_VERSION = 1
+
def plugin(version: str) -> 'TypingType[Plugin]':
"""
@@ -102,6 +106,7 @@ class PydanticPlugin(Plugin):
def __init__(self, options: Options) -> None:
self.plugin_config = PydanticPluginConfig(options)
self._plugin_data = self.plugin_config.to_data()
+ self._plugin_data['version'] = PLUGIN_VERSION
super().__init__(options)
def get_base_class_hook(self, fullname: str) -> 'Optional[Callable[[ClassDefContext], None]]':
@@ -112,6 +117,11 @@ def get_base_class_hook(self, fullname: str) -> 'Optional[Callable[[ClassDefCont
return self._pydantic_model_class_maker_callback
return None
+ def get_metaclass_hook(self, fullname: str) -> Optional[Callable[[ClassDefContext], None]]:
+ if fullname == MODEL_METACLASS_FULLNAME:
+ return self._pydantic_model_metaclass_marker_callback
+ return None
+
def get_function_hook(self, fullname: str) -> 'Optional[Callable[[FunctionContext], Type]]':
sym = self.lookup_fully_qualified(fullname)
if sym and sym.fullname == FIELD_FULLNAME:
@@ -139,6 +149,19 @@ def _pydantic_model_class_maker_callback(self, ctx: ClassDefContext) -> None:
transformer = PydanticModelTransformer(ctx, self.plugin_config)
transformer.transform()
+ def _pydantic_model_metaclass_marker_callback(self, ctx: ClassDefContext) -> None:
+ """Reset dataclass_transform_spec attribute of ModelMetaclass.
+
+ Let the plugin handle it. This behavior can be disabled
+ if 'debug_dataclass_transform' is set to True', for testing purposes.
+ """
+ if self.plugin_config.debug_dataclass_transform:
+ return
+ info_metaclass = ctx.cls.info.declared_metaclass
+ assert info_metaclass, "callback not passed from 'get_metaclass_hook'"
+ if getattr(info_metaclass.type, 'dataclass_transform_spec', None):
+ info_metaclass.type.dataclass_transform_spec = None # type: ignore[attr-defined]
+
def _pydantic_field_callback(self, ctx: FunctionContext) -> 'Type':
"""
Extract the type of the `default` argument from the Field function, and use it as the return type.
@@ -194,11 +217,18 @@ def _pydantic_field_callback(self, ctx: FunctionContext) -> 'Type':
class PydanticPluginConfig:
- __slots__ = ('init_forbid_extra', 'init_typed', 'warn_required_dynamic_aliases', 'warn_untyped_fields')
+ __slots__ = (
+ 'init_forbid_extra',
+ 'init_typed',
+ 'warn_required_dynamic_aliases',
+ 'warn_untyped_fields',
+ 'debug_dataclass_transform',
+ )
init_forbid_extra: bool
init_typed: bool
warn_required_dynamic_aliases: bool
warn_untyped_fields: bool
+ debug_dataclass_transform: bool # undocumented
def __init__(self, options: Options) -> None:
if options.config_file is None: # pragma: no cover
diff --git a/pydantic/version.py b/pydantic/version.py
index 0a3b691c5f0..57c7d0f3361 100644
--- a/pydantic/version.py
+++ b/pydantic/version.py
@@ -1,6 +1,6 @@
__all__ = 'compiled', 'VERSION', 'version_info'
-VERSION = '1.10.5'
+VERSION = '1.10.6'
try:
import cython # type: ignore
diff --git a/tests/test_discrimated_union.py b/tests/test_discrimated_union.py
index a099fcafd24..3ce40df4d01 100644
--- a/tests/test_discrimated_union.py
+++ b/tests/test_discrimated_union.py
@@ -423,3 +423,21 @@ class Container(GenericModel, Generic[T]):
# coercion is done properly
assert Container[str].parse_obj({'result': {'type': 'Success', 'data': 1}}).result.data == '1'
+
+
+def test_discriminator_with_unhashable_type():
+ """Verify an unhashable discriminator value raises a ValidationError."""
+
+ class Model1(BaseModel):
+ target: Literal['t1']
+ a: int
+
+ class Model2(BaseModel):
+ target: Literal['t2']
+ b: int
+
+ class Foo(BaseModel):
+ foo: Union[Model1, Model2] = Field(discriminator='target')
+
+ with pytest.raises(ValidationError, match=re.escape("No match for discriminator 'target' and value {}")):
+ Foo(**{'foo': {'target': {}}})
diff --git a/tests/test_validators.py b/tests/test_validators.py
index de67ffe4720..5c4d57eac62 100644
--- a/tests/test_validators.py
+++ b/tests/test_validators.py
@@ -1,8 +1,9 @@
from collections import deque
from datetime import datetime
from enum import Enum
+from functools import partial, partialmethod
from itertools import product
-from typing import Dict, List, Optional, Tuple, Union
+from typing import Any, Callable, Dict, List, Optional, Tuple, Union
import pytest
from typing_extensions import Literal
@@ -1345,3 +1346,32 @@ class Model(BaseModel):
{'loc': ('foo',), 'msg': 'the list has duplicated items', 'type': 'value_error.list.unique_items'},
{'loc': ('bar',), 'msg': 'the list has duplicated items', 'type': 'value_error.list.unique_items'},
]
+
+
+@pytest.mark.parametrize(
+ 'func,allow_reuse',
+ [
+ pytest.param(partial, False, id='`partial` and check for reuse'),
+ pytest.param(partial, True, id='`partial` and ignore reuse'),
+ pytest.param(partialmethod, False, id='`partialmethod` and check for reuse'),
+ pytest.param(partialmethod, True, id='`partialmethod` and ignore reuse'),
+ ],
+)
+def test_functool_as_validator(
+ reset_tracked_validators,
+ func: Callable,
+ allow_reuse: bool,
+):
+ def custom_validator(
+ cls,
+ v: Any,
+ allowed: str,
+ ) -> Any:
+ assert v == allowed, f'Only {allowed} allowed as value; given: {v}'
+ return v
+
+ validate = func(custom_validator, allowed='TEXT')
+
+ class TestClass(BaseModel):
+ name: str
+ _custom_validate = validator('name', allow_reuse=allow_reuse)(validate)