diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b9630e8296b..b0ae34c2ad7 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -80,20 +80,20 @@ jobs: COMPILED: yes DEPS: yes - - name: uninstall deps - run: pip uninstall -y cython email-validator typing-extensions devtools python-dotenv - - - name: test compiled without deps - run: make test - - - run: coverage xml - - uses: codecov/codecov-action@v1.0.14 - with: - file: ./coverage.xml - env_vars: COMPILED,DEPS,PYTHON,OS - env: - COMPILED: yes - DEPS: no +# - name: uninstall deps +# run: pip uninstall -y cython email-validator typing-extensions devtools python-dotenv +# +# - name: test compiled without deps +# run: make test +# +# - run: coverage xml +# - uses: codecov/codecov-action@v1.0.14 +# with: +# file: ./coverage.xml +# env_vars: COMPILED,DEPS,PYTHON,OS +# env: +# COMPILED: yes +# DEPS: no - name: remove compiled binaries run: | @@ -159,11 +159,12 @@ jobs: with: python-version: '3.7' - - name: install - run: make install-testing - - - name: test - run: make test-fastapi + - run: echo "skip fastapi for now" +# - name: install +# run: make install-testing +# +# - name: test +# run: make test-fastapi benchmark: name: run benchmarks diff --git a/HISTORY.md b/HISTORY.md index 692c8e2c2ec..c6f2d2193c6 100644 --- a/HISTORY.md +++ b/HISTORY.md @@ -1,3 +1,22 @@ +## v1.7.4 (2021-05-11) + +* **Security fix:** Fix `date` and `datetime` parsing so passing either `'infinity'` or `float('inf')` + (or their negative values) does not cause an infinite loop, + See security advisory [CVE-2021-29510](https://github.com/samuelcolvin/pydantic/security/advisories/GHSA-5jqp-qgf6-3pvh) + +## v1.7.3 (2020-11-30) + +Thank you to pydantic's sponsors: +@timdrijvers, @BCarley, @chdsbd, @tiangolo, @matin, @linusg, @kevinalh, @jorgecarleitao, @koxudaxi, @primer-api, +@mkeen, @meadsteve for their kind support. + +* fix: set right default value for required (optional) fields, #2142 by @PrettyWood +* fix: support `underscore_attrs_are_private` with generic models, #2138 by @PrettyWood +* fix: update all modified field values in `root_validator` when `validate_assignment` is on, #2116 by @PrettyWood +* Allow pickling of `pydantic.dataclasses.dataclass` dynamically created from a built-in `dataclasses.dataclass`, #2111 by @aimestereo +* Fix a regression where Enum fields would not propagate keyword arguments to the schema, #2109 by @bm424 +* Ignore `__doc__` as private attribute when `Config.underscore_attrs_are_private` is set, #2090 by @PrettyWood + ## v1.7.2 (2020-11-01) * fix slow `GenericModel` concrete model creation, allow `GenericModel` concrete name reusing in module, #2078 by @MrMrRobat diff --git a/Makefile b/Makefile index cc5d66bcb57..762b73783a7 100644 --- a/Makefile +++ b/Makefile @@ -8,7 +8,8 @@ install-linting: .PHONY: install-pydantic install-pydantic: - python -m pip install -U wheel pip + @echo 'temporarily pin pip to 20.2 while the issues with 20.3 get ironed out' + python -m pip install -U wheel pip==20.2 pip install -r requirements.txt SKIP_CYTHON=1 pip install -e . diff --git a/pydantic/dataclasses.py b/pydantic/dataclasses.py index 0d28e1b2039..53f1427d824 100644 --- a/pydantic/dataclasses.py +++ b/pydantic/dataclasses.py @@ -119,10 +119,23 @@ def _pydantic_post_init(self: 'Dataclass', *initvars: Any) -> None: # __post_init__ = _pydantic_post_init # ``` # with the exact same fields as the base dataclass + # and register it on module level to address pickle problem: + # https://github.com/samuelcolvin/pydantic/issues/2111 if is_builtin_dataclass(_cls): + uniq_class_name = f'_Pydantic_{_cls.__name__}_{id(_cls)}' _cls = type( - _cls.__name__, (_cls,), {'__annotations__': _cls.__annotations__, '__post_init__': _pydantic_post_init} + # for pretty output new class will have the name as original + _cls.__name__, + (_cls,), + { + '__annotations__': _cls.__annotations__, + '__post_init__': _pydantic_post_init, + # attrs for pickle to find this class + '__module__': __name__, + '__qualname__': uniq_class_name, + }, ) + globals()[uniq_class_name] = _cls else: _cls.__post_init__ = _pydantic_post_init cls: Type['Dataclass'] = dataclasses.dataclass( # type: ignore diff --git a/pydantic/datetime_parse.py b/pydantic/datetime_parse.py index d567c5c5171..59466c15feb 100644 --- a/pydantic/datetime_parse.py +++ b/pydantic/datetime_parse.py @@ -58,6 +58,8 @@ # if greater than this, the number is in ms, if less than or equal it's in seconds # (in seconds this is 11th October 2603, in ms it's 20th August 1970) MS_WATERSHED = int(2e10) +# slightly more than datetime.max in ns - (datetime.max - EPOCH).total_seconds() * 1e9 +MAX_NUMBER = int(3e20) StrBytesIntFloat = Union[str, bytes, int, float] @@ -73,6 +75,11 @@ def get_numeric(value: StrBytesIntFloat, native_expected_type: str) -> Union[Non def from_unix_seconds(seconds: Union[int, float]) -> datetime: + if seconds > MAX_NUMBER: + return datetime.max + elif seconds < -MAX_NUMBER: + return datetime.min + while abs(seconds) > MS_WATERSHED: seconds /= 1000 dt = EPOCH + timedelta(seconds=seconds) diff --git a/pydantic/fields.py b/pydantic/fields.py index 36ea690a237..68f75ea0bf0 100644 --- a/pydantic/fields.py +++ b/pydantic/fields.py @@ -307,7 +307,7 @@ def infer( required: 'BoolUndefined' = Undefined if value is Required: required = True - value = None + value = Ellipsis elif value is not Undefined: required = False field_info.alias = field_info.alias or field_info_from_config.get('alias') diff --git a/pydantic/main.py b/pydantic/main.py index 3d57b1bbc09..209dc9a8cca 100644 --- a/pydantic/main.py +++ b/pydantic/main.py @@ -406,7 +406,12 @@ def __setattr__(self, name, value): # noqa: C901 (ignore complexity) if errors: raise ValidationError(errors, self.__class__) - self.__dict__[name] = value + # update the whole __dict__ as other values than just `value` + # may be changed (e.g. with `root_validator`) + object_setattr(self, '__dict__', new_values) + else: + self.__dict__[name] = value + self.__fields_set__.add(name) def __getstate__(self) -> 'DictAny': diff --git a/pydantic/schema.py b/pydantic/schema.py index a6a2777fd7d..e51d74ce123 100644 --- a/pydantic/schema.py +++ b/pydantic/schema.py @@ -260,7 +260,9 @@ def get_field_schema_validations(field: ModelField) -> Dict[str, Any]: f_schema: Dict[str, Any] = {} if lenient_issubclass(field.type_, Enum): - # schema is already updated by `enum_process_schema` + # schema is already updated by `enum_process_schema`; just update with field extra + if field.field_info.extra: + f_schema.update(field.field_info.extra) return f_schema if lenient_issubclass(field.type_, (str, bytes)): diff --git a/pydantic/utils.py b/pydantic/utils.py index 4592223d13c..c75f4e1cfc8 100644 --- a/pydantic/utils.py +++ b/pydantic/utils.py @@ -631,4 +631,11 @@ def is_valid_field(name: str) -> bool: def is_valid_private_name(name: str) -> bool: - return not is_valid_field(name) and name not in {'__annotations__', '__classcell__', '__module__', '__qualname__'} + return not is_valid_field(name) and name not in { + '__annotations__', + '__classcell__', + '__doc__', + '__module__', + '__orig_bases__', + '__qualname__', + } diff --git a/pydantic/version.py b/pydantic/version.py index cd81ee5f2b1..3f0582dbb2b 100644 --- a/pydantic/version.py +++ b/pydantic/version.py @@ -1,6 +1,6 @@ __all__ = 'VERSION', 'version_info' -VERSION = '1.7.2' +VERSION = '1.7.4' def version_info() -> str: diff --git a/tests/test_dataclasses.py b/tests/test_dataclasses.py index e213ff567a3..e087455b6e3 100644 --- a/tests/test_dataclasses.py +++ b/tests/test_dataclasses.py @@ -1,4 +1,5 @@ import dataclasses +import pickle from collections.abc import Hashable from datetime import datetime from pathlib import Path @@ -406,7 +407,7 @@ class User: fields = user.__pydantic_model__.__fields__ assert fields['id'].required is True - assert fields['id'].default is None + assert fields['id'].default is Ellipsis assert fields['name'].required is False assert fields['name'].default == 'John Doe' @@ -425,7 +426,7 @@ class User: fields = user.__pydantic_model__.__fields__ assert fields['id'].required is True - assert fields['id'].default is None + assert fields['id'].default is Ellipsis assert fields['aliases'].required is False assert fields['aliases'].default == {'John': 'Joey'} @@ -733,7 +734,10 @@ class File: 'type': 'object', } }, - 'properties': {'filename': {'title': 'Filename', 'type': 'string'}, 'meta': {'$ref': '#/definitions/Meta'}}, + 'properties': { + 'filename': {'title': 'Filename', 'type': 'string'}, + 'meta': {'$ref': '#/definitions/Meta'}, + }, 'required': ['filename', 'meta'], 'title': 'File', 'type': 'object', @@ -795,3 +799,37 @@ class Config: e.other = 'bulbi2' with pytest.raises(dataclasses.FrozenInstanceError): e.item.name = 'pika2' + + +def test_pickle_overriden_builtin_dataclass(create_module): + module = create_module( + # language=Python + """\ +import dataclasses +import pydantic + + +@dataclasses.dataclass +class BuiltInDataclassForPickle: + value: int + +class ModelForPickle(pydantic.BaseModel): + # pickle can only work with top level classes as it imports them + + dataclass: BuiltInDataclassForPickle + + class Config: + validate_assignment = True + """ + ) + obj = module.ModelForPickle(dataclass=module.BuiltInDataclassForPickle(value=5)) + + pickled_obj = pickle.dumps(obj) + restored_obj = pickle.loads(pickled_obj) + + assert restored_obj.dataclass.value == 5 + assert restored_obj == obj + + # ensure the restored dataclass is still a pydantic dataclass + with pytest.raises(ValidationError, match='value\n +value is not a valid integer'): + restored_obj.dataclass.value = 'value of a wrong type' diff --git a/tests/test_datetime_parse.py b/tests/test_datetime_parse.py index d629d9fb8fb..f714d6667d8 100644 --- a/tests/test_datetime_parse.py +++ b/tests/test_datetime_parse.py @@ -42,11 +42,20 @@ def create_tz(minutes): (1_549_316_052_104, date(2019, 2, 4)), # nowish in ms (1_549_316_052_104_324, date(2019, 2, 4)), # nowish in μs (1_549_316_052_104_324_096, date(2019, 2, 4)), # nowish in ns + ('infinity', date(9999, 12, 31)), + ('inf', date(9999, 12, 31)), + (float('inf'), date(9999, 12, 31)), + ('infinity ', date(9999, 12, 31)), + (int('1' + '0' * 100), date(9999, 12, 31)), + (1e1000, date(9999, 12, 31)), + ('-infinity', date(1, 1, 1)), + ('-inf', date(1, 1, 1)), + ('nan', ValueError), ], ) def test_date_parsing(value, result): - if result == errors.DateError: - with pytest.raises(errors.DateError): + if type(result) == type and issubclass(result, Exception): + with pytest.raises(result): parse_date(value) else: assert parse_date(value) == result @@ -123,11 +132,19 @@ def test_time_parsing(value, result): (1_549_316_052_104, datetime(2019, 2, 4, 21, 34, 12, 104_000, tzinfo=timezone.utc)), # nowish in ms (1_549_316_052_104_324, datetime(2019, 2, 4, 21, 34, 12, 104_324, tzinfo=timezone.utc)), # nowish in μs (1_549_316_052_104_324_096, datetime(2019, 2, 4, 21, 34, 12, 104_324, tzinfo=timezone.utc)), # nowish in ns + ('infinity', datetime(9999, 12, 31, 23, 59, 59, 999999)), + ('inf', datetime(9999, 12, 31, 23, 59, 59, 999999)), + ('inf ', datetime(9999, 12, 31, 23, 59, 59, 999999)), + (1e50, datetime(9999, 12, 31, 23, 59, 59, 999999)), + (float('inf'), datetime(9999, 12, 31, 23, 59, 59, 999999)), + ('-infinity', datetime(1, 1, 1, 0, 0)), + ('-inf', datetime(1, 1, 1, 0, 0)), + ('nan', ValueError), ], ) def test_datetime_parsing(value, result): - if result == errors.DateTimeError: - with pytest.raises(errors.DateTimeError): + if type(result) == type and issubclass(result, Exception): + with pytest.raises(result): parse_datetime(value) else: assert parse_datetime(value) == result @@ -251,3 +268,24 @@ class Model(BaseModel): 'type': 'value_error.unicodedecode', 'msg': "'utf-8' codec can't decode byte 0x81 in position 0: invalid start byte", } + + +def test_nan(): + class Model(BaseModel): + dt: datetime + d: date + + with pytest.raises(ValidationError) as exc_info: + Model(dt='nan', d='nan') + assert exc_info.value.errors() == [ + { + 'loc': ('dt',), + 'msg': 'cannot convert float NaN to integer', + 'type': 'value_error', + }, + { + 'loc': ('d',), + 'msg': 'cannot convert float NaN to integer', + 'type': 'value_error', + }, + ] diff --git a/tests/test_main.py b/tests/test_main.py index f7ad7ee0590..e026f59d058 100644 --- a/tests/test_main.py +++ b/tests/test_main.py @@ -666,6 +666,28 @@ def current_lessequal_500(cls, values): ] +def test_root_validator_many_values_change(): + """It should run root_validator on assignment and update ALL concerned fields""" + + class Rectangle(BaseModel): + width: float + height: float + area: float = None + + class Config: + validate_assignment = True + + @root_validator + def set_area(cls, values): + values['area'] = values['width'] * values['height'] + return values + + r = Rectangle(width=1, height=1) + assert r.area == 1 + r.height = 5 + assert r.area == 5 + + def test_enum_values(): FooEnum = Enum('FooEnum', {'foo': 'foo', 'bar': 'bar'}) @@ -958,6 +980,35 @@ class Config: assert m.dict(exclude_unset=True, by_alias=True) == {'alias_a': 'a', 'c': 'c'} +def test_exclude_defaults(): + class Model(BaseModel): + mandatory: str + nullable_mandatory: Optional[str] = ... + facultative: str = 'x' + nullable_facultative: Optional[str] = None + + m = Model(mandatory='a', nullable_mandatory=None) + assert m.dict(exclude_defaults=True) == { + 'mandatory': 'a', + 'nullable_mandatory': None, + } + + m = Model(mandatory='a', nullable_mandatory=None, facultative='y', nullable_facultative=None) + assert m.dict(exclude_defaults=True) == { + 'mandatory': 'a', + 'nullable_mandatory': None, + 'facultative': 'y', + } + + m = Model(mandatory='a', nullable_mandatory=None, facultative='y', nullable_facultative='z') + assert m.dict(exclude_defaults=True) == { + 'mandatory': 'a', + 'nullable_mandatory': None, + 'facultative': 'y', + 'nullable_facultative': 'z', + } + + def test_dir_fields(): class MyModel(BaseModel): attribute_a: int diff --git a/tests/test_private_attributes.py b/tests/test_private_attributes.py index 48be872e259..d87e572abf9 100644 --- a/tests/test_private_attributes.py +++ b/tests/test_private_attributes.py @@ -1,9 +1,13 @@ -from typing import ClassVar +import sys +from typing import ClassVar, Generic, TypeVar import pytest from pydantic import BaseModel, Extra, PrivateAttr from pydantic.fields import Undefined +from pydantic.generics import GenericModel + +skip_36 = pytest.mark.skipif(sys.version_info < (3, 7), reason='generics only supported for python 3.7 and above') def test_private_attribute(): @@ -55,6 +59,8 @@ class Model(BaseModel): def test_private_attribute_annotation(): class Model(BaseModel): + """The best model""" + __foo__: str class Config: @@ -63,6 +69,7 @@ class Config: assert Model.__slots__ == {'__foo__'} assert repr(Model.__foo__) == "" assert Model.__private_attributes__ == {'__foo__': PrivateAttr(Undefined)} + assert repr(Model.__doc__) == "'The best model'" m = Model() with pytest.raises(AttributeError): @@ -177,3 +184,19 @@ class Config: m = MyModel(x='hello') assert m.dict() == {'x': 'hello'} assert m._private_attr == 123 + + +@skip_36 +def test_generic_private_attribute(): + T = TypeVar('T') + + class Model(GenericModel, Generic[T]): + value: T + _private_value: T + + class Config: + underscore_attrs_are_private = True + + m = Model[int](value=1, _private_attr=3) + m._private_value = 3 + assert m.dict() == {'value': 1} diff --git a/tests/test_schema.py b/tests/test_schema.py index 9da7d14ccb2..6c5def13c02 100644 --- a/tests/test_schema.py +++ b/tests/test_schema.py @@ -374,6 +374,33 @@ class Foo(BaseModel): } +def test_list_enum_schema_extras(): + class FoodChoice(str, Enum): + spam = 'spam' + egg = 'egg' + chips = 'chips' + + class Model(BaseModel): + foods: List[FoodChoice] = Field(examples=[['spam', 'egg']]) + + assert Model.schema() == { + 'definitions': { + 'FoodChoice': { + 'description': 'An enumeration.', + 'enum': ['spam', 'egg', 'chips'], + 'title': 'FoodChoice', + 'type': 'string', + } + }, + 'properties': { + 'foods': {'type': 'array', 'items': {'$ref': '#/definitions/FoodChoice'}, 'examples': [['spam', 'egg']]}, + }, + 'required': ['foods'], + 'title': 'Model', + 'type': 'object', + } + + def test_json_schema(): class Model(BaseModel): a = b'foobar'