diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 5370167fe7a..00cc0b5c6cd 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -4,6 +4,7 @@ on:
push:
branches:
- main
+ - 1.10.X-fixes
tags:
- '**'
pull_request: {}
@@ -27,7 +28,7 @@ jobs:
${{ env.pythonLocation }}
.mypy_cache
key: >
- lint
+ lint-v2
${{ runner.os }}
${{ env.pythonLocation }}
${{ hashFiles('tests/requirements-linting.txt') }}
@@ -77,7 +78,7 @@ jobs:
with:
path: ${{ env.pythonLocation }}
key: >
- docs-build
+ docs-build-v2
${{ runner.os }}
${{ env.pythonLocation }}
${{ hashFiles('setup.py') }}
@@ -103,7 +104,7 @@ jobs:
strategy:
fail-fast: false
matrix:
- python-version: ['3.7', '3.8', '3.9', '3.10', '3.11.0-rc.1']
+ python-version: ['3.7', '3.8', '3.9', '3.10', '3.11']
env:
PYTHON: ${{ matrix.python-version }}
OS: ubuntu
@@ -121,7 +122,7 @@ jobs:
with:
path: ${{ env.pythonLocation }}
key: >
- test-linux-compiled
+ test-linux-compiled-v2
${{ runner.os }}
${{ env.pythonLocation }}
${{ hashFiles('setup.py') }}
@@ -182,7 +183,7 @@ jobs:
with:
path: ${{ env.pythonLocation }}
key: >
- test-not-compiled
+ test-not-compiled-v2
${{ runner.os }}
${{ env.pythonLocation }}
${{ hashFiles('setup.py') }}
@@ -307,8 +308,6 @@ jobs:
build:
name: build py3.${{ matrix.python-version }} on ${{ matrix.platform || matrix.os }}
- needs: [lint, test-linux-compiled, test-not-compiled, test-old-mypy, test-fastapi]
- if: "success() && (startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main')"
strategy:
fail-fast: false
matrix:
@@ -366,9 +365,62 @@ jobs:
name: pypi_files
path: dist
+ # https://github.com/marketplace/actions/alls-green#why
+ check: # This job does nothing and is only used for the branch protection
+
+ if: always()
+
+ needs:
+ - lint
+ - docs-build
+ - test-linux-compiled
+ - test-not-compiled
+ - test-old-mypy
+ - test-fastapi
+ - build
+
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Decide whether the needed jobs succeeded or failed
+ uses: re-actors/alls-green@release/v1
+ with:
+ jobs: ${{ toJSON(needs) }}
+
+ inspect-pypi-assets:
+ needs: [build]
+ runs-on: ubuntu-latest
+
+ steps:
+ - uses: actions/checkout@v3
+
+ - name: get dist artifacts
+ uses: actions/download-artifact@v3
+ with:
+ name: pypi_files
+ path: dist
+
+ - name: list dist files
+ run: |
+ ls -lh dist/
+ echo "`ls dist | wc -l` files"
+
+ - name: extract and list sdist file
+ run: |
+ mkdir sdist-files
+ tar -xvf dist/*.tar.gz -C sdist-files
+ tree -a sdist-files
+
+ - name: extract and list wheel file
+ run: |
+ ls dist/*cp310-manylinux*x86_64.whl | head -n 1
+ python -m zipfile --list `ls dist/*cp310-manylinux*x86_64.whl | head -n 1`
+
deploy:
name: Deploy
- needs: build
+ needs:
+ - check
+ - build
if: "success() && startsWith(github.ref, 'refs/tags/')"
runs-on: ubuntu-latest
@@ -412,6 +464,9 @@ jobs:
- name: publish docs
if: '!fromJSON(steps.check-tag.outputs.IS_PRERELEASE)'
- run: make publish-docs
+ uses: cloudflare/wrangler-action@2.0.0
+ with:
+ apiToken: ${{ secrets.cloudflare_api_token }}
+ command: pages publish --project-name=pydantic-docs --branch=main site
env:
- NETLIFY: ${{ secrets.netlify_token }}
+ CLOUDFLARE_ACCOUNT_ID: ${{ secrets.cloudflare_account_id }}
diff --git a/.hooky.toml b/.hooky.toml
new file mode 100644
index 00000000000..78c95dca4e7
--- /dev/null
+++ b/.hooky.toml
@@ -0,0 +1,4 @@
+# configuring https://github.com/pydantic/hooky
+[tool.hooky]
+reviewers = ['samuelcolvin', 'PrettyWood', 'hramezani']
+require_change_file = true
diff --git a/HISTORY.md b/HISTORY.md
index 48b91929dae..9fa10487b9c 100644
--- a/HISTORY.md
+++ b/HISTORY.md
@@ -1,3 +1,25 @@
+## v1.10.4 (2022-12-30)
+
+* Change dependency to `typing-extensions>=4.2.0`, #4885 by @samuelcolvin
+
+## v1.10.3 (2022-12-29)
+
+**NOTE: v1.10.3 was ["yanked"](https://pypi.org/help/#yanked) from PyPI due to #4885 which is fixed in v1.10.4**
+
+* fix parsing of custom root models, #4883 by @gou177
+* fix: use dataclass proxy for frozen or empty dataclasses, #4878 by @PrettyWood
+* Fix `schema` and `schema_json` on models where a model instance is a one of default values, #4781 by @Bobronium
+* Add Jina AI to sponsors on docs index page, #4767 by @samuelcolvin
+* fix: support assignment on `DataclassProxy`, #4695 by @PrettyWood
+* Add `postgresql+psycopg` as allowed scheme for `PostgreDsn` to make it usable with SQLAlchemy 2, #4689 by @morian
+* Allow dict schemas to have both `patternProperties` and `additionalProperties`, #4641 by @jparise
+* Fixes error passing None for optional lists with `unique_items`, #4568 by @mfulgo
+* Fix `GenericModel` with `Callable` param raising a `TypeError`, #4551 by @mfulgo
+* Fix field regex with `StrictStr` type annotation, #4538 by @sisp
+* Correct `dataclass_transform` keyword argument name from `field_descriptors` to `field_specifiers`, #4500 by @samuelcolvin
+* fix: avoid multiple calls of `__post_init__` when dataclasses are inherited, #4487 by @PrettyWood
+* Reduce the size of binary wheels, #2276 by @samuelcolvin
+
## v1.10.2 (2022-09-05)
* **Revert Change:** Revert percent encoding of URL parts which was originally added in #4224, #4470 by @samuelcolvin
@@ -159,10 +181,10 @@ for their kind support.
### Highlights
* add Python 3.10 support, #2885 by @PrettyWood
-* [Discriminated unions](https://pydantic-docs.helpmanual.io/usage/types/#discriminated-unions-aka-tagged-unions), #619 by @PrettyWood
-* [`Config.smart_union` for better union logic](https://pydantic-docs.helpmanual.io/usage/model_config/#smart-union), #2092 by @PrettyWood
+* [Discriminated unions](https://docs.pydantic.dev/usage/types/#discriminated-unions-aka-tagged-unions), #619 by @PrettyWood
+* [`Config.smart_union` for better union logic](https://docs.pydantic.dev/usage/model_config/#smart-union), #2092 by @PrettyWood
* Binaries for Macos M1 CPUs, #3498 by @samuelcolvin
-* Complex types can be set via [nested environment variables](https://pydantic-docs.helpmanual.io/usage/settings/#parsing-environment-variable-values), e.g. `foo___bar`, #3159 by @Air-Mark
+* Complex types can be set via [nested environment variables](https://docs.pydantic.dev/usage/settings/#parsing-environment-variable-values), e.g. `foo___bar`, #3159 by @Air-Mark
* add a dark mode to _pydantic_ documentation, #2913 by @gbdlin
* Add support for autocomplete in VS Code via `__dataclass_transform__`, #2721 by @tiangolo
* Add "exclude" as a field parameter so that it can be configured using model config, #660 by @daviskirk
@@ -193,7 +215,7 @@ for their kind support.
`pydantic.fields.ModelField`) to `__modify_schema__()` if present, #3434 by @jasujm
* Fix issue when pydantic fail to parse `typing.ClassVar` string type annotation, #3401 by @uriyyo
* Mention Python >= 3.9.2 as an alternative to `typing_extensions.TypedDict`, #3374 by @BvB93
-* Changed the validator method name in the [Custom Errors example](https://pydantic-docs.helpmanual.io/usage/models/#custom-errors)
+* Changed the validator method name in the [Custom Errors example](https://docs.pydantic.dev/usage/models/#custom-errors)
to more accurately describe what the validator is doing; changed from `name_must_contain_space` to ` value_must_equal_bar`, #3327 by @michaelrios28
* Add `AmqpDsn` class, #3254 by @kludex
* Always use `Enum` value as default in generated JSON schema, #3190 by @joaommartins
@@ -215,7 +237,7 @@ for their kind support.
just as when sourced from environment variables, #2917 by @davidmreed
* add a dark mode to _pydantic_ documentation, #2913 by @gbdlin
* Make `pydantic-mypy` plugin compatible with `pyproject.toml` configuration, consistent with `mypy` changes.
- See the [doc](https://pydantic-docs.helpmanual.io/mypy_plugin/#configuring-the-plugin) for more information, #2908 by @jrwalk
+ See the [doc](https://docs.pydantic.dev/mypy_plugin/#configuring-the-plugin) for more information, #2908 by @jrwalk
* add Python 3.10 support, #2885 by @PrettyWood
* Correctly parse generic models with `Json[T]`, #2860 by @geekingfrog
* Update contrib docs re: Python version to use for building docs, #2856 by @paxcodes
@@ -260,7 +282,7 @@ for their kind support.
* Support generating schema for `Generic` fields with subtypes, #2375 by @maximberg
* fix(encoder): serialize `NameEmail` to str, #2341 by @alecgerona
* add `Config.smart_union` to prevent coercion in `Union` if possible, see
- [the doc](https://pydantic-docs.helpmanual.io/usage/model_config/#smart-union) for more information, #2092 by @PrettyWood
+ [the doc](https://docs.pydantic.dev/usage/model_config/#smart-union) for more information, #2092 by @PrettyWood
* Add ability to use `typing.Counter` as a model field type, #2060 by @uriyyo
* Add parameterised subclasses to `__bases__` when constructing new parameterised classes, so that `A <: B => A[int] <: B[int]`, #2007 by @diabolo-dan
* Create `FileUrl` type that allows URLs that conform to [RFC 8089](https://tools.ietf.org/html/rfc8089#section-2).
@@ -310,10 +332,10 @@ for their kind support.
### Highlights
-* [Hypothesis plugin](https://pydantic-docs.helpmanual.io/hypothesis_plugin/) for testing, #2097 by @Zac-HD
-* support for [`NamedTuple` and `TypedDict`](https://pydantic-docs.helpmanual.io/usage/types/#annotated-types), #2216 by @PrettyWood
-* Support [`Annotated` hints on model fields](https://pydantic-docs.helpmanual.io/usage/schema/#typingannotated-fields), #2147 by @JacobHayes
-* [`frozen` parameter on `Config`](https://pydantic-docs.helpmanual.io/usage/model_config/) to allow models to be hashed, #1880 by @rhuille
+* [Hypothesis plugin](https://docs.pydantic.dev/hypothesis_plugin/) for testing, #2097 by @Zac-HD
+* support for [`NamedTuple` and `TypedDict`](https://docs.pydantic.dev/usage/types/#annotated-types), #2216 by @PrettyWood
+* Support [`Annotated` hints on model fields](https://docs.pydantic.dev/usage/schema/#typingannotated-fields), #2147 by @JacobHayes
+* [`frozen` parameter on `Config`](https://docs.pydantic.dev/usage/model_config/) to allow models to be hashed, #1880 by @rhuille
### Changes
@@ -370,7 +392,7 @@ for their kind support.
to validate parameters without actually calling the function, #2127 by @PrettyWood
* Add the ability to customize settings sources (add / disable / change priority order), #2107 by @kozlek
* Fix mypy complaints about most custom _pydantic_ types, #2098 by @PrettyWood
-* Add a [Hypothesis](https://hypothesis.readthedocs.io/) plugin for easier [property-based testing](https://increment.com/testing/in-praise-of-property-based-testing/) with Pydantic's custom types - [usage details here](https://pydantic-docs.helpmanual.io/hypothesis_plugin/), #2097 by @Zac-HD
+* Add a [Hypothesis](https://hypothesis.readthedocs.io/) plugin for easier [property-based testing](https://increment.com/testing/in-praise-of-property-based-testing/) with Pydantic's custom types - [usage details here](https://docs.pydantic.dev/hypothesis_plugin/), #2097 by @Zac-HD
* add validator for `None`, `NoneType` or `Literal[None]`, #2095 by @PrettyWood
* Handle properly fields of type `Callable` with a default value, #2094 by @PrettyWood
* Updated `create_model` return type annotation to return type which inherits from `__base__` argument, #2071 by @uriyyo
@@ -440,9 +462,9 @@ for their kind support.
### Highlights
* Python 3.9 support, thanks @PrettyWood
-* [Private model attributes](https://pydantic-docs.helpmanual.io/usage/models/#private-model-attributes), thanks @Bobronium
-* ["secrets files" support in `BaseSettings`](https://pydantic-docs.helpmanual.io/usage/settings/#secret-support), thanks @mdgilene
-* [convert stdlib dataclasses to pydantic dataclasses and use stdlib dataclasses in models](https://pydantic-docs.helpmanual.io/usage/dataclasses/#stdlib-dataclasses-and-pydantic-dataclasses), thanks @PrettyWood
+* [Private model attributes](https://docs.pydantic.dev/usage/models/#private-model-attributes), thanks @Bobronium
+* ["secrets files" support in `BaseSettings`](https://docs.pydantic.dev/usage/settings/#secret-support), thanks @mdgilene
+* [convert stdlib dataclasses to pydantic dataclasses and use stdlib dataclasses in models](https://docs.pydantic.dev/usage/dataclasses/#stdlib-dataclasses-and-pydantic-dataclasses), thanks @PrettyWood
### Changes
@@ -585,7 +607,7 @@ Thank you to pydantic's sponsors: @matin, @tiangolo, @chdsbd, @jorgecarleitao, a
* **Breaking Change:** alias precedence logic changed so aliases on a field always take priority over
an alias from `alias_generator` to avoid buggy/unexpected behaviour,
- see [here](https://pydantic-docs.helpmanual.io/usage/model_config/#alias-precedence) for details, #1178 by @samuelcolvin
+ see [here](https://docs.pydantic.dev/usage/model_config/#alias-precedence) for details, #1178 by @samuelcolvin
* Add support for unicode and punycode in TLDs, #1182 by @jamescurtin
* Fix `cls` argument in validators during assignment, #1172 by @samuelcolvin
* completing Luhn algorithm for `PaymentCardNumber`, #1166 by @cuencandres
@@ -627,7 +649,7 @@ Thank you to pydantic's sponsors: @matin, @tiangolo, @chdsbd, @jorgecarleitao, a
* **Possible Breaking Change:** Add support for required `Optional` with `name: Optional[AnyType] = Field(...)`
and refactor `ModelField` creation to preserve `required` parameter value, #1031 by @tiangolo;
- see [here](https://pydantic-docs.helpmanual.io/usage/models/#required-optional-fields) for details
+ see [here](https://docs.pydantic.dev/usage/models/#required-optional-fields) for details
* Add benchmarks for `cattrs`, #513 by @sebastianmika
* Add `exclude_none` option to `dict()` and friends, #587 by @niknetniko
* Add benchmarks for `valideer`, #670 by @gsakkis
diff --git a/Makefile b/Makefile
index 5b3dc1e4d17..7ec83515876 100644
--- a/Makefile
+++ b/Makefile
@@ -124,9 +124,3 @@ docs:
docs-serve:
python docs/build/main.py
mkdocs serve
-
-.PHONY: publish-docs
-publish-docs:
- zip -r site.zip site
- @curl -H "Content-Type: application/zip" -H "Authorization: Bearer ${NETLIFY}" \
- --data-binary "@site.zip" https://api.netlify.com/api/v1/sites/pydantic-docs.netlify.com/deploys
diff --git a/README.md b/README.md
index ab27e4d51da..b645489ee8a 100644
--- a/README.md
+++ b/README.md
@@ -15,13 +15,13 @@ Define how data should be in pure, canonical Python 3.7+; validate it with *pyda
## Help
-See [documentation](https://pydantic-docs.helpmanual.io/) for more details.
+See [documentation](https://docs.pydantic.dev/) for more details.
## Installation
Install using `pip install -U pydantic` or `conda install pydantic -c conda-forge`.
For more installation options to make *pydantic* even faster,
-see the [Install](https://pydantic-docs.helpmanual.io/install/) section in the documentation.
+see the [Install](https://docs.pydantic.dev/install/) section in the documentation.
## A Simple Example
@@ -48,7 +48,7 @@ print(user.id)
For guidance on setting up a development environment and how to make a
contribution to *pydantic*, see
-[Contributing to Pydantic](https://pydantic-docs.helpmanual.io/contributing/).
+[Contributing to Pydantic](https://docs.pydantic.dev/contributing/).
## Reporting a Security Vulnerability
diff --git a/docs/blog/pydantic-v2.md b/docs/blog/pydantic-v2.md
index f9ed30d0150..a42b51fa231 100644
--- a/docs/blog/pydantic-v2.md
+++ b/docs/blog/pydantic-v2.md
@@ -15,6 +15,10 @@
Updated late 10 Jul 2022, see [pydantic#4226](https://github.com/pydantic/pydantic/pull/4226).
+Update 30 Dec 2022: **The new release deadline for Pydantic V2 is the end of Q1 2023**,
+see [pydantic#4887](https://github.com/pydantic/pydantic/issues/4887) for more details, futher updates
+will be posted on that issue.
+
---
I've spoken to quite a few people about pydantic V2, and mention it in passing even more.
@@ -123,7 +127,7 @@ The motivation for building pydantic-core in Rust is as follows:
pydantic-core is usable now, albeit with an unintuitive API, if you're interested, please give it a try.
pydantic-core provides validators for common data types,
-[see a list here](https://github.com/pydantic/pydantic-core/blob/main/pydantic_core/_types.py#L291).
+[see a list here](https://github.com/pydantic/pydantic-core/blob/main/pydantic_core/schema_types.py#L314).
Other, less commonly used data types will be supported via validator functions implemented in pydantic, in Python.
See [pydantic-core#153](https://github.com/pydantic/pydantic-core/issues/153)
@@ -639,7 +643,7 @@ The word "parse" will no longer be used except when talking about JSON parsing,
Since the core structure of validators has changed from "a list of validators to call one after another" to
"a tree of validators which call each other", the
-[`__get_validators__`](https://pydantic-docs.helpmanual.io/usage/types/#classes-with-__get_validators__)
+[`__get_validators__`](https://docs.pydantic.dev/usage/types/#classes-with-__get_validators__)
way of defining custom field types no longer makes sense.
Instead, we'll look for the attribute `__pydantic_validation_schema__` which must be a
@@ -745,7 +749,7 @@ The emoji here is just for variation, I'm not frowning about any of this, these
specific need to keep the type, you can use wrap validators or custom type validation as described above
6. integers are represented in rust code as `i64`, meaning if you want to use ints where `abs(v) > 2^63 − 1`
(9,223,372,036,854,775,807), you'll need to use a [wrap validator](#validator-function-improvements) and your own logic
-7. [Settings Management](https://pydantic-docs.helpmanual.io/usage/settings/) ??? - I definitely don't want to
+7. [Settings Management](https://docs.pydantic.dev/usage/settings/) ??? - I definitely don't want to
remove the functionality, but it's something of a historical curiosity that it lives within pydantic,
perhaps it should move to a separate package, perhaps installable alongside pydantic with
`pip install pydantic[settings]`?
diff --git a/docs/build/exec_examples.py b/docs/build/exec_examples.py
index 34a4a4f7f1b..648a6268045 100755
--- a/docs/build/exec_examples.py
+++ b/docs/build/exec_examples.py
@@ -40,11 +40,12 @@
```
""".strip()
JSON_OUTPUT_MD_TMPL = """
+
Outputs:
```json
{output}
```
-""".strip()
+"""
def to_string(value: Any) -> str:
diff --git a/docs/build/main.py b/docs/build/main.py
index f55237867d0..296d1e24b07 100755
--- a/docs/build/main.py
+++ b/docs/build/main.py
@@ -10,7 +10,7 @@
def main() -> int:
history = (PROJECT_ROOT / 'HISTORY.md').read_text()
- history = re.sub(r'#(\d+)', r'[#\1](https://github.com/pydantic/pydantic/issues/\1)', history)
+ history = re.sub(r'(\s)#(\d+)', r'\1[#\2](https://github.com/pydantic/pydantic/issues/\2)', history)
history = re.sub(r'(\s)@([\w\-]+)', r'\1[@\2](https://github.com/\2)', history, flags=re.I)
history = re.sub('@@', '@', history)
diff --git a/docs/examples/models_orm_mode_reserved_name.py b/docs/examples/models_orm_mode_reserved_name.py
index 4ebe1e83c3d..76a67dff240 100644
--- a/docs/examples/models_orm_mode_reserved_name.py
+++ b/docs/examples/models_orm_mode_reserved_name.py
@@ -12,10 +12,10 @@ class Config:
orm_mode = True
-BaseModel = declarative_base()
+Base = declarative_base()
-class SQLModel(BaseModel):
+class SQLModel(Base):
__tablename__ = 'my_table'
id = sa.Column('id', sa.Integer, primary_key=True)
# 'metadata' is reserved by SQLAlchemy, hence the '_'
diff --git a/docs/examples/postponed_annotations_broken.py b/docs/examples/postponed_annotations_broken.py
index 9dec4f5553b..e62470cc6da 100644
--- a/docs/examples/postponed_annotations_broken.py
+++ b/docs/examples/postponed_annotations_broken.py
@@ -4,7 +4,7 @@
def this_is_broken():
- from pydantic import HttpUrl # HttpUrl is defined in functuon local scope
+ from pydantic import HttpUrl # HttpUrl is defined in function local scope
class Model(BaseModel):
a: HttpUrl
diff --git a/docs/examples/schema_with_field.py b/docs/examples/schema_with_field.py
index 9288efd8cdf..860903e55be 100644
--- a/docs/examples/schema_with_field.py
+++ b/docs/examples/schema_with_field.py
@@ -1,5 +1,5 @@
# output-json
-from typing import Optional
+from typing import Any, Callable, Dict, Generator, Optional
from pydantic import BaseModel, Field
from pydantic.fields import ModelField
@@ -7,18 +7,20 @@
class RestrictedAlphabetStr(str):
@classmethod
- def __get_validators__(cls):
+ def __get_validators__(cls) -> Generator[Callable, None, None]:
yield cls.validate
@classmethod
- def validate(cls, value, field: ModelField):
+ def validate(cls, value: str, field: ModelField):
alphabet = field.field_info.extra['alphabet']
if any(c not in alphabet for c in value):
raise ValueError(f'{value!r} is not restricted to {alphabet!r}')
return cls(value)
@classmethod
- def __modify_schema__(cls, field_schema, field: Optional[ModelField]):
+ def __modify_schema__(
+ cls, field_schema: Dict[str, Any], field: Optional[ModelField]
+ ):
if field:
alphabet = field.field_info.extra['alphabet']
field_schema['examples'] = [c * 3 for c in alphabet]
diff --git a/docs/extra/tweaks.css b/docs/extra/tweaks.css
index 374129d04fa..7a615a3bc28 100644
--- a/docs/extra/tweaks.css
+++ b/docs/extra/tweaks.css
@@ -24,7 +24,7 @@
.sponsors > div {
text-align: center;
- width: 25%;
+ width: 33%;
padding-bottom: 20px;
}
diff --git a/docs/index.md b/docs/index.md
index d526cea1954..949eb434d1b 100644
--- a/docs/index.md
+++ b/docs/index.md
@@ -66,6 +66,12 @@ Development of *pydantic* is made possible by the following sponsors:
SendCloud
+
And many more who kindly sponsor Samuel Colvin on [GitHub Sponsors](https://github.com/sponsors/samuelcolvin#sponsors).
diff --git a/docs/sponsor_logos/jina-ai.png b/docs/sponsor_logos/jina-ai.png
new file mode 100644
index 00000000000..39242cbb2e0
Binary files /dev/null and b/docs/sponsor_logos/jina-ai.png differ
diff --git a/docs/usage/model_config.md b/docs/usage/model_config.md
index 656cf5144e9..d149385acde 100644
--- a/docs/usage/model_config.md
+++ b/docs/usage/model_config.md
@@ -55,7 +55,7 @@ Similarly, if using the `@dataclass` decorator:
**`fields`**
: a `dict` containing schema information for each field; this is equivalent to
using [the `Field` class](schema.md), except when a field is already
- defined trough annotation or the Field class, in which case only
+ defined through annotation or the Field class, in which case only
`alias`, `include`, `exclude`, `min_length`, `max_length`, `regex`, `gt`, `lt`, `gt`, `le`,
`multiple_of`, `max_digits`, `decimal_places`, `min_items`, `max_items`, `unique_items`
and allow_mutation can be set (for example you cannot set default of default_factory)
@@ -127,7 +127,7 @@ with the following means (see [#4093](https://github.com/pydantic/pydantic/pull/
or after (value `'after_validation'`) parsing and validation when they are [converted](dataclasses.md#stdlib-dataclasses-and-_pydantic_-dataclasses).
**`allow_inf_nan`**
-: whether to allows infinity (`+inf` an `-inf`) and NaN values to float fields, defaults to `True`,
+: whether to allow infinity (`+inf` an `-inf`) and NaN values to float fields, defaults to `True`,
set to `False` for compatibility with `JSON`,
see [#3994](https://github.com/pydantic/pydantic/pull/3994) for more details, added in **V1.10**
diff --git a/docs/usage/models.md b/docs/usage/models.md
index bf5d765bb34..24a72441420 100644
--- a/docs/usage/models.md
+++ b/docs/usage/models.md
@@ -435,7 +435,7 @@ as the value:
{!.tmp_examples/models_required_fields.md!}
-Where `Field` refers to the [field function](schema.md#field-customisation).
+Where `Field` refers to the [field function](schema.md#field-customization).
Here `a`, `b` and `c` are all required. However, use of the ellipses in `b` will not work well
with [mypy](mypy.md), and as of **v1.0** should be avoided in most cases.
@@ -471,7 +471,7 @@ Example of usage:
{!.tmp_examples/models_default_factory.md!}
-Where `Field` refers to the [field function](schema.md#field-customisation).
+Where `Field` refers to the [field function](schema.md#field-customization).
!!! warning
The `default_factory` expects the field type to be set.
diff --git a/docs/usage/schema.md b/docs/usage/schema.md
index 57b22fa1ef1..061d2dbddfe 100644
--- a/docs/usage/schema.md
+++ b/docs/usage/schema.md
@@ -190,7 +190,3 @@ The callable is expected to mutate the schema dictionary *in-place*; the return
For example, the `title` key can be removed from the model's `properties`:
{!.tmp_examples/schema_extra_callable.md!}
-
-
-
-```
diff --git a/docs/usage/types.md b/docs/usage/types.md
index 27109f281fb..0b6d06c957d 100644
--- a/docs/usage/types.md
+++ b/docs/usage/types.md
@@ -310,7 +310,7 @@ types:
* `int` or `float`, assumed as Unix time, i.e. seconds (if >= `-2e10` or <= `2e10`) or milliseconds (if < `-2e10`or > `2e10`) since 1 January 1970
* `str`, following formats work:
- * `YYYY-MM-DD[T]HH:MM[:SS[.ffffff]][Z or [±]HH[:]MM]]]`
+ * `YYYY-MM-DD[T]HH:MM[:SS[.ffffff]][Z or [±]HH[:]MM]`
* `int` or `float` as a string (assumed as Unix time)
* `date` fields can be:
@@ -327,7 +327,7 @@ types:
* `time`, existing `time` object
* `str`, following formats work:
- * `HH:MM[:SS[.ffffff]][Z or [±]HH[:]MM]]]`
+ * `HH:MM[:SS[.ffffff]][Z or [±]HH[:]MM]`
* `timedelta` fields can be:
@@ -490,7 +490,7 @@ With proper ordering in an annotated `Union`, you can use this to parse types of
`CockroachDsn`
: a cockroachdb DSN style URL; see [URLs](#urls)
-`RabbitMqDsn`
+`AmqpDsn`
: an `AMQP` DSN style URL as used by RabbitMQ, StormMQ, ActiveMQ etc.; see [URLs](#urls)
`RedisDsn`
@@ -598,6 +598,7 @@ For URI/URL validation the following types are available:
- `postgresql`
- `postgresql+asyncpg`
- `postgresql+pg8000`
+ - `postgresql+psycopg`
- `postgresql+psycopg2`
- `postgresql+psycopg2cffi`
- `postgresql+py-postgresql`
@@ -606,9 +607,9 @@ For URI/URL validation the following types are available:
- `cockroachdb+asyncpg`
- `cockroachdb+psycopg2`
- `AmqpDsn`: schema `amqp` or `amqps`, user info not required, TLD not required, host not required
-- `RedisDsn`: scheme `redis` or `rediss`, user info not required, tld not required, host not required (CHANGED: user info
+- `RedisDsn`: scheme `redis` or `rediss`, user info not required, tld not required, host not required (CHANGED: user info) (e.g., `rediss://:pass@localhost`)
- `MongoDsn` : scheme `mongodb`, user info not required, database name not required, port
- not required from **v1.6** onwards), user info may be passed without user part (e.g., `rediss://:pass@localhost`)
+ not required from **v1.6** onwards), user info may be passed without user part (e.g., `mongodb://mongodb0.example.com:27017`)
- `stricturl`: method with the following keyword arguments:
- `strip_whitespace: bool = True`
- `min_length: int = 1`
@@ -780,7 +781,7 @@ The value of numerous common types can be restricted using `con*` type functions
{!.tmp_examples/types_constrained.md!}
-Where `Field` refers to the [field function](schema.md#field-customisation).
+Where `Field` refers to the [field function](schema.md#field-customization).
### Arguments to `conlist`
The following arguments are available when using the `conlist` type function
diff --git a/docs/usage/validation_decorator.md b/docs/usage/validation_decorator.md
index fad7a5f5f3b..d0dc1474b73 100644
--- a/docs/usage/validation_decorator.md
+++ b/docs/usage/validation_decorator.md
@@ -48,14 +48,14 @@ To demonstrate all the above parameter types:
## Using Field to describe function arguments
-[Field](schema.md#field-customisation) can also be used with `validate_arguments` to provide extra information about
+[Field](schema.md#field-customization) can also be used with `validate_arguments` to provide extra information about
the field and validations. In general it should be used in a type hint with
[Annotated](schema.md#typingannotated-fields), unless `default_factory` is specified, in which case it should be used
as the default value of the field:
{!.tmp_examples/validation_decorator_field.md!}
-The [alias](model_config#alias-precedence) can be used with the decorator as normal.
+The [alias](model_config.md#alias-precedence) can be used with the decorator as normal.
{!.tmp_examples/validation_decorator_field_alias.md!}
diff --git a/mkdocs.yml b/mkdocs.yml
index 6804017a0e2..0fd68a5986c 100644
--- a/mkdocs.yml
+++ b/mkdocs.yml
@@ -1,7 +1,7 @@
site_name: pydantic
site_description: Data validation and settings management using Python type hints
strict: true
-site_url: https://pydantic-docs.helpmanual.io/
+site_url: https://docs.pydantic.dev/
theme:
name: 'material'
@@ -28,6 +28,7 @@ theme:
repo_name: pydantic/pydantic
repo_url: https://github.com/pydantic/pydantic
+edit_uri: edit/main/docs/
extra:
analytics:
provider: google
diff --git a/pydantic/_hypothesis_plugin.py b/pydantic/_hypothesis_plugin.py
index a56d2b98df8..d175d207f2f 100644
--- a/pydantic/_hypothesis_plugin.py
+++ b/pydantic/_hypothesis_plugin.py
@@ -10,7 +10,7 @@
https://hypothesis.readthedocs.io/en/latest/strategies.html#registering-strategies-via-setuptools-entry-points
https://hypothesis.readthedocs.io/en/latest/data.html#hypothesis.strategies.register_type_strategy
https://hypothesis.readthedocs.io/en/latest/strategies.html#interaction-with-pytest-cov
-https://pydantic-docs.helpmanual.io/usage/types/#pydantic-types
+https://docs.pydantic.dev/usage/types/#pydantic-types
Note that because our motivation is to *improve user experience*, the strategies
are always sound (never generate invalid data) but sacrifice completeness for
diff --git a/pydantic/config.py b/pydantic/config.py
index 74687ca0363..3ce8d6df71a 100644
--- a/pydantic/config.py
+++ b/pydantic/config.py
@@ -68,9 +68,7 @@ class ConfigDict(TypedDict, total=False):
json_encoders: Dict[Type[object], AnyCallable]
underscore_attrs_are_private: bool
allow_inf_nan: bool
-
- # whether or not inherited models as fields should be reconstructed as base model
- copy_on_model_validation: bool
+ copy_on_model_validation: Literal['none', 'deep', 'shallow']
# whether dataclass `__post_init__` should be run after validation
post_init_call: Literal['before_validation', 'after_validation']
diff --git a/pydantic/dataclasses.py b/pydantic/dataclasses.py
index 6833112702d..913d8cc6934 100644
--- a/pydantic/dataclasses.py
+++ b/pydantic/dataclasses.py
@@ -34,20 +34,7 @@ class M:
import sys
from contextlib import contextmanager
from functools import wraps
-from typing import (
- TYPE_CHECKING,
- Any,
- Callable,
- ClassVar,
- Dict,
- Generator,
- Optional,
- Set,
- Type,
- TypeVar,
- Union,
- overload,
-)
+from typing import TYPE_CHECKING, Any, Callable, ClassVar, Dict, Generator, Optional, Type, TypeVar, Union, overload
from typing_extensions import dataclass_transform
@@ -105,7 +92,7 @@ def __validate__(cls: Type['DataclassT'], v: Any) -> 'DataclassT':
if sys.version_info >= (3, 10):
- @dataclass_transform(kw_only_default=True, field_descriptors=(Field, FieldInfo))
+ @dataclass_transform(kw_only_default=True, field_specifiers=(Field, FieldInfo))
@overload
def dataclass(
*,
@@ -117,11 +104,12 @@ def dataclass(
frozen: bool = False,
config: Union[ConfigDict, Type[object], None] = None,
validate_on_init: Optional[bool] = None,
+ use_proxy: Optional[bool] = None,
kw_only: bool = ...,
) -> Callable[[Type[_T]], 'DataclassClassOrWrapper']:
...
- @dataclass_transform(kw_only_default=True, field_descriptors=(Field, FieldInfo))
+ @dataclass_transform(kw_only_default=True, field_specifiers=(Field, FieldInfo))
@overload
def dataclass(
_cls: Type[_T],
@@ -134,13 +122,14 @@ def dataclass(
frozen: bool = False,
config: Union[ConfigDict, Type[object], None] = None,
validate_on_init: Optional[bool] = None,
+ use_proxy: Optional[bool] = None,
kw_only: bool = ...,
) -> 'DataclassClassOrWrapper':
...
else:
- @dataclass_transform(kw_only_default=True, field_descriptors=(Field, FieldInfo))
+ @dataclass_transform(kw_only_default=True, field_specifiers=(Field, FieldInfo))
@overload
def dataclass(
*,
@@ -152,10 +141,11 @@ def dataclass(
frozen: bool = False,
config: Union[ConfigDict, Type[object], None] = None,
validate_on_init: Optional[bool] = None,
+ use_proxy: Optional[bool] = None,
) -> Callable[[Type[_T]], 'DataclassClassOrWrapper']:
...
- @dataclass_transform(kw_only_default=True, field_descriptors=(Field, FieldInfo))
+ @dataclass_transform(kw_only_default=True, field_specifiers=(Field, FieldInfo))
@overload
def dataclass(
_cls: Type[_T],
@@ -168,11 +158,12 @@ def dataclass(
frozen: bool = False,
config: Union[ConfigDict, Type[object], None] = None,
validate_on_init: Optional[bool] = None,
+ use_proxy: Optional[bool] = None,
) -> 'DataclassClassOrWrapper':
...
-@dataclass_transform(kw_only_default=True, field_descriptors=(Field, FieldInfo))
+@dataclass_transform(kw_only_default=True, field_specifiers=(Field, FieldInfo))
def dataclass(
_cls: Optional[Type[_T]] = None,
*,
@@ -184,6 +175,7 @@ def dataclass(
frozen: bool = False,
config: Union[ConfigDict, Type[object], None] = None,
validate_on_init: Optional[bool] = None,
+ use_proxy: Optional[bool] = None,
kw_only: bool = False,
) -> Union[Callable[[Type[_T]], 'DataclassClassOrWrapper'], 'DataclassClassOrWrapper']:
"""
@@ -197,7 +189,15 @@ def dataclass(
def wrap(cls: Type[Any]) -> 'DataclassClassOrWrapper':
import dataclasses
- if is_builtin_dataclass(cls) and _extra_dc_args(_cls) == _extra_dc_args(_cls.__bases__[0]): # type: ignore
+ should_use_proxy = (
+ use_proxy
+ if use_proxy is not None
+ else (
+ is_builtin_dataclass(cls)
+ and (cls.__bases__[0] is object or set(dir(cls)) == set(dir(cls.__bases__[0])))
+ )
+ )
+ if should_use_proxy:
dc_cls_doc = ''
dc_cls = DataclassProxy(cls)
default_validate_on_init = False
@@ -254,6 +254,9 @@ def __call__(self, *args: Any, **kwargs: Any) -> Any:
def __getattr__(self, name: str) -> Any:
return getattr(self.__dataclass__, name)
+ def __setattr__(self, __name: str, __value: Any) -> None:
+ return setattr(self.__dataclass__, __name, __value)
+
def __instancecheck__(self, instance: Any) -> bool:
return isinstance(instance, self.__dataclass__)
@@ -285,7 +288,10 @@ def handle_extra_init(self: 'Dataclass', *args: Any, **kwargs: Any) -> None:
init(self, *args, **kwargs)
if hasattr(dc_cls, '__post_init__'):
- post_init = dc_cls.__post_init__
+ try:
+ post_init = dc_cls.__post_init__.__wrapped__ # type: ignore[attr-defined]
+ except AttributeError:
+ post_init = dc_cls.__post_init__
@wraps(post_init)
def new_post_init(self: 'Dataclass', *args: Any, **kwargs: Any) -> None:
@@ -431,14 +437,6 @@ def _dataclass_validate_assignment_setattr(self: 'Dataclass', name: str, value:
object.__setattr__(self, name, value)
-def _extra_dc_args(cls: Type[Any]) -> Set[str]:
- return {
- x
- for x in dir(cls)
- if x not in getattr(cls, '__dataclass_fields__', {}) and not (x.startswith('__') and x.endswith('__'))
- }
-
-
def is_builtin_dataclass(_cls: Type[Any]) -> bool:
"""
Whether a class is a stdlib dataclass
@@ -476,4 +474,4 @@ def make_dataclass_validator(dc_cls: Type['Dataclass'], config: Type[BaseConfig]
and yield the validators
It retrieves the parameters of the dataclass and forwards them to the newly created dataclass
"""
- yield from _get_validators(dataclass(dc_cls, config=config, validate_on_init=False))
+ yield from _get_validators(dataclass(dc_cls, config=config, use_proxy=True))
diff --git a/pydantic/generics.py b/pydantic/generics.py
index a3f52bfee96..ece421e8ba2 100644
--- a/pydantic/generics.py
+++ b/pydantic/generics.py
@@ -64,7 +64,11 @@ def __class_getitem__(cls: Type[GenericModelT], params: Union[Type[Any], Tuple[T
"""
def _cache_key(_params: Any) -> Tuple[Type[GenericModelT], Any, Tuple[Any, ...]]:
- return cls, _params, get_args(_params)
+ args = get_args(_params)
+ # python returns a list for Callables, which is not hashable
+ if len(args) == 2 and isinstance(args[0], list):
+ args = (tuple(args[0]), args[1])
+ return cls, _params, args
cached = _generic_types_cache.get(_cache_key(params))
if cached is not None:
diff --git a/pydantic/main.py b/pydantic/main.py
index 69f3b75120d..361c9669d79 100644
--- a/pydantic/main.py
+++ b/pydantic/main.py
@@ -118,7 +118,7 @@ def hash_function(self_: Any) -> int:
_is_base_model_class_defined = False
-@dataclass_transform(kw_only_default=True, field_descriptors=(Field, FieldInfo))
+@dataclass_transform(kw_only_default=True, field_specifiers=(Field, FieldInfo))
class ModelMetaclass(ABCMeta):
@no_type_check # noqa C901
def __new__(mcs, name, bases, namespace, **kwargs): # noqa C901
@@ -508,6 +508,7 @@ def json(
def _enforce_dict_if_root(cls, obj: Any) -> Any:
if cls.__custom_root_type__ and (
not (isinstance(obj, dict) and obj.keys() == {ROOT_KEY})
+ and not (isinstance(obj, BaseModel) and obj.__fields__.keys() == {ROOT_KEY})
or cls.__fields__[ROOT_KEY].shape in MAPPING_LIKE_SHAPES
):
return {ROOT_KEY: obj}
diff --git a/pydantic/networks.py b/pydantic/networks.py
index c7d97186b93..e1eef7b576f 100644
--- a/pydantic/networks.py
+++ b/pydantic/networks.py
@@ -490,6 +490,7 @@ class PostgresDsn(MultiHostDsn):
'postgresql',
'postgresql+asyncpg',
'postgresql+pg8000',
+ 'postgresql+psycopg',
'postgresql+psycopg2',
'postgresql+psycopg2cffi',
'postgresql+py-postgresql',
diff --git a/pydantic/schema.py b/pydantic/schema.py
index e7af56f120d..a73916458df 100644
--- a/pydantic/schema.py
+++ b/pydantic/schema.py
@@ -1,6 +1,7 @@
import re
import warnings
from collections import defaultdict
+from dataclasses import is_dataclass
from datetime import date, datetime, time, timedelta
from decimal import Decimal
from enum import Enum
@@ -490,7 +491,7 @@ def field_type_schema(
# Dict keys have a regex pattern
# items_schema might be a schema or empty dict, add it either way
f_schema['patternProperties'] = {regex.pattern: items_schema}
- elif items_schema:
+ if items_schema:
# The dict values are not simply Any, so they need a schema
f_schema['additionalProperties'] = items_schema
elif field.shape == SHAPE_TUPLE or (field.shape == SHAPE_GENERIC and not issubclass(field.type_, BaseModel)):
@@ -657,11 +658,13 @@ def enum_process_schema(enum: Type[Enum], *, field: Optional[ModelField] = None)
This is similar to the `model_process_schema` function, but applies to ``Enum`` objects.
"""
+ import inspect
+
schema_: Dict[str, Any] = {
'title': enum.__name__,
# Python assigns all enums a default docstring value of 'An enumeration', so
# all enums will have a description field even if not explicitly provided.
- 'description': enum.__doc__ or 'An enumeration.',
+ 'description': inspect.cleandoc(enum.__doc__ or 'An enumeration.'),
# Add enum values and the enum field type to the schema.
'enum': [item.value for item in cast(Iterable[Enum], enum)],
}
@@ -969,7 +972,14 @@ def multitypes_literal_field_for_schema(values: Tuple[Any, ...], field: ModelFie
def encode_default(dft: Any) -> Any:
- if isinstance(dft, Enum):
+ from .main import BaseModel
+
+ if isinstance(dft, BaseModel) or is_dataclass(dft):
+ dft = cast('dict[str, Any]', pydantic_encoder(dft))
+
+ if isinstance(dft, dict):
+ return {encode_default(k): encode_default(v) for k, v in dft.items()}
+ elif isinstance(dft, Enum):
return dft.value
elif isinstance(dft, (int, float, str)):
return dft
@@ -977,8 +987,6 @@ def encode_default(dft: Any) -> Any:
t = dft.__class__
seq_args = (encode_default(v) for v in dft)
return t(*seq_args) if is_namedtuple(t) else t(seq_args)
- elif isinstance(dft, dict):
- return {encode_default(k): encode_default(v) for k, v in dft.items()}
elif dft is None:
return None
else:
@@ -1011,7 +1019,7 @@ def get_annotation_from_field_info(
raise ValueError(
f'On field "{field_name}" the following field constraints are set but not enforced: '
f'{", ".join(unused_constraints)}. '
- f'\nFor more details see https://pydantic-docs.helpmanual.io/usage/schema/#unenforced-field-constraints'
+ f'\nFor more details see https://docs.pydantic.dev/usage/schema/#unenforced-field-constraints'
)
return annotation
diff --git a/pydantic/types.py b/pydantic/types.py
index f98dba3de49..9438a6a829f 100644
--- a/pydantic/types.py
+++ b/pydantic/types.py
@@ -403,7 +403,7 @@ class ConstrainedStr(str):
min_length: OptionalInt = None
max_length: OptionalInt = None
curtail_length: OptionalInt = None
- regex: Optional[Pattern[str]] = None
+ regex: Optional[Union[str, Pattern[str]]] = None
strict = False
@classmethod
@@ -412,7 +412,7 @@ def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
field_schema,
minLength=cls.min_length,
maxLength=cls.max_length,
- pattern=cls.regex and cls.regex.pattern,
+ pattern=cls.regex and cls._get_pattern(cls.regex),
)
@classmethod
@@ -430,11 +430,15 @@ def validate(cls, value: Union[str]) -> Union[str]:
value = value[: cls.curtail_length]
if cls.regex:
- if not cls.regex.match(value):
- raise errors.StrRegexError(pattern=cls.regex.pattern)
+ if not re.match(cls.regex, value):
+ raise errors.StrRegexError(pattern=cls._get_pattern(cls.regex))
return value
+ @staticmethod
+ def _get_pattern(regex: Union[str, Pattern[str]]) -> str:
+ return regex if isinstance(regex, str) else regex.pattern
+
def constr(
*,
@@ -595,7 +599,10 @@ def list_length_validator(cls, v: 'Optional[List[T]]') -> 'Optional[List[T]]':
return v
@classmethod
- def unique_items_validator(cls, v: 'List[T]') -> 'List[T]':
+ def unique_items_validator(cls, v: 'Optional[List[T]]') -> 'Optional[List[T]]':
+ if v is None:
+ return None
+
for i, value in enumerate(v, start=1):
if value in v[i:]:
raise errors.ListUniqueItemsError()
diff --git a/pydantic/version.py b/pydantic/version.py
index 32c61633f70..a5797d56b08 100644
--- a/pydantic/version.py
+++ b/pydantic/version.py
@@ -1,6 +1,6 @@
__all__ = 'compiled', 'VERSION', 'version_info'
-VERSION = '1.10.2'
+VERSION = '1.10.4'
try:
import cython # type: ignore
diff --git a/setup.py b/setup.py
index d677cd7765a..91d82e40491 100644
--- a/setup.py
+++ b/setup.py
@@ -60,7 +60,7 @@ def extra(self):
THIS_DIR = Path(__file__).resolve().parent
try:
history = (THIS_DIR / 'HISTORY.md').read_text(encoding='utf-8')
- history = re.sub(r'#(\d+)', r'[#\1](https://github.com/pydantic/pydantic/issues/\1)', history)
+ history = re.sub(r'(\s)#(\d+)', r'\1[#\2](https://github.com/pydantic/pydantic/issues/\2)', history)
history = re.sub(r'( +)@([\w\-]+)', r'\1[@\2](https://github.com/\2)', history, flags=re.I)
history = re.sub('@@', '@', history)
@@ -82,9 +82,9 @@ def extra(self):
compiler_directives = {}
if 'CYTHON_TRACE' in sys.argv:
compiler_directives['linetrace'] = True
- # Set CFLAG to all optimizations (-O3)
+ # Set CFLAG to all optimizations (-O3), add `-g0` to reduce size of binaries, see #2276
# Any additional CFLAGS will be appended. Only the last optimization flag will have effect
- os.environ['CFLAGS'] = '-O3 ' + os.environ.get('CFLAGS', '')
+ os.environ['CFLAGS'] = '-O3 -g0 ' + os.environ.get('CFLAGS', '')
ext_modules = cythonize(
'pydantic/*.py',
exclude=['pydantic/generics.py'],
@@ -130,7 +130,7 @@ def extra(self):
python_requires='>=3.7',
zip_safe=False, # https://mypy.readthedocs.io/en/latest/installed_packages.html
install_requires=[
- 'typing-extensions>=4.1.0'
+ 'typing-extensions>=4.2.0'
],
extras_require={
'email': ['email-validator>=1.0.3'],
diff --git a/tests/requirements-testing.txt b/tests/requirements-testing.txt
index f0276206af1..bb59933804c 100644
--- a/tests/requirements-testing.txt
+++ b/tests/requirements-testing.txt
@@ -7,3 +7,5 @@ pytest==7.1.2
pytest-cov==3.0.0
pytest-mock==3.8.2
pytest-sugar==0.9.5
+# pin typing-extensions to minimum requirement - see #4885
+typing-extensions==4.2.0
diff --git a/tests/test_dataclasses.py b/tests/test_dataclasses.py
index e2027476bc0..1686ff2672f 100644
--- a/tests/test_dataclasses.py
+++ b/tests/test_dataclasses.py
@@ -1472,3 +1472,139 @@ def __post_init__(self):
self.a *= 3
assert C().a == 6 # 1 * 3 + 3
+
+
+def test_inheritance_post_init_2():
+ post_init_calls = 0
+ post_init_post_parse_calls = 0
+
+ @pydantic.dataclasses.dataclass
+ class BaseClass:
+ def __post_init__(self):
+ nonlocal post_init_calls
+ post_init_calls += 1
+
+ @pydantic.dataclasses.dataclass
+ class AbstractClass(BaseClass):
+ pass
+
+ @pydantic.dataclasses.dataclass
+ class ConcreteClass(AbstractClass):
+ def __post_init_post_parse__(self):
+ nonlocal post_init_post_parse_calls
+ post_init_post_parse_calls += 1
+
+ ConcreteClass()
+ assert post_init_calls == 1
+ assert post_init_post_parse_calls == 1
+
+
+def test_dataclass_setattr():
+ class Foo:
+ bar: str = 'cat'
+
+ default_config = dataclasses.make_dataclass(
+ cls_name=Foo.__name__,
+ bases=(dataclasses.dataclass(Foo),),
+ fields=[('bar', ClassVar[str], dataclasses.field(default=Foo.bar))],
+ )
+
+ config = pydantic.dataclasses.dataclass(default_config)
+ assert config.bar == 'cat'
+ setattr(config, 'bar', 'dog')
+ assert config.bar == 'dog'
+
+
+def test_frozen_dataclasses():
+ @dataclasses.dataclass(frozen=True)
+ class First:
+ a: int
+
+ @dataclasses.dataclass(frozen=True)
+ class Second(First):
+ @property
+ def b(self):
+ return self.a
+
+ class My(BaseModel):
+ my: Second
+
+ assert My(my=Second(a='1')).my.b == 1
+
+
+def test_empty_dataclass():
+ """should be able to inherit without adding a field"""
+
+ @dataclasses.dataclass
+ class UnvalidatedDataclass:
+ a: int = 0
+
+ @pydantic.dataclasses.dataclass
+ class ValidatedDerivedA(UnvalidatedDataclass):
+ ...
+
+ @pydantic.dataclasses.dataclass()
+ class ValidatedDerivedB(UnvalidatedDataclass):
+ b: int = 0
+
+ @pydantic.dataclasses.dataclass()
+ class ValidatedDerivedC(UnvalidatedDataclass):
+ ...
+
+
+def test_proxy_dataclass():
+ @dataclasses.dataclass
+ class Foo:
+ a: Optional[int] = dataclasses.field(default=42)
+ b: List = dataclasses.field(default_factory=list)
+
+ @dataclasses.dataclass
+ class Bar:
+ pass
+
+ @dataclasses.dataclass
+ class Model1:
+ foo: Foo
+
+ class Model2(BaseModel):
+ foo: Foo
+
+ m1 = Model1(foo=Foo())
+ m2 = Model2(foo=Foo())
+
+ assert m1.foo.a == m2.foo.a == 42
+ assert m1.foo.b == m2.foo.b == []
+ assert m1.foo.Bar() is not None
+ assert m2.foo.Bar() is not None
+
+
+def test_proxy_dataclass_2():
+ @dataclasses.dataclass
+ class M1:
+ a: int
+ b: str = 'b'
+ c: float = dataclasses.field(init=False)
+
+ def __post_init__(self):
+ self.c = float(self.a)
+
+ @dataclasses.dataclass
+ class M2:
+ a: int
+ b: str = 'b'
+ c: float = dataclasses.field(init=False)
+
+ def __post_init__(self):
+ self.c = float(self.a)
+
+ @pydantic.validator('b')
+ def check_b(cls, v):
+ if not v:
+ raise ValueError('b should not be empty')
+ return v
+
+ m1 = pydantic.parse_obj_as(M1, {'a': 3})
+ m2 = pydantic.parse_obj_as(M2, {'a': 3})
+ assert m1.a == m2.a == 3
+ assert m1.b == m2.b == 'b'
+ assert m1.c == m2.c == 3.0
diff --git a/tests/test_decorator.py b/tests/test_decorator.py
index 686f253c78c..b6b7bb492d1 100644
--- a/tests/test_decorator.py
+++ b/tests/test_decorator.py
@@ -266,10 +266,9 @@ async def run():
v = await foo(1, 2)
assert v == 'a=1 b=2'
- loop = asyncio.get_event_loop_policy().get_event_loop()
- loop.run_until_complete(run())
+ asyncio.run(run())
with pytest.raises(ValidationError) as exc_info:
- loop.run_until_complete(foo('x'))
+ asyncio.run(foo('x'))
assert exc_info.value.errors() == [{'loc': ('b',), 'msg': 'field required', 'type': 'value_error.missing'}]
diff --git a/tests/test_generics.py b/tests/test_generics.py
index 39adc45f20e..371272b1caf 100644
--- a/tests/test_generics.py
+++ b/tests/test_generics.py
@@ -7,6 +7,7 @@
ClassVar,
Dict,
Generic,
+ Iterable,
List,
Mapping,
Optional,
@@ -234,6 +235,32 @@ class Model(GenericModel, Generic[T]):
assert len(_generic_types_cache) == cache_size + 2
+def test_cache_keys_are_hashable():
+ cache_size = len(_generic_types_cache)
+ T = TypeVar('T')
+ C = Callable[[str, Dict[str, Any]], Iterable[str]]
+
+ class MyGenericModel(GenericModel, Generic[T]):
+ t: T
+
+ # Callable's first params get converted to a list, which is not hashable.
+ # Make sure we can handle that special case
+ Simple = MyGenericModel[Callable[[int], str]]
+ assert len(_generic_types_cache) == cache_size + 2
+ # Nested Callables
+ MyGenericModel[Callable[[C], Iterable[str]]]
+ assert len(_generic_types_cache) == cache_size + 4
+ MyGenericModel[Callable[[Simple], Iterable[int]]]
+ assert len(_generic_types_cache) == cache_size + 6
+ MyGenericModel[Callable[[MyGenericModel[C]], Iterable[int]]]
+ assert len(_generic_types_cache) == cache_size + 10
+
+ class Model(BaseModel):
+ x: MyGenericModel[Callable[[C], Iterable[str]]] = Field(...)
+
+ assert len(_generic_types_cache) == cache_size + 10
+
+
def test_generic_config():
data_type = TypeVar('data_type')
diff --git a/tests/test_networks.py b/tests/test_networks.py
index 2f4ed2dfe6a..eb717f96d21 100644
--- a/tests/test_networks.py
+++ b/tests/test_networks.py
@@ -36,6 +36,7 @@
'postgres://just-user@localhost:5432/app',
'postgresql+asyncpg://user:pass@localhost:5432/app',
'postgresql+pg8000://user:pass@localhost:5432/app',
+ 'postgresql+psycopg://postgres:postgres@localhost:5432/hatch',
'postgresql+psycopg2://postgres:postgres@localhost:5432/hatch',
'postgresql+psycopg2cffi://user:pass@localhost:5432/app',
'postgresql+py-postgresql://user:pass@localhost:5432/app',
@@ -262,9 +263,9 @@ def test_at_in_path():
def test_fragment_without_query():
- url = validate_url('https://pydantic-docs.helpmanual.io/usage/types/#constrained-types')
+ url = validate_url('https://docs.pydantic.dev/usage/types/#constrained-types')
assert url.scheme == 'https'
- assert url.host == 'pydantic-docs.helpmanual.io'
+ assert url.host == 'docs.pydantic.dev'
assert url.path == '/usage/types/'
assert url.query is None
assert url.fragment == 'constrained-types'
diff --git a/tests/test_parse.py b/tests/test_parse.py
index a0260fba8b8..a7d2287fff1 100644
--- a/tests/test_parse.py
+++ b/tests/test_parse.py
@@ -46,6 +46,7 @@ class MyModel(BaseModel):
m = MyModel.parse_obj('a')
assert m.dict() == {'__root__': 'a'}
assert m.__root__ == 'a'
+ assert MyModel.parse_obj(m) == m
def test_parse_root_list():
diff --git a/tests/test_schema.py b/tests/test_schema.py
index 143768a8005..6b070f98ed6 100644
--- a/tests/test_schema.py
+++ b/tests/test_schema.py
@@ -459,6 +459,34 @@ class Model(BaseModel):
}
+def test_enum_schema_cleandoc():
+ class FooBar(str, Enum):
+ """
+ This is docstring which needs to be cleaned up
+ """
+
+ foo = 'foo'
+ bar = 'bar'
+
+ class Model(BaseModel):
+ enum: FooBar
+
+ assert Model.schema() == {
+ 'title': 'Model',
+ 'type': 'object',
+ 'properties': {'enum': {'$ref': '#/definitions/FooBar'}},
+ 'required': ['enum'],
+ 'definitions': {
+ 'FooBar': {
+ 'title': 'FooBar',
+ 'description': 'This is docstring which needs to be cleaned up',
+ 'enum': ['foo', 'bar'],
+ 'type': 'string',
+ }
+ },
+ }
+
+
def test_json_schema():
class Model(BaseModel):
a = b'foobar'
@@ -1495,6 +1523,38 @@ class UserModel(BaseModel):
}
+def test_model_default():
+ """Make sure inner model types are encoded properly"""
+
+ class Inner(BaseModel):
+ a: Dict[Path, str] = {Path(): ''}
+
+ class Outer(BaseModel):
+ inner: Inner = Inner()
+
+ assert Outer.schema() == {
+ 'definitions': {
+ 'Inner': {
+ 'properties': {
+ 'a': {
+ 'additionalProperties': {'type': 'string'},
+ 'default': {'.': ''},
+ 'title': 'A',
+ 'type': 'object',
+ }
+ },
+ 'title': 'Inner',
+ 'type': 'object',
+ }
+ },
+ 'properties': {
+ 'inner': {'allOf': [{'$ref': '#/definitions/Inner'}], 'default': {'a': {'.': ''}}, 'title': 'Inner'}
+ },
+ 'title': 'Outer',
+ 'type': 'object',
+ }
+
+
@pytest.mark.parametrize(
'kwargs,type_,expected_extra',
[
@@ -1654,7 +1714,13 @@ class Foo(BaseModel):
'title': 'Foo',
'type': 'object',
'properties': {
- 'a': {'type': 'object', 'title': 'A', 'default': {}, 'patternProperties': {regex_str: {'type': 'string'}}}
+ 'a': {
+ 'type': 'object',
+ 'title': 'A',
+ 'default': {},
+ 'additionalProperties': {'type': 'string'},
+ 'patternProperties': {regex_str: {'type': 'string'}},
+ }
},
}
diff --git a/tests/test_types.py b/tests/test_types.py
index af4a91ef1d7..785e335b916 100644
--- a/tests/test_types.py
+++ b/tests/test_types.py
@@ -1759,6 +1759,45 @@ class Model(BaseModel):
Model(u='1234567')
+def test_strict_str_regex():
+ class Model(BaseModel):
+ u: StrictStr = Field(..., regex=r'^[0-9]+$')
+
+ assert Model(u='123').u == '123'
+
+ with pytest.raises(ValidationError, match='str type expected'):
+ Model(u=123)
+
+ with pytest.raises(ValidationError) as exc_info:
+ Model(u='abc')
+ assert exc_info.value.errors() == [
+ {
+ 'loc': ('u',),
+ 'msg': 'string does not match regex "^[0-9]+$"',
+ 'type': 'value_error.str.regex',
+ 'ctx': {'pattern': '^[0-9]+$'},
+ }
+ ]
+
+
+def test_string_regex():
+ class Model(BaseModel):
+ u: str = Field(..., regex=r'^[0-9]+$')
+
+ assert Model(u='123').u == '123'
+
+ with pytest.raises(ValidationError) as exc_info:
+ Model(u='abc')
+ assert exc_info.value.errors() == [
+ {
+ 'loc': ('u',),
+ 'msg': 'string does not match regex "^[0-9]+$"',
+ 'type': 'value_error.str.regex',
+ 'ctx': {'pattern': '^[0-9]+$'},
+ }
+ ]
+
+
def test_strict_bool():
class Model(BaseModel):
v: StrictBool
diff --git a/tests/test_validators.py b/tests/test_validators.py
index 778085880dc..de67ffe4720 100644
--- a/tests/test_validators.py
+++ b/tests/test_validators.py
@@ -7,7 +7,7 @@
import pytest
from typing_extensions import Literal
-from pydantic import BaseModel, ConfigError, Extra, Field, ValidationError, errors, validator
+from pydantic import BaseModel, ConfigError, Extra, Field, ValidationError, conlist, errors, validator
from pydantic.class_validators import make_generic_validator, root_validator
@@ -1329,3 +1329,19 @@ def post_root(cls, values):
B(x='pika')
assert validate_stub.call_args_list == [mocker.call('B', 'pre'), mocker.call('B', 'post')]
+
+
+def test_list_unique_items_with_optional():
+ class Model(BaseModel):
+ foo: Optional[List[str]] = Field(None, unique_items=True)
+ bar: conlist(str, unique_items=True) = Field(None)
+
+ assert Model().dict() == {'foo': None, 'bar': None}
+ assert Model(foo=None, bar=None).dict() == {'foo': None, 'bar': None}
+ assert Model(foo=['k1'], bar=['k1']).dict() == {'foo': ['k1'], 'bar': ['k1']}
+ with pytest.raises(ValidationError) as exc_info:
+ Model(foo=['k1', 'k1'], bar=['k1', 'k1'])
+ assert exc_info.value.errors() == [
+ {'loc': ('foo',), 'msg': 'the list has duplicated items', 'type': 'value_error.list.unique_items'},
+ {'loc': ('bar',), 'msg': 'the list has duplicated items', 'type': 'value_error.list.unique_items'},
+ ]