diff --git a/.flake8 b/.flake8
deleted file mode 100644
index 03237510..00000000
--- a/.flake8
+++ /dev/null
@@ -1,18 +0,0 @@
-[flake8]
-
-max-line-length = 90
-ignore =
- # irrelevant plugins
- B3,
- DW12,
- # code is sometimes better without this
- E129,
- # Contradicts PEP8 nowadays
- W503,
- # consistency with mypy
- W504
-exclude =
- # tests have more relaxed formatting rules
- # and its own specific config in .flake8-tests
- src/test_typing_extensions.py,
-noqa_require_code = true
diff --git a/.flake8-tests b/.flake8-tests
deleted file mode 100644
index 634160ab..00000000
--- a/.flake8-tests
+++ /dev/null
@@ -1,31 +0,0 @@
-# This configuration is specific to test_*.py; you need to invoke it
-# by specifically naming this config, like this:
-#
-# $ flake8 --config=.flake8-tests [SOURCES]
-#
-# This will be possibly merged in the future.
-
-[flake8]
-max-line-length = 100
-ignore =
- # temporary ignores until we sort it out
- B017,
- E302,
- E303,
- E306,
- E501,
- E701,
- E704,
- F722,
- F811,
- F821,
- F841,
- W503,
- # irrelevant plugins
- B3,
- DW12,
- # Contradicts PEP8 nowadays
- W503,
- # consistency with mypy
- W504
-noqa_require_code = true
diff --git a/.github/dependabot.yml b/.github/dependabot.yml
new file mode 100644
index 00000000..5c563144
--- /dev/null
+++ b/.github/dependabot.yml
@@ -0,0 +1,10 @@
+version: 2
+updates:
+ - package-ecosystem: "github-actions"
+ directory: "/"
+ schedule:
+ interval: monthly
+ groups:
+ actions:
+ patterns:
+ - "*"
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 78610e27..6da5134f 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -13,6 +13,7 @@ permissions:
contents: read
env:
+ FORCE_COLOR: 1
PIP_DISABLE_PIP_VERSION_CHECK: 1
concurrency:
@@ -23,16 +24,11 @@ jobs:
tests:
name: Run tests
+ # if 'schedule' was the trigger,
+ # don't run it on contributors' forks
if: >-
- # if 'schedule' was the trigger,
- # don't run it on contributors' forks
- ${{
- github.event_name != 'schedule'
- || (
- github.repository == 'python/typing_extensions'
- && github.event_name == 'schedule'
- )
- }}
+ github.repository == 'python/typing_extensions'
+ || github.event_name != 'schedule'
strategy:
fail-fast: false
@@ -42,29 +38,27 @@ jobs:
# For available versions, see:
# https://raw.githubusercontent.com/actions/python-versions/main/versions-manifest.json
python-version:
- - "3.7"
- - "3.7.1"
- - "3.8"
- - "3.8.0"
- "3.9"
- - "3.9.0"
+ - "3.9.12"
- "3.10"
- - "3.10.0"
+ - "3.10.4"
- "3.11"
- "3.11.0"
- "3.12"
- - "pypy3.7"
- - "pypy3.8"
+ - "3.12.0"
+ - "3.13"
+ - "3.13.0"
+ - "3.14"
- "pypy3.9"
- "pypy3.10"
- runs-on: ubuntu-20.04
+ runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
- name: Set up Python
- uses: actions/setup-python@v4
+ uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
allow-prereleases: true
@@ -74,8 +68,18 @@ jobs:
# Be wary of running `pip install` here, since it becomes easy for us to
# accidentally pick up typing_extensions as installed by a dependency
cd src
+ python --version # just to make sure we're running the right one
python -m unittest test_typing_extensions.py
+ - name: Test CPython typing test suite
+ # Test suite fails on PyPy even without typing_extensions
+ if: ${{ !startsWith(matrix.python-version, 'pypy') }}
+ run: |
+ cd src
+ # Run the typing test suite from CPython with typing_extensions installed,
+ # because we monkeypatch typing under some circumstances.
+ python -c 'import typing_extensions; import test.__main__' test_typing -v
+
linting:
name: Lint
@@ -85,26 +89,17 @@ jobs:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
- name: Set up Python
- uses: actions/setup-python@v4
+ uses: actions/setup-python@v5
with:
python-version: "3"
cache: "pip"
cache-dependency-path: "test-requirements.txt"
-
- name: Install dependencies
- run: |
- pip install -r test-requirements.txt
- # not included in test-requirements.txt as it depends on typing-extensions,
- # so it's a pain to have it installed locally
- pip install flake8-noqa
-
+ run: pip install -r test-requirements.txt
- name: Lint implementation
- run: flake8 --color always
-
- - name: Lint tests
- run: flake8 --config=.flake8-tests src/test_typing_extensions.py --color always
+ run: ruff check
create-issue-on-failure:
name: Create an issue if daily tests failed
@@ -124,7 +119,7 @@ jobs:
issues: write
steps:
- - uses: actions/github-script@v6
+ - uses: actions/github-script@v7
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
diff --git a/.github/workflows/package.yml b/.github/workflows/package.yml
deleted file mode 100644
index ad2deee1..00000000
--- a/.github/workflows/package.yml
+++ /dev/null
@@ -1,76 +0,0 @@
-name: Test packaging
-
-on:
- push:
- branches:
- - main
- pull_request:
- workflow_dispatch:
-
-permissions:
- contents: read
-
-concurrency:
- group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
- cancel-in-progress: true
-
-jobs:
- wheel:
- name: Test wheel install
- runs-on: ubuntu-latest
-
- steps:
- - uses: actions/checkout@v2
-
- - name: Set up Python
- uses: actions/setup-python@v2
- with:
- python-version: 3
-
- - name: Install pypa/build
- run: |
- # Be wary of running `pip install` here, since it becomes easy for us to
- # accidentally pick up typing_extensions as installed by a dependency
- python -m pip install --upgrade build
- python -m pip list
-
- - name: Build and install wheel
- run: |
- python -m build .
- export path_to_file=$(find dist -type f -name "typing_extensions-*.whl")
- echo "::notice::Installing wheel: $path_to_file"
- pip install -vvv $path_to_file
- python -m pip list
-
- - name: Attempt to import typing_extensions
- run: python -c "import typing_extensions; print(typing_extensions.__all__)"
-
- sdist:
- name: Test sdist install
- runs-on: ubuntu-latest
-
- steps:
- - uses: actions/checkout@v2
-
- - name: Set up Python
- uses: actions/setup-python@v2
- with:
- python-version: 3
-
- - name: Install pypa/build
- run: |
- # Be wary of running `pip install` here, since it becomes easy for us to
- # accidentally pick up typing_extensions as installed by a dependency
- python -m pip install --upgrade build
- python -m pip list
-
- - name: Build and install sdist
- run: |
- python -m build .
- export path_to_file=$(find dist -type f -name "typing_extensions-*.tar.gz")
- echo "::notice::Installing sdist: $path_to_file"
- pip install -vvv $path_to_file
- python -m pip list
-
- - name: Attempt to import typing_extensions
- run: python -c "import typing_extensions; print(typing_extensions.__all__)"
diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml
new file mode 100644
index 00000000..47704723
--- /dev/null
+++ b/.github/workflows/publish.yml
@@ -0,0 +1,149 @@
+# Based on
+# https://packaging.python.org/guides/publishing-package-distribution-releases-using-github-actions-ci-cd-workflows/
+
+name: Test builds and publish Python distribution to PyPI
+
+on:
+ release:
+ types: [published]
+ push:
+ branches: [main]
+ pull_request:
+
+permissions:
+ contents: read
+
+concurrency:
+ group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
+ cancel-in-progress: true
+
+jobs:
+ build:
+ name: Build distribution
+ runs-on: ubuntu-latest
+
+ steps:
+ - uses: actions/checkout@v4
+ - name: Set up Python
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.x"
+ - name: Check package metadata
+ run: python scripts/check_package.py ${{ github.ref }}
+ - name: Install pypa/build
+ run: |
+ # Be wary of running `pip install` here, since it becomes easy for us to
+ # accidentally pick up typing_extensions as installed by a dependency
+ python -m pip install --upgrade build
+ python -m pip list
+ - name: Build a binary wheel and a source tarball
+ run: python -m build
+ - name: Store the distribution packages
+ uses: actions/upload-artifact@v4
+ with:
+ name: python-package-distributions
+ path: dist/
+
+ test-wheel:
+ name: Test wheel
+ needs:
+ - build
+ runs-on: ubuntu-latest
+
+ steps:
+ - uses: actions/checkout@v4
+ - name: Set up Python
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.x"
+ - name: Download all the dists
+ uses: actions/download-artifact@v4
+ with:
+ name: python-package-distributions
+ path: dist/
+ - name: Install wheel
+ run: |
+ export path_to_file=$(find dist -type f -name "typing_extensions-*.whl")
+ echo "::notice::Installing wheel: $path_to_file"
+ python -m pip install --user $path_to_file
+ python -m pip list
+ - name: Run typing_extensions tests against installed package
+ run: rm src/typing_extensions.py && python src/test_typing_extensions.py
+
+ test-sdist:
+ name: Test source distribution
+ needs:
+ - build
+ runs-on: ubuntu-latest
+
+ steps:
+ - uses: actions/checkout@v4
+ - name: Set up Python
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.x"
+ - name: Download all the dists
+ uses: actions/download-artifact@v4
+ with:
+ name: python-package-distributions
+ path: dist/
+ - name: Unpack and test source distribution
+ run: |
+ export path_to_file=$(find dist -type f -name "typing_extensions-*.tar.gz")
+ echo "::notice::Unpacking source distribution: $path_to_file"
+ tar xzf $path_to_file -C dist/
+ cd ${path_to_file%.tar.gz}/src
+ python test_typing_extensions.py
+
+ test-sdist-installed:
+ name: Test installed source distribution
+ needs:
+ - build
+ runs-on: ubuntu-latest
+
+ steps:
+ - uses: actions/checkout@v4
+ - name: Set up Python
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.x"
+ - name: Download all the dists
+ uses: actions/download-artifact@v4
+ with:
+ name: python-package-distributions
+ path: dist/
+ - name: Install source distribution
+ run: |
+ export path_to_file=$(find dist -type f -name "typing_extensions-*.tar.gz")
+ echo "::notice::Installing source distribution: $path_to_file"
+ python -m pip install --user $path_to_file
+ python -m pip list
+ - name: Run typing_extensions tests against installed package
+ run: rm src/typing_extensions.py && python src/test_typing_extensions.py
+
+ publish-to-pypi:
+ name: >-
+ Publish Python distribution to PyPI
+ if: github.event_name == 'release' # only publish to PyPI on releases
+ needs:
+ - test-sdist
+ - test-sdist-installed
+ - test-wheel
+ - build
+ runs-on: ubuntu-latest
+ environment:
+ name: publish
+ url: https://pypi.org/p/typing-extensions
+ permissions:
+ id-token: write # IMPORTANT: mandatory for trusted publishing
+
+ steps:
+ - name: Download all the dists
+ uses: actions/download-artifact@v4
+ with:
+ name: python-package-distributions
+ path: dist/
+ - name: Ensure exactly one sdist and one wheel have been downloaded
+ run: test $(ls dist/*.tar.gz | wc -l) = 1 && test $(ls dist/*.whl | wc -l) = 1
+ - name: Publish distribution to PyPI
+ uses: pypa/gh-action-pypi-publish@release/v1
diff --git a/.github/workflows/third_party.yml b/.github/workflows/third_party.yml
index bcb0234c..a15735b0 100644
--- a/.github/workflows/third_party.yml
+++ b/.github/workflows/third_party.yml
@@ -26,208 +26,185 @@ concurrency:
cancel-in-progress: true
jobs:
+ skip-schedule-on-fork:
+ name: Check for schedule trigger on fork
+ runs-on: ubuntu-latest
+ # if 'schedule' was the trigger,
+ # don't run it on contributors' forks
+ if: >-
+ github.repository == 'python/typing_extensions'
+ || github.event_name != 'schedule'
+ steps:
+ - run: true
+
pydantic:
name: pydantic tests
- if: >-
- # if 'schedule' was the trigger,
- # don't run it on contributors' forks
- ${{
- github.event_name != 'schedule'
- || (
- github.repository == 'python/typing_extensions'
- && github.event_name == 'schedule'
- )
- }}
+ needs: skip-schedule-on-fork
strategy:
fail-fast: false
matrix:
- python-version: ["3.8", "3.9", "3.10", "3.11", "pypy3.9"]
+ # PyPy is deliberately omitted here,
+ # since pydantic's tests intermittently segfault on PyPy,
+ # and it's nothing to do with typing_extensions
+ python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"]
runs-on: ubuntu-latest
timeout-minutes: 60
steps:
- - name: Checkout pydantic
- uses: actions/checkout@v3
+ - name: Setup Python
+ uses: actions/setup-python@v5
with:
- repository: pydantic/pydantic
+ python-version: ${{ matrix.python-version }}
+ allow-prereleases: true
+ - name: Install uv
+ run: curl -LsSf https://astral.sh/uv/install.sh | sh
+ - name: Checkout pydantic
+ run: git clone --depth=1 https://github.com/pydantic/pydantic.git || git clone --depth=1 https://github.com/pydantic/pydantic.git
- name: Checkout typing_extensions
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
path: typing-extensions-latest
- - name: Setup pdm for pydantic tests
- uses: pdm-project/setup-pdm@v3
- with:
- python-version: ${{ matrix.python-version }}
- name: Add local version of typing_extensions as a dependency
- run: pdm add ./typing-extensions-latest
+ run: cd pydantic; uv add --editable ../typing-extensions-latest
- name: Install pydantic test dependencies
- run: pdm install -G testing -G email
+ run: cd pydantic; uv sync --group dev
- name: List installed dependencies
- run: pdm list -vv # pdm equivalent to `pip list`
+ run: cd pydantic; uv pip list
- name: Run pydantic tests
- run: pdm run pytest
+ run: cd pydantic; uv run pytest
typing_inspect:
name: typing_inspect tests
- if: >-
- # if 'schedule' was the trigger,
- # don't run it on contributors' forks
- ${{
- github.event_name != 'schedule'
- || (
- github.repository == 'python/typing_extensions'
- && github.event_name == 'schedule'
- )
- }}
+ needs: skip-schedule-on-fork
strategy:
fail-fast: false
matrix:
- python-version: ["3.8", "3.9", "3.10", "3.11"]
+ python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"]
runs-on: ubuntu-latest
timeout-minutes: 60
steps:
- - name: Checkout typing_inspect
- uses: actions/checkout@v3
+ - name: Setup Python
+ uses: actions/setup-python@v5
with:
- repository: ilevkivskyi/typing_inspect
- path: typing_inspect
+ python-version: ${{ matrix.python-version }}
+ - name: Install uv
+ run: curl -LsSf https://astral.sh/uv/install.sh | sh
+ - name: Checkout typing_inspect
+ run: git clone --depth=1 https://github.com/ilevkivskyi/typing_inspect.git || git clone --depth=1 https://github.com/ilevkivskyi/typing_inspect.git
- name: Checkout typing_extensions
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
path: typing-extensions-latest
- - name: Setup Python
- uses: actions/setup-python@v4
- with:
- python-version: ${{ matrix.python-version }}
- name: Install typing_inspect test dependencies
- run: pip install -r typing_inspect/test-requirements.txt
+ run: |
+ set -x
+ cd typing_inspect
+ uv pip install --system -r test-requirements.txt --exclude-newer $(git show -s --date=format:'%Y-%m-%dT%H:%M:%SZ' --format=%cd HEAD)
- name: Install typing_extensions latest
- run: pip install ./typing-extensions-latest
+ run: uv pip install --system "typing-extensions @ ./typing-extensions-latest"
- name: List all installed dependencies
- run: pip freeze --all
+ run: uv pip freeze
- name: Run typing_inspect tests
run: |
cd typing_inspect
pytest
- pyanalyze:
- name: pyanalyze tests
- if: >-
- # if 'schedule' was the trigger,
- # don't run it on contributors' forks
- ${{
- github.event_name != 'schedule'
- || (
- github.repository == 'python/typing_extensions'
- && github.event_name == 'schedule'
- )
- }}
+ pycroscope:
+ name: pycroscope tests
+ needs: skip-schedule-on-fork
strategy:
fail-fast: false
matrix:
- python-version: ["3.8", "3.9", "3.10", "3.11"]
+ python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"]
runs-on: ubuntu-latest
timeout-minutes: 60
steps:
- - name: Check out pyanalyze
- uses: actions/checkout@v3
+ - name: Setup Python
+ uses: actions/setup-python@v5
with:
- repository: quora/pyanalyze
- path: pyanalyze
+ python-version: ${{ matrix.python-version }}
+ allow-prereleases: true
+ - name: Install uv
+ run: curl -LsSf https://astral.sh/uv/install.sh | sh
+ - name: Check out pycroscope
+ run: git clone --depth=1 https://github.com/JelleZijlstra/pycroscope.git || git clone --depth=1 https://github.com/JelleZijlstra/pycroscope.git
- name: Checkout typing_extensions
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
path: typing-extensions-latest
- - name: Setup Python
- uses: actions/setup-python@v4
- with:
- python-version: ${{ matrix.python-version }}
- - name: Install pyanalyze test requirements
- run: pip install ./pyanalyze[tests]
+ - name: Install pycroscope test requirements
+ run: |
+ set -x
+ cd pycroscope
+ uv pip install --system 'pycroscope[tests] @ .' --exclude-newer $(git show -s --date=format:'%Y-%m-%dT%H:%M:%SZ' --format=%cd HEAD)
- name: Install typing_extensions latest
- run: pip install ./typing-extensions-latest
+ run: uv pip install --system "typing-extensions @ ./typing-extensions-latest"
- name: List all installed dependencies
- run: pip freeze --all
- - name: Run pyanalyze tests
+ run: uv pip freeze
+ - name: Run pycroscope tests
run: |
- cd pyanalyze
- pytest pyanalyze/
+ cd pycroscope
+ pytest pycroscope/
typeguard:
name: typeguard tests
- if: >-
- # if 'schedule' was the trigger,
- # don't run it on contributors' forks
- ${{
- github.event_name != 'schedule'
- || (
- github.repository == 'python/typing_extensions'
- && github.event_name == 'schedule'
- )
- }}
+ needs: skip-schedule-on-fork
strategy:
fail-fast: false
matrix:
- python-version: ["3.8", "3.9", "3.10", "3.11", "3.12", "pypy3.9"]
+ python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"]
runs-on: ubuntu-latest
timeout-minutes: 60
steps:
- - name: Check out typeguard
- uses: actions/checkout@v3
- with:
- repository: agronholm/typeguard
- path: typeguard
- - name: Checkout typing_extensions
- uses: actions/checkout@v3
- with:
- path: typing-extensions-latest
- name: Setup Python
- uses: actions/setup-python@v4
+ uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
allow-prereleases: true
+ - name: Install uv
+ run: curl -LsSf https://astral.sh/uv/install.sh | sh
+ - name: Check out typeguard
+ run: git clone --depth=1 https://github.com/agronholm/typeguard.git || git clone --depth=1 https://github.com/agronholm/typeguard.git
+ - name: Checkout typing_extensions
+ uses: actions/checkout@v4
+ with:
+ path: typing-extensions-latest
- name: Install typeguard test requirements
- run: pip install -e ./typeguard[test]
+ run: |
+ set -x
+ cd typeguard
+ uv pip install --system "typeguard @ ." --group test --exclude-newer $(git show -s --date=format:'%Y-%m-%dT%H:%M:%SZ' --format=%cd HEAD)
- name: Install typing_extensions latest
- run: pip install ./typing-extensions-latest
+ run: uv pip install --system "typing-extensions @ ./typing-extensions-latest"
- name: List all installed dependencies
- run: pip freeze --all
+ run: uv pip freeze
- name: Run typeguard tests
run: |
cd typeguard
+ export PYTHON_COLORS=0 # A test fails if tracebacks are colorized
pytest
typed-argument-parser:
name: typed-argument-parser tests
- if: >-
- # if 'schedule' was the trigger,
- # don't run it on contributors' forks
- ${{
- github.event_name != 'schedule'
- || (
- github.repository == 'python/typing_extensions'
- && github.event_name == 'schedule'
- )
- }}
+ needs: skip-schedule-on-fork
strategy:
fail-fast: false
matrix:
- python-version: ["3.8", "3.9", "3.10", "3.11"]
+ python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"]
runs-on: ubuntu-latest
timeout-minutes: 60
steps:
- - name: Check out typed-argument-parser
- uses: actions/checkout@v3
+ - name: Setup Python
+ uses: actions/setup-python@v5
with:
- repository: swansonk14/typed-argument-parser
- path: typed-argument-parser
+ python-version: ${{ matrix.python-version }}
+ - name: Install uv
+ run: curl -LsSf https://astral.sh/uv/install.sh | sh
+ - name: Check out typed-argument-parser
+ run: git clone --depth=1 https://github.com/swansonk14/typed-argument-parser.git || git clone --depth=1 https://github.com/swansonk14/typed-argument-parser.git
- name: Checkout typing_extensions
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
path: typing-extensions-latest
- - name: Setup Python
- uses: actions/setup-python@v4
- with:
- python-version: ${{ matrix.python-version }}
- name: Configure git for typed-argument-parser tests
# typed-argument parser does this in their CI,
# and the tests fail unless we do this
@@ -236,12 +213,14 @@ jobs:
git config --global user.name "Your Name"
- name: Install typed-argument-parser test requirements
run: |
- pip install -e ./typed-argument-parser
- pip install pytest
+ set -x
+ cd typed-argument-parser
+ uv pip install --system "typed-argument-parser @ ." --exclude-newer $(git show -s --date=format:'%Y-%m-%dT%H:%M:%SZ' --format=%cd HEAD)
+ uv pip install --system pytest --exclude-newer $(git show -s --date=format:'%Y-%m-%dT%H:%M:%SZ' --format=%cd HEAD)
- name: Install typing_extensions latest
- run: pip install ./typing-extensions-latest
+ run: uv pip install --system "typing-extensions @ ./typing-extensions-latest"
- name: List all installed dependencies
- run: pip freeze --all
+ run: uv pip freeze
- name: Run typed-argument-parser tests
run: |
cd typed-argument-parser
@@ -249,45 +228,37 @@ jobs:
mypy:
name: stubtest & mypyc tests
- if: >-
- # if 'schedule' was the trigger,
- # don't run it on contributors' forks
- ${{
- github.event_name != 'schedule'
- || (
- github.repository == 'python/typing_extensions'
- && github.event_name == 'schedule'
- )
- }}
+ needs: skip-schedule-on-fork
strategy:
fail-fast: false
matrix:
- python-version: ["3.8", "3.9", "3.10", "3.11"]
+ python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"]
runs-on: ubuntu-latest
timeout-minutes: 60
steps:
- - name: Checkout mypy for stubtest and mypyc tests
- uses: actions/checkout@v3
+ - name: Setup Python
+ uses: actions/setup-python@v5
with:
- repository: python/mypy
- path: mypy
+ python-version: ${{ matrix.python-version }}
+ allow-prereleases: true
+ - name: Install uv
+ run: curl -LsSf https://astral.sh/uv/install.sh | sh
+ - name: Checkout mypy for stubtest and mypyc tests
+ run: git clone --depth=1 https://github.com/python/mypy.git || git clone --depth=1 https://github.com/python/mypy.git
- name: Checkout typing_extensions
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
path: typing-extensions-latest
- - name: Setup Python
- uses: actions/setup-python@v4
- with:
- python-version: ${{ matrix.python-version }}
- name: Install mypy test requirements
run: |
+ set -x
cd mypy
- pip install -r test-requirements.txt
- pip install -e .
+ uv pip install --system -r test-requirements.txt --exclude-newer $(git show -s --date=format:'%Y-%m-%dT%H:%M:%SZ' --format=%cd HEAD)
+ uv pip install --system -e .
- name: Install typing_extensions latest
- run: pip install ./typing-extensions-latest
+ run: uv pip install --system "typing-extensions @ ./typing-extensions-latest"
- name: List all installed dependencies
- run: pip freeze --all
+ run: uv pip freeze
- name: Run stubtest & mypyc tests
run: |
cd mypy
@@ -295,47 +266,109 @@ jobs:
cattrs:
name: cattrs tests
- if: >-
- # if 'schedule' was the trigger,
- # don't run it on contributors' forks
- ${{
- github.event_name != 'schedule'
- || (
- github.repository == 'python/typing_extensions'
- && github.event_name == 'schedule'
- )
- }}
+ needs: skip-schedule-on-fork
strategy:
fail-fast: false
matrix:
- python-version: ["3.8", "3.9", "3.10", "3.11", "pypy3.9"]
+ python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"]
runs-on: ubuntu-latest
timeout-minutes: 60
steps:
- - name: Checkout cattrs
- uses: actions/checkout@v3
+ - name: Setup Python
+ uses: actions/setup-python@v5
with:
- repository: python-attrs/cattrs
+ python-version: ${{ matrix.python-version }}
+ - name: Checkout cattrs
+ run: git clone --depth=1 https://github.com/python-attrs/cattrs.git || git clone --depth=1 https://github.com/python-attrs/cattrs.git
- name: Checkout typing_extensions
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
path: typing-extensions-latest
- - name: Setup Python
- uses: actions/setup-python@v4
- with:
- python-version: ${{ matrix.python-version }}
- name: Install pdm for cattrs
run: pip install pdm
- name: Add latest typing-extensions as a dependency
run: |
+ cd cattrs
pdm remove typing-extensions
- pdm add --dev ./typing-extensions-latest
+ pdm add --dev ../typing-extensions-latest
+ pdm update --group=docs pendulum # pinned version in lockfile is incompatible with py313 as of 2025/05/05
+ pdm sync --clean
- name: Install cattrs test dependencies
- run: pdm install --dev -G :all
+ run: cd cattrs; pdm install --dev -G :all
- name: List all installed dependencies
- run: pdm list -vv
+ run: cd cattrs; pdm list -vv
- name: Run cattrs tests
- run: pdm run pytest tests
+ run: cd cattrs; pdm run pytest tests
+
+ sqlalchemy:
+ name: sqlalchemy tests
+ needs: skip-schedule-on-fork
+ strategy:
+ fail-fast: false
+ matrix:
+ # PyPy is deliberately omitted here, since SQLAlchemy's tests
+ # fail on PyPy for reasons unrelated to typing_extensions.
+ python-version: [ "3.9", "3.10", "3.11", "3.12", "3.13" ]
+ checkout-ref: [ "main", "rel_2_0" ]
+ # sqlalchemy tests fail when using the Ubuntu 24.04 runner
+ # https://github.com/sqlalchemy/sqlalchemy/commit/8d73205f352e68c6603e90494494ef21027ec68f
+ runs-on: ubuntu-22.04
+ timeout-minutes: 60
+ steps:
+ - name: Setup Python
+ uses: actions/setup-python@v5
+ with:
+ python-version: ${{ matrix.python-version }}
+ allow-prereleases: true
+ - name: Install uv
+ run: curl -LsSf https://astral.sh/uv/install.sh | sh
+ - name: Checkout sqlalchemy
+ run: git clone -b ${{ matrix.checkout-ref }} --depth=1 https://github.com/sqlalchemy/sqlalchemy.git || git clone -b ${{ matrix.checkout-ref }} --depth=1 https://github.com/sqlalchemy/sqlalchemy.git
+ - name: Checkout typing_extensions
+ uses: actions/checkout@v4
+ with:
+ path: typing-extensions-latest
+ - name: Install sqlalchemy test dependencies
+ run: uv pip install --system tox setuptools
+ - name: List installed dependencies
+ # Note: tox installs SQLAlchemy and its dependencies in a different isolated
+ # environment before running the tests. To see the dependencies installed
+ # in the test environment, look for the line 'freeze> python -m pip freeze --all'
+ # in the output of the test step below.
+ run: uv pip list
+ - name: Run sqlalchemy tests
+ run: |
+ cd sqlalchemy
+ tox -e github-nocext \
+ --force-dep "typing-extensions @ file://$(pwd)/../typing-extensions-latest" \
+ -- -q --nomemory --notimingintensive
+
+
+ litestar:
+ name: litestar tests
+ needs: skip-schedule-on-fork
+ runs-on: ubuntu-latest
+ timeout-minutes: 10
+ strategy:
+ fail-fast: false
+ matrix:
+ python-version: [ "3.9", "3.10", "3.11", "3.12", "3.13" ]
+ steps:
+ - name: Setup Python
+ uses: actions/setup-python@v5
+ with:
+ python-version: ${{ matrix.python-version }}
+ - name: Checkout litestar
+ run: git clone --depth=1 https://github.com/litestar-org/litestar.git || git clone --depth=1 https://github.com/litestar-org/litestar.git
+ - name: Checkout typing_extensions
+ uses: actions/checkout@v4
+ with:
+ path: typing-extensions-latest
+ - name: Install uv
+ run: curl -LsSf https://astral.sh/uv/install.sh | sh
+ - name: Run litestar tests
+ run: uv run --with=../typing-extensions-latest -- python -m pytest tests/unit/test_typing.py tests/unit/test_dto
+ working-directory: litestar
create-issue-on-failure:
name: Create an issue if daily tests failed
@@ -344,11 +377,12 @@ jobs:
needs:
- pydantic
- typing_inspect
- - pyanalyze
+ - pycroscope
- typeguard
- typed-argument-parser
- mypy
- cattrs
+ - sqlalchemy
if: >-
${{
@@ -358,11 +392,12 @@ jobs:
&& (
needs.pydantic.result == 'failure'
|| needs.typing_inspect.result == 'failure'
- || needs.pyanalyze.result == 'failure'
+ || needs.pycroscope.result == 'failure'
|| needs.typeguard.result == 'failure'
|| needs.typed-argument-parser.result == 'failure'
|| needs.mypy.result == 'failure'
|| needs.cattrs.result == 'failure'
+ || needs.sqlalchemy.result == 'failure'
)
}}
@@ -370,7 +405,7 @@ jobs:
issues: write
steps:
- - uses: actions/github-script@v6
+ - uses: actions/github-script@v7
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
@@ -378,5 +413,5 @@ jobs:
owner: "python",
repo: "typing_extensions",
title: `Third-party tests failed on ${new Date().toDateString()}`,
- body: "Runs listed here: https://github.com/python/typing_extensions/actions/workflows/third_party.yml",
+ body: "Run listed here: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}",
})
diff --git a/.gitignore b/.gitignore
index 0ad58f48..ee36fe77 100644
--- a/.gitignore
+++ b/.gitignore
@@ -11,6 +11,7 @@ venv*/
.tox/
.venv*/
.vscode/
+.python-version
*.swp
*.pyc
diff --git a/.readthedocs.yaml b/.readthedocs.yaml
new file mode 100644
index 00000000..60419be8
--- /dev/null
+++ b/.readthedocs.yaml
@@ -0,0 +1,13 @@
+# Read the Docs configuration file
+# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
+
+version: 2
+
+build:
+ os: ubuntu-22.04
+ tools:
+ python: "3.12"
+
+sphinx:
+ configuration: doc/conf.py
+
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 1e490c5c..5d949cc8 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,3 +1,294 @@
+# Unreleased
+
+- Fix usage of `typing_extensions.TypedDict` nested inside other types
+ (e.g., `typing.Type[typing_extensions.TypedDict]`). This is not allowed by the
+ type system but worked on older versions, so we maintain support.
+
+# Release 4.14.0 (June 2, 2025)
+
+Changes since 4.14.0rc1:
+
+- Remove `__or__` and `__ror__` methods from `typing_extensions.Sentinel`
+ on Python versions <3.10. PEP 604 was introduced in Python 3.10, and
+ `typing_extensions` does not generally attempt to backport PEP-604 methods
+ to prior versions.
+- Further update `typing_extensions.evaluate_forward_ref` with changes in Python 3.14.
+
+# Release 4.14.0rc1 (May 24, 2025)
+
+- Drop support for Python 3.8 (including PyPy-3.8). Patch by [Victorien Plot](https://github.com/Viicos).
+- Do not attempt to re-export names that have been removed from `typing`,
+ anticipating the removal of `typing.no_type_check_decorator` in Python 3.15.
+ Patch by Jelle Zijlstra.
+- Update `typing_extensions.Format`, `typing_extensions.evaluate_forward_ref`, and
+ `typing_extensions.TypedDict` to align
+ with changes in Python 3.14. Patches by Jelle Zijlstra.
+- Fix tests for Python 3.14 and 3.15. Patches by Jelle Zijlstra.
+
+New features:
+
+- Add support for inline typed dictionaries ([PEP 764](https://peps.python.org/pep-0764/)).
+ Patch by [Victorien Plot](https://github.com/Viicos).
+- Add `typing_extensions.Reader` and `typing_extensions.Writer`. Patch by
+ Sebastian Rittau.
+- Add support for sentinels ([PEP 661](https://peps.python.org/pep-0661/)). Patch by
+ [Victorien Plot](https://github.com/Viicos).
+
+# Release 4.13.2 (April 10, 2025)
+
+- Fix `TypeError` when taking the union of `typing_extensions.TypeAliasType` and a
+ `typing.TypeAliasType` on Python 3.12 and 3.13.
+ Patch by [Joren Hammudoglu](https://github.com/jorenham).
+- Backport from CPython PR [#132160](https://github.com/python/cpython/pull/132160)
+ to avoid having user arguments shadowed in generated `__new__` by
+ `@typing_extensions.deprecated`.
+ Patch by [Victorien Plot](https://github.com/Viicos).
+
+# Release 4.13.1 (April 3, 2025)
+
+Bugfixes:
+
+- Fix regression in 4.13.0 on Python 3.10.2 causing a `TypeError` when using `Concatenate`.
+ Patch by [Daraan](https://github.com/Daraan).
+- Fix `TypeError` when using `evaluate_forward_ref` on Python 3.10.1-2 and 3.9.8-10.
+ Patch by [Daraan](https://github.com/Daraan).
+
+# Release 4.13.0 (March 25, 2025)
+
+No user-facing changes since 4.13.0rc1.
+
+# Release 4.13.0rc1 (March 18, 2025)
+
+New features:
+
+- Add `typing_extensions.TypeForm` from PEP 747. Patch by
+ Jelle Zijlstra.
+- Add `typing_extensions.get_annotations`, a backport of
+ `inspect.get_annotations` that adds features specified
+ by PEP 649. Patches by Jelle Zijlstra and Alex Waygood.
+- Backport `evaluate_forward_ref` from CPython PR
+ [#119891](https://github.com/python/cpython/pull/119891) to evaluate `ForwardRef`s.
+ Patch by [Daraan](https://github.com/Daraan), backporting a CPython PR by Jelle Zijlstra.
+
+Bugfixes and changed features:
+
+- Update PEP 728 implementation to a newer version of the PEP. Patch by Jelle Zijlstra.
+- Copy the coroutine status of functions and methods wrapped
+ with `@typing_extensions.deprecated`. Patch by Sebastian Rittau.
+- Fix bug where `TypeAliasType` instances could be subscripted even
+ where they were not generic. Patch by [Daraan](https://github.com/Daraan).
+- Fix bug where a subscripted `TypeAliasType` instance did not have all
+ attributes of the original `TypeAliasType` instance on older Python versions.
+ Patch by [Daraan](https://github.com/Daraan) and Alex Waygood.
+- Fix bug where subscripted `TypeAliasType` instances (and some other
+ subscripted objects) had wrong parameters if they were directly
+ subscripted with an `Unpack` object.
+ Patch by [Daraan](https://github.com/Daraan).
+- Backport to Python 3.10 the ability to substitute `...` in generic `Callable`
+ aliases that have a `Concatenate` special form as their argument.
+ Patch by [Daraan](https://github.com/Daraan).
+- Extended the `Concatenate` backport for Python 3.8-3.10 to now accept
+ `Ellipsis` as an argument. Patch by [Daraan](https://github.com/Daraan).
+- Fix backport of `get_type_hints` to reflect Python 3.11+ behavior which does not add
+ `Union[..., NoneType]` to annotations that have a `None` default value anymore.
+ This fixes wrapping of `Annotated` in an unwanted `Optional` in such cases.
+ Patch by [Daraan](https://github.com/Daraan).
+- Fix error in subscription of `Unpack` aliases causing nested Unpacks
+ to not be resolved correctly. Patch by [Daraan](https://github.com/Daraan).
+- Backport CPython PR [#124795](https://github.com/python/cpython/pull/124795):
+ fix `TypeAliasType` not raising an error on non-tuple inputs for `type_params`.
+ Patch by [Daraan](https://github.com/Daraan).
+- Fix that lists and `...` could not be used for parameter expressions for `TypeAliasType`
+ instances before Python 3.11.
+ Patch by [Daraan](https://github.com/Daraan).
+- Fix error on Python 3.10 when using `typing.Concatenate` and
+ `typing_extensions.Concatenate` together. Patch by [Daraan](https://github.com/Daraan).
+- Backport of CPython PR [#109544](https://github.com/python/cpython/pull/109544)
+ to reflect Python 3.13+ behavior: A value assigned to `__total__` in the class body of a
+ `TypedDict` will be overwritten by the `total` argument of the `TypedDict` constructor.
+ Patch by [Daraan](https://github.com/Daraan), backporting a CPython PR by Jelle Zijlstra.
+- `isinstance(typing_extensions.Unpack[...], TypeVar)` now evaluates to `False` on Python 3.11
+ and newer, but remains `True` on versions before 3.11.
+ Patch by [Daraan](https://github.com/Daraan).
+
+# Release 4.12.2 (June 7, 2024)
+
+- Fix regression in v4.12.0 where specialization of certain
+ generics with an overridden `__eq__` method would raise errors.
+ Patch by Jelle Zijlstra.
+- Fix tests so they pass on 3.13.0b2
+
+# Release 4.12.1 (June 1, 2024)
+
+- Preliminary changes for compatibility with the draft implementation
+ of PEP 649 in Python 3.14. Patch by Jelle Zijlstra.
+- Fix regression in v4.12.0 where nested `Annotated` types would cause
+ `TypeError` to be raised if the nested `Annotated` type had unhashable
+ metadata. Patch by Alex Waygood.
+
+# Release 4.12.0 (May 23, 2024)
+
+This release is mostly the same as 4.12.0rc1 but fixes one more
+longstanding bug.
+
+- Fix incorrect behaviour of `typing_extensions.ParamSpec` on Python 3.8 and
+ 3.9 that meant that
+ `isinstance(typing_extensions.ParamSpec("P"), typing.TypeVar)` would have a
+ different result in some situations depending on whether or not a profiling
+ function had been set using `sys.setprofile`. Patch by Alex Waygood.
+
+# Release 4.12.0rc1 (May 16, 2024)
+
+This release focuses on compatibility with the upcoming release of
+Python 3.13. Most changes are related to the implementation of type
+parameter defaults (PEP 696).
+
+Thanks to all of the people who contributed patches, especially Alex
+Waygood, who did most of the work adapting typing-extensions to the
+CPython PEP 696 implementation.
+
+Full changelog:
+
+- Improve the implementation of type parameter defaults (PEP 696)
+ - Backport the `typing.NoDefault` sentinel object from Python 3.13.
+ TypeVars, ParamSpecs and TypeVarTuples without default values now have
+ their `__default__` attribute set to this sentinel value.
+ - TypeVars, ParamSpecs and TypeVarTuples now have a `has_default()`
+ method, matching `typing.TypeVar`, `typing.ParamSpec` and
+ `typing.TypeVarTuple` on Python 3.13+.
+ - TypeVars, ParamSpecs and TypeVarTuples with `default=None` passed to
+ their constructors now have their `__default__` attribute set to `None`
+ at runtime rather than `types.NoneType`.
+ - Fix most tests for `TypeVar`, `ParamSpec` and `TypeVarTuple` on Python
+ 3.13.0b1 and newer.
+ - Backport CPython PR [#118774](https://github.com/python/cpython/pull/118774),
+ allowing type parameters without default values to follow those with
+ default values in some type parameter lists. Patch by Alex Waygood,
+ backporting a CPython PR by Jelle Zijlstra.
+ - It is now disallowed to use a `TypeVar` with a default value after a
+ `TypeVarTuple` in a type parameter list. This matches the CPython
+ implementation of PEP 696 on Python 3.13+.
+ - Fix bug in PEP-696 implementation where a default value for a `ParamSpec`
+ would be cast to a tuple if a list was provided.
+ Patch by Alex Waygood.
+- Fix `Protocol` tests on Python 3.13.0a6 and newer. 3.13.0a6 adds a new
+ `__static_attributes__` attribute to all classes in Python,
+ which broke some assumptions made by the implementation of
+ `typing_extensions.Protocol`. Similarly, 3.13.0b1 adds the new
+ `__firstlineno__` attribute to all classes.
+- Fix `AttributeError` when using `typing_extensions.runtime_checkable`
+ in combination with `typing.Protocol` on Python 3.12.2 or newer.
+ Patch by Alex Waygood.
+- At runtime, `assert_never` now includes the repr of the argument
+ in the `AssertionError`. Patch by Hashem, backporting of the original
+ fix https://github.com/python/cpython/pull/91720 by Jelle Zijlstra.
+- The second and third parameters of `typing_extensions.Generator`,
+ and the second parameter of `typing_extensions.AsyncGenerator`,
+ now default to `None`. This matches the behaviour of `typing.Generator`
+ and `typing.AsyncGenerator` on Python 3.13+.
+- `typing_extensions.ContextManager` and
+ `typing_extensions.AsyncContextManager` now have an optional second
+ parameter, which defaults to `Optional[bool]`. The new parameter
+ signifies the return type of the `__(a)exit__` method, matching
+ `typing.ContextManager` and `typing.AsyncContextManager` on Python
+ 3.13+.
+- Backport `types.CapsuleType` from Python 3.13.
+- Releases are now made using [Trusted Publishers](https://docs.pypi.org/trusted-publishers/)
+ improving the security of the release process. Patch by Jelle Zijlstra.
+
+# Release 4.12.0a1 and 4.12.0a2 (May 16, 2024)
+
+These releases primarily test a revised release workflow. If all goes
+well, release 4.12.0rc1 will follow soon.
+
+# Release 4.11.0 (April 5, 2024)
+
+This feature release provides improvements to various recently
+added features, most importantly type parameter defaults (PEP 696).
+
+There are no changes since 4.11.0rc1.
+
+# Release 4.11.0rc1 (March 24, 2024)
+
+- Fix tests on Python 3.13.0a5. Patch by Jelle Zijlstra.
+- Fix the runtime behavior of type parameters with defaults (PEP 696).
+ Patch by Nadir Chowdhury.
+- Fix minor discrepancy between error messages produced by `typing`
+ and `typing_extensions` on Python 3.10. Patch by Jelle Zijlstra.
+- When `include_extra=False`, `get_type_hints()` now strips `ReadOnly` from the annotation.
+
+# Release 4.10.0 (February 24, 2024)
+
+This feature release adds support for PEP 728 (TypedDict with extra
+items) and PEP 742 (``TypeIs``).
+
+There are no changes since 4.10.0rc1.
+
+# Release 4.10.0rc1 (February 17, 2024)
+
+- Add support for PEP 728, supporting the `closed` keyword argument and the
+ special `__extra_items__` key for TypedDict. Patch by Zixuan James Li.
+- Add support for PEP 742, adding `typing_extensions.TypeIs`. Patch
+ by Jelle Zijlstra.
+- Drop runtime error when a read-only `TypedDict` item overrides a mutable
+ one. Type checkers should still flag this as an error. Patch by Jelle
+ Zijlstra.
+- Speedup `issubclass()` checks against simple runtime-checkable protocols by
+ around 6% (backporting https://github.com/python/cpython/pull/112717, by Alex
+ Waygood).
+- Fix a regression in the implementation of protocols where `typing.Protocol`
+ classes that were not marked as `@runtime_checkable` would be unnecessarily
+ introspected, potentially causing exceptions to be raised if the protocol had
+ problematic members. Patch by Alex Waygood, backporting
+ https://github.com/python/cpython/pull/113401.
+
+# Release 4.9.0 (December 9, 2023)
+
+This feature release adds `typing_extensions.ReadOnly`, as specified
+by PEP 705, and makes various other improvements, especially to
+`@typing_extensions.deprecated()`.
+
+There are no changes since 4.9.0rc1.
+
+# Release 4.9.0rc1 (November 29, 2023)
+
+- Add support for PEP 705, adding `typing_extensions.ReadOnly`. Patch
+ by Jelle Zijlstra.
+- All parameters on `NewType.__call__` are now positional-only. This means that
+ the signature of `typing_extensions.NewType.__call__` now exactly matches the
+ signature of `typing.NewType.__call__`. Patch by Alex Waygood.
+- Fix bug with using `@deprecated` on a mixin class. Inheriting from a
+ deprecated class now raises a `DeprecationWarning`. Patch by Jelle Zijlstra.
+- `@deprecated` now gives a better error message if you pass a non-`str`
+ argument to the `msg` parameter. Patch by Alex Waygood.
+- `@deprecated` is now implemented as a class for better introspectability.
+ Patch by Jelle Zijlstra.
+- Exclude `__match_args__` from `Protocol` members.
+ Backport of https://github.com/python/cpython/pull/110683 by Nikita Sobolev.
+- When creating a `typing_extensions.NamedTuple` class, ensure `__set_name__`
+ is called on all objects that define `__set_name__` and exist in the values
+ of the `NamedTuple` class's class dictionary. Patch by Alex Waygood,
+ backporting https://github.com/python/cpython/pull/111876.
+- Improve the error message when trying to call `issubclass()` against a
+ `Protocol` that has non-method members. Patch by Alex Waygood (backporting
+ https://github.com/python/cpython/pull/112344, by Randolph Scholz).
+
+# Release 4.8.0 (September 17, 2023)
+
+No changes since 4.8.0rc1.
+
+# Release 4.8.0rc1 (September 7, 2023)
+
+- Add `typing_extensions.Doc`, as proposed by PEP 727. Patch by
+ Sebastián RamÃrez.
+- Drop support for Python 3.7 (including PyPy-3.7). Patch by Alex Waygood.
+- Fix bug where `get_original_bases()` would return incorrect results when
+ called on a concrete subclass of a generic class. Patch by Alex Waygood
+ (backporting https://github.com/python/cpython/pull/107584, by James
+ Hilton-Balfe).
+- Fix bug where `ParamSpec(default=...)` would raise a `TypeError` on Python
+ versions <3.11. Patch by James Hilton-Balfe
+
# Release 4.7.1 (July 2, 2023)
- Fix support for `TypedDict`, `NamedTuple` and `is_protocol` on PyPy-3.7 and
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 9d07313e..1b030d56 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -18,8 +18,6 @@ standard library, so that users can experiment with them before they are added t
standard library. Such features should already be specified in a PEP or merged into
CPython's `main` branch.
-`typing_extensions` supports Python versions 3.7 and up.
-
# Versioning scheme
Starting with version 4.0.0, `typing_extensions` uses
@@ -63,24 +61,10 @@ may have installed.
- Update the version number in `typing_extensions/pyproject.toml` and in
`typing_extensions/CHANGELOG.md`.
-- Make sure your environment is up to date
-
- - `git checkout main`
- - `git pull`
- - `python -m pip install --upgrade build twine`
-
-- Build the source and wheel distributions:
-
- - `rm -rf dist/`
- - `python -m build .`
-
-- Install the built distributions locally and test (if you were using `tox`, you already
- tested the source distribution).
-
-- Run `twine upload dist/*`. Remember to use `__token__` as the username
- and pass your API token as the password.
-
- Create a new GitHub release at https://github.com/python/typing_extensions/releases/new.
Details:
- The tag should be just the version number, e.g. `4.1.1`.
- Copy the release notes from `CHANGELOG.md`.
+
+- Release automation will finish the release. You'll have to manually
+ approve the last step before upload.
diff --git a/README.md b/README.md
index efd3a824..1eddb2a1 100644
--- a/README.md
+++ b/README.md
@@ -26,8 +26,6 @@ Therefore, it's safe to depend
on `typing_extensions` like this: `typing_extensions >=x.y, <(x+1)`,
where `x.y` is the first version that includes all features you need.
-`typing_extensions` supports Python versions 3.7 and higher.
-
## Included items
See [the documentation](https://typing-extensions.readthedocs.io/en/latest/#) for a
diff --git a/SECURITY.md b/SECURITY.md
new file mode 100644
index 00000000..efd1d6a3
--- /dev/null
+++ b/SECURITY.md
@@ -0,0 +1,10 @@
+# Security Policy
+
+## Supported Versions
+
+Only the latest release is supported.
+
+## Reporting a Vulnerability
+
+To report an issue, go to https://github.com/python/typing_extensions/security.
+We commit to respond to any issue within 14 days and promptly release any fixes.
diff --git a/doc/conf.py b/doc/conf.py
index 7984bc22..db9b5185 100644
--- a/doc/conf.py
+++ b/doc/conf.py
@@ -6,6 +6,9 @@
import os.path
import sys
+from docutils.nodes import Element
+from sphinx.writers.html5 import HTML5Translator
+
sys.path.insert(0, os.path.abspath('.'))
# -- Project information -----------------------------------------------------
@@ -24,11 +27,26 @@
templates_path = ['_templates']
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
-intersphinx_mapping = {'py': ('https://docs.python.org/3.12', None)}
+# This should usually point to /3, unless there is a necessity to link to
+# features in future versions of Python.
+intersphinx_mapping = {'py': ('https://docs.python.org/3.14', None)}
+add_module_names = False
# -- Options for HTML output -------------------------------------------------
# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output
html_theme = 'alabaster'
-html_static_path = ['_static']
+
+
+class MyTranslator(HTML5Translator):
+ """Adds a link target to name without `typing_extensions.` prefix."""
+ def visit_desc_signature(self, node: Element) -> None:
+ desc_name = node.get("fullname")
+ if desc_name:
+ self.body.append(f'')
+ super().visit_desc_signature(node)
+
+
+def setup(app):
+ app.set_translator('html', MyTranslator)
diff --git a/doc/index.rst b/doc/index.rst
index 5fd2b2e8..21d6fa60 100644
--- a/doc/index.rst
+++ b/doc/index.rst
@@ -1,3 +1,4 @@
+.. module:: typing_extensions
Welcome to typing_extensions's documentation!
=============================================
@@ -56,6 +57,9 @@ be added.
Bugfix releases, with version numbers of the form 4.N.1 or higher,
may be made if bugs are discovered after a feature release.
+We provide no backward compatibility guarantees for prereleases (e.g.,
+release candidates) and for unreleased code in our Git repository.
+
Before version 4.0.0, the versioning scheme loosely followed the Python
version from which features were backported; for example,
``typing_extensions`` 3.10.0.0 was meant to reflect ``typing`` as of
@@ -129,13 +133,13 @@ Example usage::
False
>>> is_literal(get_origin(typing.Literal[42]))
True
- >>> is_literal(get_origin(typing_extensions.Final[42]))
+ >>> is_literal(get_origin(typing_extensions.Final[int]))
False
Python version support
----------------------
-``typing_extensions`` currently supports Python versions 3.7 and higher. In the future,
+``typing_extensions`` currently supports Python versions 3.9 and higher. In the future,
support for older Python versions will be dropped some time after that version
reaches end of life.
@@ -174,7 +178,7 @@ Special typing primitives
See :py:data:`typing.Concatenate` and :pep:`612`. In ``typing`` since 3.10.
The backport does not support certain operations involving ``...`` as
- a parameter; see :issue:`48` and :issue:`110` for details.
+ a parameter; see :issue:`48` and :pr:`481` for details.
.. data:: Final
@@ -249,13 +253,19 @@ Special typing primitives
The improvements from Python 3.10 and 3.11 were backported.
+.. data:: NoDefault
+
+ See :py:data:`typing.NoDefault`. In ``typing`` since 3.13.
+
+ .. versionadded:: 4.12.0
+
.. data:: NotRequired
See :py:data:`typing.NotRequired` and :pep:`655`. In ``typing`` since 3.11.
.. versionadded:: 4.0.0
-.. class:: ParamSpec(name, *, default=...)
+.. class:: ParamSpec(name, *, default=NoDefault)
See :py:class:`typing.ParamSpec` and :pep:`612`. In ``typing`` since 3.10.
@@ -275,6 +285,25 @@ Special typing primitives
The implementation was changed for compatibility with Python 3.12.
+ .. versionchanged:: 4.8.0
+
+ Passing an ellipsis literal (``...``) to *default* now works on Python
+ 3.10 and lower.
+
+ .. versionchanged:: 4.12.0
+
+ The :attr:`!__default__` attribute is now set to ``None`` if
+ ``default=None`` is passed, and to :data:`NoDefault` if no value is passed.
+
+ Previously, passing ``None`` would result in :attr:`!__default__` being set
+ to :py:class:`types.NoneType`, and passing no value for the parameter would
+ result in :attr:`!__default__` being set to ``None``.
+
+ .. versionchanged:: 4.12.0
+
+ ParamSpecs now have a ``has_default()`` method, for compatibility
+ with :py:class:`typing.ParamSpec` on Python 3.13+.
+
.. class:: ParamSpecArgs
.. class:: ParamSpecKwargs
@@ -310,6 +339,14 @@ Special typing primitives
present in a protocol class's :py:term:`method resolution order`. See
:issue:`245` for some examples.
+.. data:: ReadOnly
+
+ See :py:data:`typing.ReadOnly` and :pep:`705`. In ``typing`` since 3.13.
+
+ Indicates that a :class:`TypedDict` item may not be modified.
+
+ .. versionadded:: 4.9.0
+
.. data:: Required
See :py:data:`typing.Required` and :pep:`655`. In ``typing`` since 3.11.
@@ -332,11 +369,25 @@ Special typing primitives
.. versionadded:: 4.6.0
+.. data:: TypeForm
+
+ See :pep:`747`. A special form representing the value of a type expression.
+
+ .. versionadded:: 4.13.0
+
.. data:: TypeGuard
See :py:data:`typing.TypeGuard` and :pep:`647`. In ``typing`` since 3.10.
-.. class:: TypedDict
+.. data:: TypeIs
+
+ See :py:data:`typing.TypeIs` and :pep:`742`. In ``typing`` since 3.13.
+
+ Similar to :data:`TypeGuard`, but allows more type narrowing.
+
+ .. versionadded:: 4.10.0
+
+.. class:: TypedDict(dict, total=True)
See :py:class:`typing.TypedDict` and :pep:`589`. In ``typing`` since 3.8.
@@ -358,6 +409,55 @@ Special typing primitives
raises a :py:exc:`DeprecationWarning` when this syntax is used in Python 3.12
or lower and fails with a :py:exc:`TypeError` in Python 3.13 and higher.
+ ``typing_extensions`` supports the experimental :data:`ReadOnly` qualifier
+ proposed by :pep:`705`. It is reflected in the following attributes:
+
+ .. attribute:: __readonly_keys__
+
+ A :py:class:`frozenset` containing the names of all read-only keys. Keys
+ are read-only if they carry the :data:`ReadOnly` qualifier.
+
+ .. versionadded:: 4.9.0
+
+ .. attribute:: __mutable_keys__
+
+ A :py:class:`frozenset` containing the names of all mutable keys. Keys
+ are mutable if they do not carry the :data:`ReadOnly` qualifier.
+
+ .. versionadded:: 4.9.0
+
+ The experimental ``closed`` keyword argument and the special key
+ ``__extra_items__`` proposed in :pep:`728` are supported.
+
+ When ``closed`` is unspecified or ``closed=False`` is given,
+ ``__extra_items__`` behaves like a regular key. Otherwise, this becomes a
+ special key that does not show up in ``__readonly_keys__``,
+ ``__mutable_keys__``, ``__required_keys__``, ``__optional_keys``, or
+ ``__annotations__``.
+
+ For runtime introspection, two attributes can be looked at:
+
+ .. attribute:: __closed__
+
+ A boolean flag indicating whether the current ``TypedDict`` is
+ considered closed. This is not inherited by the ``TypedDict``'s
+ subclasses.
+
+ .. versionadded:: 4.10.0
+
+ .. attribute:: __extra_items__
+
+ The type annotation of the extra items allowed on the ``TypedDict``.
+ This attribute defaults to ``None`` on a TypedDict that has itself and
+ all its bases non-closed. This default is different from ``type(None)``
+ that represents ``__extra_items__: None`` defined on a closed
+ ``TypedDict``.
+
+ If ``__extra_items__`` is not defined or inherited on a closed
+ ``TypedDict``, this defaults to ``Never``.
+
+ .. versionadded:: 4.10.0
+
.. versionchanged:: 4.3.0
Added support for generic ``TypedDict``\ s.
@@ -386,8 +486,17 @@ Special typing primitives
disallowed in Python 3.15. To create a TypedDict class with 0 fields,
use ``class TD(TypedDict): pass`` or ``TD = TypedDict("TD", {})``.
+ .. versionchanged:: 4.9.0
+
+ Support for the :data:`ReadOnly` qualifier was added.
+
+ .. versionchanged:: 4.10.0
+
+ The keyword argument ``closed`` and the special key ``__extra_items__``
+ when ``closed=True`` is given were supported.
+
.. class:: TypeVar(name, *constraints, bound=None, covariant=False,
- contravariant=False, infer_variance=False, default=...)
+ contravariant=False, infer_variance=False, default=NoDefault)
See :py:class:`typing.TypeVar`.
@@ -405,7 +514,21 @@ Special typing primitives
The implementation was changed for compatibility with Python 3.12.
-.. class:: TypeVarTuple(name, *, default=...)
+ .. versionchanged:: 4.12.0
+
+ The :attr:`!__default__` attribute is now set to ``None`` if
+ ``default=None`` is passed, and to :data:`NoDefault` if no value is passed.
+
+ Previously, passing ``None`` would result in :attr:`!__default__` being set
+ to :py:class:`types.NoneType`, and passing no value for the parameter would
+ result in :attr:`!__default__` being set to ``None``.
+
+ .. versionchanged:: 4.12.0
+
+ TypeVars now have a ``has_default()`` method, for compatibility
+ with :py:class:`typing.TypeVar` on Python 3.13+.
+
+.. class:: TypeVarTuple(name, *, default=NoDefault)
See :py:class:`typing.TypeVarTuple` and :pep:`646`. In ``typing`` since 3.11.
@@ -422,6 +545,26 @@ Special typing primitives
The implementation was changed for compatibility with Python 3.12.
+ .. versionchanged:: 4.12.0
+
+ The :attr:`!__default__` attribute is now set to ``None`` if
+ ``default=None`` is passed, and to :data:`NoDefault` if no value is passed.
+
+ Previously, passing ``None`` would result in :attr:`!__default__` being set
+ to :py:class:`types.NoneType`, and passing no value for the parameter would
+ result in :attr:`!__default__` being set to ``None``.
+
+ .. versionchanged:: 4.12.0
+
+ TypeVarTuples now have a ``has_default()`` method, for compatibility
+ with :py:class:`typing.TypeVarTuple` on Python 3.13+.
+
+ .. versionchanged:: 4.12.0
+
+ It is now disallowed to use a `TypeVar` with a default value after a
+ `TypeVarTuple` in a type parameter list. This matches the CPython
+ implementation of PEP 696 on Python 3.13+.
+
.. data:: Unpack
See :py:data:`typing.Unpack` and :pep:`646`. In ``typing`` since 3.11.
@@ -438,13 +581,6 @@ Special typing primitives
Backport ``repr()`` changes from Python 3.12.
-Generic concrete collections
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-.. class:: OrderedDict
-
- See :py:class:`typing.OrderedDict`. In ``typing`` since 3.7.2.
-
Abstract Base Classes
~~~~~~~~~~~~~~~~~~~~~
@@ -523,6 +659,18 @@ Protocols
.. versionadded:: 4.6.0
+.. class:: Reader
+
+ See :py:class:`io.Reader`. Added to the standard library in Python 3.14.
+
+ .. versionadded:: 4.14.0
+
+.. class:: Writer
+
+ See :py:class:`io.Writer`. Added to the standard library in Python 3.14.
+
+ .. versionadded:: 4.14.0
+
Decorators
~~~~~~~~~~
@@ -548,10 +696,15 @@ Decorators
.. decorator:: deprecated(msg, *, category=DeprecationWarning, stacklevel=1)
- See :pep:`702`. Experimental; not yet part of the standard library.
+ See :pep:`702`. In the :mod:`warnings` module since Python 3.13.
.. versionadded:: 4.5.0
+ .. versionchanged:: 4.9.0
+
+ Inheriting from a deprecated class now also raises a runtime
+ :py:exc:`DeprecationWarning`.
+
.. decorator:: final
See :py:func:`typing.final` and :pep:`591`. In ``typing`` since 3.8.
@@ -616,6 +769,56 @@ Functions
.. versionadded:: 4.2.0
+.. function:: evaluate_forward_ref(forward_ref, *, owner=None, globals=None, locals=None, type_params=None, format=None)
+
+ Evaluate an :py:class:`typing.ForwardRef` as a :py:term:`type hint`.
+
+ This is similar to calling :py:meth:`annotationlib.ForwardRef.evaluate`,
+ but unlike that method, :func:`!evaluate_forward_ref` also:
+
+ * Recursively evaluates forward references nested within the type hint.
+ However, the amount of recursion is limited in Python 3.8 and 3.10.
+ * Raises :exc:`TypeError` when it encounters certain objects that are
+ not valid type hints.
+ * Replaces type hints that evaluate to :const:`!None` with
+ :class:`types.NoneType`.
+ * Supports the :attr:`Format.FORWARDREF` and
+ :attr:`Format.STRING` formats.
+
+ *forward_ref* must be an instance of :py:class:`typing.ForwardRef`.
+ *owner*, if given, should be the object that holds the annotations that
+ the forward reference derived from, such as a module, class object, or function.
+ It is used to infer the namespaces to use for looking up names.
+ *globals* and *locals* can also be explicitly given to provide
+ the global and local namespaces.
+ *type_params* is a tuple of :py:ref:`type parameters ` that
+ are in scope when evaluating the forward reference.
+ This parameter must be provided (though it may be an empty tuple) if *owner*
+ is not given and the forward reference does not already have an owner set.
+ *format* specifies the format of the annotation and is a member of
+ the :class:`Format` enum, defaulting to :attr:`Format.VALUE`.
+
+ .. versionadded:: 4.13.0
+
+.. function:: get_annotations(obj, *, globals=None, locals=None, eval_str=False, format=Format.VALUE)
+
+ See :py:func:`inspect.get_annotations`. In the standard library since Python 3.10.
+
+ ``typing_extensions`` adds the keyword argument ``format``, as specified
+ by :pep:`649`. The supported formats are listed in the :class:`Format` enum.
+ The default format, :attr:`Format.VALUE`, behaves the same across all versions.
+ For the other two formats, ``typing_extensions`` provides a rough approximation
+ of the :pep:`649` behavior on versions of Python that do not support it.
+
+ The purpose of this backport is to allow users who would like to use
+ :attr:`Format.FORWARDREF` or :attr:`Format.STRING` semantics once
+ :pep:`649` is implemented, but who also
+ want to support earlier Python versions, to simply write::
+
+ typing_extensions.get_annotations(obj, format=Format.FORWARDREF)
+
+ .. versionadded:: 4.13.0
+
.. function:: get_args(tp)
See :py:func:`typing.get_args`. In ``typing`` since 3.8.
@@ -655,6 +858,8 @@ Functions
.. function:: get_protocol_members(tp)
+ See :py:func:`typing.get_protocol_members`. In ``typing`` since 3.13.
+
Return the set of members defined in a :class:`Protocol`. This works with protocols
defined using either :class:`typing.Protocol` or :class:`typing_extensions.Protocol`.
@@ -683,8 +888,15 @@ Functions
Interaction with :data:`Required` and :data:`NotRequired`.
+ .. versionchanged:: 4.11.0
+
+ When ``include_extra=False``, ``get_type_hints()`` now strips
+ :data:`ReadOnly` from the annotation.
+
.. function:: is_protocol(tp)
+ See :py:func:`typing.is_protocol`. In ``typing`` since 3.13.
+
Determine if a type is a :class:`Protocol`. This works with protocols
defined using either :py:class:`typing.Protocol` or :class:`typing_extensions.Protocol`.
@@ -721,11 +933,133 @@ Functions
.. versionadded:: 4.1.0
+Enums
+~~~~~
+
+.. class:: Format
+
+ The formats for evaluating annotations introduced by :pep:`649`.
+ Members of this enum can be passed as the *format* argument
+ to :func:`get_annotations`.
+
+ The final place of this enum in the standard library has not yet
+ been determined (see :pep:`649` and :pep:`749`), but the names
+ and integer values are stable and will continue to work.
+
+ .. attribute:: VALUE
+
+ Equal to 1. The default value. The function will return the conventional Python values
+ for the annotations. This format is identical to the return value for
+ the function under earlier versions of Python.
+
+ .. attribute:: VALUE_WITH_FAKE_GLOBALS
+
+ Equal to 2. Special value used to signal that an annotate function is being
+ evaluated in a special environment with fake globals. When passed this
+ value, annotate functions should either return the same value as for
+ the :attr:`Format.VALUE` format, or raise :exc:`NotImplementedError`
+ to signal that they do not support execution in this environment.
+ This format is only used internally and should not be passed to
+ the functions in this module.
+
+ .. attribute:: FORWARDREF
+
+ Equal to 3. When :pep:`649` is implemented, this format will attempt to return the
+ conventional Python values for the annotations. However, if it encounters
+ an undefined name, it dynamically creates a proxy object (a ForwardRef)
+ that substitutes for that value in the expression.
+
+ ``typing_extensions`` emulates this value on versions of Python which do
+ not support :pep:`649` by returning the same value as for ``VALUE`` semantics.
+
+ .. attribute:: STRING
+
+ Equal to 4. When :pep:`649` is implemented, this format will produce an annotation
+ dictionary where the values have been replaced by strings containing
+ an approximation of the original source code for the annotation expressions.
+
+ ``typing_extensions`` emulates this by evaluating the annotations using
+ ``VALUE`` semantics and then stringifying the results.
+
+ .. versionadded:: 4.13.0
+
+Annotation metadata
+~~~~~~~~~~~~~~~~~~~
+
+.. class:: Doc(documentation, /)
+
+ Define the documentation of a type annotation using :data:`Annotated`, to be
+ used in class attributes, function and method parameters, return values,
+ and variables.
+
+ The value should be a positional-only string literal to allow static tools
+ like editors and documentation generators to use it.
+
+ This complements docstrings.
+
+ The string value passed is available in the attribute ``documentation``.
+
+ Example::
+
+ >>> from typing_extensions import Annotated, Doc
+ >>> def hi(to: Annotated[str, Doc("Who to say hi to")]) -> None: ...
+
+ .. versionadded:: 4.8.0
+
+ See :pep:`727`.
+
+ .. attribute:: documentation
+
+ The documentation string passed to :class:`Doc`.
+
+
+Capsule objects
+~~~~~~~~~~~~~~~
+
+.. class:: CapsuleType
+
+ The type of :py:ref:`capsule objects `.
+ See :py:class:`types.CapsuleType`, where it has existed since Python 3.13.
+
+ Note that this may not exist on all implementations of Python; it is only
+ guaranteed to exist on CPython.
+
+ .. versionadded:: 4.12.0
+
+
+Sentinel objects
+~~~~~~~~~~~~~~~~
+
+.. class:: Sentinel(name, repr=None)
+
+ A type used to define sentinel values. The *name* argument should be the
+ name of the variable to which the return value shall be assigned.
+
+ If *repr* is provided, it will be used for the :meth:`~object.__repr__`
+ of the sentinel object. If not provided, ``""`` will be used.
+
+ Example::
+
+ >>> from typing_extensions import Sentinel, assert_type
+ >>> MISSING = Sentinel('MISSING')
+ >>> def func(arg: int | MISSING = MISSING) -> None:
+ ... if arg is MISSING:
+ ... assert_type(arg, MISSING)
+ ... else:
+ ... assert_type(arg, int)
+ ...
+ >>> func(MISSING)
+
+ .. versionadded:: 4.14.0
+
+ See :pep:`661`
+
+
Pure aliases
~~~~~~~~~~~~
-These are simply re-exported from the :mod:`typing` module on all supported
-versions of Python. They are listed here for completeness.
+Most of these are simply re-exported from the :mod:`typing` module on all supported
+versions of Python, but all are listed here for completeness.
.. class:: AbstractSet
@@ -743,10 +1077,19 @@ versions of Python. They are listed here for completeness.
See :py:class:`typing.AsyncContextManager`. In ``typing`` since 3.5.4 and 3.6.2.
+ .. versionchanged:: 4.12.0
+
+ ``AsyncContextManager`` now has an optional second parameter, defaulting to
+ ``Optional[bool]``, signifying the return type of the ``__aexit__`` method.
+
.. class:: AsyncGenerator
See :py:class:`typing.AsyncGenerator`. In ``typing`` since 3.6.1.
+ .. versionchanged:: 4.12.0
+
+ The second type parameter is now optional (it defaults to ``None``).
+
.. class:: AsyncIterable
See :py:class:`typing.AsyncIterable`. In ``typing`` since 3.5.2.
@@ -795,6 +1138,11 @@ versions of Python. They are listed here for completeness.
See :py:class:`typing.ContextManager`. In ``typing`` since 3.5.4.
+ .. versionchanged:: 4.12.0
+
+ ``ContextManager`` now has an optional second parameter, defaulting to
+ ``Optional[bool]``, signifying the return type of the ``__exit__`` method.
+
.. class:: Coroutine
See :py:class:`typing.Coroutine`. In ``typing`` since 3.5.3.
@@ -835,6 +1183,11 @@ versions of Python. They are listed here for completeness.
.. versionadded:: 4.7.0
+ .. versionchanged:: 4.12.0
+
+ The second type and third type parameters are now optional
+ (they both default to ``None``).
+
.. class:: Generic
See :py:class:`typing.Generic`.
@@ -929,6 +1282,10 @@ versions of Python. They are listed here for completeness.
.. versionadded:: 4.7.0
+.. class:: OrderedDict
+
+ See :py:class:`typing.OrderedDict`. In ``typing`` since 3.7.2.
+
.. class:: Pattern
See :py:class:`typing.Pattern`.
@@ -1012,3 +1369,23 @@ versions of Python. They are listed here for completeness.
See :py:func:`typing.no_type_check_decorator`.
.. versionadded:: 4.7.0
+
+Security
+--------
+
+``typing_extensions`` is among the most widely used packages in the
+Python ecosystem. Therefore, we take security seriously and strive
+to use a transparent, secure release process.
+
+We commit to the following in order to keep the package secure in the
+future:
+
+* ``typing_extensions`` will never include any native extensions, only
+ pure Python code.
+* ``typing_extensions`` will not have any third-party dependencies.
+* We will follow best practices for a secure release process.
+
+If you have any feedback on our security process, please `open an issue
+`__. To report
+an issue privately, use `GitHub's private reporting feature
+`__.
diff --git a/pyproject.toml b/pyproject.toml
index 736e1e42..a8f3d525 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,16 +1,17 @@
# Build system requirements.
[build-system]
-requires = ["flit_core >=3.4,<4"]
+requires = ["flit_core >=3.11,<4"]
build-backend = "flit_core.buildapi"
# Project metadata
[project]
name = "typing_extensions"
-version = "4.7.1"
-description = "Backported and Experimental Type Hints for Python 3.7+"
+version = "4.14.0"
+description = "Backported and Experimental Type Hints for Python 3.9+"
readme = "README.md"
-requires-python = ">=3.7"
-license = { file = "LICENSE" }
+requires-python = ">=3.9"
+license = "PSF-2.0"
+license-files = ["LICENSE"]
keywords = [
"annotations",
"backport",
@@ -30,16 +31,15 @@ classifiers = [
"Development Status :: 5 - Production/Stable",
"Environment :: Console",
"Intended Audience :: Developers",
- "License :: OSI Approved :: Python Software Foundation License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3 :: Only",
- "Programming Language :: Python :: 3.7",
- "Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
+ "Programming Language :: Python :: 3.13",
+ "Programming Language :: Python :: 3.14",
"Topic :: Software Development",
]
@@ -60,3 +60,57 @@ email = "levkivskyi@gmail.com"
[tool.flit.sdist]
include = ["CHANGELOG.md", "README.md", "tox.ini", "*/*test*.py"]
exclude = []
+
+[tool.ruff]
+line-length = 90
+target-version = "py39"
+
+[tool.ruff.lint]
+select = [
+ "B",
+ "C4",
+ "E",
+ "F",
+ "I",
+ "ISC001",
+ "PGH004",
+ "RUF",
+ "SIM201",
+ "SIM202",
+ "UP",
+ "W",
+]
+
+ignore = [
+ # Ignore various "modernization" rules that tell you off for importing/using
+ # deprecated things from the typing module, etc.
+ "UP006",
+ "UP007",
+ "UP013",
+ "UP014",
+ "UP019",
+ "UP035",
+ "UP038",
+ # Not relevant here
+ "RUF012",
+ "RUF022",
+ "RUF023",
+ # Ruff doesn't understand the globals() assignment; we test __all__
+ # directly in test_all_names_in___all__.
+ "F822",
+]
+
+[tool.ruff.lint.per-file-ignores]
+"!src/typing_extensions.py" = [
+ "B018",
+ "B024",
+ "C4",
+ "E302",
+ "E306",
+ "E501",
+ "E701",
+]
+
+[tool.ruff.lint.isort]
+extra-standard-library = ["tomllib"]
+known-first-party = ["typing_extensions", "_typed_dict_test_helper"]
diff --git a/scripts/check_package.py b/scripts/check_package.py
new file mode 100644
index 00000000..f52df411
--- /dev/null
+++ b/scripts/check_package.py
@@ -0,0 +1,60 @@
+import argparse
+import re
+import sys
+import tomllib
+from pathlib import Path
+
+
+class ValidationError(Exception):
+ pass
+
+
+def check(github_ref: str | None) -> None:
+ pyproject = Path(__file__).parent.parent / "pyproject.toml"
+ if not pyproject.exists():
+ raise ValidationError("pyproject.toml not found")
+ with pyproject.open("rb") as f:
+ data = tomllib.load(f)
+ pyproject_version = data["project"]["version"]
+
+ if github_ref is not None and github_ref.startswith("refs/tags/"):
+ version = github_ref.removeprefix("refs/tags/")
+ if version != pyproject_version:
+ raise ValidationError(
+ f"Version mismatch: GitHub ref is {version}, "
+ f"but pyproject.toml is {pyproject_version}"
+ )
+
+ requires_python = data["project"]["requires-python"]
+ assert sys.version_info[0] == 3, "Rewrite this script when Python 4 comes out"
+ match = re.fullmatch(r">=3\.(\d+)", requires_python)
+ if not match:
+ raise ValidationError(f"Invalid requires-python: {requires_python!r}")
+ lowest_minor = int(match.group(1))
+
+ description = data["project"]["description"]
+ if not description.endswith(f"3.{lowest_minor}+"):
+ raise ValidationError(f"Description should mention Python 3.{lowest_minor}+")
+
+ classifiers = set(data["project"]["classifiers"])
+ for should_be_supported in range(lowest_minor, sys.version_info[1] + 1):
+ if (
+ f"Programming Language :: Python :: 3.{should_be_supported}"
+ not in classifiers
+ ):
+ raise ValidationError(
+ f"Missing classifier for Python 3.{should_be_supported}"
+ )
+
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser("Script to check the package metadata")
+ parser.add_argument(
+ "github_ref", type=str, help="The current GitHub ref", nargs="?"
+ )
+ args = parser.parse_args()
+ try:
+ check(args.github_ref)
+ except ValidationError as e:
+ print(e)
+ sys.exit(1)
diff --git a/src/_typed_dict_test_helper.py b/src/_typed_dict_test_helper.py
index c5582b15..73cf9199 100644
--- a/src/_typed_dict_test_helper.py
+++ b/src/_typed_dict_test_helper.py
@@ -1,7 +1,8 @@
from __future__ import annotations
from typing import Generic, Optional, T
-from typing_extensions import TypedDict, Annotated, Required
+
+from typing_extensions import Annotated, Required, TypedDict
# this class must not be imported into test_typing_extensions.py at top level, otherwise
diff --git a/src/test_typing_extensions.py b/src/test_typing_extensions.py
index c2ab6d7f..3ef29474 100644
--- a/src/test_typing_extensions.py
+++ b/src/test_typing_extensions.py
@@ -1,70 +1,153 @@
-import sys
-import os
import abc
-import gc
-import io
-import contextlib
+import asyncio
import collections
-from collections import defaultdict
import collections.abc
+import contextlib
import copy
-from functools import lru_cache
+import functools
+import gc
import importlib
import inspect
+import io
+import itertools
import pickle
import re
import subprocess
+import sys
import tempfile
import textwrap
import types
-from pathlib import Path
-from unittest import TestCase, main, skipUnless, skipIf
-from unittest.mock import patch
import typing
-from typing import Optional, Union, AnyStr
-from typing import T, KT, VT # Not in __all__.
-from typing import Tuple, List, Set, Dict, Iterable, Iterator, Callable
-from typing import Generic
-from typing import no_type_check
import warnings
+from collections import defaultdict
+from functools import lru_cache
+from pathlib import Path
+from unittest import TestCase, main, skipIf, skipUnless
+from unittest.mock import patch
import typing_extensions
-from typing_extensions import NoReturn, Any, ClassVar, Final, IntVar, Literal, Type, NewType, TypedDict, Self
-from typing_extensions import TypeAlias, ParamSpec, Concatenate, ParamSpecArgs, ParamSpecKwargs, TypeGuard
-from typing_extensions import Awaitable, AsyncIterator, AsyncContextManager, Required, NotRequired
-from typing_extensions import Protocol, runtime, runtime_checkable, Annotated, final, is_typeddict
-from typing_extensions import TypeVarTuple, Unpack, dataclass_transform, reveal_type, Never, assert_never, LiteralString
-from typing_extensions import assert_type, get_type_hints, get_origin, get_args, get_original_bases
-from typing_extensions import clear_overloads, get_overloads, overload
-from typing_extensions import NamedTuple
-from typing_extensions import override, deprecated, Buffer, TypeAliasType, TypeVar, get_protocol_members, is_protocol
from _typed_dict_test_helper import Foo, FooGeneric, VeryAnnotated
+from typing_extensions import (
+ _FORWARD_REF_HAS_CLASS,
+ Annotated,
+ Any,
+ AnyStr,
+ AsyncContextManager,
+ AsyncIterator,
+ Awaitable,
+ Buffer,
+ Callable,
+ ClassVar,
+ Concatenate,
+ Dict,
+ Doc,
+ Final,
+ Format,
+ Generic,
+ IntVar,
+ Iterable,
+ Iterator,
+ List,
+ Literal,
+ LiteralString,
+ NamedTuple,
+ Never,
+ NewType,
+ NoDefault,
+ NoExtraItems,
+ NoReturn,
+ NotRequired,
+ Optional,
+ ParamSpec,
+ ParamSpecArgs,
+ ParamSpecKwargs,
+ Protocol,
+ ReadOnly,
+ Required,
+ Self,
+ Sentinel,
+ Set,
+ Tuple,
+ Type,
+ TypeAlias,
+ TypeAliasType,
+ TypedDict,
+ TypeForm,
+ TypeGuard,
+ TypeIs,
+ TypeVar,
+ TypeVarTuple,
+ Union,
+ Unpack,
+ assert_never,
+ assert_type,
+ clear_overloads,
+ dataclass_transform,
+ deprecated,
+ evaluate_forward_ref,
+ final,
+ get_annotations,
+ get_args,
+ get_origin,
+ get_original_bases,
+ get_overloads,
+ get_protocol_members,
+ get_type_hints,
+ is_protocol,
+ is_typeddict,
+ no_type_check,
+ overload,
+ override,
+ reveal_type,
+ runtime,
+ runtime_checkable,
+)
+
+NoneType = type(None)
+T = TypeVar("T")
+KT = TypeVar("KT")
+VT = TypeVar("VT")
# Flags used to mark tests that only apply after a specific
# version of the typing module.
-TYPING_3_8_0 = sys.version_info[:3] >= (3, 8, 0)
-TYPING_3_9_0 = sys.version_info[:3] >= (3, 9, 0)
TYPING_3_10_0 = sys.version_info[:3] >= (3, 10, 0)
# 3.11 makes runtime type checks (_type_check) more lenient.
TYPING_3_11_0 = sys.version_info[:3] >= (3, 11, 0)
# 3.12 changes the representation of Unpack[] (PEP 692)
+# and adds PEP 695 to CPython's grammar
TYPING_3_12_0 = sys.version_info[:3] >= (3, 12, 0)
-only_with_typing_Protocol = skipUnless(
- hasattr(typing, "Protocol"), "Only relevant when typing.Protocol exists"
-)
+# @deprecated works differently in Python 3.12
+TYPING_3_12_ONLY = (3, 12) <= sys.version_info < (3, 13)
+
+# 3.13 drops support for the keyword argument syntax of TypedDict
+TYPING_3_13_0 = sys.version_info[:3] >= (3, 13, 0)
+
+# 3.13.0.rc1 fixes a problem with @deprecated
+TYPING_3_13_0_RC = sys.version_info[:4] >= (3, 13, 0, "candidate")
+
+TYPING_3_14_0 = sys.version_info[:3] >= (3, 14, 0)
# https://github.com/python/cpython/pull/27017 was backported into some 3.9 and 3.10
# versions, but not all
HAS_FORWARD_MODULE = "module" in inspect.signature(typing._type_check).parameters
+skip_if_py313_beta_1 = skipIf(
+ sys.version_info[:5] == (3, 13, 0, 'beta', 1),
+ "Bugfixes will be released in 3.13.0b2"
+)
+
ANN_MODULE_SOURCE = '''\
-from typing import Optional
+import sys
+from typing import List, Optional
from functools import wraps
-__annotations__[1] = 2
+try:
+ __annotations__[1] = 2
+except NameError:
+ assert sys.version_info >= (3, 14)
class C:
@@ -74,8 +157,10 @@ class C:
x: int = 5; y: str = x; f: Tuple[int, int]
class M(type):
-
- __annotations__['123'] = 123
+ try:
+ __annotations__['123'] = 123
+ except NameError:
+ assert sys.version_info >= (3, 14)
o: type = object
(pars): bool = True
@@ -167,22 +252,235 @@ def g_bad_ann():
'''
+STOCK_ANNOTATIONS = """
+a:int=3
+b:str="foo"
+
+class MyClass:
+ a:int=4
+ b:str="bar"
+ def __init__(self, a, b):
+ self.a = a
+ self.b = b
+ def __eq__(self, other):
+ return isinstance(other, MyClass) and self.a == other.a and self.b == other.b
+
+def function(a:int, b:str) -> MyClass:
+ return MyClass(a, b)
+
+
+def function2(a:int, b:"str", c:MyClass) -> MyClass:
+ pass
+
+
+def function3(a:"int", b:"str", c:"MyClass"):
+ pass
+
+
+class UnannotatedClass:
+ pass
+
+def unannotated_function(a, b, c): pass
+"""
+
+STRINGIZED_ANNOTATIONS = """
+from __future__ import annotations
+
+a:int=3
+b:str="foo"
+
+class MyClass:
+ a:int=4
+ b:str="bar"
+ def __init__(self, a, b):
+ self.a = a
+ self.b = b
+ def __eq__(self, other):
+ return isinstance(other, MyClass) and self.a == other.a and self.b == other.b
+
+def function(a:int, b:str) -> MyClass:
+ return MyClass(a, b)
+
+
+def function2(a:int, b:"str", c:MyClass) -> MyClass:
+ pass
+
+
+def function3(a:"int", b:"str", c:"MyClass"):
+ pass
+
+
+class UnannotatedClass:
+ pass
+
+def unannotated_function(a, b, c): pass
+
+class MyClassWithLocalAnnotations:
+ mytype = int
+ x: mytype
+"""
+
+STRINGIZED_ANNOTATIONS_2 = """
+from __future__ import annotations
+
+
+def foo(a, b, c): pass
+"""
+
+if TYPING_3_12_0:
+ STRINGIZED_ANNOTATIONS_PEP_695 = textwrap.dedent(
+ """
+ from __future__ import annotations
+ from typing import Callable, Unpack
+
+
+ class A[T, *Ts, **P]:
+ x: T
+ y: tuple[*Ts]
+ z: Callable[P, str]
+
+
+ class B[T, *Ts, **P]:
+ T = int
+ Ts = str
+ P = bytes
+ x: T
+ y: Ts
+ z: P
+
+
+ Eggs = int
+ Spam = str
+
+
+ class C[Eggs, **Spam]:
+ x: Eggs
+ y: Spam
+
+
+ def generic_function[T, *Ts, **P](
+ x: T, *y: Unpack[Ts], z: P.args, zz: P.kwargs
+ ) -> None: ...
+
+
+ def generic_function_2[Eggs, **Spam](x: Eggs, y: Spam): pass
+
+
+ class D:
+ Foo = int
+ Bar = str
+
+ def generic_method[Foo, **Bar](
+ self, x: Foo, y: Bar
+ ) -> None: ...
+
+ def generic_method_2[Eggs, **Spam](self, x: Eggs, y: Spam): pass
+
+
+ # Eggs is `int` in globals, a TypeVar in type_params, and `str` in locals:
+ class E[Eggs]:
+ Eggs = str
+ x: Eggs
+
+
+
+ def nested():
+ from types import SimpleNamespace
+ from typing_extensions import get_annotations
+
+ Eggs = bytes
+ Spam = memoryview
+
+
+ class F[Eggs, **Spam]:
+ x: Eggs
+ y: Spam
+
+ def generic_method[Eggs, **Spam](self, x: Eggs, y: Spam): pass
+
+
+ def generic_function[Eggs, **Spam](x: Eggs, y: Spam): pass
+
+
+ # Eggs is `int` in globals, `bytes` in the function scope,
+ # a TypeVar in the type_params, and `str` in locals:
+ class G[Eggs]:
+ Eggs = str
+ x: Eggs
+
+
+ return SimpleNamespace(
+ F=F,
+ F_annotations=get_annotations(F, eval_str=True),
+ F_meth_annotations=get_annotations(F.generic_method, eval_str=True),
+ G_annotations=get_annotations(G, eval_str=True),
+ generic_func=generic_function,
+ generic_func_annotations=get_annotations(generic_function, eval_str=True)
+ )
+ """
+ )
+else:
+ STRINGIZED_ANNOTATIONS_PEP_695 = None
+
+
class BaseTestCase(TestCase):
def assertIsSubclass(self, cls, class_or_tuple, msg=None):
if not issubclass(cls, class_or_tuple):
- message = f'{cls!r} is not a subclass of {repr(class_or_tuple)}'
+ message = f'{cls!r} is not a subclass of {class_or_tuple!r}'
if msg is not None:
message += f' : {msg}'
raise self.failureException(message)
def assertNotIsSubclass(self, cls, class_or_tuple, msg=None):
if issubclass(cls, class_or_tuple):
- message = f'{cls!r} is a subclass of {repr(class_or_tuple)}'
+ message = f'{cls!r} is a subclass of {class_or_tuple!r}'
if msg is not None:
message += f' : {msg}'
raise self.failureException(message)
+class EqualToForwardRef:
+ """Helper to ease use of annotationlib.ForwardRef in tests.
+
+ This checks only attributes that can be set using the constructor.
+
+ """
+
+ def __init__(
+ self,
+ arg,
+ *,
+ module=None,
+ owner=None,
+ is_class=False,
+ ):
+ self.__forward_arg__ = arg
+ self.__forward_is_class__ = is_class
+ self.__forward_module__ = module
+ self.__owner__ = owner
+
+ def __eq__(self, other):
+ if not isinstance(other, (EqualToForwardRef, typing.ForwardRef)):
+ return NotImplemented
+ if sys.version_info >= (3, 14) and self.__owner__ != other.__owner__:
+ return False
+ return (
+ self.__forward_arg__ == other.__forward_arg__
+ and self.__forward_module__ == other.__forward_module__
+ and self.__forward_is_class__ == other.__forward_is_class__
+ )
+
+ def __repr__(self):
+ extra = []
+ if self.__forward_module__ is not None:
+ extra.append(f", module={self.__forward_module__!r}")
+ if self.__forward_is_class__:
+ extra.append(", is_class=True")
+ if sys.version_info >= (3, 14) and self.__owner__ is not None:
+ extra.append(f", owner={self.__owner__!r}")
+ return f"EqualToForwardRef({self.__forward_arg__!r}{''.join(extra)})"
+
+
class Employee:
pass
@@ -217,7 +515,7 @@ def test_cannot_subclass(self):
class A(self.bottom_type):
pass
with self.assertRaises(TypeError):
- class A(type(self.bottom_type)):
+ class B(type(self.bottom_type)):
pass
def test_cannot_instantiate(self):
@@ -227,7 +525,7 @@ def test_cannot_instantiate(self):
type(self.bottom_type)()
def test_pickle(self):
- for proto in range(pickle.HIGHEST_PROTOCOL):
+ for proto in range(pickle.HIGHEST_PROTOCOL + 1):
pickled = pickle.dumps(self.bottom_type, protocol=proto)
self.assertIs(self.bottom_type, pickle.loads(pickled))
@@ -246,13 +544,7 @@ def some(arg: NoReturn) -> NoReturn: ...
def some_str(arg: 'NoReturn') -> 'typing.NoReturn': ...
expected = {'arg': NoReturn, 'return': NoReturn}
- targets = [some]
-
- # On 3.7.0 and 3.7.1, https://github.com/python/cpython/pull/10772
- # wasn't applied yet and NoReturn fails _type_check.
- if not ((3, 7, 0) <= sys.version_info < (3, 7, 2)):
- targets.append(some_str)
- for target in targets:
+ for target in some, some_str:
with self.subTest(target=target):
self.assertEqual(gth(target), expected)
@@ -285,6 +577,19 @@ def test_exception(self):
with self.assertRaises(AssertionError):
assert_never(None)
+ value = "some value"
+ with self.assertRaisesRegex(AssertionError, value):
+ assert_never(value)
+
+ # Make sure a huge value doesn't get printed in its entirety
+ huge_value = "a" * 10000
+ with self.assertRaises(AssertionError) as cm:
+ assert_never(huge_value)
+ self.assertLess(
+ len(cm.exception.args[0]),
+ typing_extensions._ASSERT_NEVER_REPR_MAX_LENGTH * 2,
+ )
+
class OverrideTests(BaseTestCase):
def test_override(self):
@@ -312,7 +617,6 @@ def static_method_good_order():
def static_method_bad_order():
return 42
-
self.assertIsSubclass(Derived, Base)
instance = Derived()
self.assertEqual(instance.normal_method(), 42)
@@ -428,6 +732,112 @@ def __new__(cls, x):
self.assertEqual(instance.x, 42)
self.assertTrue(new_called)
+ def test_mixin_class(self):
+ @deprecated("Mixin will go away soon")
+ class Mixin:
+ pass
+
+ class Base:
+ def __init__(self, a) -> None:
+ self.a = a
+
+ with self.assertWarnsRegex(DeprecationWarning, "Mixin will go away soon"):
+ class Child(Base, Mixin):
+ pass
+
+ instance = Child(42)
+ self.assertEqual(instance.a, 42)
+
+ def test_do_not_shadow_user_arguments(self):
+ new_called = False
+ new_called_cls = None
+
+ @deprecated("MyMeta will go away soon")
+ class MyMeta(type):
+ def __new__(mcs, name, bases, attrs, cls=None):
+ nonlocal new_called, new_called_cls
+ new_called = True
+ new_called_cls = cls
+ return super().__new__(mcs, name, bases, attrs)
+
+ with self.assertWarnsRegex(DeprecationWarning, "MyMeta will go away soon"):
+ class Foo(metaclass=MyMeta, cls='haha'):
+ pass
+
+ self.assertTrue(new_called)
+ self.assertEqual(new_called_cls, 'haha')
+
+ def test_existing_init_subclass(self):
+ @deprecated("C will go away soon")
+ class C:
+ def __init_subclass__(cls) -> None:
+ cls.inited = True
+
+ with self.assertWarnsRegex(DeprecationWarning, "C will go away soon"):
+ C()
+
+ with self.assertWarnsRegex(DeprecationWarning, "C will go away soon"):
+ class D(C):
+ pass
+
+ self.assertTrue(D.inited)
+ self.assertIsInstance(D(), D) # no deprecation
+
+ def test_existing_init_subclass_in_base(self):
+ class Base:
+ def __init_subclass__(cls, x) -> None:
+ cls.inited = x
+
+ @deprecated("C will go away soon")
+ class C(Base, x=42):
+ pass
+
+ self.assertEqual(C.inited, 42)
+
+ with self.assertWarnsRegex(DeprecationWarning, "C will go away soon"):
+ C()
+
+ with self.assertWarnsRegex(DeprecationWarning, "C will go away soon"):
+ class D(C, x=3):
+ pass
+
+ self.assertEqual(D.inited, 3)
+
+ def test_init_subclass_has_correct_cls(self):
+ init_subclass_saw = None
+
+ @deprecated("Base will go away soon")
+ class Base:
+ def __init_subclass__(cls) -> None:
+ nonlocal init_subclass_saw
+ init_subclass_saw = cls
+
+ self.assertIsNone(init_subclass_saw)
+
+ with self.assertWarnsRegex(DeprecationWarning, "Base will go away soon"):
+ class C(Base):
+ pass
+
+ self.assertIs(init_subclass_saw, C)
+
+ def test_init_subclass_with_explicit_classmethod(self):
+ init_subclass_saw = None
+
+ @deprecated("Base will go away soon")
+ class Base:
+ @classmethod
+ def __init_subclass__(cls) -> None:
+ nonlocal init_subclass_saw
+ init_subclass_saw = cls
+
+ self.assertIsNone(init_subclass_saw)
+
+ with self.assertWarnsRegex(DeprecationWarning, "Base will go away soon"):
+ class C(Base):
+ pass
+
+ self.assertIs(init_subclass_saw, C)
+
def test_function(self):
@deprecated("b will go away soon")
def b():
@@ -490,6 +900,62 @@ def d():
warnings.simplefilter("error")
d()
+ def test_only_strings_allowed(self):
+ with self.assertRaisesRegex(
+ TypeError,
+ "Expected an object of type str for 'message', not 'type'"
+ ):
+ @deprecated
+ class Foo: ...
+
+ with self.assertRaisesRegex(
+ TypeError,
+ "Expected an object of type str for 'message', not 'function'"
+ ):
+ @deprecated
+ def foo(): ...
+
+ def test_no_retained_references_to_wrapper_instance(self):
+ @deprecated('depr')
+ def d(): pass
+
+ self.assertFalse(any(
+ isinstance(cell.cell_contents, deprecated) for cell in d.__closure__
+ ))
+
+@deprecated("depr")
+def func():
+ pass
+
+@deprecated("depr")
+async def coro():
+ pass
+
+class Cls:
+ @deprecated("depr")
+ def func(self):
+ pass
+
+ @deprecated("depr")
+ async def coro(self):
+ pass
+
+class DeprecatedCoroTests(BaseTestCase):
+ def test_asyncio_iscoroutinefunction(self):
+ with warnings.catch_warnings():
+ warnings.simplefilter("ignore", DeprecationWarning)
+ self.assertFalse(asyncio.coroutines.iscoroutinefunction(func))
+ self.assertFalse(asyncio.coroutines.iscoroutinefunction(Cls.func))
+ self.assertTrue(asyncio.coroutines.iscoroutinefunction(coro))
+ self.assertTrue(asyncio.coroutines.iscoroutinefunction(Cls.coro))
+
+ @skipUnless(TYPING_3_12_ONLY or TYPING_3_13_0_RC, "inspect.iscoroutinefunction works differently on Python < 3.12")
+ def test_inspect_iscoroutinefunction(self):
+ self.assertFalse(inspect.iscoroutinefunction(func))
+ self.assertFalse(inspect.iscoroutinefunction(Cls.func))
+ self.assertTrue(inspect.iscoroutinefunction(coro))
+ self.assertTrue(inspect.iscoroutinefunction(Cls.coro))
+
class AnyTests(BaseTestCase):
def test_can_subclass(self):
@@ -565,7 +1031,7 @@ def test_cannot_subclass(self):
class C(type(ClassVar)):
pass
with self.assertRaises(TypeError):
- class C(type(ClassVar[int])):
+ class D(type(ClassVar[int])):
pass
def test_cannot_init(self):
@@ -595,22 +1061,18 @@ def test_basics(self):
Final[int][str]
def test_repr(self):
- if hasattr(typing, 'Final') and sys.version_info[:2] >= (3, 7):
- mod_name = 'typing'
- else:
- mod_name = 'typing_extensions'
- self.assertEqual(repr(Final), mod_name + '.Final')
+ self.assertEqual(repr(Final), 'typing.Final')
cv = Final[int]
- self.assertEqual(repr(cv), mod_name + '.Final[int]')
+ self.assertEqual(repr(cv), 'typing.Final[int]')
cv = Final[Employee]
- self.assertEqual(repr(cv), mod_name + f'.Final[{__name__}.Employee]')
+ self.assertEqual(repr(cv), f'typing.Final[{__name__}.Employee]')
def test_cannot_subclass(self):
with self.assertRaises(TypeError):
class C(type(Final)):
pass
with self.assertRaises(TypeError):
- class C(type(Final[int])):
+ class D(type(Final[int])):
pass
def test_cannot_init(self):
@@ -644,18 +1106,18 @@ def test_repr(self):
mod_name = 'typing'
else:
mod_name = 'typing_extensions'
- self.assertEqual(repr(Required), mod_name + '.Required')
+ self.assertEqual(repr(Required), f'{mod_name}.Required')
cv = Required[int]
- self.assertEqual(repr(cv), mod_name + '.Required[int]')
+ self.assertEqual(repr(cv), f'{mod_name}.Required[int]')
cv = Required[Employee]
- self.assertEqual(repr(cv), mod_name + '.Required[%s.Employee]' % __name__)
+ self.assertEqual(repr(cv), f'{mod_name}.Required[{__name__}.Employee]')
def test_cannot_subclass(self):
with self.assertRaises(TypeError):
class C(type(Required)):
pass
with self.assertRaises(TypeError):
- class C(type(Required[int])):
+ class D(type(Required[int])):
pass
def test_cannot_init(self):
@@ -689,18 +1151,18 @@ def test_repr(self):
mod_name = 'typing'
else:
mod_name = 'typing_extensions'
- self.assertEqual(repr(NotRequired), mod_name + '.NotRequired')
+ self.assertEqual(repr(NotRequired), f'{mod_name}.NotRequired')
cv = NotRequired[int]
- self.assertEqual(repr(cv), mod_name + '.NotRequired[int]')
+ self.assertEqual(repr(cv), f'{mod_name}.NotRequired[int]')
cv = NotRequired[Employee]
- self.assertEqual(repr(cv), mod_name + '.NotRequired[%s.Employee]' % __name__)
+ self.assertEqual(repr(cv), f'{mod_name}.NotRequired[{ __name__}.Employee]')
def test_cannot_subclass(self):
with self.assertRaises(TypeError):
class C(type(NotRequired)):
pass
with self.assertRaises(TypeError):
- class C(type(NotRequired[int])):
+ class D(type(NotRequired[int])):
pass
def test_cannot_init(self):
@@ -720,15 +1182,15 @@ def test_no_isinstance(self):
class IntVarTests(BaseTestCase):
def test_valid(self):
- T_ints = IntVar("T_ints")
+ IntVar("T_ints")
def test_invalid(self):
with self.assertRaises(TypeError):
- T_ints = IntVar("T_ints", int)
+ IntVar("T_ints", int)
with self.assertRaises(TypeError):
- T_ints = IntVar("T_ints", bound=int)
+ IntVar("T_ints", bound=int)
with self.assertRaises(TypeError):
- T_ints = IntVar("T_ints", covariant=True)
+ IntVar("T_ints", covariant=True)
class LiteralTests(BaseTestCase):
@@ -751,7 +1213,7 @@ def test_illegal_parameters_do_not_raise_runtime_errors(self):
Literal[int]
Literal[Literal[1, 2], Literal[4, 5]]
Literal[3j + 2, ..., ()]
- Literal[b"foo", u"bar"]
+ Literal[b"foo", "bar"]
Literal[{"foo": 3, "bar": 4}]
Literal[T]
@@ -1075,7 +1537,6 @@ async def __aexit__(self, etype, eval, tb):
return None
-
class A:
y: float
class B(A):
@@ -1196,7 +1657,10 @@ def tearDownClass(cls):
del sys.modules[modname]
def test_get_type_hints_modules(self):
- ann_module_type_hints = {1: 2, 'f': Tuple[int, int], 'x': int, 'y': str}
+ if sys.version_info >= (3, 14):
+ ann_module_type_hints = {'f': Tuple[int, int], 'x': int, 'y': str}
+ else:
+ ann_module_type_hints = {1: 2, 'f': Tuple[int, int], 'x': int, 'y': str}
self.assertEqual(gth(self.ann_module), ann_module_type_hints)
self.assertEqual(gth(self.ann_module2), {})
self.assertEqual(gth(self.ann_module3), {})
@@ -1205,7 +1669,10 @@ def test_get_type_hints_classes(self):
self.assertEqual(gth(self.ann_module.C, self.ann_module.__dict__),
{'y': Optional[self.ann_module.C]})
self.assertIsInstance(gth(self.ann_module.j_class), dict)
- self.assertEqual(gth(self.ann_module.M), {'123': 123, 'o': type})
+ if sys.version_info >= (3, 14):
+ self.assertEqual(gth(self.ann_module.M), {'o': type})
+ else:
+ self.assertEqual(gth(self.ann_module.M), {'123': 123, 'o': type})
self.assertEqual(gth(self.ann_module.D),
{'j': str, 'k': str, 'y': Optional[self.ann_module.C]})
self.assertEqual(gth(self.ann_module.Y), {'z': int})
@@ -1220,7 +1687,7 @@ def test_respect_no_type_check(self):
@no_type_check
class NoTpCheck:
class Inn:
- def __init__(self, x: 'not a type'): ...
+ def __init__(self, x: 'not a type'): ... # noqa: F722 # (yes, there's a syntax error in this annotation, that's the point)
self.assertTrue(NoTpCheck.__no_type_check__)
self.assertTrue(NoTpCheck.Inn.__init__.__no_type_check__)
self.assertEqual(gth(self.ann_module2.NTC.meth), {})
@@ -1245,6 +1712,95 @@ def test_final_forward_ref(self):
self.assertNotEqual(gth(Loop, globals())['attr'], Final[int])
self.assertNotEqual(gth(Loop, globals())['attr'], Final)
+ def test_annotation_and_optional_default(self):
+ annotation = Annotated[Union[int, None], "data"]
+ NoneAlias = None
+ StrAlias = str
+ T_default = TypeVar("T_default", default=None)
+ Ts = TypeVarTuple("Ts")
+
+ cases = {
+ # annotation: expected_type_hints
+ Annotated[None, "none"] : Annotated[None, "none"],
+ annotation : annotation,
+ Optional[int] : Optional[int],
+ Optional[List[str]] : Optional[List[str]],
+ Optional[annotation] : Optional[annotation],
+ Union[str, None, str] : Optional[str],
+ Unpack[Tuple[int, None]]: Unpack[Tuple[int, None]],
+ # Note: A starred *Ts will use typing.Unpack in 3.11+ see Issue #485
+ Unpack[Ts] : Unpack[Ts],
+ }
+ # contains a ForwardRef, TypeVar(~prefix) or no expression
+ do_not_stringify_cases = {
+ () : {}, # Special-cased below to create an unannotated parameter
+ int : int,
+ "int" : int,
+ None : type(None),
+ "NoneAlias" : type(None),
+ List["str"] : List[str],
+ Union[str, "str"] : str,
+ Union[str, None, "str"] : Optional[str],
+ Union[str, "NoneAlias", "StrAlias"]: Optional[str],
+ Union[str, "Union[None, StrAlias]"]: Optional[str],
+ Union["annotation", T_default] : Union[annotation, T_default],
+ Annotated["annotation", "nested"] : Annotated[Union[int, None], "data", "nested"],
+ }
+ if TYPING_3_10_0: # cannot construct UnionTypes before 3.10
+ do_not_stringify_cases["str | NoneAlias | StrAlias"] = str | None
+ cases[str | None] = Optional[str]
+ cases.update(do_not_stringify_cases)
+ for (annot, expected), none_default, as_str, wrap_optional in itertools.product(
+ cases.items(), (False, True), (False, True), (False, True)
+ ):
+ # Special case:
+ skip_reason = None
+ annot_unchanged = annot
+ if sys.version_info[:2] == (3, 10) and annot == "str | NoneAlias | StrAlias" and none_default:
+ # In 3.10 converts Optional[str | None] to Optional[str] which has a different repr
+ skip_reason = "UnionType not preserved in 3.10"
+ if wrap_optional:
+ if annot_unchanged == ():
+ continue
+ annot = Optional[annot]
+ expected = {"x": Optional[expected]}
+ else:
+ expected = {"x": expected} if annot_unchanged != () else {}
+ if as_str:
+ if annot_unchanged in do_not_stringify_cases or annot_unchanged == ():
+ continue
+ annot = str(annot)
+ with self.subTest(
+ annotation=annot,
+ as_str=as_str,
+ wrap_optional=wrap_optional,
+ none_default=none_default,
+ expected_type_hints=expected,
+ ):
+ # Create function to check
+ if annot_unchanged == ():
+ if none_default:
+ def func(x=None): pass
+ else:
+ def func(x): pass
+ elif none_default:
+ def func(x: annot = None): pass
+ else:
+ def func(x: annot): pass
+ type_hints = get_type_hints(func, globals(), locals(), include_extras=True)
+ # Equality
+ self.assertEqual(type_hints, expected)
+ # Hash
+ for k in type_hints.keys():
+ self.assertEqual(hash(type_hints[k]), hash(expected[k]))
+ # Test if UnionTypes are preserved
+ self.assertIs(type(type_hints[k]), type(expected[k]))
+ # Repr
+ with self.subTest("Check str and repr"):
+ if skip_reason == "UnionType not preserved in 3.10":
+ self.skipTest(skip_reason)
+ self.assertEqual(repr(type_hints), repr(expected))
+
class GetUtilitiesTestCase(TestCase):
def test_get_origin(self):
@@ -1266,8 +1822,7 @@ class C(Generic[T]): pass
self.assertIs(get_origin(List), list)
self.assertIs(get_origin(Tuple), tuple)
self.assertIs(get_origin(Callable), collections.abc.Callable)
- if sys.version_info >= (3, 9):
- self.assertIs(get_origin(list[int]), list)
+ self.assertIs(get_origin(list[int]), list)
self.assertIs(get_origin(list), None)
self.assertIs(get_origin(P.args), P)
self.assertIs(get_origin(P.kwargs), P)
@@ -1304,28 +1859,28 @@ class C(Generic[T]): pass
self.assertEqual(get_args(List), ())
self.assertEqual(get_args(Tuple), ())
self.assertEqual(get_args(Callable), ())
- if sys.version_info >= (3, 9):
- self.assertEqual(get_args(list[int]), (int,))
+ self.assertEqual(get_args(list[int]), (int,))
self.assertEqual(get_args(list), ())
- if sys.version_info >= (3, 9):
- # Support Python versions with and without the fix for
- # https://bugs.python.org/issue42195
- # The first variant is for 3.9.2+, the second for 3.9.0 and 1
- self.assertIn(get_args(collections.abc.Callable[[int], str]),
- (([int], str), ([[int]], str)))
- self.assertIn(get_args(collections.abc.Callable[[], str]),
- (([], str), ([[]], str)))
- self.assertEqual(get_args(collections.abc.Callable[..., str]), (..., str))
+ # Support Python versions with and without the fix for
+ # https://bugs.python.org/issue42195
+ # The first variant is for 3.9.2+, the second for 3.9.0 and 1
+ self.assertIn(get_args(collections.abc.Callable[[int], str]),
+ (([int], str), ([[int]], str)))
+ self.assertIn(get_args(collections.abc.Callable[[], str]),
+ (([], str), ([[]], str)))
+ self.assertEqual(get_args(collections.abc.Callable[..., str]), (..., str))
P = ParamSpec('P')
- # In 3.9 and lower we use typing_extensions's hacky implementation
+ # In 3.9 we use typing_extensions's hacky implementation
# of ParamSpec, which gets incorrectly wrapped in a list
self.assertIn(get_args(Callable[P, int]), [(P, int), ([P], int)])
- self.assertEqual(get_args(Callable[Concatenate[int, P], int]),
- (Concatenate[int, P], int))
self.assertEqual(get_args(Required[int]), (int,))
self.assertEqual(get_args(NotRequired[int]), (int,))
self.assertEqual(get_args(Unpack[Ts]), (Ts,))
self.assertEqual(get_args(Unpack), ())
+ self.assertEqual(get_args(Callable[Concatenate[int, P], int]),
+ (Concatenate[int, P], int))
+ self.assertEqual(get_args(Callable[Concatenate[int, ...], int]),
+ (Concatenate[int, ...], int))
class CollectionsAbcTests(BaseTestCase):
@@ -1471,12 +2026,92 @@ class MyCounter(typing_extensions.Counter[int]):
self.assertIsInstance(d, collections.Counter)
self.assertIsInstance(d, typing_extensions.Counter)
- def test_async_generator(self):
- async def f():
+
+# These are a separate TestCase class,
+# as (unlike most collections.abc aliases in typing_extensions),
+# these are reimplemented on Python <=3.12 so that we can provide
+# default values for the second and third parameters
+class GeneratorTests(BaseTestCase):
+
+ def test_generator_basics(self):
+ def foo():
yield 42
+ g = foo()
+
+ self.assertIsInstance(g, typing_extensions.Generator)
+ self.assertNotIsInstance(foo, typing_extensions.Generator)
+ self.assertIsSubclass(type(g), typing_extensions.Generator)
+ self.assertNotIsSubclass(type(foo), typing_extensions.Generator)
+
+ parameterized = typing_extensions.Generator[int, str, None]
+ with self.assertRaises(TypeError):
+ isinstance(g, parameterized)
+ with self.assertRaises(TypeError):
+ issubclass(type(g), parameterized)
+
+ def test_generator_default(self):
+ g1 = typing_extensions.Generator[int]
+ g2 = typing_extensions.Generator[int, None, None]
+ self.assertEqual(get_args(g1), (int, type(None), type(None)))
+ self.assertEqual(get_args(g1), get_args(g2))
+
+ g3 = typing_extensions.Generator[int, float]
+ g4 = typing_extensions.Generator[int, float, None]
+ self.assertEqual(get_args(g3), (int, float, type(None)))
+ self.assertEqual(get_args(g3), get_args(g4))
+
+ def test_no_generator_instantiation(self):
+ with self.assertRaises(TypeError):
+ typing_extensions.Generator()
+ with self.assertRaises(TypeError):
+ typing_extensions.Generator[T, T, T]()
+ with self.assertRaises(TypeError):
+ typing_extensions.Generator[int, int, int]()
+
+ def test_subclassing_generator(self):
+ class G(typing_extensions.Generator[int, int, None]):
+ def send(self, value):
+ pass
+ def throw(self, typ, val=None, tb=None):
+ pass
+
+ def g(): yield 0
+
+ self.assertIsSubclass(G, typing_extensions.Generator)
+ self.assertIsSubclass(G, typing_extensions.Iterable)
+ self.assertIsSubclass(G, collections.abc.Generator)
+ self.assertIsSubclass(G, collections.abc.Iterable)
+ self.assertNotIsSubclass(type(g), G)
+
+ instance = G()
+ self.assertIsInstance(instance, typing_extensions.Generator)
+ self.assertIsInstance(instance, typing_extensions.Iterable)
+ self.assertIsInstance(instance, collections.abc.Generator)
+ self.assertIsInstance(instance, collections.abc.Iterable)
+ self.assertNotIsInstance(type(g), G)
+ self.assertNotIsInstance(g, G)
+ def test_async_generator_basics(self):
+ async def f():
+ yield 42
g = f()
+
+ self.assertIsInstance(g, typing_extensions.AsyncGenerator)
self.assertIsSubclass(type(g), typing_extensions.AsyncGenerator)
+ self.assertNotIsInstance(f, typing_extensions.AsyncGenerator)
+ self.assertNotIsSubclass(type(f), typing_extensions.AsyncGenerator)
+
+ parameterized = typing_extensions.AsyncGenerator[int, str]
+ with self.assertRaises(TypeError):
+ isinstance(g, parameterized)
+ with self.assertRaises(TypeError):
+ issubclass(type(g), parameterized)
+
+ def test_async_generator_default(self):
+ ag1 = typing_extensions.AsyncGenerator[int]
+ ag2 = typing_extensions.AsyncGenerator[int, None]
+ self.assertEqual(get_args(ag1), (int, type(None)))
+ self.assertEqual(get_args(ag1), get_args(ag2))
def test_no_async_generator_instantiation(self):
with self.assertRaises(TypeError):
@@ -1509,6 +2144,68 @@ async def g(): yield 0
self.assertNotIsInstance(type(g), G)
self.assertNotIsInstance(g, G)
+ def test_subclassing_subclasshook(self):
+
+ class Base(typing_extensions.Generator):
+ @classmethod
+ def __subclasshook__(cls, other):
+ if other.__name__ == 'Foo':
+ return True
+ else:
+ return False
+
+ class C(Base): ...
+ class Foo: ...
+ class Bar: ...
+ self.assertIsSubclass(Foo, Base)
+ self.assertIsSubclass(Foo, C)
+ self.assertNotIsSubclass(Bar, C)
+
+ def test_subclassing_register(self):
+
+ class A(typing_extensions.Generator): ...
+ class B(A): ...
+
+ class C: ...
+ A.register(C)
+ self.assertIsSubclass(C, A)
+ self.assertNotIsSubclass(C, B)
+
+ class D: ...
+ B.register(D)
+ self.assertIsSubclass(D, A)
+ self.assertIsSubclass(D, B)
+
+ class M: ...
+ collections.abc.Generator.register(M)
+ self.assertIsSubclass(M, typing_extensions.Generator)
+
+ def test_collections_as_base(self):
+
+ class M(collections.abc.Generator): ...
+ self.assertIsSubclass(M, typing_extensions.Generator)
+ self.assertIsSubclass(M, typing_extensions.Iterable)
+
+ class S(collections.abc.AsyncGenerator): ...
+ self.assertIsSubclass(S, typing_extensions.AsyncGenerator)
+ self.assertIsSubclass(S, typing_extensions.AsyncIterator)
+
+ class A(collections.abc.Generator, metaclass=abc.ABCMeta): ...
+ class B: ...
+ A.register(B)
+ self.assertIsSubclass(B, typing_extensions.Generator)
+
+ @skipIf(sys.version_info < (3, 10), "PEP 604 has yet to be")
+ def test_or_and_ror(self):
+ self.assertEqual(
+ typing_extensions.Generator | typing_extensions.AsyncGenerator,
+ Union[typing_extensions.Generator, typing_extensions.AsyncGenerator]
+ )
+ self.assertEqual(
+ typing_extensions.Generator | typing.Deque,
+ Union[typing_extensions.Generator, typing.Deque]
+ )
+
class OtherABCTests(BaseTestCase):
@@ -1521,6 +2218,12 @@ def manager():
self.assertIsInstance(cm, typing_extensions.ContextManager)
self.assertNotIsInstance(42, typing_extensions.ContextManager)
+ def test_contextmanager_type_params(self):
+ cm1 = typing_extensions.ContextManager[int]
+ self.assertEqual(get_args(cm1), (int, typing.Optional[bool]))
+ cm2 = typing_extensions.ContextManager[int, None]
+ self.assertEqual(get_args(cm2), (int, NoneType))
+
def test_async_contextmanager(self):
class NotACM:
pass
@@ -1532,11 +2235,20 @@ def manager():
cm = manager()
self.assertNotIsInstance(cm, typing_extensions.AsyncContextManager)
- self.assertEqual(typing_extensions.AsyncContextManager[int].__args__, (int,))
+ self.assertEqual(
+ typing_extensions.AsyncContextManager[int].__args__,
+ (int, typing.Optional[bool])
+ )
with self.assertRaises(TypeError):
isinstance(42, typing_extensions.AsyncContextManager[int])
with self.assertRaises(TypeError):
- typing_extensions.AsyncContextManager[int, str]
+ typing_extensions.AsyncContextManager[int, str, float]
+
+ def test_asynccontextmanager_type_params(self):
+ cm1 = typing_extensions.AsyncContextManager[int]
+ self.assertEqual(get_args(cm1), (int, typing.Optional[bool]))
+ cm2 = typing_extensions.AsyncContextManager[int, None]
+ self.assertEqual(get_args(cm2), (int, NoneType))
class TypeTests(BaseTestCase):
@@ -1761,17 +2473,16 @@ class BP(Protocol): pass
class P(C, Protocol):
pass
with self.assertRaises(TypeError):
- class P(Protocol, C):
+ class Q(Protocol, C):
pass
with self.assertRaises(TypeError):
- class P(BP, C, Protocol):
+ class R(BP, C, Protocol):
pass
class D(BP, C): pass
class E(C, BP): pass
self.assertNotIsInstance(D(), E)
self.assertNotIsInstance(E(), D)
- @only_with_typing_Protocol
def test_runtimecheckable_on_typing_dot_Protocol(self):
@runtime_checkable
class Foo(typing.Protocol):
@@ -1784,7 +2495,6 @@ def __init__(self):
self.assertIsInstance(Bar(), Foo)
self.assertNotIsInstance(object(), Foo)
- @only_with_typing_Protocol
def test_typing_dot_runtimecheckable_on_Protocol(self):
@typing.runtime_checkable
class Foo(Protocol):
@@ -1797,7 +2507,6 @@ def __init__(self):
self.assertIsInstance(Bar(), Foo)
self.assertNotIsInstance(object(), Foo)
- @only_with_typing_Protocol
def test_typing_Protocol_and_extensions_Protocol_can_mix(self):
class TypingProto(typing.Protocol):
x: int
@@ -2080,7 +2789,7 @@ class NotAProtocolButAnImplicitSubclass3:
meth: Callable[[], None]
meth2: Callable[[int, str], bool]
def meth(self): pass
- def meth(self, x, y): return True
+ def meth2(self, x, y): return True
self.assertNotIsSubclass(AnnotatedButNotAProtocol, CallableMembersProto)
self.assertIsSubclass(NotAProtocolButAnImplicitSubclass, CallableMembersProto)
@@ -2523,13 +3232,46 @@ class Bad: pass
self.assertNotIsInstance(Other(), Concrete)
self.assertIsInstance(NT(1, 2), Position)
- def test_protocols_isinstance_init(self):
- T = TypeVar('T')
+ def test_runtime_checkable_with_match_args(self):
@runtime_checkable
- class P(Protocol):
- x = 1
+ class P_regular(Protocol):
+ x: int
+ y: int
+
@runtime_checkable
- class PG(Protocol[T]):
+ class P_match(Protocol):
+ __match_args__ = ("x", "y")
+ x: int
+ y: int
+
+ class Regular:
+ def __init__(self, x: int, y: int):
+ self.x = x
+ self.y = y
+
+ class WithMatch:
+ __match_args__ = ("x", "y", "z")
+ def __init__(self, x: int, y: int, z: int):
+ self.x = x
+ self.y = y
+ self.z = z
+
+ class Nope: ...
+
+ self.assertIsInstance(Regular(1, 2), P_regular)
+ self.assertIsInstance(Regular(1, 2), P_match)
+ self.assertIsInstance(WithMatch(1, 2, 3), P_regular)
+ self.assertIsInstance(WithMatch(1, 2, 3), P_match)
+ self.assertNotIsInstance(Nope(), P_regular)
+ self.assertNotIsInstance(Nope(), P_match)
+
+ def test_protocols_isinstance_init(self):
+ T = TypeVar('T')
+ @runtime_checkable
+ class P(Protocol):
+ x = 1
+ @runtime_checkable
+ class PG(Protocol[T]):
x = 1
class C:
def __init__(self, x):
@@ -2675,7 +3417,7 @@ class NonP(P):
class NonPR(PR): pass
class C(metaclass=abc.ABCMeta):
x = 1
- class D(metaclass=abc.ABCMeta): # noqa: B024
+ class D(metaclass=abc.ABCMeta):
def meth(self): pass # noqa: B027
self.assertNotIsInstance(C(), NonP)
self.assertNotIsInstance(D(), NonPR)
@@ -2686,12 +3428,12 @@ def meth(self): pass # noqa: B027
self.assertNotIn("__protocol_attrs__", vars(NonP))
self.assertNotIn("__protocol_attrs__", vars(NonPR))
- self.assertNotIn("__callable_proto_members_only__", vars(NonP))
- self.assertNotIn("__callable_proto_members_only__", vars(NonPR))
+ self.assertNotIn("__non_callable_proto_members__", vars(NonP))
+ self.assertNotIn("__non_callable_proto_members__", vars(NonPR))
acceptable_extra_attrs = {
'_is_protocol', '_is_runtime_protocol', '__parameters__',
- '__init__', '__annotations__', '__subclasshook__',
+ '__init__', '__annotations__', '__subclasshook__', '__annotate__'
}
self.assertLessEqual(vars(NonP).keys(), vars(C).keys() | acceptable_extra_attrs)
self.assertLessEqual(
@@ -2760,11 +3502,26 @@ def __subclasshook__(cls, other):
@skip_if_py312b1
def test_issubclass_fails_correctly(self):
@runtime_checkable
- class P(Protocol):
+ class NonCallableMembers(Protocol):
x = 1
+
+ class NotRuntimeCheckable(Protocol):
+ def callable_member(self) -> int: ...
+
+ @runtime_checkable
+ class RuntimeCheckable(Protocol):
+ def callable_member(self) -> int: ...
+
class C: pass
- with self.assertRaisesRegex(TypeError, r"issubclass\(\) arg 1 must be a class"):
- issubclass(C(), P)
+
+ # These three all exercise different code paths,
+ # but should result in the same error message:
+ for protocol in NonCallableMembers, NotRuntimeCheckable, RuntimeCheckable:
+ with self.subTest(proto_name=protocol.__name__):
+ with self.assertRaisesRegex(
+ TypeError, r"issubclass\(\) arg 1 must be a class"
+ ):
+ issubclass(C(), protocol)
def test_defining_generic_protocols(self):
T = TypeVar('T')
@@ -2878,11 +3635,11 @@ def test_protocols_bad_subscripts(self):
with self.assertRaises(TypeError):
class P(Protocol[T, T]): pass
with self.assertRaises(TypeError):
- class P(Protocol[int]): pass
+ class P2(Protocol[int]): pass
with self.assertRaises(TypeError):
- class P(Protocol[T], Protocol[S]): pass
+ class P3(Protocol[T], Protocol[S]): pass
with self.assertRaises(TypeError):
- class P(typing.Mapping[T, S], Protocol[T]): pass
+ class P4(typing.Mapping[T, S], Protocol[T]): pass
def test_generic_protocols_repr(self):
T = TypeVar('T')
@@ -2946,7 +3703,7 @@ def test_none_treated_correctly(self):
@runtime_checkable
class P(Protocol):
x: int = None
- class B(object): pass
+ class B: pass
self.assertNotIsInstance(B(), P)
class C:
x = 1
@@ -3091,7 +3848,7 @@ def __call__(self, *args: P.args, **kwargs: P.kwargs) -> T: ...
MemoizedFunc[[int, str, str]]
if sys.version_info >= (3, 10):
- # These unfortunately don't pass on <=3.9,
+ # These unfortunately don't pass on 3.9,
# due to typing._type_check on older Python versions
X = MemoizedFunc[[int, str, str], T, T2]
self.assertEqual(X.__parameters__, (T, T2))
@@ -3101,6 +3858,10 @@ def __call__(self, *args: P.args, **kwargs: P.kwargs) -> T: ...
self.assertEqual(Y.__parameters__, ())
self.assertEqual(Y.__args__, ((int, str, str), bytes, memoryview))
+ # Regression test; fixing #126 might cause an error here
+ with self.assertRaisesRegex(TypeError, "not a generic class"):
+ Y[int]
+
def test_protocol_generic_over_typevartuple(self):
Ts = TypeVarTuple("Ts")
T = TypeVar("T")
@@ -3113,7 +3874,7 @@ def __call__(self, *args: Unpack[Ts]) -> T: ...
self.assertEqual(MemoizedFunc.__parameters__, (Ts, T, T2))
self.assertTrue(MemoizedFunc._is_protocol)
- things = "arguments" if sys.version_info >= (3, 11) else "parameters"
+ things = "arguments" if sys.version_info >= (3, 10) else "parameters"
# A bug was fixed in 3.11.1
# (https://github.com/python/cpython/commit/74920aa27d0c57443dd7f704d6272cca9c507ab3)
@@ -3173,7 +3934,6 @@ def c(self) -> int: return 5
with self.assertRaisesRegex(TypeError, "not a Protocol"):
get_protocol_members(ConcreteInherit())
- @only_with_typing_Protocol
def test_get_protocol_members_typing(self):
with self.assertRaisesRegex(TypeError, "not a Protocol"):
get_protocol_members(typing.Protocol)
@@ -3222,7 +3982,6 @@ def test_is_protocol(self):
# Protocol is not itself a protocol
self.assertFalse(is_protocol(Protocol))
- @only_with_typing_Protocol
def test_is_protocol_with_typing(self):
self.assertFalse(is_protocol(typing.Protocol))
@@ -3325,6 +4084,94 @@ def method(self) -> None: ...
self.assertIsInstance(Foo(), ProtocolWithMixedMembers)
self.assertNotIsInstance(42, ProtocolWithMixedMembers)
+ def test_protocol_issubclass_error_message(self):
+ @runtime_checkable
+ class Vec2D(Protocol):
+ x: float
+ y: float
+
+ def square_norm(self) -> float:
+ return self.x ** 2 + self.y ** 2
+
+ self.assertEqual(Vec2D.__protocol_attrs__, {'x', 'y', 'square_norm'})
+ expected_error_message = (
+ "Protocols with non-method members don't support issubclass()."
+ " Non-method members: 'x', 'y'."
+ )
+ with self.assertRaisesRegex(TypeError, re.escape(expected_error_message)):
+ issubclass(int, Vec2D)
+
+ def test_nonruntime_protocol_interaction_with_evil_classproperty(self):
+ class classproperty:
+ def __get__(self, instance, type):
+ raise RuntimeError("NO")
+
+ class Commentable(Protocol):
+ evil = classproperty()
+
+ # recognised as a protocol attr,
+ # but not actually accessed by the protocol metaclass
+ # (which would raise RuntimeError) for non-runtime protocols.
+ # See gh-113320
+ self.assertEqual(get_protocol_members(Commentable), {"evil"})
+
+ def test_runtime_protocol_interaction_with_evil_classproperty(self):
+ class CustomError(Exception): pass
+
+ class classproperty:
+ def __get__(self, instance, type):
+ raise CustomError
+
+ with self.assertRaises(TypeError) as cm:
+ @runtime_checkable
+ class Commentable(Protocol):
+ evil = classproperty()
+
+ exc = cm.exception
+ self.assertEqual(
+ exc.args[0],
+ "Failed to determine whether protocol member 'evil' is a method member"
+ )
+ self.assertIs(type(exc.__cause__), CustomError)
+
+ def test_extensions_runtimecheckable_on_typing_Protocol(self):
+ @runtime_checkable
+ class Functor(typing.Protocol):
+ def foo(self) -> None: ...
+
+ self.assertNotIsSubclass(object, Functor)
+
+ class Bar:
+ def foo(self): pass
+
+ self.assertIsSubclass(Bar, Functor)
+
+
+class SpecificProtocolTests(BaseTestCase):
+ def test_reader_runtime_checkable(self):
+ class MyReader:
+ def read(self, n: int) -> bytes:
+ return b""
+
+ class WrongReader:
+ def readx(self, n: int) -> bytes:
+ return b""
+
+ self.assertIsInstance(MyReader(), typing_extensions.Reader)
+ self.assertNotIsInstance(WrongReader(), typing_extensions.Reader)
+
+ def test_writer_runtime_checkable(self):
+ class MyWriter:
+ def write(self, b: bytes) -> int:
+ return 0
+
+ class WrongWriter:
+ def writex(self, b: bytes) -> int:
+ return 0
+
+ self.assertIsInstance(MyWriter(), typing_extensions.Writer)
+ self.assertNotIsInstance(WrongWriter(), typing_extensions.Writer)
+
class Point2DGeneric(Generic[T], TypedDict):
a: T
@@ -3355,10 +4202,16 @@ def test_basics_functional_syntax(self):
self.assertEqual(Emp.__annotations__, {'name': str, 'id': int})
self.assertEqual(Emp.__total__, True)
+ def test_allowed_as_type_argument(self):
+ # https://github.com/python/typing_extensions/issues/613
+ obj = typing.Type[typing_extensions.TypedDict]
+ self.assertIs(typing_extensions.get_origin(obj), type)
+ self.assertEqual(typing_extensions.get_args(obj), (typing_extensions.TypedDict,))
+
@skipIf(sys.version_info < (3, 13), "Change in behavior in 3.13")
def test_keywords_syntax_raises_on_3_13(self):
- with self.assertRaises(TypeError):
- Emp = TypedDict('Emp', name=str, id=int)
+ with self.assertRaises(TypeError), self.assertWarns(DeprecationWarning):
+ TypedDict('Emp', name=str, id=int)
@skipIf(sys.version_info >= (3, 13), "3.13 removes support for kwargs")
def test_basics_keywords_syntax(self):
@@ -3381,18 +4234,25 @@ def test_basics_keywords_syntax(self):
def test_typeddict_special_keyword_names(self):
with self.assertWarns(DeprecationWarning):
TD = TypedDict("TD", cls=type, self=object, typename=str, _typename=int,
- fields=list, _fields=dict)
+ fields=list, _fields=dict,
+ closed=bool, extra_items=bool)
self.assertEqual(TD.__name__, 'TD')
self.assertEqual(TD.__annotations__, {'cls': type, 'self': object, 'typename': str,
- '_typename': int, 'fields': list, '_fields': dict})
+ '_typename': int, 'fields': list, '_fields': dict,
+ 'closed': bool, 'extra_items': bool})
+ self.assertIsNone(TD.__closed__)
+ self.assertIs(TD.__extra_items__, NoExtraItems)
a = TD(cls=str, self=42, typename='foo', _typename=53,
- fields=[('bar', tuple)], _fields={'baz', set})
+ fields=[('bar', tuple)], _fields={'baz', set},
+ closed=None, extra_items="tea pot")
self.assertEqual(a['cls'], str)
self.assertEqual(a['self'], 42)
self.assertEqual(a['typename'], 'foo')
self.assertEqual(a['_typename'], 53)
self.assertEqual(a['fields'], [('bar', tuple)])
self.assertEqual(a['_fields'], {'baz', set})
+ self.assertIsNone(a['closed'])
+ self.assertEqual(a['extra_items'], "tea pot")
def test_typeddict_create_errors(self):
with self.assertRaises(TypeError):
@@ -3402,17 +4262,9 @@ def test_typeddict_create_errors(self):
with self.assertRaises(TypeError):
TypedDict('Emp', [('name', str)], None)
- with self.assertWarns(DeprecationWarning):
- Emp = TypedDict('Emp', name=str, id=int)
- self.assertEqual(Emp.__name__, 'Emp')
- self.assertEqual(Emp.__annotations__, {'name': str, 'id': int})
-
def test_typeddict_errors(self):
Emp = TypedDict('Emp', {'name': str, 'id': int})
- if sys.version_info >= (3, 13):
- self.assertEqual(TypedDict.__module__, 'typing')
- else:
- self.assertEqual(TypedDict.__module__, 'typing_extensions')
+ self.assertEqual(TypedDict.__module__, 'typing_extensions')
jim = Emp(name='Jim', id=1)
with self.assertRaises(TypeError):
isinstance({}, Emp)
@@ -3486,6 +4338,37 @@ def test_total(self):
self.assertEqual(Options.__required_keys__, frozenset())
self.assertEqual(Options.__optional_keys__, {'log_level', 'log_path'})
+ def test_total_inherits_non_total(self):
+ class TD1(TypedDict, total=False):
+ a: int
+
+ self.assertIs(TD1.__total__, False)
+
+ class TD2(TD1):
+ b: str
+
+ self.assertIs(TD2.__total__, True)
+
+ def test_total_with_assigned_value(self):
+ class TD(TypedDict):
+ __total__ = "some_value"
+
+ self.assertIs(TD.__total__, True)
+
+ class TD2(TypedDict, total=True):
+ __total__ = "some_value"
+
+ self.assertIs(TD2.__total__, True)
+
+ class TD3(TypedDict, total=False):
+ __total__ = "some value"
+
+ self.assertIs(TD3.__total__, False)
+
+ TD4 = TypedDict('TD4', {'__total__': "some_value"}) # noqa: F821
+ self.assertIs(TD4.__total__, True)
+
+
def test_optional_keys(self):
class Point2Dor3D(Point2D, total=False):
z: int
@@ -3525,6 +4408,39 @@ class Cat(Animal):
'voice': str,
}
+ @skipIf(sys.version_info == (3, 14, 0, "beta", 1), "Broken on beta 1, fixed in beta 2")
+ def test_inheritance_pep563(self):
+ def _make_td(future, class_name, annos, base, extra_names=None):
+ lines = []
+ if future:
+ lines.append('from __future__ import annotations')
+ lines.append('from typing import TypedDict')
+ lines.append(f'class {class_name}({base}):')
+ for name, anno in annos.items():
+ lines.append(f' {name}: {anno}')
+ code = '\n'.join(lines)
+ ns = {**extra_names} if extra_names else {}
+ exec(code, ns)
+ return ns[class_name]
+
+ for base_future in (True, False):
+ for child_future in (True, False):
+ with self.subTest(base_future=base_future, child_future=child_future):
+ base = _make_td(
+ base_future, "Base", {"base": "int"}, "TypedDict"
+ )
+ if sys.version_info >= (3, 14):
+ self.assertIsNotNone(base.__annotate__)
+ child = _make_td(
+ child_future, "Child", {"child": "int"}, "Base", {"Base": base}
+ )
+ base_anno = typing.ForwardRef("int", module="builtins") if base_future else int
+ child_anno = typing.ForwardRef("int", module="builtins") if child_future else int
+ self.assertEqual(base.__annotations__, {'base': base_anno})
+ self.assertEqual(
+ child.__annotations__, {'child': child_anno, 'base': base_anno}
+ )
+
def test_required_notrequired_keys(self):
self.assertEqual(NontotalMovie.__required_keys__,
frozenset({"title"}))
@@ -3648,6 +4564,53 @@ class ChildWithInlineAndOptional(Untotal, Inline):
class Wrong(*bases):
pass
+ def test_closed_values(self):
+ class Implicit(TypedDict): ...
+ class ExplicitTrue(TypedDict, closed=True): ...
+ class ExplicitFalse(TypedDict, closed=False): ...
+
+ self.assertIsNone(Implicit.__closed__)
+ self.assertIs(ExplicitTrue.__closed__, True)
+ self.assertIs(ExplicitFalse.__closed__, False)
+
+
+ @skipIf(TYPING_3_14_0, "only supported on older versions")
+ def test_closed_typeddict_compat(self):
+ class Closed(TypedDict, closed=True):
+ __extra_items__: None
+
+ class Unclosed(TypedDict, closed=False):
+ ...
+
+ class ChildUnclosed(Closed, Unclosed):
+ ...
+
+ self.assertIsNone(ChildUnclosed.__closed__)
+ self.assertEqual(ChildUnclosed.__extra_items__, NoExtraItems)
+
+ class ChildClosed(Unclosed, Closed):
+ ...
+
+ self.assertIsNone(ChildClosed.__closed__)
+ self.assertEqual(ChildClosed.__extra_items__, NoExtraItems)
+
+ def test_extra_items_class_arg(self):
+ class TD(TypedDict, extra_items=int):
+ a: str
+
+ self.assertIs(TD.__extra_items__, int)
+ self.assertEqual(TD.__annotations__, {'a': str})
+ self.assertEqual(TD.__required_keys__, frozenset({'a'}))
+ self.assertEqual(TD.__optional_keys__, frozenset())
+
+ class NoExtra(TypedDict):
+ a: str
+
+ self.assertIs(NoExtra.__extra_items__, NoExtraItems)
+ self.assertEqual(NoExtra.__annotations__, {'a': str})
+ self.assertEqual(NoExtra.__required_keys__, frozenset({'a'}))
+ self.assertEqual(NoExtra.__optional_keys__, frozenset())
+
def test_is_typeddict(self):
self.assertIs(is_typeddict(Point2D), True)
self.assertIs(is_typeddict(Point2Dor3D), True)
@@ -3681,7 +4644,6 @@ class NewGeneric[T](TypedDict):
if hasattr(typing, "TypedDict"):
self.assertIs(is_typeddict(typing.TypedDict), False)
- @skipUnless(TYPING_3_8_0, "Python 3.8+ required")
def test_is_typeddict_against_typeddict_from_typing(self):
Point = typing.TypedDict('Point', {'x': int, 'y': int})
@@ -3696,7 +4658,7 @@ class PointDict3D(PointDict2D, total=False):
assert is_typeddict(PointDict2D) is True
assert is_typeddict(PointDict3D) is True
- @skipUnless(HAS_FORWARD_MODULE, "ForwardRef.__forward_module__ was added in 3.9")
+ @skipUnless(HAS_FORWARD_MODULE, "ForwardRef.__forward_module__ was added in 3.9.7")
def test_get_type_hints_cross_module_subclass(self):
self.assertNotIn("_DoNotImport", globals())
self.assertEqual(
@@ -3790,7 +4752,6 @@ class C(B[int]):
with self.assertRaises(TypeError):
C[str]
-
class Point3D(Point2DGeneric[T], Generic[T, KT]):
c: KT
@@ -3840,11 +4801,9 @@ class WithImplicitAny(B):
with self.assertRaises(TypeError):
WithImplicitAny[str]
- @skipUnless(TYPING_3_9_0, "Was changed in 3.9")
def test_non_generic_subscript(self):
# For backward compatibility, subscription works
# on arbitrary TypedDict types.
- # (But we don't attempt to backport this misfeature onto 3.7 and 3.8.)
class TD(TypedDict):
a: T
A = TD[int]
@@ -3937,6 +4896,402 @@ class T4(TypedDict, Generic[S]): pass
self.assertEqual(klass.__optional_keys__, set())
self.assertIsInstance(klass(), dict)
+ def test_readonly_inheritance(self):
+ class Base1(TypedDict):
+ a: ReadOnly[int]
+
+ class Child1(Base1):
+ b: str
+
+ self.assertEqual(Child1.__readonly_keys__, frozenset({'a'}))
+ self.assertEqual(Child1.__mutable_keys__, frozenset({'b'}))
+
+ class Base2(TypedDict):
+ a: int
+
+ class Child2(Base2):
+ b: ReadOnly[str]
+
+ self.assertEqual(Child2.__readonly_keys__, frozenset({'b'}))
+ self.assertEqual(Child2.__mutable_keys__, frozenset({'a'}))
+
+ def test_make_mutable_key_readonly(self):
+ class Base(TypedDict):
+ a: int
+
+ self.assertEqual(Base.__readonly_keys__, frozenset())
+ self.assertEqual(Base.__mutable_keys__, frozenset({'a'}))
+
+ class Child(Base):
+ a: ReadOnly[int] # type checker error, but allowed at runtime
+
+ self.assertEqual(Child.__readonly_keys__, frozenset({'a'}))
+ self.assertEqual(Child.__mutable_keys__, frozenset())
+
+ def test_can_make_readonly_key_mutable(self):
+ class Base(TypedDict):
+ a: ReadOnly[int]
+
+ class Child(Base):
+ a: int
+
+ self.assertEqual(Child.__readonly_keys__, frozenset())
+ self.assertEqual(Child.__mutable_keys__, frozenset({'a'}))
+
+ def test_combine_qualifiers(self):
+ class AllTheThings(TypedDict):
+ a: Annotated[Required[ReadOnly[int]], "why not"]
+ b: Required[Annotated[ReadOnly[int], "why not"]]
+ c: ReadOnly[NotRequired[Annotated[int, "why not"]]]
+ d: NotRequired[Annotated[int, "why not"]]
+
+ self.assertEqual(AllTheThings.__required_keys__, frozenset({'a', 'b'}))
+ self.assertEqual(AllTheThings.__optional_keys__, frozenset({'c', 'd'}))
+ self.assertEqual(AllTheThings.__readonly_keys__, frozenset({'a', 'b', 'c'}))
+ self.assertEqual(AllTheThings.__mutable_keys__, frozenset({'d'}))
+
+ self.assertEqual(
+ get_type_hints(AllTheThings, include_extras=False),
+ {'a': int, 'b': int, 'c': int, 'd': int},
+ )
+ self.assertEqual(
+ get_type_hints(AllTheThings, include_extras=True),
+ {
+ 'a': Annotated[Required[ReadOnly[int]], 'why not'],
+ 'b': Required[Annotated[ReadOnly[int], 'why not']],
+ 'c': ReadOnly[NotRequired[Annotated[int, 'why not']]],
+ 'd': NotRequired[Annotated[int, 'why not']],
+ },
+ )
+
+ @skipIf(TYPING_3_14_0, "Old syntax only supported on <3.14")
+ def test_extra_keys_non_readonly_legacy(self):
+ class Base(TypedDict, closed=True):
+ __extra_items__: str
+
+ class Child(Base):
+ a: NotRequired[int]
+
+ self.assertEqual(Child.__required_keys__, frozenset({}))
+ self.assertEqual(Child.__optional_keys__, frozenset({'a'}))
+ self.assertEqual(Child.__readonly_keys__, frozenset({}))
+ self.assertEqual(Child.__mutable_keys__, frozenset({'a'}))
+
+ @skipIf(TYPING_3_14_0, "Only supported on <3.14")
+ def test_extra_keys_readonly_legacy(self):
+ class Base(TypedDict, closed=True):
+ __extra_items__: ReadOnly[str]
+
+ class Child(Base):
+ a: NotRequired[str]
+
+ self.assertEqual(Child.__required_keys__, frozenset({}))
+ self.assertEqual(Child.__optional_keys__, frozenset({'a'}))
+ self.assertEqual(Child.__readonly_keys__, frozenset({}))
+ self.assertEqual(Child.__mutable_keys__, frozenset({'a'}))
+
+ @skipIf(TYPING_3_14_0, "Only supported on <3.14")
+ def test_extra_keys_readonly_explicit_closed_legacy(self):
+ class Base(TypedDict, closed=True):
+ __extra_items__: ReadOnly[str]
+
+ class Child(Base, closed=True):
+ a: NotRequired[str]
+
+ self.assertEqual(Child.__required_keys__, frozenset({}))
+ self.assertEqual(Child.__optional_keys__, frozenset({'a'}))
+ self.assertEqual(Child.__readonly_keys__, frozenset({}))
+ self.assertEqual(Child.__mutable_keys__, frozenset({'a'}))
+
+ @skipIf(TYPING_3_14_0, "Only supported on <3.14")
+ def test_extra_key_required_legacy(self):
+ with self.assertRaisesRegex(
+ TypeError,
+ "Special key __extra_items__ does not support Required"
+ ):
+ TypedDict("A", {"__extra_items__": Required[int]}, closed=True)
+
+ with self.assertRaisesRegex(
+ TypeError,
+ "Special key __extra_items__ does not support NotRequired"
+ ):
+ TypedDict("A", {"__extra_items__": NotRequired[int]}, closed=True)
+
+ def test_regular_extra_items_legacy(self):
+ class ExtraReadOnly(TypedDict):
+ __extra_items__: ReadOnly[str]
+
+ self.assertEqual(ExtraReadOnly.__required_keys__, frozenset({'__extra_items__'}))
+ self.assertEqual(ExtraReadOnly.__optional_keys__, frozenset({}))
+ self.assertEqual(ExtraReadOnly.__readonly_keys__, frozenset({'__extra_items__'}))
+ self.assertEqual(ExtraReadOnly.__mutable_keys__, frozenset({}))
+ self.assertIs(ExtraReadOnly.__extra_items__, NoExtraItems)
+ self.assertIsNone(ExtraReadOnly.__closed__)
+
+ class ExtraRequired(TypedDict):
+ __extra_items__: Required[str]
+
+ self.assertEqual(ExtraRequired.__required_keys__, frozenset({'__extra_items__'}))
+ self.assertEqual(ExtraRequired.__optional_keys__, frozenset({}))
+ self.assertEqual(ExtraRequired.__readonly_keys__, frozenset({}))
+ self.assertEqual(ExtraRequired.__mutable_keys__, frozenset({'__extra_items__'}))
+ self.assertIs(ExtraRequired.__extra_items__, NoExtraItems)
+ self.assertIsNone(ExtraRequired.__closed__)
+
+ class ExtraNotRequired(TypedDict):
+ __extra_items__: NotRequired[str]
+
+ self.assertEqual(ExtraNotRequired.__required_keys__, frozenset({}))
+ self.assertEqual(ExtraNotRequired.__optional_keys__, frozenset({'__extra_items__'}))
+ self.assertEqual(ExtraNotRequired.__readonly_keys__, frozenset({}))
+ self.assertEqual(ExtraNotRequired.__mutable_keys__, frozenset({'__extra_items__'}))
+ self.assertIs(ExtraNotRequired.__extra_items__, NoExtraItems)
+ self.assertIsNone(ExtraNotRequired.__closed__)
+
+ @skipIf(TYPING_3_14_0, "Only supported on <3.14")
+ def test_closed_inheritance_legacy(self):
+ class Base(TypedDict, closed=True):
+ __extra_items__: ReadOnly[Union[str, None]]
+
+ self.assertEqual(Base.__required_keys__, frozenset({}))
+ self.assertEqual(Base.__optional_keys__, frozenset({}))
+ self.assertEqual(Base.__readonly_keys__, frozenset({}))
+ self.assertEqual(Base.__mutable_keys__, frozenset({}))
+ self.assertEqual(Base.__annotations__, {})
+ self.assertEqual(Base.__extra_items__, ReadOnly[Union[str, None]])
+ self.assertIs(Base.__closed__, True)
+
+ class Child(Base, closed=True):
+ a: int
+ __extra_items__: int
+
+ self.assertEqual(Child.__required_keys__, frozenset({'a'}))
+ self.assertEqual(Child.__optional_keys__, frozenset({}))
+ self.assertEqual(Child.__readonly_keys__, frozenset({}))
+ self.assertEqual(Child.__mutable_keys__, frozenset({'a'}))
+ self.assertEqual(Child.__annotations__, {"a": int})
+ self.assertIs(Child.__extra_items__, int)
+ self.assertIs(Child.__closed__, True)
+
+ class GrandChild(Child, closed=True):
+ __extra_items__: str
+
+ self.assertEqual(GrandChild.__required_keys__, frozenset({'a'}))
+ self.assertEqual(GrandChild.__optional_keys__, frozenset({}))
+ self.assertEqual(GrandChild.__readonly_keys__, frozenset({}))
+ self.assertEqual(GrandChild.__mutable_keys__, frozenset({'a'}))
+ self.assertEqual(GrandChild.__annotations__, {"a": int})
+ self.assertIs(GrandChild.__extra_items__, str)
+ self.assertIs(GrandChild.__closed__, True)
+
+ def test_closed_inheritance(self):
+ class Base(TypedDict, extra_items=ReadOnly[Union[str, None]]):
+ a: int
+
+ self.assertEqual(Base.__required_keys__, frozenset({"a"}))
+ self.assertEqual(Base.__optional_keys__, frozenset({}))
+ self.assertEqual(Base.__readonly_keys__, frozenset({}))
+ self.assertEqual(Base.__mutable_keys__, frozenset({"a"}))
+ self.assertEqual(Base.__annotations__, {"a": int})
+ self.assertEqual(Base.__extra_items__, ReadOnly[Union[str, None]])
+ self.assertIsNone(Base.__closed__)
+
+ class Child(Base, extra_items=int):
+ a: str
+
+ self.assertEqual(Child.__required_keys__, frozenset({'a'}))
+ self.assertEqual(Child.__optional_keys__, frozenset({}))
+ self.assertEqual(Child.__readonly_keys__, frozenset({}))
+ self.assertEqual(Child.__mutable_keys__, frozenset({'a'}))
+ self.assertEqual(Child.__annotations__, {"a": str})
+ self.assertIs(Child.__extra_items__, int)
+ self.assertIsNone(Child.__closed__)
+
+ class GrandChild(Child, closed=True):
+ a: float
+
+ self.assertEqual(GrandChild.__required_keys__, frozenset({'a'}))
+ self.assertEqual(GrandChild.__optional_keys__, frozenset({}))
+ self.assertEqual(GrandChild.__readonly_keys__, frozenset({}))
+ self.assertEqual(GrandChild.__mutable_keys__, frozenset({'a'}))
+ self.assertEqual(GrandChild.__annotations__, {"a": float})
+ self.assertIs(GrandChild.__extra_items__, NoExtraItems)
+ self.assertIs(GrandChild.__closed__, True)
+
+ class GrandGrandChild(GrandChild):
+ ...
+ self.assertEqual(GrandGrandChild.__required_keys__, frozenset({'a'}))
+ self.assertEqual(GrandGrandChild.__optional_keys__, frozenset({}))
+ self.assertEqual(GrandGrandChild.__readonly_keys__, frozenset({}))
+ self.assertEqual(GrandGrandChild.__mutable_keys__, frozenset({'a'}))
+ self.assertEqual(GrandGrandChild.__annotations__, {"a": float})
+ self.assertIs(GrandGrandChild.__extra_items__, NoExtraItems)
+ self.assertIsNone(GrandGrandChild.__closed__)
+
+ def test_implicit_extra_items(self):
+ class Base(TypedDict):
+ a: int
+
+ self.assertIs(Base.__extra_items__, NoExtraItems)
+ self.assertIsNone(Base.__closed__)
+
+ class ChildA(Base, closed=True):
+ ...
+
+ self.assertEqual(ChildA.__extra_items__, NoExtraItems)
+ self.assertIs(ChildA.__closed__, True)
+
+ @skipIf(TYPING_3_14_0, "Backwards compatibility only for Python 3.13")
+ def test_implicit_extra_items_before_3_14(self):
+ class Base(TypedDict):
+ a: int
+ class ChildB(Base, closed=True):
+ __extra_items__: None
+
+ self.assertIs(ChildB.__extra_items__, type(None))
+ self.assertIs(ChildB.__closed__, True)
+
+ @skipIf(
+ TYPING_3_13_0,
+ "The keyword argument alternative to define a "
+ "TypedDict type using the functional syntax is no longer supported"
+ )
+ def test_backwards_compatibility(self):
+ with self.assertWarns(DeprecationWarning):
+ TD = TypedDict("TD", closed=int)
+ self.assertIs(TD.__closed__, None)
+ self.assertEqual(TD.__annotations__, {"closed": int})
+
+ with self.assertWarns(DeprecationWarning):
+ TD = TypedDict("TD", extra_items=int)
+ self.assertIs(TD.__extra_items__, NoExtraItems)
+ self.assertEqual(TD.__annotations__, {"extra_items": int})
+
+ def test_cannot_combine_closed_and_extra_items(self):
+ with self.assertRaisesRegex(
+ TypeError,
+ "Cannot combine closed=True and extra_items"
+ ):
+ class TD(TypedDict, closed=True, extra_items=range):
+ x: str
+
+ def test_typed_dict_signature(self):
+ self.assertListEqual(
+ list(inspect.signature(TypedDict).parameters),
+ ['typename', 'fields', 'total', 'closed', 'extra_items', 'kwargs']
+ )
+
+ def test_inline_too_many_arguments(self):
+ with self.assertRaises(TypeError):
+ TypedDict[{"a": int}, "extra"]
+
+ def test_inline_not_a_dict(self):
+ with self.assertRaises(TypeError):
+ TypedDict["not_a_dict"]
+
+ # a tuple of elements isn't allowed, even if the first element is a dict:
+ with self.assertRaises(TypeError):
+ TypedDict[({"key": int},)]
+
+ def test_inline_empty(self):
+ TD = TypedDict[{}]
+ self.assertIs(TD.__total__, True)
+ self.assertIs(TD.__closed__, True)
+ self.assertEqual(TD.__extra_items__, NoExtraItems)
+ self.assertEqual(TD.__required_keys__, set())
+ self.assertEqual(TD.__optional_keys__, set())
+ self.assertEqual(TD.__readonly_keys__, set())
+ self.assertEqual(TD.__mutable_keys__, set())
+
+ def test_inline(self):
+ TD = TypedDict[{
+ "a": int,
+ "b": Required[int],
+ "c": NotRequired[int],
+ "d": ReadOnly[int],
+ }]
+ self.assertIsSubclass(TD, dict)
+ self.assertIsSubclass(TD, typing.MutableMapping)
+ self.assertNotIsSubclass(TD, collections.abc.Sequence)
+ self.assertTrue(is_typeddict(TD))
+ self.assertEqual(TD.__name__, "")
+ self.assertEqual(
+ TD.__annotations__,
+ {"a": int, "b": Required[int], "c": NotRequired[int], "d": ReadOnly[int]},
+ )
+ self.assertEqual(TD.__module__, __name__)
+ self.assertEqual(TD.__bases__, (dict,))
+ self.assertIs(TD.__total__, True)
+ self.assertIs(TD.__closed__, True)
+ self.assertEqual(TD.__extra_items__, NoExtraItems)
+ self.assertEqual(TD.__required_keys__, {"a", "b", "d"})
+ self.assertEqual(TD.__optional_keys__, {"c"})
+ self.assertEqual(TD.__readonly_keys__, {"d"})
+ self.assertEqual(TD.__mutable_keys__, {"a", "b", "c"})
+
+ inst = TD(a=1, b=2, d=3)
+ self.assertIs(type(inst), dict)
+ self.assertEqual(inst["a"], 1)
+
+ def test_annotations(self):
+ # _type_check is applied
+ with self.assertRaisesRegex(TypeError, "Plain typing.Optional is not valid as type argument"):
+ class X(TypedDict):
+ a: Optional
+
+ # _type_convert is applied
+ class Y(TypedDict):
+ a: None
+ b: "int"
+ if sys.version_info >= (3, 14):
+ import annotationlib
+
+ fwdref = EqualToForwardRef('int', module=__name__)
+ self.assertEqual(Y.__annotations__, {'a': type(None), 'b': fwdref})
+ self.assertEqual(Y.__annotate__(annotationlib.Format.FORWARDREF), {'a': type(None), 'b': fwdref})
+ else:
+ self.assertEqual(Y.__annotations__, {'a': type(None), 'b': typing.ForwardRef('int', module=__name__)})
+
+ @skipUnless(TYPING_3_14_0, "Only supported on 3.14")
+ def test_delayed_type_check(self):
+ # _type_check is also applied later
+ class Z(TypedDict):
+ a: undefined # noqa: F821
+
+ with self.assertRaises(NameError):
+ Z.__annotations__
+
+ undefined = Final
+ with self.assertRaisesRegex(TypeError, "Plain typing.Final is not valid as type argument"):
+ Z.__annotations__
+
+ undefined = None # noqa: F841
+ self.assertEqual(Z.__annotations__, {'a': type(None)})
+
+ @skipUnless(TYPING_3_14_0, "Only supported on 3.14")
+ def test_deferred_evaluation(self):
+ class A(TypedDict):
+ x: NotRequired[undefined] # noqa: F821
+ y: ReadOnly[undefined] # noqa: F821
+ z: Required[undefined] # noqa: F821
+
+ self.assertEqual(A.__required_keys__, frozenset({'y', 'z'}))
+ self.assertEqual(A.__optional_keys__, frozenset({'x'}))
+ self.assertEqual(A.__readonly_keys__, frozenset({'y'}))
+ self.assertEqual(A.__mutable_keys__, frozenset({'x', 'z'}))
+
+ with self.assertRaises(NameError):
+ A.__annotations__
+
+ import annotationlib
+ self.assertEqual(
+ A.__annotate__(annotationlib.Format.STRING),
+ {'x': 'NotRequired[undefined]', 'y': 'ReadOnly[undefined]',
+ 'z': 'Required[undefined]'},
+ )
+
+ def test_dunder_dict(self):
+ self.assertIsInstance(TypedDict.__dict__, dict)
class AnnotatedTests(BaseTestCase):
@@ -4028,23 +5383,14 @@ class C:
A.x = 5
self.assertEqual(C.x, 5)
- @skipIf(sys.version_info[:2] in ((3, 9), (3, 10)), "Waiting for bpo-46491 bugfix.")
+ @skipIf(sys.version_info[:2] == (3, 10), "Waiting for https://github.com/python/cpython/issues/90649 bugfix.")
def test_special_form_containment(self):
class C:
classvar: Annotated[ClassVar[int], "a decoration"] = 4
const: Annotated[Final[int], "Const"] = 4
- if sys.version_info[:2] >= (3, 7):
- self.assertEqual(get_type_hints(C, globals())["classvar"], ClassVar[int])
- self.assertEqual(get_type_hints(C, globals())["const"], Final[int])
- else:
- self.assertEqual(
- get_type_hints(C, globals())["classvar"],
- Annotated[ClassVar[int], "a decoration"]
- )
- self.assertEqual(
- get_type_hints(C, globals())["const"], Annotated[Final[int], "Const"]
- )
+ self.assertEqual(get_type_hints(C, globals())["classvar"], ClassVar[int])
+ self.assertEqual(get_type_hints(C, globals())["const"], Final[int])
def test_cannot_subclass(self):
with self.assertRaisesRegex(TypeError, "Cannot subclass .*Annotated"):
@@ -4123,6 +5469,19 @@ def test_annotated_in_other_types(self):
X = List[Annotated[T, 5]]
self.assertEqual(X[int], List[Annotated[int, 5]])
+ def test_nested_annotated_with_unhashable_metadata(self):
+ X = Annotated[
+ List[Annotated[str, {"unhashable_metadata"}]],
+ "metadata"
+ ]
+ self.assertEqual(X.__origin__, List[Annotated[str, {"unhashable_metadata"}]])
+ self.assertEqual(X.__metadata__, ("metadata",))
+
+ def test_compatibility(self):
+ # Test that the _AnnotatedAlias compatibility alias works
+ self.assertTrue(hasattr(typing_extensions, "_AnnotatedAlias"))
+ self.assertIs(typing_extensions._AnnotatedAlias, typing._AnnotatedAlias)
+
class GetTypeHintsTests(BaseTestCase):
def test_get_type_hints(self):
@@ -4246,7 +5605,7 @@ def test_canonical_usage_with_variable_annotation(self):
exec('Alias: TypeAlias = Employee', globals(), ns)
def test_canonical_usage_with_type_comment(self):
- Alias: TypeAlias = Employee
+ Alias: TypeAlias = Employee # noqa: F841
def test_cannot_instantiate(self):
with self.assertRaises(TypeError):
@@ -4269,7 +5628,7 @@ class C(TypeAlias):
pass
with self.assertRaises(TypeError):
- class C(type(TypeAlias)):
+ class D(type(TypeAlias)):
pass
def test_repr(self):
@@ -4341,21 +5700,20 @@ def test_valid_uses(self):
self.assertEqual(C2.__parameters__, (P, T))
# Test collections.abc.Callable too.
- if sys.version_info[:2] >= (3, 9):
- # Note: no tests for Callable.__parameters__ here
- # because types.GenericAlias Callable is hardcoded to search
- # for tp_name "TypeVar" in C. This was changed in 3.10.
- C3 = collections.abc.Callable[P, int]
- self.assertEqual(C3.__args__, (P, int))
- C4 = collections.abc.Callable[P, T]
- self.assertEqual(C4.__args__, (P, T))
+ # Note: no tests for Callable.__parameters__ here
+ # because types.GenericAlias Callable is hardcoded to search
+ # for tp_name "TypeVar" in C. This was changed in 3.10.
+ C3 = collections.abc.Callable[P, int]
+ self.assertEqual(C3.__args__, (P, int))
+ C4 = collections.abc.Callable[P, T]
+ self.assertEqual(C4.__args__, (P, T))
# ParamSpec instances should also have args and kwargs attributes.
# Note: not in dir(P) because of __class__ hacks
self.assertTrue(hasattr(P, 'args'))
self.assertTrue(hasattr(P, 'kwargs'))
- @skipIf((3, 10, 0) <= sys.version_info[:3] <= (3, 10, 2), "Needs bpo-46676.")
+ @skipIf((3, 10, 0) <= sys.version_info[:3] <= (3, 10, 2), "Needs https://github.com/python/cpython/issues/90834.")
def test_args_kwargs(self):
P = ParamSpec('P')
P_2 = ParamSpec('P_2')
@@ -4387,6 +5745,7 @@ class X(Generic[T, P]):
class Y(Protocol[T, P]):
pass
+ things = "arguments" if sys.version_info >= (3, 10) else "parameters"
for klass in X, Y:
with self.subTest(klass=klass.__name__):
G1 = klass[int, P_2]
@@ -4397,13 +5756,73 @@ class Y(Protocol[T, P]):
self.assertEqual(G2.__args__, (int, Concatenate[int, P_2]))
self.assertEqual(G2.__parameters__, (P_2,))
+ G3 = klass[int, Concatenate[int, ...]]
+ self.assertEqual(G3.__args__, (int, Concatenate[int, ...]))
+ self.assertEqual(G3.__parameters__, ())
+
+ with self.assertRaisesRegex(
+ TypeError,
+ f"Too few {things} for {klass}"
+ ):
+ klass[int]
+
# The following are some valid uses cases in PEP 612 that don't work:
# These do not work in 3.9, _type_check blocks the list and ellipsis.
# G3 = X[int, [int, bool]]
# G4 = X[int, ...]
# G5 = Z[[int, str, bool]]
- # Not working because this is special-cased in 3.10.
- # G6 = Z[int, str, bool]
+
+ def test_single_argument_generic(self):
+ P = ParamSpec("P")
+ T = TypeVar("T")
+ P_2 = ParamSpec("P_2")
+
+ class Z(Generic[P]):
+ pass
+
+ class ProtoZ(Protocol[P]):
+ pass
+
+ for klass in Z, ProtoZ:
+ with self.subTest(klass=klass.__name__):
+ # Note: For 3.10+ __args__ are nested tuples here ((int, ),) instead of (int, )
+ G6 = klass[int, str, T]
+ G6args = G6.__args__[0] if sys.version_info >= (3, 10) else G6.__args__
+ self.assertEqual(G6args, (int, str, T))
+ self.assertEqual(G6.__parameters__, (T,))
+
+ # P = [int]
+ G7 = klass[int]
+ G7args = G7.__args__[0] if sys.version_info >= (3, 10) else G7.__args__
+ self.assertEqual(G7args, (int,))
+ self.assertEqual(G7.__parameters__, ())
+
+ G8 = klass[Concatenate[T, ...]]
+ self.assertEqual(G8.__args__, (Concatenate[T, ...], ))
+ self.assertEqual(G8.__parameters__, (T,))
+
+ G9 = klass[Concatenate[T, P_2]]
+ self.assertEqual(G9.__args__, (Concatenate[T, P_2], ))
+
+ # This is an invalid form but useful for testing correct subsitution
+ G10 = klass[int, Concatenate[str, P]]
+ G10args = G10.__args__[0] if sys.version_info >= (3, 10) else G10.__args__
+ self.assertEqual(G10args, (int, Concatenate[str, P], ))
+
+ @skipUnless(TYPING_3_10_0, "ParamSpec not present before 3.10")
+ def test_is_param_expr(self):
+ P = ParamSpec("P")
+ P_typing = typing.ParamSpec("P_typing")
+ self.assertTrue(typing_extensions._is_param_expr(P))
+ self.assertTrue(typing_extensions._is_param_expr(P_typing))
+ if hasattr(typing, "_is_param_expr"):
+ self.assertTrue(typing._is_param_expr(P))
+ self.assertTrue(typing._is_param_expr(P_typing))
+
+ def test_single_argument_generic_with_parameter_expressions(self):
+ P = ParamSpec("P")
+ T = TypeVar("T")
+ P_2 = ParamSpec("P_2")
class Z(Generic[P]):
pass
@@ -4411,13 +5830,81 @@ class Z(Generic[P]):
class ProtoZ(Protocol[P]):
pass
+ things = "arguments" if sys.version_info >= (3, 10) else "parameters"
+ for klass in Z, ProtoZ:
+ with self.subTest(klass=klass.__name__):
+ G8 = klass[Concatenate[T, ...]]
+
+ H8_1 = G8[int]
+ self.assertEqual(H8_1.__parameters__, ())
+ with self.assertRaisesRegex(TypeError, "not a generic class"):
+ H8_1[str]
+
+ H8_2 = G8[T][int]
+ self.assertEqual(H8_2.__parameters__, ())
+ with self.assertRaisesRegex(TypeError, "not a generic class"):
+ H8_2[str]
+
+ G9 = klass[Concatenate[T, P_2]]
+ self.assertEqual(G9.__parameters__, (T, P_2))
+
+ with self.assertRaisesRegex(TypeError,
+ "The last parameter to Concatenate should be a ParamSpec variable or ellipsis."
+ if sys.version_info < (3, 10) else
+ # from __typing_subst__
+ "Expected a list of types, an ellipsis, ParamSpec, or Concatenate"
+ ):
+ G9[int, int]
+
+ with self.assertRaisesRegex(TypeError, f"Too few {things}"):
+ G9[int]
+
+ with self.subTest("Check list as parameter expression", klass=klass.__name__):
+ if sys.version_info < (3, 10):
+ self.skipTest("Cannot pass non-types")
+ G5 = klass[[int, str, T]]
+ self.assertEqual(G5.__parameters__, (T,))
+ self.assertEqual(G5.__args__, ((int, str, T),))
+
+ H9 = G9[int, [T]]
+ self.assertEqual(H9.__parameters__, (T,))
+
+ # This is an invalid parameter expression but useful for testing correct subsitution
+ G10 = klass[int, Concatenate[str, P]]
+ with self.subTest("Check invalid form substitution"):
+ self.assertEqual(G10.__parameters__, (P, ))
+ H10 = G10[int]
+ if (3, 10) <= sys.version_info < (3, 11, 3):
+ self.skipTest("3.10-3.11.2 does not substitute Concatenate here")
+ self.assertEqual(H10.__parameters__, ())
+ H10args = H10.__args__[0] if sys.version_info >= (3, 10) else H10.__args__
+ self.assertEqual(H10args, (int, (str, int)))
+
+ @skipUnless(TYPING_3_10_0, "ParamSpec not present before 3.10")
+ def test_substitution_with_typing_variants(self):
+ # verifies substitution and typing._check_generic working with typing variants
+ P = ParamSpec("P")
+ typing_P = typing.ParamSpec("typing_P")
+ typing_Concatenate = typing.Concatenate[int, P]
+
+ class Z(Generic[typing_P]):
+ pass
+
+ P1 = Z[typing_P]
+ self.assertEqual(P1.__parameters__, (typing_P,))
+ self.assertEqual(P1.__args__, (typing_P,))
+
+ C1 = Z[typing_Concatenate]
+ self.assertEqual(C1.__parameters__, (P,))
+ self.assertEqual(C1.__args__, (typing_Concatenate,))
+
def test_pickle(self):
global P, P_co, P_contra, P_default
P = ParamSpec('P')
P_co = ParamSpec('P_co', covariant=True)
P_contra = ParamSpec('P_contra', contravariant=True)
- P_default = ParamSpec('P_default', default=int)
- for proto in range(pickle.HIGHEST_PROTOCOL):
+ P_default = ParamSpec('P_default', default=[int])
+ for proto in range(pickle.HIGHEST_PROTOCOL + 1):
with self.subTest(f'Pickle protocol {proto}'):
for paramspec in (P, P_co, P_contra, P_default):
z = pickle.loads(pickle.dumps(paramspec, proto))
@@ -4440,6 +5927,48 @@ def test_eq(self):
# won't be the same.
self.assertNotEqual(hash(ParamSpec('P')), hash(P))
+ def test_isinstance_results_unaffected_by_presence_of_tracing_function(self):
+ # See https://github.com/python/typing_extensions/issues/318
+
+ code = textwrap.dedent(
+ """\
+ import sys, typing
+
+ def trace_call(*args):
+ return trace_call
+
+ def run():
+ sys.modules.pop("typing_extensions", None)
+ from typing_extensions import ParamSpec
+ return isinstance(ParamSpec("P"), typing.TypeVar)
+
+ isinstance_result_1 = run()
+ sys.setprofile(trace_call)
+ isinstance_result_2 = run()
+ sys.stdout.write(f"{isinstance_result_1} {isinstance_result_2}")
+ """
+ )
+
+ # Run this in an isolated process or it pollutes the environment
+ # and makes other tests fail:
+ try:
+ proc = subprocess.run(
+ [sys.executable, "-c", code], check=True, capture_output=True, text=True,
+ )
+ except subprocess.CalledProcessError as exc:
+ print("stdout", exc.stdout, sep="\n")
+ print("stderr", exc.stderr, sep="\n")
+ raise
+
+ # Sanity checks that assert the test is working as expected
+ self.assertIsInstance(proc.stdout, str)
+ result1, result2 = proc.stdout.split(" ")
+ self.assertIn(result1, {"True", "False"})
+ self.assertIn(result2, {"True", "False"})
+
+ # The actual test:
+ self.assertEqual(result1, result2)
+
class ConcatenateTests(BaseTestCase):
def test_basics(self):
@@ -4450,17 +5979,38 @@ class MyClass: ...
c = Concatenate[MyClass, P]
self.assertNotEqual(c, Concatenate)
- def test_valid_uses(self):
+ # Test Ellipsis Concatenation
+ d = Concatenate[MyClass, ...]
+ self.assertNotEqual(d, c)
+ self.assertNotEqual(d, Concatenate)
+
+ @skipUnless(TYPING_3_10_0, "Concatenate not available in <3.10")
+ def test_typing_compatibility(self):
P = ParamSpec('P')
- T = TypeVar('T')
+ C1 = Concatenate[int, P][typing.Concatenate[int, P]]
+ self.assertEqual(C1, Concatenate[int, int, P])
+ self.assertEqual(get_args(C1), (int, int, P))
- C1 = Callable[Concatenate[int, P], int]
- C2 = Callable[Concatenate[int, T, P], T]
+ C2 = typing.Concatenate[int, P][Concatenate[int, P]]
+ with self.subTest("typing compatibility with typing_extensions"):
+ if sys.version_info < (3, 10, 3):
+ self.skipTest("Unpacking not introduced until 3.10.3")
+ self.assertEqual(get_args(C2), (int, int, P))
- # Test collections.abc.Callable too.
- if sys.version_info[:2] >= (3, 9):
- C3 = collections.abc.Callable[Concatenate[int, P], int]
- C4 = collections.abc.Callable[Concatenate[int, T, P], T]
+ def test_valid_uses(self):
+ P = ParamSpec('P')
+ T = TypeVar('T')
+ for callable_variant in (Callable, collections.abc.Callable):
+ with self.subTest(callable_variant=callable_variant):
+ C1 = callable_variant[Concatenate[int, P], int]
+ C2 = callable_variant[Concatenate[int, T, P], T]
+ self.assertEqual(C1.__origin__, C2.__origin__)
+ self.assertNotEqual(C1, C2)
+
+ C3 = callable_variant[Concatenate[int, ...], int]
+ C4 = callable_variant[Concatenate[int, T, ...], T]
+ self.assertEqual(C3.__origin__, C4.__origin__)
+ self.assertNotEqual(C3, C4)
def test_invalid_uses(self):
P = ParamSpec('P')
@@ -4474,25 +6024,54 @@ def test_invalid_uses(self):
with self.assertRaisesRegex(
TypeError,
- 'The last parameter to Concatenate should be a ParamSpec variable',
+ 'The last parameter to Concatenate should be a ParamSpec variable or ellipsis',
):
Concatenate[P, T]
- if not TYPING_3_11_0:
- with self.assertRaisesRegex(
- TypeError,
- 'each arg must be a type',
- ):
- Concatenate[1, P]
+ # Test with tuple argument
+ with self.assertRaisesRegex(
+ TypeError,
+ "The last parameter to Concatenate should be a ParamSpec variable or ellipsis.",
+ ):
+ Concatenate[(P, T)]
- def test_basic_introspection(self):
+ with self.assertRaisesRegex(
+ TypeError,
+ 'is not a generic class',
+ ):
+ Callable[Concatenate[int, ...], Any][Any]
+
+ # Assure that `_type_check` is called.
P = ParamSpec('P')
- C1 = Concatenate[int, P]
- C2 = Concatenate[int, T, P]
- self.assertEqual(C1.__origin__, Concatenate)
+ with self.assertRaisesRegex(
+ TypeError,
+ "each arg must be a type",
+ ):
+ Concatenate[(str,), P]
+
+ @skipUnless(TYPING_3_10_0, "Missing backport to 3.9. See issue #48")
+ def test_alias_subscription_with_ellipsis(self):
+ P = ParamSpec('P')
+ X = Callable[Concatenate[int, P], Any]
+
+ C1 = X[...]
+ self.assertEqual(C1.__parameters__, ())
+ self.assertEqual(get_args(C1), (Concatenate[int, ...], Any))
+
+ def test_basic_introspection(self):
+ P = ParamSpec('P')
+ C1 = Concatenate[int, P]
+ C2 = Concatenate[int, T, P]
+ C3 = Concatenate[int, ...]
+ C4 = Concatenate[int, T, ...]
+ self.assertEqual(C1.__origin__, Concatenate)
self.assertEqual(C1.__args__, (int, P))
self.assertEqual(C2.__origin__, Concatenate)
self.assertEqual(C2.__args__, (int, T, P))
+ self.assertEqual(C3.__origin__, Concatenate)
+ self.assertEqual(C3.__args__, (int, Ellipsis))
+ self.assertEqual(C4.__origin__, Concatenate)
+ self.assertEqual(C4.__args__, (int, T, Ellipsis))
def test_eq(self):
P = ParamSpec('P')
@@ -4503,6 +6082,50 @@ def test_eq(self):
self.assertEqual(hash(C1), hash(C2))
self.assertNotEqual(C1, C3)
+ C4 = Concatenate[int, ...]
+ C5 = Concatenate[int, ...]
+ C6 = Concatenate[int, T, ...]
+ self.assertEqual(C4, C5)
+ self.assertEqual(hash(C4), hash(C5))
+ self.assertNotEqual(C4, C6)
+
+ def test_substitution(self):
+ T = TypeVar('T')
+ P = ParamSpec('P')
+ Ts = TypeVarTuple("Ts")
+
+ C1 = Concatenate[str, T, ...]
+ self.assertEqual(C1[int], Concatenate[str, int, ...])
+
+ C2 = Concatenate[str, P]
+ self.assertEqual(C2[...], Concatenate[str, ...])
+ self.assertEqual(C2[int], (str, int))
+ U1 = Unpack[Tuple[int, str]]
+ U2 = Unpack[Ts]
+ self.assertEqual(C2[U1], (str, int, str))
+ self.assertEqual(C2[U2], (str, Unpack[Ts]))
+ self.assertEqual(C2["U2"], (str, EqualToForwardRef("U2")))
+
+ if (3, 12, 0) <= sys.version_info < (3, 12, 4):
+ with self.assertRaises(AssertionError):
+ C2[Unpack[U2]]
+ else:
+ with self.assertRaisesRegex(TypeError, "must be used with a tuple type"):
+ C2[Unpack[U2]]
+
+ C3 = Concatenate[str, T, P]
+ self.assertEqual(C3[int, [bool]], (str, int, bool))
+
+ @skipUnless(TYPING_3_10_0, "Concatenate not present before 3.10")
+ def test_is_param_expr(self):
+ P = ParamSpec('P')
+ concat = Concatenate[str, P]
+ typing_concat = typing.Concatenate[str, P]
+ self.assertTrue(typing_extensions._is_param_expr(concat))
+ self.assertTrue(typing_extensions._is_param_expr(typing_concat))
+ if hasattr(typing, "_is_param_expr"):
+ self.assertTrue(typing._is_param_expr(concat))
+ self.assertTrue(typing._is_param_expr(typing_concat))
class TypeGuardTests(BaseTestCase):
def test_basics(self):
@@ -4530,7 +6153,7 @@ def test_cannot_subclass(self):
class C(type(TypeGuard)):
pass
with self.assertRaises(TypeError):
- class C(type(TypeGuard[int])):
+ class D(type(TypeGuard[int])):
pass
def test_cannot_init(self):
@@ -4548,6 +6171,108 @@ def test_no_isinstance(self):
issubclass(int, TypeGuard)
+class TypeIsTests(BaseTestCase):
+ def test_basics(self):
+ TypeIs[int] # OK
+ self.assertEqual(TypeIs[int], TypeIs[int])
+
+ def foo(arg) -> TypeIs[int]: ...
+ self.assertEqual(gth(foo), {'return': TypeIs[int]})
+
+ def test_repr(self):
+ if hasattr(typing, 'TypeIs'):
+ mod_name = 'typing'
+ else:
+ mod_name = 'typing_extensions'
+ self.assertEqual(repr(TypeIs), f'{mod_name}.TypeIs')
+ cv = TypeIs[int]
+ self.assertEqual(repr(cv), f'{mod_name}.TypeIs[int]')
+ cv = TypeIs[Employee]
+ self.assertEqual(repr(cv), f'{mod_name}.TypeIs[{__name__}.Employee]')
+ cv = TypeIs[Tuple[int]]
+ self.assertEqual(repr(cv), f'{mod_name}.TypeIs[typing.Tuple[int]]')
+
+ def test_cannot_subclass(self):
+ with self.assertRaises(TypeError):
+ class C(type(TypeIs)):
+ pass
+ with self.assertRaises(TypeError):
+ class D(type(TypeIs[int])):
+ pass
+
+ def test_cannot_init(self):
+ with self.assertRaises(TypeError):
+ TypeIs()
+ with self.assertRaises(TypeError):
+ type(TypeIs)()
+ with self.assertRaises(TypeError):
+ type(TypeIs[Optional[int]])()
+
+ def test_no_isinstance(self):
+ with self.assertRaises(TypeError):
+ isinstance(1, TypeIs[int])
+ with self.assertRaises(TypeError):
+ issubclass(int, TypeIs)
+
+
+class TypeFormTests(BaseTestCase):
+ def test_basics(self):
+ TypeForm[int] # OK
+ self.assertEqual(TypeForm[int], TypeForm[int])
+
+ def foo(arg) -> TypeForm[int]: ...
+ self.assertEqual(gth(foo), {'return': TypeForm[int]})
+
+ def test_repr(self):
+ if hasattr(typing, 'TypeForm'):
+ mod_name = 'typing'
+ else:
+ mod_name = 'typing_extensions'
+ self.assertEqual(repr(TypeForm), f'{mod_name}.TypeForm')
+ cv = TypeForm[int]
+ self.assertEqual(repr(cv), f'{mod_name}.TypeForm[int]')
+ cv = TypeForm[Employee]
+ self.assertEqual(repr(cv), f'{mod_name}.TypeForm[{__name__}.Employee]')
+ cv = TypeForm[Tuple[int]]
+ self.assertEqual(repr(cv), f'{mod_name}.TypeForm[typing.Tuple[int]]')
+
+ def test_cannot_subclass(self):
+ with self.assertRaises(TypeError):
+ class C(type(TypeForm)):
+ pass
+ with self.assertRaises(TypeError):
+ class D(type(TypeForm[int])):
+ pass
+
+ def test_call(self):
+ objs = [
+ 1,
+ "int",
+ int,
+ Tuple[int, str],
+ ]
+ for obj in objs:
+ with self.subTest(obj=obj):
+ self.assertIs(TypeForm(obj), obj)
+
+ with self.assertRaises(TypeError):
+ TypeForm()
+ with self.assertRaises(TypeError):
+ TypeForm("too", "many")
+
+ def test_cannot_init_type(self):
+ with self.assertRaises(TypeError):
+ type(TypeForm)()
+ with self.assertRaises(TypeError):
+ type(TypeForm[Optional[int]])()
+
+ def test_no_isinstance(self):
+ with self.assertRaises(TypeError):
+ isinstance(1, TypeForm[int])
+ with self.assertRaises(TypeError):
+ issubclass(int, TypeForm)
+
+
class LiteralStringTests(BaseTestCase):
def test_basics(self):
class Foo:
@@ -4565,7 +6290,7 @@ def test_repr(self):
mod_name = 'typing'
else:
mod_name = 'typing_extensions'
- self.assertEqual(repr(LiteralString), '{}.LiteralString'.format(mod_name))
+ self.assertEqual(repr(LiteralString), f'{mod_name}.LiteralString')
def test_cannot_subscript(self):
with self.assertRaises(TypeError):
@@ -4576,7 +6301,7 @@ def test_cannot_subclass(self):
class C(type(LiteralString)):
pass
with self.assertRaises(TypeError):
- class C(LiteralString):
+ class D(LiteralString):
pass
def test_cannot_init(self):
@@ -4602,7 +6327,7 @@ def test_typevar(self):
self.assertIs(StrT.__bound__, LiteralString)
def test_pickle(self):
- for proto in range(pickle.HIGHEST_PROTOCOL):
+ for proto in range(pickle.HIGHEST_PROTOCOL + 1):
pickled = pickle.dumps(LiteralString, protocol=proto)
self.assertIs(LiteralString, pickle.loads(pickled))
@@ -4619,7 +6344,7 @@ def test_repr(self):
mod_name = 'typing'
else:
mod_name = 'typing_extensions'
- self.assertEqual(repr(Self), '{}.Self'.format(mod_name))
+ self.assertEqual(repr(Self), f'{mod_name}.Self')
def test_cannot_subscript(self):
with self.assertRaises(TypeError):
@@ -4649,7 +6374,7 @@ def return_tuple(self) -> TupleSelf:
return (self, self)
def test_pickle(self):
- for proto in range(pickle.HIGHEST_PROTOCOL):
+ for proto in range(pickle.HIGHEST_PROTOCOL + 1):
pickled = pickle.dumps(Self, protocol=proto)
self.assertIs(Self, pickle.loads(pickled))
@@ -4761,6 +6486,53 @@ class D(Protocol[T1, T2, Unpack[Ts]]): pass
with self.assertRaises(TypeError):
klass[int]
+ def test_substitution(self):
+ Ts = TypeVarTuple("Ts")
+ unpacked_str = Unpack[Ts][str] # This should not raise an error
+ self.assertIs(unpacked_str, str)
+
+ @skipUnless(TYPING_3_11_0, "Needs Issue #103 for <3.11")
+ def test_nested_unpack(self):
+ Ts = TypeVarTuple("Ts")
+ Variadic = Tuple[int, Unpack[Ts]]
+ # Tuple[int, int, Tuple[str, int]]
+ direct_subscription = Variadic[int, Tuple[str, int]]
+ # Tuple[int, int, Tuple[*Ts, int]]
+ TupleAliasTs = Variadic[int, Tuple[Unpack[Ts], int]]
+
+ # Tuple[int, int, Tuple[str, int]]
+ recursive_unpack = TupleAliasTs[str]
+ self.assertEqual(direct_subscription, recursive_unpack)
+ self.assertEqual(get_args(recursive_unpack), (int, int, Tuple[str, int]))
+
+ # Test with Callable
+ T = TypeVar("T")
+ # Tuple[int, (*Ts) -> T]
+ CallableAliasTsT = Variadic[Callable[[Unpack[Ts]], T]]
+ # Tuple[int, (str, int) -> object]
+ callable_fully_subscripted = CallableAliasTsT[Unpack[Tuple[str, int]], object]
+ self.assertEqual(get_args(callable_fully_subscripted), (int, Callable[[str, int], object]))
+
+ @skipUnless(TYPING_3_11_0, "Needs Issue #103 for <3.11")
+ def test_equivalent_nested_variadics(self):
+ T = TypeVar("T")
+ Ts = TypeVarTuple("Ts")
+ Variadic = Tuple[int, Unpack[Ts]]
+ TupleAliasTsT = Variadic[Tuple[Unpack[Ts], T]]
+ nested_tuple_bare = TupleAliasTsT[str, int, object]
+
+ self.assertEqual(get_args(nested_tuple_bare), (int, Tuple[str, int, object]))
+ # Variants
+ self.assertEqual(nested_tuple_bare, TupleAliasTsT[Unpack[Tuple[str, int, object]]])
+ self.assertEqual(nested_tuple_bare, TupleAliasTsT[Unpack[Tuple[str, int]], object])
+ self.assertEqual(nested_tuple_bare, TupleAliasTsT[Unpack[Tuple[str]], Unpack[Tuple[int]], object])
+
+ @skipUnless(TYPING_3_11_0, "Needed for backport")
+ def test_type_var_inheritance(self):
+ Ts = TypeVarTuple("Ts")
+ self.assertFalse(isinstance(Unpack[Ts], TypeVar))
+ self.assertFalse(isinstance(Unpack[Ts], typing.TypeVar))
+
class TypeVarTupleTests(BaseTestCase):
@@ -4814,7 +6586,7 @@ def test_pickle(self):
Ts = TypeVarTuple('Ts')
Ts_default = TypeVarTuple('Ts_default', default=Unpack[Tuple[int, str]])
- for proto in range(pickle.HIGHEST_PROTOCOL):
+ for proto in range(pickle.HIGHEST_PROTOCOL + 1):
for typevartuple in (Ts, Ts_default):
z = pickle.loads(pickle.dumps(typevartuple, proto))
self.assertEqual(z.__name__, typevartuple.__name__)
@@ -4878,7 +6650,7 @@ def stmethod(): ...
def prop(self): ...
@final
- @lru_cache() # noqa: B019
+ @lru_cache # noqa: B019
def cached(self): ...
# Use getattr_static because the descriptor returns the
@@ -5065,40 +6837,48 @@ def test_all_names_in___all__(self):
self.assertLessEqual(exclude, actual_names)
def test_typing_extensions_defers_when_possible(self):
- exclude = {
- 'dataclass_transform',
- 'overload',
- 'ParamSpec',
- 'Text',
- 'TypeVar',
- 'TypeVarTuple',
- 'TYPE_CHECKING',
- 'Final',
- 'get_type_hints',
- }
+ exclude = set()
if sys.version_info < (3, 10):
exclude |= {'get_args', 'get_origin'}
if sys.version_info < (3, 10, 1):
exclude |= {"Literal"}
if sys.version_info < (3, 11):
- exclude |= {'final', 'Any', 'NewType'}
+ exclude |= {'final', 'Any', 'NewType', 'overload', 'Concatenate'}
if sys.version_info < (3, 12):
exclude |= {
- 'Protocol', 'SupportsAbs', 'SupportsBytes',
+ 'SupportsAbs', 'SupportsBytes',
'SupportsComplex', 'SupportsFloat', 'SupportsIndex', 'SupportsInt',
- 'SupportsRound', 'Unpack',
+ 'SupportsRound', 'Unpack', 'dataclass_transform',
}
if sys.version_info < (3, 13):
- exclude |= {'NamedTuple', 'TypedDict', 'is_typeddict'}
+ exclude |= {
+ 'NamedTuple', 'Protocol', 'runtime_checkable', 'Generator',
+ 'AsyncGenerator', 'ContextManager', 'AsyncContextManager',
+ 'ParamSpec', 'TypeVar', 'TypeVarTuple', 'get_type_hints',
+ }
+ if sys.version_info < (3, 14):
+ exclude |= {
+ 'TypeAliasType'
+ }
+ if not typing_extensions._PEP_728_IMPLEMENTED:
+ exclude |= {'TypedDict', 'is_typeddict'}
for item in typing_extensions.__all__:
if item not in exclude and hasattr(typing, item):
self.assertIs(
getattr(typing_extensions, item),
getattr(typing, item))
+ def test_alias_names_still_exist(self):
+ for name in typing_extensions._typing_names:
+ # If this fails, change _typing_names to conditionally add the name
+ # depending on the Python version.
+ self.assertTrue(
+ hasattr(typing_extensions, name),
+ f"{name} no longer exists in typing",
+ )
+
def test_typing_extensions_compiles_with_opt(self):
- file_path = os.path.join(os.path.dirname(os.path.realpath(__file__)),
- 'typing_extensions.py')
+ file_path = typing_extensions.__file__
try:
subprocess.check_output(f'{sys.executable} -OO {file_path}',
stderr=subprocess.STDOUT,
@@ -5124,17 +6904,6 @@ def double(self):
return 2 * self.x
-class XRepr(NamedTuple):
- x: int
- y: int = 1
-
- def __str__(self):
- return f'{self.x} -> {self.y}'
-
- def __add__(self, other):
- return 0
-
-
class NamedTupleTests(BaseTestCase):
class NestedEmployee(NamedTuple):
name: str
@@ -5189,13 +6958,6 @@ class NonDefaultAfterDefault(NamedTuple):
x: int = 3
y: int
- @skipUnless(
- (
- TYPING_3_8_0
- or hasattr(CoolEmployeeWithDefault, '_field_defaults')
- ),
- '"_field_defaults" attribute was added in a micro version of 3.7'
- )
def test_field_defaults(self):
self.assertEqual(CoolEmployeeWithDefault._field_defaults, dict(cool=0))
@@ -5233,11 +6995,11 @@ class X(NamedTuple, A):
TypeError,
'can only inherit from a NamedTuple type and Generic'
):
- class X(NamedTuple, tuple):
+ class Y(NamedTuple, tuple):
x: int
with self.assertRaisesRegex(TypeError, 'duplicate base class'):
- class X(NamedTuple, NamedTuple):
+ class Z(NamedTuple, NamedTuple):
x: int
class A(NamedTuple):
@@ -5246,7 +7008,7 @@ class A(NamedTuple):
TypeError,
'can only inherit from a NamedTuple type and Generic'
):
- class X(NamedTuple, A):
+ class XX(NamedTuple, A):
y: str
def test_generic(self):
@@ -5275,12 +7037,10 @@ class Y(Generic[T], NamedTuple):
self.assertIsInstance(a, G)
self.assertEqual(a.x, 3)
- things = "arguments" if sys.version_info >= (3, 11) else "parameters"
-
+ things = "arguments" if sys.version_info >= (3, 10) else "parameters"
with self.assertRaisesRegex(TypeError, f'Too many {things}'):
G[int, str]
- @skipUnless(TYPING_3_9_0, "tuple.__class_getitem__ was added in 3.9")
def test_non_generic_subscript_py39_plus(self):
# For backward compatibility, subscription works
# on arbitrary NamedTuple types.
@@ -5295,19 +7055,7 @@ class Group(NamedTuple):
self.assertIs(type(a), Group)
self.assertEqual(a, (1, [2]))
- @skipIf(TYPING_3_9_0, "Test isn't relevant to 3.9+")
- def test_non_generic_subscript_error_message_py38_minus(self):
- class Group(NamedTuple):
- key: T
- group: List[T]
-
- with self.assertRaisesRegex(TypeError, 'not subscriptable'):
- Group[int]
-
- for attr in ('__args__', '__origin__', '__parameters__'):
- with self.subTest(attr=attr):
- self.assertFalse(hasattr(Group, attr))
-
+ @skipUnless(sys.version_info <= (3, 15), "Behavior removed in 3.15")
def test_namedtuple_keyword_usage(self):
with self.assertWarnsRegex(
DeprecationWarning,
@@ -5343,6 +7091,7 @@ def test_namedtuple_keyword_usage(self):
):
NamedTuple('Name', None, x=int)
+ @skipUnless(sys.version_info <= (3, 15), "Behavior removed in 3.15")
def test_namedtuple_special_keyword_names(self):
with self.assertWarnsRegex(
DeprecationWarning,
@@ -5358,6 +7107,7 @@ def test_namedtuple_special_keyword_names(self):
self.assertEqual(a.typename, 'foo')
self.assertEqual(a.fields, [('bar', tuple)])
+ @skipUnless(sys.version_info <= (3, 15), "Behavior removed in 3.15")
def test_empty_namedtuple(self):
expected_warning = re.escape(
"Failing to pass a value for the 'fields' parameter is deprecated "
@@ -5389,10 +7139,7 @@ class CNT(NamedTuple):
self.assertEqual(struct._fields, ())
self.assertEqual(struct.__annotations__, {})
self.assertIsInstance(struct(), struct)
- # Attribute was added in a micro version of 3.7
- # and is tested more fully elsewhere
- if hasattr(struct, "_field_defaults"):
- self.assertEqual(struct._field_defaults, {})
+ self.assertEqual(struct._field_defaults, {})
def test_namedtuple_errors(self):
with self.assertRaises(TypeError):
@@ -5429,30 +7176,13 @@ def test_copy_and_pickle(self):
def test_docstring(self):
self.assertIsInstance(NamedTuple.__doc__, str)
- @skipUnless(TYPING_3_8_0, "NamedTuple had a bad signature on <=3.7")
- def test_signature_is_same_as_typing_NamedTuple(self):
- self.assertEqual(inspect.signature(NamedTuple), inspect.signature(typing.NamedTuple))
-
- @skipIf(TYPING_3_8_0, "tests are only relevant to <=3.7")
- def test_signature_on_37(self):
- self.assertIsInstance(inspect.signature(NamedTuple), inspect.Signature)
- self.assertFalse(hasattr(NamedTuple, "__text_signature__"))
-
- @skipUnless(TYPING_3_9_0, "NamedTuple was a class on 3.8 and lower")
- def test_same_as_typing_NamedTuple_39_plus(self):
+ def test_same_as_typing_NamedTuple(self):
self.assertEqual(
set(dir(NamedTuple)) - {"__text_signature__"},
set(dir(typing.NamedTuple))
)
self.assertIs(type(NamedTuple), type(typing.NamedTuple))
- @skipIf(TYPING_3_9_0, "tests are only relevant to <=3.8")
- def test_same_as_typing_NamedTuple_38_minus(self):
- self.assertEqual(
- self.NestedEmployee.__annotations__,
- self.NestedEmployee._field_types
- )
-
def test_orig_bases(self):
T = TypeVar('T')
@@ -5469,6 +7199,128 @@ class GenericNamedTuple(NamedTuple, Generic[T]):
self.assertEqual(CallNamedTuple.__orig_bases__, (NamedTuple,))
+ def test_setname_called_on_values_in_class_dictionary(self):
+ class Vanilla:
+ def __set_name__(self, owner, name):
+ self.name = name
+
+ class Foo(NamedTuple):
+ attr = Vanilla()
+
+ foo = Foo()
+ self.assertEqual(len(foo), 0)
+ self.assertNotIn('attr', Foo._fields)
+ self.assertIsInstance(foo.attr, Vanilla)
+ self.assertEqual(foo.attr.name, "attr")
+
+ class Bar(NamedTuple):
+ attr: Vanilla = Vanilla()
+
+ bar = Bar()
+ self.assertEqual(len(bar), 1)
+ self.assertIn('attr', Bar._fields)
+ self.assertIsInstance(bar.attr, Vanilla)
+ self.assertEqual(bar.attr.name, "attr")
+
+ @skipIf(
+ TYPING_3_12_0,
+ "__set_name__ behaviour changed on py312+ to use BaseException.add_note()"
+ )
+ def test_setname_raises_the_same_as_on_other_classes_py311_minus(self):
+ class CustomException(BaseException): pass
+
+ class Annoying:
+ def __set_name__(self, owner, name):
+ raise CustomException
+
+ annoying = Annoying()
+
+ with self.assertRaises(RuntimeError) as cm:
+ class NormalClass:
+ attr = annoying
+ normal_exception = cm.exception
+
+ with self.assertRaises(RuntimeError) as cm:
+ class NamedTupleClass(NamedTuple):
+ attr = annoying
+ namedtuple_exception = cm.exception
+
+ self.assertIs(type(namedtuple_exception), RuntimeError)
+ self.assertIs(type(namedtuple_exception), type(normal_exception))
+ self.assertEqual(len(namedtuple_exception.args), len(normal_exception.args))
+ self.assertEqual(
+ namedtuple_exception.args[0],
+ normal_exception.args[0].replace("NormalClass", "NamedTupleClass")
+ )
+
+ self.assertIs(type(namedtuple_exception.__cause__), CustomException)
+ self.assertIs(
+ type(namedtuple_exception.__cause__), type(normal_exception.__cause__)
+ )
+ self.assertEqual(
+ namedtuple_exception.__cause__.args, normal_exception.__cause__.args
+ )
+
+ @skipUnless(
+ TYPING_3_12_0,
+ "__set_name__ behaviour changed on py312+ to use BaseException.add_note()"
+ )
+ def test_setname_raises_the_same_as_on_other_classes_py312_plus(self):
+ class CustomException(BaseException): pass
+
+ class Annoying:
+ def __set_name__(self, owner, name):
+ raise CustomException
+
+ annoying = Annoying()
+
+ with self.assertRaises(CustomException) as cm:
+ class NormalClass:
+ attr = annoying
+ normal_exception = cm.exception
+
+ with self.assertRaises(CustomException) as cm:
+ class NamedTupleClass(NamedTuple):
+ attr = annoying
+ namedtuple_exception = cm.exception
+
+ expected_note = (
+ "Error calling __set_name__ on 'Annoying' instance "
+ "'attr' in 'NamedTupleClass'"
+ )
+
+ self.assertIs(type(namedtuple_exception), CustomException)
+ self.assertIs(type(namedtuple_exception), type(normal_exception))
+ self.assertEqual(namedtuple_exception.args, normal_exception.args)
+
+ self.assertEqual(len(namedtuple_exception.__notes__), 1)
+ self.assertEqual(
+ len(namedtuple_exception.__notes__), len(normal_exception.__notes__)
+ )
+
+ self.assertEqual(namedtuple_exception.__notes__[0], expected_note)
+ self.assertEqual(
+ namedtuple_exception.__notes__[0],
+ normal_exception.__notes__[0].replace("NormalClass", "NamedTupleClass")
+ )
+
+ def test_strange_errors_when_accessing_set_name_itself(self):
+ class CustomException(Exception): pass
+
+ class Meta(type):
+ def __getattribute__(self, attr):
+ if attr == "__set_name__":
+ raise CustomException
+ return object.__getattribute__(self, attr)
+
+ class VeryAnnoying(metaclass=Meta): pass
+
+ very_annoying = VeryAnnoying()
+
+ with self.assertRaises(CustomException):
+ class Foo(NamedTuple):
+ attr = very_annoying
+
class TypeVarTests(BaseTestCase):
def test_basic_plain(self):
@@ -5551,8 +7403,8 @@ def test_or(self):
self.assertEqual(X | "x", Union[X, "x"])
self.assertEqual("x" | X, Union["x", X])
# make sure the order is correct
- self.assertEqual(get_args(X | "x"), (X, typing.ForwardRef("x")))
- self.assertEqual(get_args("x" | X), (typing.ForwardRef("x"), X))
+ self.assertEqual(get_args(X | "x"), (X, EqualToForwardRef("x")))
+ self.assertEqual(get_args("x" | X), (EqualToForwardRef("x"), X))
def test_union_constrained(self):
A = TypeVar('A', str, bytes)
@@ -5577,7 +7429,7 @@ def test_cannot_subclass(self):
class V(TypeVar): pass
T = TypeVar("T")
with self.assertRaises(TypeError):
- class V(T): pass
+ class W(T): pass
def test_cannot_instantiate_vars(self):
with self.assertRaises(TypeError):
@@ -5585,18 +7437,15 @@ def test_cannot_instantiate_vars(self):
def test_bound_errors(self):
with self.assertRaises(TypeError):
- TypeVar('X', bound=Union)
+ TypeVar('X', bound=Optional)
with self.assertRaises(TypeError):
TypeVar('X', str, float, bound=Employee)
with self.assertRaisesRegex(TypeError,
r"Bound must be a type\. Got \(1, 2\)\."):
TypeVar('X', bound=(1, 2))
- # Technically we could run it on later versions of 3.7 and 3.8,
- # but that's not worth the effort.
- @skipUnless(TYPING_3_9_0, "Fix was not backported")
def test_missing__name__(self):
- # See bpo-39942
+ # See https://github.com/python/cpython/issues/84123
code = ("import typing\n"
"T = typing.TypeVar('T')\n"
)
@@ -5624,17 +7473,20 @@ def test_typevar(self):
self.assertIsInstance(typing_T, typing_extensions.TypeVar)
class A(Generic[T]): ...
- Alias = Optional[T]
+ self.assertEqual(Optional[T].__args__, (T, type(None)))
def test_typevar_none(self):
U = typing_extensions.TypeVar('U')
U_None = typing_extensions.TypeVar('U_None', default=None)
- self.assertEqual(U.__default__, None)
- self.assertEqual(U_None.__default__, type(None))
+ self.assertIs(U.__default__, NoDefault)
+ self.assertFalse(U.has_default())
+ self.assertEqual(U_None.__default__, None)
+ self.assertTrue(U_None.has_default())
def test_paramspec(self):
- P = ParamSpec('P', default=(str, int))
- self.assertEqual(P.__default__, (str, int))
+ P = ParamSpec('P', default=[str, int])
+ self.assertEqual(P.__default__, [str, int])
+ self.assertTrue(P.has_default())
self.assertIsInstance(P, ParamSpec)
if hasattr(typing, "ParamSpec"):
self.assertIsInstance(P, typing.ParamSpec)
@@ -5643,12 +7495,25 @@ def test_paramspec(self):
self.assertIsInstance(typing_P, ParamSpec)
class A(Generic[P]): ...
- Alias = typing.Callable[P, None]
+ self.assertEqual(typing.Callable[P, None].__args__, (P, type(None)))
+
+ P_default = ParamSpec('P_default', default=...)
+ self.assertIs(P_default.__default__, ...)
+ self.assertTrue(P_default.has_default())
+
+ def test_paramspec_none(self):
+ U = ParamSpec('U')
+ U_None = ParamSpec('U_None', default=None)
+ self.assertIs(U.__default__, NoDefault)
+ self.assertFalse(U.has_default())
+ self.assertIs(U_None.__default__, None)
+ self.assertTrue(U_None.has_default())
def test_typevartuple(self):
Ts = TypeVarTuple('Ts', default=Unpack[Tuple[str, int]])
self.assertEqual(Ts.__default__, Unpack[Tuple[str, int]])
self.assertIsInstance(Ts, TypeVarTuple)
+ self.assertTrue(Ts.has_default())
if hasattr(typing, "TypeVarTuple"):
self.assertIsInstance(Ts, typing.TypeVarTuple)
typing_Ts = typing.TypeVarTuple('Ts')
@@ -5656,7 +7521,75 @@ def test_typevartuple(self):
self.assertIsInstance(typing_Ts, TypeVarTuple)
class A(Generic[Unpack[Ts]]): ...
- Alias = Optional[Unpack[Ts]]
+ self.assertEqual(Optional[Unpack[Ts]].__args__, (Unpack[Ts], type(None)))
+
+ @skipIf(
+ sys.version_info < (3, 11, 1),
+ "Not yet backported for older versions of Python"
+ )
+ def test_typevartuple_specialization(self):
+ T = TypeVar("T")
+ Ts = TypeVarTuple('Ts', default=Unpack[Tuple[str, int]])
+ self.assertEqual(Ts.__default__, Unpack[Tuple[str, int]])
+ class A(Generic[T, Unpack[Ts]]): ...
+ self.assertEqual(A[float].__args__, (float, str, int))
+ self.assertEqual(A[float, range].__args__, (float, range))
+ self.assertEqual(A[float, Unpack[tuple[int, ...]]].__args__, (float, Unpack[tuple[int, ...]]))
+
+ @skipIf(
+ sys.version_info < (3, 11, 1),
+ "Not yet backported for older versions of Python"
+ )
+ def test_typevar_and_typevartuple_specialization(self):
+ T = TypeVar("T")
+ U = TypeVar("U", default=float)
+ Ts = TypeVarTuple('Ts', default=Unpack[Tuple[str, int]])
+ self.assertEqual(Ts.__default__, Unpack[Tuple[str, int]])
+ class A(Generic[T, U, Unpack[Ts]]): ...
+ self.assertEqual(A[int].__args__, (int, float, str, int))
+ self.assertEqual(A[int, str].__args__, (int, str, str, int))
+ self.assertEqual(A[int, str, range].__args__, (int, str, range))
+ self.assertEqual(A[int, str, Unpack[tuple[int, ...]]].__args__, (int, str, Unpack[tuple[int, ...]]))
+
+ def test_no_default_after_typevar_tuple(self):
+ T = TypeVar("T", default=int)
+ Ts = TypeVarTuple("Ts")
+ Ts_default = TypeVarTuple("Ts_default", default=Unpack[Tuple[str, int]])
+
+ with self.assertRaises(TypeError):
+ class X(Generic[Unpack[Ts], T]): ...
+
+ with self.assertRaises(TypeError):
+ class Y(Generic[Unpack[Ts_default], T]): ...
+
+ def test_typevartuple_none(self):
+ U = TypeVarTuple('U')
+ U_None = TypeVarTuple('U_None', default=None)
+ self.assertIs(U.__default__, NoDefault)
+ self.assertFalse(U.has_default())
+ self.assertIs(U_None.__default__, None)
+ self.assertTrue(U_None.has_default())
+
+ def test_no_default_after_non_default(self):
+ DefaultStrT = typing_extensions.TypeVar('DefaultStrT', default=str)
+ T = TypeVar('T')
+
+ with self.assertRaises(TypeError):
+ Generic[DefaultStrT, T]
+
+ def test_need_more_params(self):
+ DefaultStrT = typing_extensions.TypeVar('DefaultStrT', default=str)
+ T = typing_extensions.TypeVar('T')
+ U = typing_extensions.TypeVar('U')
+
+ class A(Generic[T, U, DefaultStrT]): ...
+ A[int, bool]
+ A[int, bool, str]
+
+ with self.assertRaises(
+ TypeError, msg="Too few arguments for .+; actual 1, expected at least 2"
+ ):
+ A[int]
def test_pickle(self):
global U, U_co, U_contra, U_default # pickle wants to reference the class by name
@@ -5664,7 +7597,7 @@ def test_pickle(self):
U_co = typing_extensions.TypeVar('U_co', covariant=True)
U_contra = typing_extensions.TypeVar('U_contra', contravariant=True)
U_default = typing_extensions.TypeVar('U_default', default=int)
- for proto in range(pickle.HIGHEST_PROTOCOL):
+ for proto in range(pickle.HIGHEST_PROTOCOL + 1):
for typevar in (U, U_co, U_contra, U_default):
z = pickle.loads(pickle.dumps(typevar, proto))
self.assertEqual(z.__name__, typevar.__name__)
@@ -5673,6 +7606,132 @@ def test_pickle(self):
self.assertEqual(z.__bound__, typevar.__bound__)
self.assertEqual(z.__default__, typevar.__default__)
+ def test_strange_defaults_are_allowed(self):
+ # Leave it to type checkers to check whether strange default values
+ # should be allowed or disallowed
+ def not_a_type(): ...
+
+ for typevarlike_cls in TypeVar, ParamSpec, TypeVarTuple:
+ for default in not_a_type, 42, bytearray(), (int, not_a_type, 42):
+ with self.subTest(typevarlike_cls=typevarlike_cls, default=default):
+ T = typevarlike_cls("T", default=default)
+ self.assertEqual(T.__default__, default)
+
+ @skip_if_py313_beta_1
+ def test_allow_default_after_non_default_in_alias(self):
+ T_default = TypeVar('T_default', default=int)
+ T = TypeVar('T')
+ Ts = TypeVarTuple('Ts')
+
+ a1 = Callable[[T_default], T]
+ self.assertEqual(a1.__args__, (T_default, T))
+
+ a2 = dict[T_default, T]
+ self.assertEqual(a2.__args__, (T_default, T))
+
+ a3 = typing.Dict[T_default, T]
+ self.assertEqual(a3.__args__, (T_default, T))
+
+ a4 = Callable[[Unpack[Ts]], T]
+ self.assertEqual(a4.__args__, (Unpack[Ts], T))
+
+ @skipIf(
+ typing_extensions.Protocol is typing.Protocol,
+ "Test currently fails with the CPython version of Protocol and that's not our fault"
+ )
+ def test_generic_with_broken_eq(self):
+ # See https://github.com/python/typing_extensions/pull/422 for context
+ class BrokenEq(type):
+ def __eq__(self, other):
+ if other is typing_extensions.Protocol:
+ raise TypeError("I'm broken")
+ return False
+
+ class G(Generic[T], metaclass=BrokenEq):
+ pass
+
+ alias = G[int]
+ self.assertIs(get_origin(alias), G)
+ self.assertEqual(get_args(alias), (int,))
+
+ @skipIf(
+ sys.version_info < (3, 11, 1),
+ "Not yet backported for older versions of Python"
+ )
+ def test_paramspec_specialization(self):
+ T = TypeVar("T")
+ P = ParamSpec('P', default=[str, int])
+ self.assertEqual(P.__default__, [str, int])
+ class A(Generic[T, P]): ...
+ self.assertEqual(A[float].__args__, (float, (str, int)))
+ self.assertEqual(A[float, [range]].__args__, (float, (range,)))
+
+ @skipIf(
+ sys.version_info < (3, 11, 1),
+ "Not yet backported for older versions of Python"
+ )
+ def test_typevar_and_paramspec_specialization(self):
+ T = TypeVar("T")
+ U = TypeVar("U", default=float)
+ P = ParamSpec('P', default=[str, int])
+ self.assertEqual(P.__default__, [str, int])
+ class A(Generic[T, U, P]): ...
+ self.assertEqual(A[float].__args__, (float, float, (str, int)))
+ self.assertEqual(A[float, int].__args__, (float, int, (str, int)))
+ self.assertEqual(A[float, int, [range]].__args__, (float, int, (range,)))
+
+ @skipIf(
+ sys.version_info < (3, 11, 1),
+ "Not yet backported for older versions of Python"
+ )
+ def test_paramspec_and_typevar_specialization(self):
+ T = TypeVar("T")
+ P = ParamSpec('P', default=[str, int])
+ U = TypeVar("U", default=float)
+ self.assertEqual(P.__default__, [str, int])
+ class A(Generic[T, P, U]): ...
+ self.assertEqual(A[float].__args__, (float, (str, int), float))
+ self.assertEqual(A[float, [range]].__args__, (float, (range,), float))
+ self.assertEqual(A[float, [range], int].__args__, (float, (range,), int))
+
+
+class NoDefaultTests(BaseTestCase):
+ @skip_if_py313_beta_1
+ def test_pickling(self):
+ for proto in range(pickle.HIGHEST_PROTOCOL + 1):
+ s = pickle.dumps(NoDefault, proto)
+ loaded = pickle.loads(s)
+ self.assertIs(NoDefault, loaded)
+
+ @skip_if_py313_beta_1
+ def test_doc(self):
+ self.assertIsInstance(NoDefault.__doc__, str)
+
+ def test_constructor(self):
+ self.assertIs(NoDefault, type(NoDefault)())
+ with self.assertRaises(TypeError):
+ type(NoDefault)(1)
+
+ def test_repr(self):
+ self.assertRegex(repr(NoDefault), r'typing(_extensions)?\.NoDefault')
+
+ def test_no_call(self):
+ with self.assertRaises(TypeError):
+ NoDefault()
+
+ @skip_if_py313_beta_1
+ def test_immutable(self):
+ with self.assertRaises(AttributeError):
+ NoDefault.foo = 'bar'
+ with self.assertRaises(AttributeError):
+ NoDefault.foo
+
+ # TypeError is consistent with the behavior of NoneType
+ with self.assertRaises(TypeError):
+ type(NoDefault).foo = 3
+ with self.assertRaises(AttributeError):
+ type(NoDefault).foo
+
class TypeVarInferVarianceTests(BaseTestCase):
def test_typevar(self):
@@ -5687,7 +7746,7 @@ def test_pickle(self):
global U, U_infer # pickle wants to reference the class by name
U = typing_extensions.TypeVar('U')
U_infer = typing_extensions.TypeVar('U_infer', infer_variance=True)
- for proto in range(pickle.HIGHEST_PROTOCOL):
+ for proto in range(pickle.HIGHEST_PROTOCOL + 1):
for typevar in (U, U_infer):
z = pickle.loads(pickle.dumps(typevar, proto))
self.assertEqual(z.__name__, typevar.__name__)
@@ -5748,7 +7807,6 @@ class D(B[str], float): pass
with self.assertRaisesRegex(TypeError, "Expected an instance of type"):
get_original_bases(object())
- @skipUnless(TYPING_3_9_0, "PEP 585 is yet to be")
def test_builtin_generics(self):
class E(list[T]): pass
class F(list[int]): pass
@@ -5756,6 +7814,25 @@ class F(list[int]): pass
self.assertEqual(get_original_bases(E), (list[T],))
self.assertEqual(get_original_bases(F), (list[int],))
+ @skipIf(
+ sys.version_info[:3] == (3, 12, 0) and sys.version_info[3] in {"alpha", "beta"},
+ "Early versions of py312 had a bug"
+ )
+ def test_concrete_subclasses_of_generic_classes(self):
+ T = TypeVar("T")
+
+ class FirstBase(Generic[T]): pass
+ class SecondBase(Generic[T]): pass
+ class First(FirstBase[int]): pass
+ class Second(SecondBase[int]): pass
+ class G(First, Second): pass
+ self.assertEqual(get_original_bases(G), (First, Second))
+
+ class First_(Generic[T]): pass
+ class Second_(Generic[T]): pass
+ class H(First_, Second_): pass
+ self.assertEqual(get_original_bases(H), (First_, Second_))
+
def test_namedtuples(self):
# On 3.12, this should work well with typing.NamedTuple and typing_extensions.NamedTuple
# On lower versions, it will only work fully with typing_extensions.NamedTuple
@@ -5839,6 +7916,80 @@ def test_attributes(self):
self.assertEqual(Variadic.__type_params__, (Ts,))
self.assertEqual(Variadic.__parameters__, tuple(iter(Ts)))
+ P = ParamSpec('P')
+ CallableP = TypeAliasType("CallableP", Callable[P, Any], type_params=(P, ))
+ self.assertEqual(CallableP.__name__, "CallableP")
+ self.assertEqual(CallableP.__value__, Callable[P, Any])
+ self.assertEqual(CallableP.__type_params__, (P,))
+ self.assertEqual(CallableP.__parameters__, (P,))
+
+ def test_alias_types_and_substitutions(self):
+ T = TypeVar('T')
+ T2 = TypeVar('T2')
+ T_default = TypeVar("T_default", default=int)
+ Ts = TypeVarTuple("Ts")
+ P = ParamSpec('P')
+
+ test_argument_cases = {
+ # arguments : expected parameters
+ int : (),
+ ... : (),
+ None : (),
+ T2 : (T2,),
+ Union[int, List[T2]] : (T2,),
+ Tuple[int, str] : (),
+ Tuple[T, T_default, T2] : (T, T_default, T2),
+ Tuple[Unpack[Ts]] : (Ts,),
+ Callable[[Unpack[Ts]], T2] : (Ts, T2),
+ Callable[P, T2] : (P, T2),
+ Callable[Concatenate[T2, P], T_default] : (T2, P, T_default),
+ TypeAliasType("NestedAlias", List[T], type_params=(T,))[T2] : (T2,),
+ Unpack[Ts] : (Ts,),
+ Unpack[Tuple[int, T2]] : (T2,),
+ Concatenate[int, P] : (P,),
+ # Not tested usage of bare TypeVarTuple, would need 3.11+
+ # Ts : (Ts,), # invalid case
+ }
+
+ test_alias_cases = [
+ # Simple cases
+ TypeAliasType("ListT", List[T], type_params=(T,)),
+ TypeAliasType("UnionT", Union[int, List[T]], type_params=(T,)),
+ # Value has no parameter but in type_param
+ TypeAliasType("ValueWithoutT", int, type_params=(T,)),
+ # Callable
+ TypeAliasType("CallableP", Callable[P, Any], type_params=(P, )),
+ TypeAliasType("CallableT", Callable[..., T], type_params=(T, )),
+ TypeAliasType("CallableTs", Callable[[Unpack[Ts]], Any], type_params=(Ts, )),
+ # TypeVarTuple
+ TypeAliasType("Variadic", Tuple[int, Unpack[Ts]], type_params=(Ts,)),
+ # TypeVar with default
+ TypeAliasType("TupleT_default", Tuple[T_default, T], type_params=(T, T_default)),
+ TypeAliasType("CallableT_default", Callable[[T], T_default], type_params=(T, T_default)),
+ ]
+
+ for alias in test_alias_cases:
+ with self.subTest(alias=alias, args=[]):
+ subscripted = alias[[]]
+ self.assertEqual(get_args(subscripted), ([],))
+ self.assertEqual(subscripted.__parameters__, ())
+ with self.subTest(alias=alias, args=()):
+ subscripted = alias[()]
+ self.assertEqual(get_args(subscripted), ())
+ self.assertEqual(subscripted.__parameters__, ())
+ with self.subTest(alias=alias, args=(int, float)):
+ subscripted = alias[int, float]
+ self.assertEqual(get_args(subscripted), (int, float))
+ self.assertEqual(subscripted.__parameters__, ())
+ with self.subTest(alias=alias, args=[int, float]):
+ subscripted = alias[[int, float]]
+ self.assertEqual(get_args(subscripted), ([int, float],))
+ self.assertEqual(subscripted.__parameters__, ())
+ for expected_args, expected_parameters in test_argument_cases.items():
+ with self.subTest(alias=alias, args=expected_args):
+ self.assertEqual(get_args(alias[expected_args]), (expected_args,))
+ self.assertEqual(alias[expected_args].__parameters__, expected_parameters)
+
def test_cannot_set_attributes(self):
Simple = TypeAliasType("Simple", int)
with self.assertRaisesRegex(AttributeError, "readonly attribute"):
@@ -5891,6 +8042,10 @@ def test_or(self):
self.assertEqual(Alias | None, Union[Alias, None])
self.assertEqual(Alias | (int | str), Union[Alias, int | str])
self.assertEqual(Alias | list[float], Union[Alias, list[float]])
+
+ if sys.version_info >= (3, 12):
+ Alias2 = typing.TypeAliasType("Alias2", str)
+ self.assertEqual(Alias | Alias2, Union[Alias, Alias2])
else:
with self.assertRaises(TypeError):
Alias | int
@@ -5899,12 +8054,19 @@ def test_or(self):
Alias | "Ref"
def test_getitem(self):
+ T = TypeVar('T')
ListOrSetT = TypeAliasType("ListOrSetT", Union[List[T], Set[T]], type_params=(T,))
subscripted = ListOrSetT[int]
self.assertEqual(get_args(subscripted), (int,))
self.assertIs(get_origin(subscripted), ListOrSetT)
- with self.assertRaises(TypeError):
- subscripted[str]
+ with self.assertRaisesRegex(TypeError,
+ "not a generic class"
+ # types.GenericAlias raises a different error in 3.10
+ if sys.version_info[:2] != (3, 10)
+ else "There are no type variables left in ListOrSetT"
+ ):
+ subscripted[int]
+
still_generic = ListOrSetT[Iterable[T]]
self.assertEqual(get_args(still_generic), (Iterable[T],))
@@ -5913,7 +8075,164 @@ def test_getitem(self):
self.assertEqual(get_args(fully_subscripted), (Iterable[float],))
self.assertIs(get_origin(fully_subscripted), ListOrSetT)
- def test_pickle(self):
+ ValueWithoutTypeVar = TypeAliasType("ValueWithoutTypeVar", int, type_params=(T,))
+ still_subscripted = ValueWithoutTypeVar[str]
+ self.assertEqual(get_args(still_subscripted), (str,))
+
+ def test_callable_without_concatenate(self):
+ P = ParamSpec('P')
+ CallableP = TypeAliasType("CallableP", Callable[P, Any], type_params=(P,))
+ get_args_test_cases = [
+ # List of (alias, expected_args)
+ # () -> Any
+ (CallableP[()], ()),
+ (CallableP[[]], ([],)),
+ # (int) -> Any
+ (CallableP[int], (int,)),
+ (CallableP[[int]], ([int],)),
+ # (int, int) -> Any
+ (CallableP[int, int], (int, int)),
+ (CallableP[[int, int]], ([int, int],)),
+ # (...) -> Any
+ (CallableP[...], (...,)),
+ # (int, ...) -> Any
+ (CallableP[[int, ...]], ([int, ...],)),
+ ]
+
+ for index, (expression, expected_args) in enumerate(get_args_test_cases):
+ with self.subTest(index=index, expression=expression):
+ self.assertEqual(get_args(expression), expected_args)
+
+ self.assertEqual(CallableP[...], CallableP[(...,)])
+ # (T) -> Any
+ CallableT = CallableP[T]
+ self.assertEqual(get_args(CallableT), (T,))
+ self.assertEqual(CallableT.__parameters__, (T,))
+
+ def test_callable_with_concatenate(self):
+ P = ParamSpec('P')
+ P2 = ParamSpec('P2')
+ CallableP = TypeAliasType("CallableP", Callable[P, Any], type_params=(P,))
+
+ callable_concat = CallableP[Concatenate[int, P2]]
+ self.assertEqual(callable_concat.__parameters__, (P2,))
+ concat_usage = callable_concat[str]
+ with self.subTest("get_args of Concatenate in TypeAliasType"):
+ if not TYPING_3_10_0:
+ # args are: ([, ~P2],)
+ self.skipTest("Nested ParamSpec is not substituted")
+ self.assertEqual(get_args(concat_usage), ((int, str),))
+ with self.subTest("Equality of parameter_expression without []"):
+ if not TYPING_3_10_0:
+ self.skipTest("Nested list is invalid type form")
+ self.assertEqual(concat_usage, callable_concat[[str]])
+
+ def test_substitution(self):
+ T = TypeVar('T')
+ Ts = TypeVarTuple("Ts")
+
+ CallableTs = TypeAliasType("CallableTs", Callable[[Unpack[Ts]], Any], type_params=(Ts, ))
+ unpack_callable = CallableTs[Unpack[Tuple[int, T]]]
+ self.assertEqual(get_args(unpack_callable), (Unpack[Tuple[int, T]],))
+
+ P = ParamSpec('P')
+ CallableP = TypeAliasType("CallableP", Callable[P, T], type_params=(P, T))
+ callable_concat = CallableP[Concatenate[int, P], Any]
+ self.assertEqual(get_args(callable_concat), (Concatenate[int, P], Any))
+
+ def test_wrong_amount_of_parameters(self):
+ T = TypeVar('T')
+ T2 = TypeVar("T2")
+ P = ParamSpec('P')
+ ListOrSetT = TypeAliasType("ListOrSetT", Union[List[T], Set[T]], type_params=(T,))
+ TwoT = TypeAliasType("TwoT", Union[List[T], Set[T2]], type_params=(T, T2))
+ CallablePT = TypeAliasType("CallablePT", Callable[P, T], type_params=(P, T))
+
+ # Not enough parameters
+ test_cases = [
+ # not_enough
+ (TwoT[int], [(int,), ()]),
+ (TwoT[T], [(T,), (T,)]),
+ # callable and not enough
+ (CallablePT[int], [(int,), ()]),
+ # too many
+ (ListOrSetT[int, bool], [(int, bool), ()]),
+ # callable and too many
+ (CallablePT[str, float, int], [(str, float, int), ()]),
+ # Check if TypeVar is still present even if over substituted
+ (ListOrSetT[int, T], [(int, T), (T,)]),
+ # With and without list for ParamSpec
+ (CallablePT[str, float, T], [(str, float, T), (T,)]),
+ (CallablePT[[str], float, int, T2], [([str], float, int, T2), (T2,)]),
+ ]
+
+ for index, (alias, [expected_args, expected_params]) in enumerate(test_cases):
+ with self.subTest(index=index, alias=alias):
+ self.assertEqual(get_args(alias), expected_args)
+ self.assertEqual(alias.__parameters__, expected_params)
+
+ # The condition should align with the version of GeneriAlias usage in __getitem__ or be 3.11+
+ @skipIf(TYPING_3_10_0, "Most arguments are allowed in 3.11+ or with GenericAlias")
+ def test_invalid_cases_before_3_10(self):
+ T = TypeVar('T')
+ ListOrSetT = TypeAliasType("ListOrSetT", Union[List[T], Set[T]], type_params=(T,))
+ with self.assertRaises(TypeError):
+ ListOrSetT[Generic[T]]
+ with self.assertRaises(TypeError):
+ ListOrSetT[(Generic[T], )]
+
+ def test_unpack_parameter_collection(self):
+ Ts = TypeVarTuple("Ts")
+
+ class Foo(Generic[Unpack[Ts]]):
+ bar: Tuple[Unpack[Ts]]
+
+ FooAlias = TypeAliasType("FooAlias", Foo[Unpack[Ts]], type_params=(Ts,))
+ self.assertEqual(FooAlias[Unpack[Tuple[str]]].__parameters__, ())
+ self.assertEqual(FooAlias[Unpack[Tuple[T]]].__parameters__, (T,))
+
+ P = ParamSpec("P")
+ CallableP = TypeAliasType("CallableP", Callable[P, Any], type_params=(P,))
+ call_int_T = CallableP[Unpack[Tuple[int, T]]]
+ self.assertEqual(call_int_T.__parameters__, (T,))
+
+ def test_alias_attributes(self):
+ T = TypeVar('T')
+ T2 = TypeVar('T2')
+ ListOrSetT = TypeAliasType("ListOrSetT", Union[List[T], Set[T]], type_params=(T,))
+
+ subscripted = ListOrSetT[int]
+ self.assertEqual(subscripted.__module__, ListOrSetT.__module__)
+ self.assertEqual(subscripted.__name__, "ListOrSetT")
+ self.assertEqual(subscripted.__value__, Union[List[T], Set[T]])
+ self.assertEqual(subscripted.__type_params__, (T,))
+
+ still_generic = ListOrSetT[Iterable[T2]]
+ self.assertEqual(still_generic.__module__, ListOrSetT.__module__)
+ self.assertEqual(still_generic.__name__, "ListOrSetT")
+ self.assertEqual(still_generic.__value__, Union[List[T], Set[T]])
+ self.assertEqual(still_generic.__type_params__, (T,))
+
+ fully_subscripted = still_generic[float]
+ self.assertEqual(fully_subscripted.__module__, ListOrSetT.__module__)
+ self.assertEqual(fully_subscripted.__name__, "ListOrSetT")
+ self.assertEqual(fully_subscripted.__value__, Union[List[T], Set[T]])
+ self.assertEqual(fully_subscripted.__type_params__, (T,))
+
+ def test_subscription_without_type_params(self):
+ Simple = TypeAliasType("Simple", int)
+ with self.assertRaises(TypeError, msg="Only generic type aliases are subscriptable"):
+ Simple[int]
+
+ # A TypeVar in the value does not allow subscription
+ T = TypeVar('T')
+ MissingTypeParamsErr = TypeAliasType("MissingTypeParamsErr", List[T])
+ self.assertEqual(MissingTypeParamsErr.__type_params__, ())
+ self.assertEqual(MissingTypeParamsErr.__parameters__, ())
+ with self.assertRaises(TypeError, msg="Only generic type aliases are subscriptable"):
+ MissingTypeParamsErr[int]
+
+ def test_pickle(self):
global Alias
Alias = TypeAliasType("Alias", int)
for proto in range(pickle.HIGHEST_PROTOCOL + 1):
@@ -5927,6 +8246,1066 @@ def test_no_instance_subclassing(self):
class MyAlias(TypeAliasType):
pass
+ def test_type_var_compatibility(self):
+ # Regression test to assure compatibility with typing variants
+ typingT = typing.TypeVar('typingT')
+ T1 = TypeAliasType("TypingTypeVar", ..., type_params=(typingT,))
+ self.assertEqual(T1.__type_params__, (typingT,))
+
+ # Test typing_extensions backports
+ textT = TypeVar('textT')
+ T2 = TypeAliasType("TypingExtTypeVar", ..., type_params=(textT,))
+ self.assertEqual(T2.__type_params__, (textT,))
+
+ textP = ParamSpec("textP")
+ T3 = TypeAliasType("TypingExtParamSpec", ..., type_params=(textP,))
+ self.assertEqual(T3.__type_params__, (textP,))
+
+ textTs = TypeVarTuple("textTs")
+ T4 = TypeAliasType("TypingExtTypeVarTuple", ..., type_params=(textTs,))
+ self.assertEqual(T4.__type_params__, (textTs,))
+
+ @skipUnless(TYPING_3_10_0, "typing.ParamSpec is not available before 3.10")
+ def test_param_spec_compatibility(self):
+ # Regression test to assure compatibility with typing variant
+ typingP = typing.ParamSpec("typingP")
+ T5 = TypeAliasType("TypingParamSpec", ..., type_params=(typingP,))
+ self.assertEqual(T5.__type_params__, (typingP,))
+
+ @skipUnless(TYPING_3_12_0, "typing.TypeVarTuple is not available before 3.12")
+ def test_type_var_tuple_compatibility(self):
+ # Regression test to assure compatibility with typing variant
+ typingTs = typing.TypeVarTuple("typingTs")
+ T6 = TypeAliasType("TypingTypeVarTuple", ..., type_params=(typingTs,))
+ self.assertEqual(T6.__type_params__, (typingTs,))
+
+ def test_type_params_possibilities(self):
+ T = TypeVar('T')
+ # Test not a tuple
+ with self.assertRaisesRegex(TypeError, "type_params must be a tuple"):
+ TypeAliasType("InvalidTypeParams", List[T], type_params=[T])
+
+ # Test default order and other invalid inputs
+ T_default = TypeVar('T_default', default=int)
+ Ts = TypeVarTuple('Ts')
+ Ts_default = TypeVarTuple('Ts_default', default=Unpack[Tuple[str, int]])
+ P = ParamSpec('P')
+ P_default = ParamSpec('P_default', default=[str, int])
+
+ # NOTE: PEP 696 states: "TypeVars with defaults cannot immediately follow TypeVarTuples"
+ # this is currently not enforced for the type statement and is not tested.
+ # PEP 695: Double usage of the same name is also not enforced and not tested.
+ valid_cases = [
+ (T, P, Ts),
+ (T, Ts_default),
+ (P_default, T_default),
+ (P, T_default, Ts_default),
+ (T_default, P_default, Ts_default),
+ ]
+ invalid_cases = [
+ ((T_default, T), f"non-default type parameter '{T!r}' follows default"),
+ ((P_default, P), f"non-default type parameter '{P!r}' follows default"),
+ ((Ts_default, T), f"non-default type parameter '{T!r}' follows default"),
+ # Only type params are accepted
+ ((1,), "Expected a type param, got 1"),
+ ((str,), f"Expected a type param, got {str!r}"),
+ # Unpack is not a TypeVar but isinstance(Unpack[Ts], TypeVar) is True in Python < 3.12
+ ((Unpack[Ts],), f"Expected a type param, got {re.escape(repr(Unpack[Ts]))}"),
+ ]
+
+ for case in valid_cases:
+ with self.subTest(type_params=case):
+ TypeAliasType("OkCase", List[T], type_params=case)
+ for case, msg in invalid_cases:
+ with self.subTest(type_params=case):
+ with self.assertRaisesRegex(TypeError, msg):
+ TypeAliasType("InvalidCase", List[T], type_params=case)
+
+class DocTests(BaseTestCase):
+ def test_annotation(self):
+
+ def hi(to: Annotated[str, Doc("Who to say hi to")]) -> None: pass
+
+ hints = get_type_hints(hi, include_extras=True)
+ doc_info = hints["to"].__metadata__[0]
+ self.assertEqual(doc_info.documentation, "Who to say hi to")
+ self.assertIsInstance(doc_info, Doc)
+
+ def test_repr(self):
+ doc_info = Doc("Who to say hi to")
+ self.assertEqual(repr(doc_info), "Doc('Who to say hi to')")
+
+ def test_hashability(self):
+ doc_info = Doc("Who to say hi to")
+ self.assertIsInstance(hash(doc_info), int)
+ self.assertNotEqual(hash(doc_info), hash(Doc("Who not to say hi to")))
+
+ def test_equality(self):
+ doc_info = Doc("Who to say hi to")
+ # Equal to itself
+ self.assertEqual(doc_info, doc_info)
+ # Equal to another instance with the same string
+ self.assertEqual(doc_info, Doc("Who to say hi to"))
+ # Not equal to another instance with a different string
+ self.assertNotEqual(doc_info, Doc("Who not to say hi to"))
+
+ def test_pickle(self):
+ doc_info = Doc("Who to say hi to")
+ for proto in range(pickle.HIGHEST_PROTOCOL + 1):
+ pickled = pickle.dumps(doc_info, protocol=proto)
+ self.assertEqual(doc_info, pickle.loads(pickled))
+
+
+@skipUnless(
+ hasattr(typing_extensions, "CapsuleType"),
+ "CapsuleType is not available on all Python implementations"
+)
+class CapsuleTypeTests(BaseTestCase):
+ def test_capsule_type(self):
+ import _datetime
+ self.assertIsInstance(_datetime.datetime_CAPI, typing_extensions.CapsuleType)
+
+
+def times_three(fn):
+ @functools.wraps(fn)
+ def wrapper(a, b):
+ return fn(a * 3, b * 3)
+
+ return wrapper
+
+
+class TestGetAnnotations(BaseTestCase):
+ @classmethod
+ def setUpClass(cls):
+ with tempfile.TemporaryDirectory() as tempdir:
+ sys.path.append(tempdir)
+ Path(tempdir, "inspect_stock_annotations.py").write_text(STOCK_ANNOTATIONS)
+ Path(tempdir, "inspect_stringized_annotations.py").write_text(STRINGIZED_ANNOTATIONS)
+ Path(tempdir, "inspect_stringized_annotations_2.py").write_text(STRINGIZED_ANNOTATIONS_2)
+ cls.inspect_stock_annotations = importlib.import_module("inspect_stock_annotations")
+ cls.inspect_stringized_annotations = importlib.import_module("inspect_stringized_annotations")
+ cls.inspect_stringized_annotations_2 = importlib.import_module("inspect_stringized_annotations_2")
+ sys.path.pop()
+
+ @classmethod
+ def tearDownClass(cls):
+ for modname in (
+ "inspect_stock_annotations",
+ "inspect_stringized_annotations",
+ "inspect_stringized_annotations_2",
+ ):
+ delattr(cls, modname)
+ del sys.modules[modname]
+
+ def test_builtin_type(self):
+ self.assertEqual(get_annotations(int), {})
+ self.assertEqual(get_annotations(object), {})
+
+ def test_format(self):
+ def f1(a: int):
+ pass
+
+ def f2(a: "undefined"): # noqa: F821
+ pass
+
+ self.assertEqual(
+ get_annotations(f1, format=Format.VALUE), {"a": int}
+ )
+ self.assertEqual(get_annotations(f1, format=1), {"a": int})
+
+ self.assertEqual(
+ get_annotations(f2, format=Format.FORWARDREF),
+ {"a": "undefined"},
+ )
+ # Test that the raw int also works
+ self.assertEqual(
+ get_annotations(f2, format=Format.FORWARDREF.value),
+ {"a": "undefined"},
+ )
+
+ self.assertEqual(
+ get_annotations(f1, format=Format.STRING),
+ {"a": "int"},
+ )
+ self.assertEqual(
+ get_annotations(f1, format=Format.STRING.value),
+ {"a": "int"},
+ )
+
+ with self.assertRaises(ValueError):
+ get_annotations(f1, format=0)
+
+ with self.assertRaises(ValueError):
+ get_annotations(f1, format=42)
+
+ def test_custom_object_with_annotations(self):
+ class C:
+ def __init__(self, x: int = 0, y: str = ""):
+ self.__annotations__ = {"x": int, "y": str}
+
+ self.assertEqual(get_annotations(C()), {"x": int, "y": str})
+
+ def test_custom_format_eval_str(self):
+ def foo():
+ pass
+
+ with self.assertRaises(ValueError):
+ get_annotations(
+ foo, format=Format.FORWARDREF, eval_str=True
+ )
+ get_annotations(
+ foo, format=Format.STRING, eval_str=True
+ )
+
+ def test_stock_annotations(self):
+ def foo(a: int, b: str):
+ pass
+
+ for format in (Format.VALUE, Format.FORWARDREF):
+ with self.subTest(format=format):
+ self.assertEqual(
+ get_annotations(foo, format=format),
+ {"a": int, "b": str},
+ )
+ self.assertEqual(
+ get_annotations(foo, format=Format.STRING),
+ {"a": "int", "b": "str"},
+ )
+
+ foo.__annotations__ = {"a": "foo", "b": "str"}
+ for format in Format:
+ with self.subTest(format=format):
+ if format is Format.VALUE_WITH_FAKE_GLOBALS:
+ with self.assertRaisesRegex(
+ ValueError,
+ "The VALUE_WITH_FAKE_GLOBALS format is for internal use only"
+ ):
+ get_annotations(foo, format=format)
+ else:
+ self.assertEqual(
+ get_annotations(foo, format=format),
+ {"a": "foo", "b": "str"},
+ )
+
+ self.assertEqual(
+ get_annotations(foo, eval_str=True, locals=locals()),
+ {"a": foo, "b": str},
+ )
+ self.assertEqual(
+ get_annotations(foo, eval_str=True, globals=locals()),
+ {"a": foo, "b": str},
+ )
+
+ def test_stock_annotations_in_module(self):
+ isa = self.inspect_stock_annotations
+
+ for kwargs in [
+ {},
+ {"eval_str": False},
+ {"format": Format.VALUE},
+ {"format": Format.FORWARDREF},
+ {"format": Format.VALUE, "eval_str": False},
+ {"format": Format.FORWARDREF, "eval_str": False},
+ ]:
+ with self.subTest(**kwargs):
+ self.assertEqual(
+ get_annotations(isa, **kwargs), {"a": int, "b": str}
+ )
+ self.assertEqual(
+ get_annotations(isa.MyClass, **kwargs),
+ {"a": int, "b": str},
+ )
+ self.assertEqual(
+ get_annotations(isa.function, **kwargs),
+ {"a": int, "b": str, "return": isa.MyClass},
+ )
+ self.assertEqual(
+ get_annotations(isa.function2, **kwargs),
+ {"a": int, "b": "str", "c": isa.MyClass, "return": isa.MyClass},
+ )
+ self.assertEqual(
+ get_annotations(isa.function3, **kwargs),
+ {"a": "int", "b": "str", "c": "MyClass"},
+ )
+ self.assertEqual(
+ get_annotations(inspect, **kwargs), {}
+ ) # inspect module has no annotations
+ self.assertEqual(
+ get_annotations(isa.UnannotatedClass, **kwargs), {}
+ )
+ self.assertEqual(
+ get_annotations(isa.unannotated_function, **kwargs), {}
+ )
+
+ for kwargs in [
+ {"eval_str": True},
+ {"format": Format.VALUE, "eval_str": True},
+ ]:
+ with self.subTest(**kwargs):
+ self.assertEqual(
+ get_annotations(isa, **kwargs), {"a": int, "b": str}
+ )
+ self.assertEqual(
+ get_annotations(isa.MyClass, **kwargs),
+ {"a": int, "b": str},
+ )
+ self.assertEqual(
+ get_annotations(isa.function, **kwargs),
+ {"a": int, "b": str, "return": isa.MyClass},
+ )
+ self.assertEqual(
+ get_annotations(isa.function2, **kwargs),
+ {"a": int, "b": str, "c": isa.MyClass, "return": isa.MyClass},
+ )
+ self.assertEqual(
+ get_annotations(isa.function3, **kwargs),
+ {"a": int, "b": str, "c": isa.MyClass},
+ )
+ self.assertEqual(get_annotations(inspect, **kwargs), {})
+ self.assertEqual(
+ get_annotations(isa.UnannotatedClass, **kwargs), {}
+ )
+ self.assertEqual(
+ get_annotations(isa.unannotated_function, **kwargs), {}
+ )
+
+ self.assertEqual(
+ get_annotations(isa, format=Format.STRING),
+ {"a": "int", "b": "str"},
+ )
+ self.assertEqual(
+ get_annotations(isa.MyClass, format=Format.STRING),
+ {"a": "int", "b": "str"},
+ )
+ mycls = "MyClass" if sys.version_info >= (3, 14) else "inspect_stock_annotations.MyClass"
+ self.assertEqual(
+ get_annotations(isa.function, format=Format.STRING),
+ {"a": "int", "b": "str", "return": mycls},
+ )
+ self.assertEqual(
+ get_annotations(
+ isa.function2, format=Format.STRING
+ ),
+ {"a": "int", "b": "str", "c": mycls, "return": mycls},
+ )
+ self.assertEqual(
+ get_annotations(
+ isa.function3, format=Format.STRING
+ ),
+ {"a": "int", "b": "str", "c": "MyClass"},
+ )
+ self.assertEqual(
+ get_annotations(inspect, format=Format.STRING),
+ {},
+ )
+ self.assertEqual(
+ get_annotations(
+ isa.UnannotatedClass, format=Format.STRING
+ ),
+ {},
+ )
+ self.assertEqual(
+ get_annotations(
+ isa.unannotated_function, format=Format.STRING
+ ),
+ {},
+ )
+
+ def test_stock_annotations_on_wrapper(self):
+ isa = self.inspect_stock_annotations
+
+ wrapped = times_three(isa.function)
+ self.assertEqual(wrapped(1, "x"), isa.MyClass(3, "xxx"))
+ self.assertIsNot(wrapped.__globals__, isa.function.__globals__)
+ self.assertEqual(
+ get_annotations(wrapped),
+ {"a": int, "b": str, "return": isa.MyClass},
+ )
+ self.assertEqual(
+ get_annotations(wrapped, format=Format.FORWARDREF),
+ {"a": int, "b": str, "return": isa.MyClass},
+ )
+ mycls = "MyClass" if sys.version_info >= (3, 14) else "inspect_stock_annotations.MyClass"
+ self.assertEqual(
+ get_annotations(wrapped, format=Format.STRING),
+ {"a": "int", "b": "str", "return": mycls},
+ )
+ self.assertEqual(
+ get_annotations(wrapped, eval_str=True),
+ {"a": int, "b": str, "return": isa.MyClass},
+ )
+ self.assertEqual(
+ get_annotations(wrapped, eval_str=False),
+ {"a": int, "b": str, "return": isa.MyClass},
+ )
+
+ def test_stringized_annotations_in_module(self):
+ isa = self.inspect_stringized_annotations
+ for kwargs in [
+ {},
+ {"eval_str": False},
+ {"format": Format.VALUE},
+ {"format": Format.FORWARDREF},
+ {"format": Format.STRING},
+ {"format": Format.VALUE, "eval_str": False},
+ {"format": Format.FORWARDREF, "eval_str": False},
+ {"format": Format.STRING, "eval_str": False},
+ ]:
+ with self.subTest(**kwargs):
+ self.assertEqual(
+ get_annotations(isa, **kwargs), {"a": "int", "b": "str"}
+ )
+ self.assertEqual(
+ get_annotations(isa.MyClass, **kwargs),
+ {"a": "int", "b": "str"},
+ )
+ self.assertEqual(
+ get_annotations(isa.function, **kwargs),
+ {"a": "int", "b": "str", "return": "MyClass"},
+ )
+ self.assertEqual(
+ get_annotations(isa.function2, **kwargs),
+ {"a": "int", "b": "'str'", "c": "MyClass", "return": "MyClass"},
+ )
+ self.assertEqual(
+ get_annotations(isa.function3, **kwargs),
+ {"a": "'int'", "b": "'str'", "c": "'MyClass'"},
+ )
+ self.assertEqual(
+ get_annotations(isa.UnannotatedClass, **kwargs), {}
+ )
+ self.assertEqual(
+ get_annotations(isa.unannotated_function, **kwargs), {}
+ )
+
+ for kwargs in [
+ {"eval_str": True},
+ {"format": Format.VALUE, "eval_str": True},
+ ]:
+ with self.subTest(**kwargs):
+ self.assertEqual(
+ get_annotations(isa, **kwargs), {"a": int, "b": str}
+ )
+ self.assertEqual(
+ get_annotations(isa.MyClass, **kwargs),
+ {"a": int, "b": str},
+ )
+ self.assertEqual(
+ get_annotations(isa.function, **kwargs),
+ {"a": int, "b": str, "return": isa.MyClass},
+ )
+ self.assertEqual(
+ get_annotations(isa.function2, **kwargs),
+ {"a": int, "b": "str", "c": isa.MyClass, "return": isa.MyClass},
+ )
+ self.assertEqual(
+ get_annotations(isa.function3, **kwargs),
+ {"a": "int", "b": "str", "c": "MyClass"},
+ )
+ self.assertEqual(
+ get_annotations(isa.UnannotatedClass, **kwargs), {}
+ )
+ self.assertEqual(
+ get_annotations(isa.unannotated_function, **kwargs), {}
+ )
+
+ def test_stringized_annotations_in_empty_module(self):
+ isa2 = self.inspect_stringized_annotations_2
+ self.assertEqual(get_annotations(isa2), {})
+ self.assertEqual(get_annotations(isa2, eval_str=True), {})
+ self.assertEqual(get_annotations(isa2, eval_str=False), {})
+
+ def test_stringized_annotations_on_wrapper(self):
+ isa = self.inspect_stringized_annotations
+ wrapped = times_three(isa.function)
+ self.assertEqual(wrapped(1, "x"), isa.MyClass(3, "xxx"))
+ self.assertIsNot(wrapped.__globals__, isa.function.__globals__)
+ self.assertEqual(
+ get_annotations(wrapped),
+ {"a": "int", "b": "str", "return": "MyClass"},
+ )
+ self.assertEqual(
+ get_annotations(wrapped, eval_str=True),
+ {"a": int, "b": str, "return": isa.MyClass},
+ )
+ self.assertEqual(
+ get_annotations(wrapped, eval_str=False),
+ {"a": "int", "b": "str", "return": "MyClass"},
+ )
+
+ def test_stringized_annotations_on_class(self):
+ isa = self.inspect_stringized_annotations
+ # test that local namespace lookups work
+ self.assertEqual(
+ get_annotations(isa.MyClassWithLocalAnnotations),
+ {"x": "mytype"},
+ )
+ self.assertEqual(
+ get_annotations(isa.MyClassWithLocalAnnotations, eval_str=True),
+ {"x": int},
+ )
+
+ def test_modify_annotations(self):
+ def f(x: int):
+ pass
+
+ self.assertEqual(get_annotations(f), {"x": int})
+ self.assertEqual(
+ get_annotations(f, format=Format.FORWARDREF),
+ {"x": int},
+ )
+
+ f.__annotations__["x"] = str
+ self.assertEqual(get_annotations(f), {"x": str})
+
+
+class TestGetAnnotationsMetaclasses(BaseTestCase):
+ def test_annotated_meta(self):
+ class Meta(type):
+ a: int
+
+ class X(metaclass=Meta):
+ pass
+
+ class Y(metaclass=Meta):
+ b: float
+
+ self.assertEqual(get_annotations(Meta), {"a": int})
+ self.assertEqual(get_annotations(X), {})
+ self.assertEqual(get_annotations(Y), {"b": float})
+
+ def test_unannotated_meta(self):
+ class Meta(type): pass
+
+ class X(metaclass=Meta):
+ a: str
+
+ class Y(X): pass
+
+ self.assertEqual(get_annotations(Meta), {})
+ self.assertEqual(get_annotations(Y), {})
+ self.assertEqual(get_annotations(X), {"a": str})
+
+ def test_ordering(self):
+ # Based on a sample by David Ellis
+ # https://discuss.python.org/t/pep-749-implementing-pep-649/54974/38
+
+ def make_classes():
+ class Meta(type):
+ a: int
+ expected_annotations = {"a": int}
+
+ class A(type, metaclass=Meta):
+ b: float
+ expected_annotations = {"b": float}
+
+ class B(metaclass=A):
+ c: str
+ expected_annotations = {"c": str}
+
+ class C(B):
+ expected_annotations = {}
+
+ class D(metaclass=Meta):
+ expected_annotations = {}
+
+ return Meta, A, B, C, D
+
+ classes = make_classes()
+ class_count = len(classes)
+ for order in itertools.permutations(range(class_count), class_count):
+ names = ", ".join(classes[i].__name__ for i in order)
+ with self.subTest(names=names):
+ classes = make_classes() # Regenerate classes
+ for i in order:
+ get_annotations(classes[i])
+ for c in classes:
+ with self.subTest(c=c):
+ self.assertEqual(get_annotations(c), c.expected_annotations)
+
+
+@skipIf(STRINGIZED_ANNOTATIONS_PEP_695 is None, "PEP 695 has yet to be")
+class TestGetAnnotationsWithPEP695(BaseTestCase):
+ @classmethod
+ def setUpClass(cls):
+ with tempfile.TemporaryDirectory() as tempdir:
+ sys.path.append(tempdir)
+ Path(tempdir, "inspect_stringized_annotations_pep_695.py").write_text(STRINGIZED_ANNOTATIONS_PEP_695)
+ cls.inspect_stringized_annotations_pep_695 = importlib.import_module(
+ "inspect_stringized_annotations_pep_695"
+ )
+ sys.path.pop()
+
+ @classmethod
+ def tearDownClass(cls):
+ del cls.inspect_stringized_annotations_pep_695
+ del sys.modules["inspect_stringized_annotations_pep_695"]
+
+ def test_pep695_generic_class_with_future_annotations(self):
+ ann_module695 = self.inspect_stringized_annotations_pep_695
+ A_annotations = get_annotations(ann_module695.A, eval_str=True)
+ A_type_params = ann_module695.A.__type_params__
+ self.assertIs(A_annotations["x"], A_type_params[0])
+ self.assertEqual(A_annotations["y"].__args__[0], Unpack[A_type_params[1]])
+ self.assertIs(A_annotations["z"].__args__[0], A_type_params[2])
+
+ def test_pep695_generic_class_with_future_annotations_and_local_shadowing(self):
+ B_annotations = get_annotations(
+ self.inspect_stringized_annotations_pep_695.B, eval_str=True
+ )
+ self.assertEqual(B_annotations, {"x": int, "y": str, "z": bytes})
+
+ def test_pep695_generic_class_with_future_annotations_name_clash_with_global_vars(self):
+ ann_module695 = self.inspect_stringized_annotations_pep_695
+ C_annotations = get_annotations(ann_module695.C, eval_str=True)
+ self.assertEqual(
+ set(C_annotations.values()),
+ set(ann_module695.C.__type_params__)
+ )
+
+ def test_pep_695_generic_function_with_future_annotations(self):
+ ann_module695 = self.inspect_stringized_annotations_pep_695
+ generic_func_annotations = get_annotations(
+ ann_module695.generic_function, eval_str=True
+ )
+ func_t_params = ann_module695.generic_function.__type_params__
+ self.assertEqual(
+ generic_func_annotations.keys(), {"x", "y", "z", "zz", "return"}
+ )
+ self.assertIs(generic_func_annotations["x"], func_t_params[0])
+ self.assertEqual(generic_func_annotations["y"], Unpack[func_t_params[1]])
+ self.assertIs(generic_func_annotations["z"].__origin__, func_t_params[2])
+ self.assertIs(generic_func_annotations["zz"].__origin__, func_t_params[2])
+
+ def test_pep_695_generic_function_with_future_annotations_name_clash_with_global_vars(self):
+ self.assertEqual(
+ set(
+ get_annotations(
+ self.inspect_stringized_annotations_pep_695.generic_function_2,
+ eval_str=True
+ ).values()
+ ),
+ set(
+ self.inspect_stringized_annotations_pep_695.generic_function_2.__type_params__
+ )
+ )
+
+ def test_pep_695_generic_method_with_future_annotations(self):
+ ann_module695 = self.inspect_stringized_annotations_pep_695
+ generic_method_annotations = get_annotations(
+ ann_module695.D.generic_method, eval_str=True
+ )
+ params = {
+ param.__name__: param
+ for param in ann_module695.D.generic_method.__type_params__
+ }
+ self.assertEqual(
+ generic_method_annotations,
+ {"x": params["Foo"], "y": params["Bar"], "return": None}
+ )
+
+ def test_pep_695_generic_method_with_future_annotations_name_clash_with_global_vars(self):
+ self.assertEqual(
+ set(
+ get_annotations(
+ self.inspect_stringized_annotations_pep_695.D.generic_method_2,
+ eval_str=True
+ ).values()
+ ),
+ set(
+ self.inspect_stringized_annotations_pep_695.D.generic_method_2.__type_params__
+ )
+ )
+
+ def test_pep_695_generic_method_with_future_annotations_name_clash_with_global_and_local_vars(self):
+ self.assertEqual(
+ get_annotations(
+ self.inspect_stringized_annotations_pep_695.E, eval_str=True
+ ),
+ {"x": str},
+ )
+
+ def test_pep_695_generics_with_future_annotations_nested_in_function(self):
+ results = self.inspect_stringized_annotations_pep_695.nested()
+
+ self.assertEqual(
+ set(results.F_annotations.values()),
+ set(results.F.__type_params__)
+ )
+ self.assertEqual(
+ set(results.F_meth_annotations.values()),
+ set(results.F.generic_method.__type_params__)
+ )
+ self.assertNotEqual(
+ set(results.F_meth_annotations.values()),
+ set(results.F.__type_params__)
+ )
+ self.assertEqual(
+ set(results.F_meth_annotations.values()).intersection(results.F.__type_params__),
+ set()
+ )
+
+ self.assertEqual(results.G_annotations, {"x": str})
+
+ self.assertEqual(
+ set(results.generic_func_annotations.values()),
+ set(results.generic_func.__type_params__)
+ )
+
+
+class EvaluateForwardRefTests(BaseTestCase):
+ def test_evaluate_forward_ref(self):
+ int_ref = typing_extensions.ForwardRef('int')
+ self.assertIs(typing_extensions.evaluate_forward_ref(int_ref), int)
+ self.assertIs(
+ typing_extensions.evaluate_forward_ref(int_ref, type_params=()),
+ int,
+ )
+ self.assertIs(
+ typing_extensions.evaluate_forward_ref(int_ref, format=typing_extensions.Format.VALUE),
+ int,
+ )
+ self.assertIs(
+ typing_extensions.evaluate_forward_ref(
+ int_ref, format=typing_extensions.Format.FORWARDREF,
+ ),
+ int,
+ )
+ self.assertEqual(
+ typing_extensions.evaluate_forward_ref(
+ int_ref, format=typing_extensions.Format.STRING,
+ ),
+ 'int',
+ )
+
+ def test_evaluate_forward_ref_undefined(self):
+ missing = typing_extensions.ForwardRef('missing')
+ with self.assertRaises(NameError):
+ typing_extensions.evaluate_forward_ref(missing)
+ self.assertIs(
+ typing_extensions.evaluate_forward_ref(
+ missing, format=typing_extensions.Format.FORWARDREF,
+ ),
+ missing,
+ )
+ self.assertEqual(
+ typing_extensions.evaluate_forward_ref(
+ missing, format=typing_extensions.Format.STRING,
+ ),
+ "missing",
+ )
+
+ def test_evaluate_forward_ref_nested(self):
+ ref = typing_extensions.ForwardRef("Union[int, list['str']]")
+ ns = {"Union": Union}
+ if sys.version_info >= (3, 11):
+ expected = Union[int, list[str]]
+ else:
+ expected = Union[int, list['str']] # TODO: evaluate nested forward refs in Python < 3.11
+ self.assertEqual(
+ typing_extensions.evaluate_forward_ref(ref, globals=ns),
+ expected,
+ )
+ self.assertEqual(
+ typing_extensions.evaluate_forward_ref(
+ ref, globals=ns, format=typing_extensions.Format.FORWARDREF
+ ),
+ expected,
+ )
+ self.assertEqual(
+ typing_extensions.evaluate_forward_ref(ref, format=typing_extensions.Format.STRING),
+ "Union[int, list['str']]",
+ )
+
+ why = typing_extensions.ForwardRef('"\'str\'"')
+ self.assertIs(typing_extensions.evaluate_forward_ref(why), str)
+
+ @skipUnless(sys.version_info >= (3, 10), "Relies on PEP 604")
+ def test_evaluate_forward_ref_nested_pep604(self):
+ ref = typing_extensions.ForwardRef("int | list['str']")
+ if sys.version_info >= (3, 11):
+ expected = int | list[str]
+ else:
+ expected = int | list['str'] # TODO: evaluate nested forward refs in Python < 3.11
+ self.assertEqual(
+ typing_extensions.evaluate_forward_ref(ref),
+ expected,
+ )
+ self.assertEqual(
+ typing_extensions.evaluate_forward_ref(ref, format=typing_extensions.Format.FORWARDREF),
+ expected,
+ )
+ self.assertEqual(
+ typing_extensions.evaluate_forward_ref(ref, format=typing_extensions.Format.STRING),
+ "int | list['str']",
+ )
+
+ def test_evaluate_forward_ref_none(self):
+ none_ref = typing_extensions.ForwardRef('None')
+ self.assertIs(typing_extensions.evaluate_forward_ref(none_ref), None)
+
+ def test_globals(self):
+ A = "str"
+ ref = typing_extensions.ForwardRef('list[A]')
+ with self.assertRaises(NameError):
+ typing_extensions.evaluate_forward_ref(ref)
+ self.assertEqual(
+ typing_extensions.evaluate_forward_ref(ref, globals={'A': A}),
+ list[str] if sys.version_info >= (3, 11) else list['str'],
+ )
+
+ def test_owner(self):
+ ref = typing_extensions.ForwardRef("A")
+
+ with self.assertRaises(NameError):
+ typing_extensions.evaluate_forward_ref(ref)
+
+ # We default to the globals of `owner`,
+ # so it no longer raises `NameError`
+ self.assertIs(
+ typing_extensions.evaluate_forward_ref(ref, owner=Loop), A
+ )
+
+ @skipUnless(sys.version_info >= (3, 14), "Not yet implemented in Python < 3.14")
+ def test_inherited_owner(self):
+ # owner passed to evaluate_forward_ref
+ ref = typing_extensions.ForwardRef("list['A']")
+ self.assertEqual(
+ typing_extensions.evaluate_forward_ref(ref, owner=Loop),
+ list[A],
+ )
+
+ # owner set on the ForwardRef
+ ref = typing_extensions.ForwardRef("list['A']", owner=Loop)
+ self.assertEqual(
+ typing_extensions.evaluate_forward_ref(ref),
+ list[A],
+ )
+
+ @skipUnless(sys.version_info >= (3, 14), "Not yet implemented in Python < 3.14")
+ def test_partial_evaluation(self):
+ ref = typing_extensions.ForwardRef("list[A]")
+ with self.assertRaises(NameError):
+ typing_extensions.evaluate_forward_ref(ref)
+
+ self.assertEqual(
+ typing_extensions.evaluate_forward_ref(ref, format=typing_extensions.Format.FORWARDREF),
+ list[EqualToForwardRef('A')],
+ )
+
+ def test_global_constant(self):
+ if sys.version_info[:3] > (3, 10, 0):
+ self.assertTrue(_FORWARD_REF_HAS_CLASS)
+
+ def test_forward_ref_fallback(self):
+ with self.assertRaises(NameError):
+ evaluate_forward_ref(typing.ForwardRef("doesntexist"))
+ ref = typing.ForwardRef("doesntexist")
+ self.assertIs(evaluate_forward_ref(ref, format=Format.FORWARDREF), ref)
+
+ class X:
+ unresolvable = "doesnotexist2"
+
+ evaluated_ref = evaluate_forward_ref(
+ typing.ForwardRef("X.unresolvable"),
+ locals={"X": X},
+ type_params=None,
+ format=Format.FORWARDREF,
+ )
+ self.assertEqual(evaluated_ref, EqualToForwardRef("doesnotexist2"))
+
+ def test_evaluate_with_type_params(self):
+ # Use a T name that is not in globals
+ self.assertNotIn("Tx", globals())
+ if not TYPING_3_12_0:
+ Tx = TypeVar("Tx")
+ class Gen(Generic[Tx]):
+ alias = int
+ if not hasattr(Gen, "__type_params__"):
+ Gen.__type_params__ = (Tx,)
+ self.assertEqual(Gen.__type_params__, (Tx,))
+ del Tx
+ else:
+ ns = {}
+ exec(textwrap.dedent("""
+ class Gen[Tx]:
+ alias = int
+ """), None, ns)
+ Gen = ns["Gen"]
+
+ # owner=None, type_params=None
+ # NOTE: The behavior of owner=None might change in the future when ForwardRef.__owner__ is available
+ with self.assertRaises(NameError):
+ evaluate_forward_ref(typing.ForwardRef("Tx"))
+ with self.assertRaises(NameError):
+ evaluate_forward_ref(typing.ForwardRef("Tx"), type_params=())
+ with self.assertRaises(NameError):
+ evaluate_forward_ref(typing.ForwardRef("Tx"), owner=int)
+
+ (Tx,) = Gen.__type_params__
+ self.assertIs(evaluate_forward_ref(typing.ForwardRef("Tx"), type_params=Gen.__type_params__), Tx)
+
+ # For this test its important that Tx is not a global variable, i.e. do not use "T" here
+ self.assertNotIn("Tx", globals())
+ self.assertIs(evaluate_forward_ref(typing.ForwardRef("Tx"), owner=Gen), Tx)
+
+ # Different type_params take precedence
+ not_Tx = TypeVar("Tx") # different TypeVar with same name
+ self.assertIs(evaluate_forward_ref(typing.ForwardRef("Tx"), type_params=(not_Tx,), owner=Gen), not_Tx)
+
+ # globals can take higher precedence
+ if _FORWARD_REF_HAS_CLASS:
+ self.assertIs(evaluate_forward_ref(typing.ForwardRef("Tx", is_class=True), owner=Gen, globals={"Tx": str}), str)
+ self.assertIs(evaluate_forward_ref(typing.ForwardRef("Tx", is_class=True), owner=Gen, type_params=(not_Tx,), globals={"Tx": str}), str)
+
+ with self.assertRaises(NameError):
+ evaluate_forward_ref(typing.ForwardRef("alias"), type_params=Gen.__type_params__)
+ self.assertIs(evaluate_forward_ref(typing.ForwardRef("alias"), owner=Gen), int)
+ # If you pass custom locals, we don't look at the owner's locals
+ with self.assertRaises(NameError):
+ evaluate_forward_ref(typing.ForwardRef("alias"), owner=Gen, locals={})
+ # But if the name exists in the locals, it works
+ self.assertIs(
+ evaluate_forward_ref(typing.ForwardRef("alias"), owner=Gen, locals={"alias": str}), str
+ )
+
+ @skipUnless(
+ HAS_FORWARD_MODULE, "Needs module 'forward' to test forward references"
+ )
+ def test_fwdref_with_module(self):
+ self.assertIs(
+ evaluate_forward_ref(typing.ForwardRef("Counter", module="collections")), collections.Counter
+ )
+ self.assertEqual(
+ evaluate_forward_ref(typing.ForwardRef("Counter[int]", module="collections")),
+ collections.Counter[int],
+ )
+
+ with self.assertRaises(NameError):
+ # If globals are passed explicitly, we don't look at the module dict
+ evaluate_forward_ref(typing.ForwardRef("Format", module="annotationlib"), globals={})
+
+ def test_fwdref_to_builtin(self):
+ self.assertIs(evaluate_forward_ref(typing.ForwardRef("int")), int)
+ if HAS_FORWARD_MODULE:
+ self.assertIs(evaluate_forward_ref(typing.ForwardRef("int", module="collections")), int)
+ self.assertIs(evaluate_forward_ref(typing.ForwardRef("int"), owner=str), int)
+
+ # builtins are still searched with explicit globals
+ self.assertIs(evaluate_forward_ref(typing.ForwardRef("int"), globals={}), int)
+
+ def test_fwdref_with_globals(self):
+ # explicit values in globals have precedence
+ obj = object()
+ self.assertIs(evaluate_forward_ref(typing.ForwardRef("int"), globals={"int": obj}), obj)
+
+ def test_fwdref_with_owner(self):
+ self.assertEqual(
+ evaluate_forward_ref(typing.ForwardRef("Counter[int]"), owner=collections),
+ collections.Counter[int],
+ )
+
+ def test_name_lookup_without_eval(self):
+ # test the codepath where we look up simple names directly in the
+ # namespaces without going through eval()
+ self.assertIs(evaluate_forward_ref(typing.ForwardRef("int")), int)
+ self.assertIs(evaluate_forward_ref(typing.ForwardRef("int"), locals={"int": str}), str)
+ self.assertIs(
+ evaluate_forward_ref(typing.ForwardRef("int"), locals={"int": float}, globals={"int": str}),
+ float,
+ )
+ self.assertIs(evaluate_forward_ref(typing.ForwardRef("int"), globals={"int": str}), str)
+ import builtins
+
+ from test import support
+ with support.swap_attr(builtins, "int", dict):
+ self.assertIs(evaluate_forward_ref(typing.ForwardRef("int")), dict)
+
+ def test_nested_strings(self):
+ # This variable must have a different name TypeVar
+ Tx = TypeVar("Tx")
+
+ class Y(Generic[Tx]):
+ a = "X"
+ bT = "Y[T_nonlocal]"
+
+ Z = TypeAliasType("Z", Y[Tx], type_params=(Tx,))
+
+ evaluated_ref1a = evaluate_forward_ref(typing.ForwardRef("Y[Y['Tx']]"), locals={"Y": Y, "Tx": Tx})
+ self.assertEqual(get_origin(evaluated_ref1a), Y)
+ self.assertEqual(get_args(evaluated_ref1a), (Y[Tx],))
+
+ evaluated_ref1b = evaluate_forward_ref(
+ typing.ForwardRef("Y[Y['Tx']]"), locals={"Y": Y}, type_params=(Tx,)
+ )
+ self.assertEqual(get_origin(evaluated_ref1b), Y)
+ self.assertEqual(get_args(evaluated_ref1b), (Y[Tx],))
+
+ with self.subTest("nested string of TypeVar"):
+ evaluated_ref2 = evaluate_forward_ref(typing.ForwardRef("""Y["Y['Tx']"]"""), locals={"Y": Y, "Tx": Tx})
+ self.assertEqual(get_origin(evaluated_ref2), Y)
+ self.assertEqual(get_args(evaluated_ref2), (Y[Tx],))
+
+ with self.subTest("nested string of TypeAliasType and alias"):
+ # NOTE: Using Y here works for 3.10
+ evaluated_ref3 = evaluate_forward_ref(typing.ForwardRef("""Y['Z["StrAlias"]']"""), locals={"Y": Y, "Z": Z, "StrAlias": str})
+ self.assertEqual(get_origin(evaluated_ref3), Y)
+ if sys.version_info[:2] == (3, 10):
+ self.skipTest("Nested string 'StrAlias' is not resolved in 3.10")
+ self.assertEqual(get_args(evaluated_ref3), (Z[str],))
+
+ def test_invalid_special_forms(self):
+ for name in ("Protocol", "Final", "ClassVar", "Generic"):
+ with self.subTest(name=name):
+ self.assertIs(
+ evaluate_forward_ref(typing.ForwardRef(name), globals=vars(typing)),
+ getattr(typing, name),
+ )
+ if _FORWARD_REF_HAS_CLASS:
+ self.assertIs(evaluate_forward_ref(typing.ForwardRef("Final", is_class=True), globals=vars(typing)), Final)
+ self.assertIs(evaluate_forward_ref(typing.ForwardRef("ClassVar", is_class=True), globals=vars(typing)), ClassVar)
+ self.assertIs(evaluate_forward_ref(typing.ForwardRef("Final", is_argument=False), globals=vars(typing)), Final)
+ self.assertIs(evaluate_forward_ref(typing.ForwardRef("ClassVar", is_argument=False), globals=vars(typing)), ClassVar)
+
+
+class TestSentinels(BaseTestCase):
+ def test_sentinel_no_repr(self):
+ sentinel_no_repr = Sentinel('sentinel_no_repr')
+
+ self.assertEqual(sentinel_no_repr._name, 'sentinel_no_repr')
+ self.assertEqual(repr(sentinel_no_repr), '')
+
+ def test_sentinel_explicit_repr(self):
+ sentinel_explicit_repr = Sentinel('sentinel_explicit_repr', repr='explicit_repr')
+
+ self.assertEqual(repr(sentinel_explicit_repr), 'explicit_repr')
+
+ @skipIf(sys.version_info < (3, 10), reason='New unions not available in 3.9')
+ def test_sentinel_type_expression_union(self):
+ sentinel = Sentinel('sentinel')
+
+ def func1(a: int | sentinel = sentinel): pass
+ def func2(a: sentinel | int = sentinel): pass
+
+ self.assertEqual(func1.__annotations__['a'], Union[int, sentinel])
+ self.assertEqual(func2.__annotations__['a'], Union[sentinel, int])
+
+ def test_sentinel_not_callable(self):
+ sentinel = Sentinel('sentinel')
+ with self.assertRaisesRegex(
+ TypeError,
+ "'Sentinel' object is not callable"
+ ):
+ sentinel()
+
+ def test_sentinel_not_picklable(self):
+ sentinel = Sentinel('sentinel')
+ with self.assertRaisesRegex(
+ TypeError,
+ "Cannot pickle 'Sentinel' object"
+ ):
+ pickle.dumps(sentinel)
+
if __name__ == '__main__':
main()
diff --git a/src/typing_extensions.py b/src/typing_extensions.py
index 901f3b96..efa09d55 100644
--- a/src/typing_extensions.py
+++ b/src/typing_extensions.py
@@ -1,14 +1,22 @@
import abc
+import builtins
import collections
import collections.abc
+import contextlib
+import enum
import functools
import inspect
+import io
+import keyword
import operator
import sys
import types as _types
import typing
import warnings
+if sys.version_info >= (3, 14):
+ import annotationlib
+
__all__ = [
# Super-special typing primitives.
'Any',
@@ -52,6 +60,8 @@
'SupportsIndex',
'SupportsInt',
'SupportsRound',
+ 'Reader',
+ 'Writer',
# One-off things.
'Annotated',
@@ -60,8 +70,12 @@
'clear_overloads',
'dataclass_transform',
'deprecated',
+ 'Doc',
+ 'evaluate_forward_ref',
'get_overloads',
'final',
+ 'Format',
+ 'get_annotations',
'get_args',
'get_origin',
'get_original_bases',
@@ -75,18 +89,24 @@
'overload',
'override',
'Protocol',
+ 'Sentinel',
'reveal_type',
'runtime',
'runtime_checkable',
'Text',
'TypeAlias',
'TypeAliasType',
+ 'TypeForm',
'TypeGuard',
+ 'TypeIs',
'TYPE_CHECKING',
'Never',
'NoReturn',
+ 'ReadOnly',
'Required',
'NotRequired',
+ 'NoDefault',
+ 'NoExtraItems',
# Pure aliases, have always been in typing
'AbstractSet',
@@ -131,6 +151,10 @@
# for backward compatibility
PEP_560 = True
GenericMeta = type
+_PEP_696_IMPLEMENTED = sys.version_info >= (3, 13, 0, "beta")
+
+# Added with bpo-45166 to 3.10.1+ and some 3.9 versions
+_FORWARD_REF_HAS_CLASS = "__forward_is_class__" in typing.ForwardRef.__slots__
# The functions below are modified copies of typing internal helpers.
# They are needed by _ProtocolMeta and they provide support for PEP 646.
@@ -144,59 +168,14 @@ def __repr__(self):
_marker = _Sentinel()
-def _check_generic(cls, parameters, elen=_marker):
- """Check correct count for parameters of a generic cls (internal helper).
- This gives a nice error message in case of count mismatch.
- """
- if not elen:
- raise TypeError(f"{cls} is not a generic class")
- if elen is _marker:
- if not hasattr(cls, "__parameters__") or not cls.__parameters__:
- raise TypeError(f"{cls} is not a generic class")
- elen = len(cls.__parameters__)
- alen = len(parameters)
- if alen != elen:
- if hasattr(cls, "__parameters__"):
- parameters = [p for p in cls.__parameters__ if not _is_unpack(p)]
- num_tv_tuples = sum(isinstance(p, TypeVarTuple) for p in parameters)
- if (num_tv_tuples > 0) and (alen >= elen - num_tv_tuples):
- return
- raise TypeError(f"Too {'many' if alen > elen else 'few'} parameters for {cls};"
- f" actual {alen}, expected {elen}")
-
-
if sys.version_info >= (3, 10):
def _should_collect_from_parameters(t):
return isinstance(
t, (typing._GenericAlias, _types.GenericAlias, _types.UnionType)
)
-elif sys.version_info >= (3, 9):
- def _should_collect_from_parameters(t):
- return isinstance(t, (typing._GenericAlias, _types.GenericAlias))
else:
def _should_collect_from_parameters(t):
- return isinstance(t, typing._GenericAlias) and not t._special
-
-
-def _collect_type_vars(types, typevar_types=None):
- """Collect all type variable contained in types in order of
- first appearance (lexicographic order). For example::
-
- _collect_type_vars((T, List[S, T])) == (T, S)
- """
- if typevar_types is None:
- typevar_types = typing.TypeVar
- tvars = []
- for t in types:
- if (
- isinstance(t, typevar_types) and
- t not in tvars and
- not _is_unpack(t)
- ):
- tvars.append(t)
- if _should_collect_from_parameters(t):
- tvars.extend([t for t in t.__parameters__ if t not in tvars])
- return tuple(tvars)
+ return isinstance(t, (typing._GenericAlias, _types.GenericAlias))
NoReturn = typing.NoReturn
@@ -242,38 +221,61 @@ def __new__(cls, *args, **kwargs):
ClassVar = typing.ClassVar
+# Vendored from cpython typing._SpecialFrom
+# Having a separate class means that instances will not be rejected by
+# typing._type_check.
+class _SpecialForm(typing._Final, _root=True):
+ __slots__ = ('_name', '__doc__', '_getitem')
+
+ def __init__(self, getitem):
+ self._getitem = getitem
+ self._name = getitem.__name__
+ self.__doc__ = getitem.__doc__
+
+ def __getattr__(self, item):
+ if item in {'__name__', '__qualname__'}:
+ return self._name
+
+ raise AttributeError(item)
+
+ def __mro_entries__(self, bases):
+ raise TypeError(f"Cannot subclass {self!r}")
-class _ExtensionsSpecialForm(typing._SpecialForm, _root=True):
def __repr__(self):
- return 'typing_extensions.' + self._name
+ return f'typing_extensions.{self._name}'
+ def __reduce__(self):
+ return self._name
-# On older versions of typing there is an internal class named "Final".
-# 3.8+
-if hasattr(typing, 'Final') and sys.version_info[:2] >= (3, 7):
- Final = typing.Final
-# 3.7
-else:
- class _FinalForm(_ExtensionsSpecialForm, _root=True):
- def __getitem__(self, parameters):
- item = typing._type_check(parameters,
- f'{self._name} accepts only a single type.')
- return typing._GenericAlias(self, (item,))
+ def __call__(self, *args, **kwds):
+ raise TypeError(f"Cannot instantiate {self!r}")
- Final = _FinalForm('Final',
- doc="""A special typing construct to indicate that a name
- cannot be re-assigned or overridden in a subclass.
- For example:
+ def __or__(self, other):
+ return typing.Union[self, other]
- MAX_SIZE: Final = 9000
- MAX_SIZE += 1 # Error reported by type checker
+ def __ror__(self, other):
+ return typing.Union[other, self]
+
+ def __instancecheck__(self, obj):
+ raise TypeError(f"{self} cannot be used with isinstance()")
- class Connection:
- TIMEOUT: Final[int] = 10
- class FastConnector(Connection):
- TIMEOUT = 1 # Error reported by type checker
+ def __subclasscheck__(self, cls):
+ raise TypeError(f"{self} cannot be used with issubclass()")
+
+ @typing._tp_cache
+ def __getitem__(self, parameters):
+ return self._getitem(self, parameters)
- There is no runtime checking of these properties.""")
+
+# Note that inheriting from this class means that the object will be
+# rejected by typing._type_check, so do not use it if the special form
+# is arguably valid as a type by itself.
+class _ExtensionsSpecialForm(typing._SpecialForm, _root=True):
+ def __repr__(self):
+ return 'typing_extensions.' + self._name
+
+
+Final = typing.Final
if sys.version_info >= (3, 11):
final = typing.final
@@ -465,31 +467,87 @@ def clear_overloads():
# Various ABCs mimicking those in collections.abc.
# A few are simply re-exported for completeness.
-
-
Awaitable = typing.Awaitable
Coroutine = typing.Coroutine
AsyncIterable = typing.AsyncIterable
AsyncIterator = typing.AsyncIterator
Deque = typing.Deque
-ContextManager = typing.ContextManager
-AsyncContextManager = typing.AsyncContextManager
DefaultDict = typing.DefaultDict
-
-# 3.7.2+
-if hasattr(typing, 'OrderedDict'):
- OrderedDict = typing.OrderedDict
-# 3.7.0-3.7.2
-else:
- OrderedDict = typing._alias(collections.OrderedDict, (KT, VT))
-
+OrderedDict = typing.OrderedDict
Counter = typing.Counter
ChainMap = typing.ChainMap
-AsyncGenerator = typing.AsyncGenerator
Text = typing.Text
TYPE_CHECKING = typing.TYPE_CHECKING
+if sys.version_info >= (3, 13, 0, "beta"):
+ from typing import AsyncContextManager, AsyncGenerator, ContextManager, Generator
+else:
+ def _is_dunder(attr):
+ return attr.startswith('__') and attr.endswith('__')
+
+
+ class _SpecialGenericAlias(typing._SpecialGenericAlias, _root=True):
+ def __init__(self, origin, nparams, *, inst=True, name=None, defaults=()):
+ super().__init__(origin, nparams, inst=inst, name=name)
+ self._defaults = defaults
+
+ def __setattr__(self, attr, val):
+ allowed_attrs = {'_name', '_inst', '_nparams', '_defaults'}
+ if _is_dunder(attr) or attr in allowed_attrs:
+ object.__setattr__(self, attr, val)
+ else:
+ setattr(self.__origin__, attr, val)
+
+ @typing._tp_cache
+ def __getitem__(self, params):
+ if not isinstance(params, tuple):
+ params = (params,)
+ msg = "Parameters to generic types must be types."
+ params = tuple(typing._type_check(p, msg) for p in params)
+ if (
+ self._defaults
+ and len(params) < self._nparams
+ and len(params) + len(self._defaults) >= self._nparams
+ ):
+ params = (*params, *self._defaults[len(params) - self._nparams:])
+ actual_len = len(params)
+
+ if actual_len != self._nparams:
+ if self._defaults:
+ expected = f"at least {self._nparams - len(self._defaults)}"
+ else:
+ expected = str(self._nparams)
+ if not self._nparams:
+ raise TypeError(f"{self} is not a generic class")
+ raise TypeError(
+ f"Too {'many' if actual_len > self._nparams else 'few'}"
+ f" arguments for {self};"
+ f" actual {actual_len}, expected {expected}"
+ )
+ return self.copy_with(params)
+
+ _NoneType = type(None)
+ Generator = _SpecialGenericAlias(
+ collections.abc.Generator, 3, defaults=(_NoneType, _NoneType)
+ )
+ AsyncGenerator = _SpecialGenericAlias(
+ collections.abc.AsyncGenerator, 2, defaults=(_NoneType,)
+ )
+ ContextManager = _SpecialGenericAlias(
+ contextlib.AbstractContextManager,
+ 2,
+ name="ContextManager",
+ defaults=(typing.Optional[bool],)
+ )
+ AsyncContextManager = _SpecialGenericAlias(
+ contextlib.AbstractAsyncContextManager,
+ 2,
+ name="AsyncContextManager",
+ defaults=(typing.Optional[bool],)
+ )
+
+
_PROTO_ALLOWLIST = {
'collections.abc': [
'Callable', 'Awaitable', 'Iterable', 'Iterator', 'AsyncIterable',
@@ -500,28 +558,11 @@ def clear_overloads():
}
-_EXCLUDED_ATTRS = {
- "__abstractmethods__", "__annotations__", "__weakref__", "_is_protocol",
- "_is_runtime_protocol", "__dict__", "__slots__", "__parameters__",
- "__orig_bases__", "__module__", "_MutableMapping__marker", "__doc__",
- "__subclasshook__", "__orig_class__", "__init__", "__new__",
- "__protocol_attrs__", "__callable_proto_members_only__",
+_EXCLUDED_ATTRS = frozenset(typing.EXCLUDED_ATTRIBUTES) | {
+ "__match_args__", "__protocol_attrs__", "__non_callable_proto_members__",
+ "__final__",
}
-if sys.version_info < (3, 8):
- _EXCLUDED_ATTRS |= {
- "_gorg", "__next_in_mro__", "__extra__", "__tree_hash__", "__args__",
- "__origin__"
- }
-
-if sys.version_info >= (3, 9):
- _EXCLUDED_ATTRS.add("__class_getitem__")
-
-if sys.version_info >= (3, 12):
- _EXCLUDED_ATTRS.add("__type_params__")
-
-_EXCLUDED_ATTRS = frozenset(_EXCLUDED_ATTRS)
-
def _get_protocol_attrs(cls):
attrs = set()
@@ -535,59 +576,24 @@ def _get_protocol_attrs(cls):
return attrs
-def _maybe_adjust_parameters(cls):
- """Helper function used in Protocol.__init_subclass__ and _TypedDictMeta.__new__.
-
- The contents of this function are very similar
- to logic found in typing.Generic.__init_subclass__
- on the CPython main branch.
- """
- tvars = []
- if '__orig_bases__' in cls.__dict__:
- tvars = _collect_type_vars(cls.__orig_bases__)
- # Look for Generic[T1, ..., Tn] or Protocol[T1, ..., Tn].
- # If found, tvars must be a subset of it.
- # If not found, tvars is it.
- # Also check for and reject plain Generic,
- # and reject multiple Generic[...] and/or Protocol[...].
- gvars = None
- for base in cls.__orig_bases__:
- if (isinstance(base, typing._GenericAlias) and
- base.__origin__ in (typing.Generic, Protocol)):
- # for error messages
- the_base = base.__origin__.__name__
- if gvars is not None:
- raise TypeError(
- "Cannot inherit from Generic[...]"
- " and/or Protocol[...] multiple types.")
- gvars = base.__parameters__
- if gvars is None:
- gvars = tvars
- else:
- tvarset = set(tvars)
- gvarset = set(gvars)
- if not tvarset <= gvarset:
- s_vars = ', '.join(str(t) for t in tvars if t not in gvarset)
- s_args = ', '.join(str(g) for g in gvars)
- raise TypeError(f"Some type variables ({s_vars}) are"
- f" not listed in {the_base}[{s_args}]")
- tvars = gvars
- cls.__parameters__ = tuple(tvars)
-
-
-def _caller(depth=2):
+def _caller(depth=1, default='__main__'):
try:
- return sys._getframe(depth).f_globals.get('__name__', '__main__')
+ return sys._getframemodulename(depth + 1) or default
+ except AttributeError: # For platforms without _getframemodulename()
+ pass
+ try:
+ return sys._getframe(depth + 1).f_globals.get('__name__', default)
except (AttributeError, ValueError): # For platforms without _getframe()
- return None
+ pass
+ return None
-# The performance of runtime-checkable protocols is significantly improved on Python 3.12,
-# so we backport the 3.12 version of Protocol to Python <=3.11
-if sys.version_info >= (3, 12):
+# `__match_args__` attribute was removed from protocol members in 3.13,
+# we want to backport this change to older Python versions.
+if sys.version_info >= (3, 13):
Protocol = typing.Protocol
else:
- def _allow_reckless_class_checks(depth=3):
+ def _allow_reckless_class_checks(depth=2):
"""Allow instance and class checks for special stdlib modules.
The abc and functools modules indiscriminately call isinstance() and
issubclass() on the whole MRO of a user class, which may contain protocols.
@@ -598,30 +604,39 @@ def _no_init(self, *args, **kwargs):
if type(self)._is_protocol:
raise TypeError('Protocols cannot be instantiated')
- if sys.version_info >= (3, 8):
- # Inheriting from typing._ProtocolMeta isn't actually desirable,
- # but is necessary to allow typing.Protocol and typing_extensions.Protocol
- # to mix without getting TypeErrors about "metaclass conflict"
- _typing_Protocol = typing.Protocol
- _ProtocolMetaBase = type(_typing_Protocol)
- else:
- _typing_Protocol = _marker
- _ProtocolMetaBase = abc.ABCMeta
+ def _type_check_issubclass_arg_1(arg):
+ """Raise TypeError if `arg` is not an instance of `type`
+ in `issubclass(arg, )`.
+
+ In most cases, this is verified by type.__subclasscheck__.
+ Checking it again unnecessarily would slow down issubclass() checks,
+ so, we don't perform this check unless we absolutely have to.
- class _ProtocolMeta(_ProtocolMetaBase):
+ For various error paths, however,
+ we want to ensure that *this* error message is shown to the user
+ where relevant, rather than a typing.py-specific error message.
+ """
+ if not isinstance(arg, type):
+ # Same error message as for issubclass(1, int).
+ raise TypeError('issubclass() arg 1 must be a class')
+
+ # Inheriting from typing._ProtocolMeta isn't actually desirable,
+ # but is necessary to allow typing.Protocol and typing_extensions.Protocol
+ # to mix without getting TypeErrors about "metaclass conflict"
+ class _ProtocolMeta(type(typing.Protocol)):
# This metaclass is somewhat unfortunate,
# but is necessary for several reasons...
#
# NOTE: DO NOT call super() in any methods in this class
- # That would call the methods on typing._ProtocolMeta on Python 3.8-3.11
+ # That would call the methods on typing._ProtocolMeta on Python <=3.11
# and those are slow
def __new__(mcls, name, bases, namespace, **kwargs):
if name == "Protocol" and len(bases) < 2:
pass
- elif {Protocol, _typing_Protocol} & set(bases):
+ elif {Protocol, typing.Protocol} & set(bases):
for base in bases:
if not (
- base in {object, typing.Generic, Protocol, _typing_Protocol}
+ base in {object, typing.Generic, Protocol, typing.Protocol}
or base.__name__ in _PROTO_ALLOWLIST.get(base.__module__, [])
or is_protocol(base)
):
@@ -635,11 +650,6 @@ def __init__(cls, *args, **kwargs):
abc.ABCMeta.__init__(cls, *args, **kwargs)
if getattr(cls, "_is_protocol", False):
cls.__protocol_attrs__ = _get_protocol_attrs(cls)
- # PEP 544 prohibits using issubclass()
- # with protocols that have non-method members.
- cls.__callable_proto_members_only__ = all(
- callable(getattr(cls, attr, None)) for attr in cls.__protocol_attrs__
- )
def __subclasscheck__(cls, other):
if cls is Protocol:
@@ -648,21 +658,23 @@ def __subclasscheck__(cls, other):
getattr(cls, '_is_protocol', False)
and not _allow_reckless_class_checks()
):
- if not isinstance(other, type):
- # Same error message as for issubclass(1, int).
- raise TypeError('issubclass() arg 1 must be a class')
- if (
- not cls.__callable_proto_members_only__
- and cls.__dict__.get("__subclasshook__") is _proto_hook
- ):
- raise TypeError(
- "Protocols with non-method members don't support issubclass()"
- )
if not getattr(cls, '_is_runtime_protocol', False):
+ _type_check_issubclass_arg_1(other)
raise TypeError(
"Instance and class checks can only be used with "
"@runtime_checkable protocols"
)
+ if (
+ # this attribute is set by @runtime_checkable:
+ cls.__non_callable_proto_members__
+ and cls.__dict__.get("__subclasshook__") is _proto_hook
+ ):
+ _type_check_issubclass_arg_1(other)
+ non_method_attrs = sorted(cls.__non_callable_proto_members__)
+ raise TypeError(
+ "Protocols with non-method members don't support issubclass()."
+ f" Non-method members: {str(non_method_attrs)[1:-1]}."
+ )
return abc.ABCMeta.__subclasscheck__(cls, other)
def __instancecheck__(cls, instance):
@@ -689,7 +701,8 @@ def __instancecheck__(cls, instance):
val = inspect.getattr_static(instance, attr)
except AttributeError:
break
- if val is None and callable(getattr(cls, attr, None)):
+ # this attribute is set by @runtime_checkable:
+ if val is None and attr not in cls.__non_callable_proto_members__:
break
else:
return True
@@ -699,12 +712,10 @@ def __instancecheck__(cls, instance):
def __eq__(cls, other):
# Hack so that typing.Generic.__class_getitem__
# treats typing_extensions.Protocol
- # as equivalent to typing.Protocol on Python 3.8+
+ # as equivalent to typing.Protocol
if abc.ABCMeta.__eq__(cls, other) is True:
return True
- return (
- cls is Protocol and other is getattr(typing, "Protocol", object())
- )
+ return cls is Protocol and other is typing.Protocol
# This has to be defined, or the abc-module cache
# complains about classes with this metaclass being unhashable,
@@ -737,146 +748,88 @@ def _proto_hook(cls, other):
return NotImplemented
return True
- if sys.version_info >= (3, 8):
- class Protocol(typing.Generic, metaclass=_ProtocolMeta):
- __doc__ = typing.Protocol.__doc__
- __slots__ = ()
- _is_protocol = True
- _is_runtime_protocol = False
-
- def __init_subclass__(cls, *args, **kwargs):
- super().__init_subclass__(*args, **kwargs)
+ class Protocol(typing.Generic, metaclass=_ProtocolMeta):
+ __doc__ = typing.Protocol.__doc__
+ __slots__ = ()
+ _is_protocol = True
+ _is_runtime_protocol = False
- # Determine if this is a protocol or a concrete subclass.
- if not cls.__dict__.get('_is_protocol', False):
- cls._is_protocol = any(b is Protocol for b in cls.__bases__)
+ def __init_subclass__(cls, *args, **kwargs):
+ super().__init_subclass__(*args, **kwargs)
- # Set (or override) the protocol subclass hook.
- if '__subclasshook__' not in cls.__dict__:
- cls.__subclasshook__ = _proto_hook
+ # Determine if this is a protocol or a concrete subclass.
+ if not cls.__dict__.get('_is_protocol', False):
+ cls._is_protocol = any(b is Protocol for b in cls.__bases__)
- # Prohibit instantiation for protocol classes
- if cls._is_protocol and cls.__init__ is Protocol.__init__:
- cls.__init__ = _no_init
+ # Set (or override) the protocol subclass hook.
+ if '__subclasshook__' not in cls.__dict__:
+ cls.__subclasshook__ = _proto_hook
- else:
- class Protocol(metaclass=_ProtocolMeta):
- # There is quite a lot of overlapping code with typing.Generic.
- # Unfortunately it is hard to avoid this on Python <3.8,
- # as the typing module on Python 3.7 doesn't let us subclass typing.Generic!
- """Base class for protocol classes. Protocol classes are defined as::
+ # Prohibit instantiation for protocol classes
+ if cls._is_protocol and cls.__init__ is Protocol.__init__:
+ cls.__init__ = _no_init
- class Proto(Protocol):
- def meth(self) -> int:
- ...
- Such classes are primarily used with static type checkers that recognize
- structural subtyping (static duck-typing), for example::
+if sys.version_info >= (3, 13):
+ runtime_checkable = typing.runtime_checkable
+else:
+ def runtime_checkable(cls):
+ """Mark a protocol class as a runtime protocol.
- class C:
- def meth(self) -> int:
- return 0
+ Such protocol can be used with isinstance() and issubclass().
+ Raise TypeError if applied to a non-protocol class.
+ This allows a simple-minded structural check very similar to
+ one trick ponies in collections.abc such as Iterable.
- def func(x: Proto) -> int:
- return x.meth()
+ For example::
- func(C()) # Passes static type check
+ @runtime_checkable
+ class Closable(Protocol):
+ def close(self): ...
- See PEP 544 for details. Protocol classes decorated with
- @typing_extensions.runtime_checkable act
- as simple-minded runtime-checkable protocols that check
- only the presence of given attributes, ignoring their type signatures.
+ assert isinstance(open('/some/file'), Closable)
- Protocol classes can be generic, they are defined as::
+ Warning: this will check only the presence of the required methods,
+ not their type signatures!
+ """
+ if not issubclass(cls, typing.Generic) or not getattr(cls, '_is_protocol', False):
+ raise TypeError(f'@runtime_checkable can be only applied to protocol classes,'
+ f' got {cls!r}')
+ cls._is_runtime_protocol = True
- class GenProto(Protocol[T]):
- def meth(self) -> T:
- ...
- """
- __slots__ = ()
- _is_protocol = True
- _is_runtime_protocol = False
-
- def __new__(cls, *args, **kwds):
- if cls is Protocol:
- raise TypeError("Type Protocol cannot be instantiated; "
- "it can only be used as a base class")
- return super().__new__(cls)
-
- @typing._tp_cache
- def __class_getitem__(cls, params):
- if not isinstance(params, tuple):
- params = (params,)
- if not params and cls is not typing.Tuple:
+ # typing.Protocol classes on <=3.11 break if we execute this block,
+ # because typing.Protocol classes on <=3.11 don't have a
+ # `__protocol_attrs__` attribute, and this block relies on the
+ # `__protocol_attrs__` attribute. Meanwhile, typing.Protocol classes on 3.12.2+
+ # break if we *don't* execute this block, because *they* assume that all
+ # protocol classes have a `__non_callable_proto_members__` attribute
+ # (which this block sets)
+ if isinstance(cls, _ProtocolMeta) or sys.version_info >= (3, 12, 2):
+ # PEP 544 prohibits using issubclass()
+ # with protocols that have non-method members.
+ # See gh-113320 for why we compute this attribute here,
+ # rather than in `_ProtocolMeta.__init__`
+ cls.__non_callable_proto_members__ = set()
+ for attr in cls.__protocol_attrs__:
+ try:
+ is_callable = callable(getattr(cls, attr, None))
+ except Exception as e:
raise TypeError(
- f"Parameter list to {cls.__qualname__}[...] cannot be empty")
- msg = "Parameters to generic types must be types."
- params = tuple(typing._type_check(p, msg) for p in params)
- if cls is Protocol:
- # Generic can only be subscripted with unique type variables.
- if not all(isinstance(p, typing.TypeVar) for p in params):
- i = 0
- while isinstance(params[i], typing.TypeVar):
- i += 1
- raise TypeError(
- "Parameters to Protocol[...] must all be type variables."
- f" Parameter {i + 1} is {params[i]}")
- if len(set(params)) != len(params):
- raise TypeError(
- "Parameters to Protocol[...] must all be unique")
- else:
- # Subscripting a regular Generic subclass.
- _check_generic(cls, params, len(cls.__parameters__))
- return typing._GenericAlias(cls, params)
-
- def __init_subclass__(cls, *args, **kwargs):
- if '__orig_bases__' in cls.__dict__:
- error = typing.Generic in cls.__orig_bases__
+ f"Failed to determine whether protocol member {attr!r} "
+ "is a method member"
+ ) from e
else:
- error = typing.Generic in cls.__bases__
- if error:
- raise TypeError("Cannot inherit from plain Generic")
- _maybe_adjust_parameters(cls)
-
- # Determine if this is a protocol or a concrete subclass.
- if not cls.__dict__.get('_is_protocol', None):
- cls._is_protocol = any(b is Protocol for b in cls.__bases__)
-
- # Set (or override) the protocol subclass hook.
- if '__subclasshook__' not in cls.__dict__:
- cls.__subclasshook__ = _proto_hook
+ if not is_callable:
+ cls.__non_callable_proto_members__.add(attr)
- # Prohibit instantiation for protocol classes
- if cls._is_protocol and cls.__init__ is Protocol.__init__:
- cls.__init__ = _no_init
-
-
-if sys.version_info >= (3, 8):
- runtime_checkable = typing.runtime_checkable
-else:
- def runtime_checkable(cls):
- """Mark a protocol class as a runtime protocol, so that it
- can be used with isinstance() and issubclass(). Raise TypeError
- if applied to a non-protocol class.
-
- This allows a simple-minded structural check very similar to the
- one-offs in collections.abc such as Hashable.
- """
- if not (
- (isinstance(cls, _ProtocolMeta) or issubclass(cls, typing.Generic))
- and getattr(cls, "_is_protocol", False)
- ):
- raise TypeError('@runtime_checkable can be only applied to protocol classes,'
- f' got {cls!r}')
- cls._is_runtime_protocol = True
return cls
-# Exists for backwards compatibility.
+# The "runtime" alias exists for backwards compatibility.
runtime = runtime_checkable
-# Our version of runtime-checkable protocols is faster on Python 3.7-3.11
+# Our version of runtime-checkable protocols is faster on Python <=3.11
if sys.version_info >= (3, 12):
SupportsInt = typing.SupportsInt
SupportsFloat = typing.SupportsFloat
@@ -953,24 +906,103 @@ def __round__(self, ndigits: int = 0) -> T_co:
pass
-def _ensure_subclassable(mro_entries):
- def inner(func):
- if sys.implementation.name == "pypy" and sys.version_info < (3, 9):
- cls_dict = {
- "__call__": staticmethod(func),
- "__mro_entries__": staticmethod(mro_entries)
- }
- t = type(func.__name__, (), cls_dict)
- return functools.update_wrapper(t(), func)
- else:
- func.__mro_entries__ = mro_entries
- return func
- return inner
+if hasattr(io, "Reader") and hasattr(io, "Writer"):
+ Reader = io.Reader
+ Writer = io.Writer
+else:
+ @runtime_checkable
+ class Reader(Protocol[T_co]):
+ """Protocol for simple I/O reader instances.
+ This protocol only supports blocking I/O.
+ """
-if sys.version_info >= (3, 13):
- # The standard library TypedDict in Python 3.8 does not store runtime information
- # about which (if any) keys are optional. See https://bugs.python.org/issue38834
+ __slots__ = ()
+
+ @abc.abstractmethod
+ def read(self, size: int = ..., /) -> T_co:
+ """Read data from the input stream and return it.
+
+ If *size* is specified, at most *size* items (bytes/characters) will be
+ read.
+ """
+
+ @runtime_checkable
+ class Writer(Protocol[T_contra]):
+ """Protocol for simple I/O writer instances.
+
+ This protocol only supports blocking I/O.
+ """
+
+ __slots__ = ()
+
+ @abc.abstractmethod
+ def write(self, data: T_contra, /) -> int:
+ """Write *data* to the output stream and return the number of items written.""" # noqa: E501
+
+
+_NEEDS_SINGLETONMETA = (
+ not hasattr(typing, "NoDefault") or not hasattr(typing, "NoExtraItems")
+)
+
+if _NEEDS_SINGLETONMETA:
+ class SingletonMeta(type):
+ def __setattr__(cls, attr, value):
+ # TypeError is consistent with the behavior of NoneType
+ raise TypeError(
+ f"cannot set {attr!r} attribute of immutable type {cls.__name__!r}"
+ )
+
+
+if hasattr(typing, "NoDefault"):
+ NoDefault = typing.NoDefault
+else:
+ class NoDefaultType(metaclass=SingletonMeta):
+ """The type of the NoDefault singleton."""
+
+ __slots__ = ()
+
+ def __new__(cls):
+ return globals().get("NoDefault") or object.__new__(cls)
+
+ def __repr__(self):
+ return "typing_extensions.NoDefault"
+
+ def __reduce__(self):
+ return "NoDefault"
+
+ NoDefault = NoDefaultType()
+ del NoDefaultType
+
+if hasattr(typing, "NoExtraItems"):
+ NoExtraItems = typing.NoExtraItems
+else:
+ class NoExtraItemsType(metaclass=SingletonMeta):
+ """The type of the NoExtraItems singleton."""
+
+ __slots__ = ()
+
+ def __new__(cls):
+ return globals().get("NoExtraItems") or object.__new__(cls)
+
+ def __repr__(self):
+ return "typing_extensions.NoExtraItems"
+
+ def __reduce__(self):
+ return "NoExtraItems"
+
+ NoExtraItems = NoExtraItemsType()
+ del NoExtraItemsType
+
+if _NEEDS_SINGLETONMETA:
+ del SingletonMeta
+
+
+# Update this to something like >=3.13.0b1 if and when
+# PEP 728 is implemented in CPython
+_PEP_728_IMPLEMENTED = False
+
+if _PEP_728_IMPLEMENTED:
# The standard library TypedDict in Python 3.9.0/1 does not honour the "total"
# keyword with old-style TypedDict(). See https://bugs.python.org/issue42059
# The standard library TypedDict below Python 3.11 does not store runtime
@@ -979,6 +1011,8 @@ def inner(func):
# Aaaand on 3.12 we add __orig_bases__ to TypedDict
# to enable better runtime introspection.
# On 3.13 we deprecate some odd ways of creating TypedDicts.
+ # Also on 3.13, PEP 705 adds the ReadOnly[] qualifier.
+ # PEP 728 (still pending) makes more changes.
TypedDict = typing.TypedDict
_TypedDictMeta = typing._TypedDictMeta
is_typeddict = typing.is_typeddict
@@ -986,13 +1020,31 @@ def inner(func):
# 3.10.0 and later
_TAKES_MODULE = "module" in inspect.signature(typing._type_check).parameters
- if sys.version_info >= (3, 8):
- _fake_name = "Protocol"
- else:
- _fake_name = "_Protocol"
+ def _get_typeddict_qualifiers(annotation_type):
+ while True:
+ annotation_origin = get_origin(annotation_type)
+ if annotation_origin is Annotated:
+ annotation_args = get_args(annotation_type)
+ if annotation_args:
+ annotation_type = annotation_args[0]
+ else:
+ break
+ elif annotation_origin is Required:
+ yield Required
+ annotation_type, = get_args(annotation_type)
+ elif annotation_origin is NotRequired:
+ yield NotRequired
+ annotation_type, = get_args(annotation_type)
+ elif annotation_origin is ReadOnly:
+ yield ReadOnly
+ annotation_type, = get_args(annotation_type)
+ else:
+ break
class _TypedDictMeta(type):
- def __new__(cls, name, bases, ns, total=True):
+
+ def __new__(cls, name, bases, ns, *, total=True, closed=None,
+ extra_items=NoExtraItems):
"""Create new typed dict class object.
This method is called when TypedDict is subclassed,
@@ -1004,66 +1056,148 @@ def __new__(cls, name, bases, ns, total=True):
if type(base) is not _TypedDictMeta and base is not typing.Generic:
raise TypeError('cannot inherit from both a TypedDict type '
'and a non-TypedDict base class')
+ if closed is not None and extra_items is not NoExtraItems:
+ raise TypeError(f"Cannot combine closed={closed!r} and extra_items")
if any(issubclass(b, typing.Generic) for b in bases):
generic_base = (typing.Generic,)
else:
generic_base = ()
+ ns_annotations = ns.pop('__annotations__', None)
+
# typing.py generally doesn't let you inherit from plain Generic, unless
- # the name of the class happens to be "Protocol" (or "_Protocol" on 3.7).
- tp_dict = type.__new__(_TypedDictMeta, _fake_name, (*generic_base, dict), ns)
+ # the name of the class happens to be "Protocol"
+ tp_dict = type.__new__(_TypedDictMeta, "Protocol", (*generic_base, dict), ns)
tp_dict.__name__ = name
- if tp_dict.__qualname__ == _fake_name:
+ if tp_dict.__qualname__ == "Protocol":
tp_dict.__qualname__ = name
if not hasattr(tp_dict, '__orig_bases__'):
tp_dict.__orig_bases__ = bases
annotations = {}
- own_annotations = ns.get('__annotations__', {})
+ own_annotate = None
+ if ns_annotations is not None:
+ own_annotations = ns_annotations
+ elif sys.version_info >= (3, 14):
+ if hasattr(annotationlib, "get_annotate_from_class_namespace"):
+ own_annotate = annotationlib.get_annotate_from_class_namespace(ns)
+ else:
+ # 3.14.0a7 and earlier
+ own_annotate = ns.get("__annotate__")
+ if own_annotate is not None:
+ own_annotations = annotationlib.call_annotate_function(
+ own_annotate, Format.FORWARDREF, owner=tp_dict
+ )
+ else:
+ own_annotations = {}
+ else:
+ own_annotations = {}
msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type"
if _TAKES_MODULE:
- own_annotations = {
+ own_checked_annotations = {
n: typing._type_check(tp, msg, module=tp_dict.__module__)
for n, tp in own_annotations.items()
}
else:
- own_annotations = {
+ own_checked_annotations = {
n: typing._type_check(tp, msg)
for n, tp in own_annotations.items()
}
required_keys = set()
optional_keys = set()
+ readonly_keys = set()
+ mutable_keys = set()
+ extra_items_type = extra_items
for base in bases:
- annotations.update(base.__dict__.get('__annotations__', {}))
- required_keys.update(base.__dict__.get('__required_keys__', ()))
- optional_keys.update(base.__dict__.get('__optional_keys__', ()))
-
- annotations.update(own_annotations)
- for annotation_key, annotation_type in own_annotations.items():
- annotation_origin = get_origin(annotation_type)
- if annotation_origin is Annotated:
- annotation_args = get_args(annotation_type)
- if annotation_args:
- annotation_type = annotation_args[0]
- annotation_origin = get_origin(annotation_type)
-
- if annotation_origin is Required:
+ base_dict = base.__dict__
+
+ if sys.version_info <= (3, 14):
+ annotations.update(base_dict.get('__annotations__', {}))
+ required_keys.update(base_dict.get('__required_keys__', ()))
+ optional_keys.update(base_dict.get('__optional_keys__', ()))
+ readonly_keys.update(base_dict.get('__readonly_keys__', ()))
+ mutable_keys.update(base_dict.get('__mutable_keys__', ()))
+
+ # This was specified in an earlier version of PEP 728. Support
+ # is retained for backwards compatibility, but only for Python
+ # 3.13 and lower.
+ if (closed and sys.version_info < (3, 14)
+ and "__extra_items__" in own_checked_annotations):
+ annotation_type = own_checked_annotations.pop("__extra_items__")
+ qualifiers = set(_get_typeddict_qualifiers(annotation_type))
+ if Required in qualifiers:
+ raise TypeError(
+ "Special key __extra_items__ does not support "
+ "Required"
+ )
+ if NotRequired in qualifiers:
+ raise TypeError(
+ "Special key __extra_items__ does not support "
+ "NotRequired"
+ )
+ extra_items_type = annotation_type
+
+ annotations.update(own_checked_annotations)
+ for annotation_key, annotation_type in own_checked_annotations.items():
+ qualifiers = set(_get_typeddict_qualifiers(annotation_type))
+
+ if Required in qualifiers:
required_keys.add(annotation_key)
- elif annotation_origin is NotRequired:
+ elif NotRequired in qualifiers:
optional_keys.add(annotation_key)
elif total:
required_keys.add(annotation_key)
else:
optional_keys.add(annotation_key)
+ if ReadOnly in qualifiers:
+ mutable_keys.discard(annotation_key)
+ readonly_keys.add(annotation_key)
+ else:
+ mutable_keys.add(annotation_key)
+ readonly_keys.discard(annotation_key)
+
+ if sys.version_info >= (3, 14):
+ def __annotate__(format):
+ annos = {}
+ for base in bases:
+ if base is Generic:
+ continue
+ base_annotate = base.__annotate__
+ if base_annotate is None:
+ continue
+ base_annos = annotationlib.call_annotate_function(
+ base_annotate, format, owner=base)
+ annos.update(base_annos)
+ if own_annotate is not None:
+ own = annotationlib.call_annotate_function(
+ own_annotate, format, owner=tp_dict)
+ if format != Format.STRING:
+ own = {
+ n: typing._type_check(tp, msg, module=tp_dict.__module__)
+ for n, tp in own.items()
+ }
+ elif format == Format.STRING:
+ own = annotationlib.annotations_to_string(own_annotations)
+ elif format in (Format.FORWARDREF, Format.VALUE):
+ own = own_checked_annotations
+ else:
+ raise NotImplementedError(format)
+ annos.update(own)
+ return annos
- tp_dict.__annotations__ = annotations
+ tp_dict.__annotate__ = __annotate__
+ else:
+ tp_dict.__annotations__ = annotations
tp_dict.__required_keys__ = frozenset(required_keys)
tp_dict.__optional_keys__ = frozenset(optional_keys)
- if not hasattr(tp_dict, '__total__'):
- tp_dict.__total__ = total
+ tp_dict.__readonly_keys__ = frozenset(readonly_keys)
+ tp_dict.__mutable_keys__ = frozenset(mutable_keys)
+ tp_dict.__total__ = total
+ tp_dict.__closed__ = closed
+ tp_dict.__extra_items__ = extra_items_type
return tp_dict
__call__ = dict # static method
@@ -1076,8 +1210,94 @@ def __subclasscheck__(cls, other):
_TypedDict = type.__new__(_TypedDictMeta, 'TypedDict', (), {})
- @_ensure_subclassable(lambda bases: (_TypedDict,))
- def TypedDict(__typename, __fields=_marker, *, total=True, **kwargs):
+ def _create_typeddict(
+ typename,
+ fields,
+ /,
+ *,
+ typing_is_inline,
+ total,
+ closed,
+ extra_items,
+ **kwargs,
+ ):
+ if fields is _marker or fields is None:
+ if fields is _marker:
+ deprecated_thing = (
+ "Failing to pass a value for the 'fields' parameter"
+ )
+ else:
+ deprecated_thing = "Passing `None` as the 'fields' parameter"
+
+ example = f"`{typename} = TypedDict({typename!r}, {{}})`"
+ deprecation_msg = (
+ f"{deprecated_thing} is deprecated and will be disallowed in "
+ "Python 3.15. To create a TypedDict class with 0 fields "
+ "using the functional syntax, pass an empty dictionary, e.g. "
+ ) + example + "."
+ warnings.warn(deprecation_msg, DeprecationWarning, stacklevel=2)
+ # Support a field called "closed"
+ if closed is not False and closed is not True and closed is not None:
+ kwargs["closed"] = closed
+ closed = None
+ # Or "extra_items"
+ if extra_items is not NoExtraItems:
+ kwargs["extra_items"] = extra_items
+ extra_items = NoExtraItems
+ fields = kwargs
+ elif kwargs:
+ raise TypeError("TypedDict takes either a dict or keyword arguments,"
+ " but not both")
+ if kwargs:
+ if sys.version_info >= (3, 13):
+ raise TypeError("TypedDict takes no keyword arguments")
+ warnings.warn(
+ "The kwargs-based syntax for TypedDict definitions is deprecated "
+ "in Python 3.11, will be removed in Python 3.13, and may not be "
+ "understood by third-party type checkers.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+
+ ns = {'__annotations__': dict(fields)}
+ module = _caller(depth=4 if typing_is_inline else 2)
+ if module is not None:
+ # Setting correct module is necessary to make typed dict classes
+ # pickleable.
+ ns['__module__'] = module
+
+ td = _TypedDictMeta(typename, (), ns, total=total, closed=closed,
+ extra_items=extra_items)
+ td.__orig_bases__ = (TypedDict,)
+ return td
+
+ class _TypedDictSpecialForm(_SpecialForm, _root=True):
+ def __call__(
+ self,
+ typename,
+ fields=_marker,
+ /,
+ *,
+ total=True,
+ closed=None,
+ extra_items=NoExtraItems,
+ **kwargs
+ ):
+ return _create_typeddict(
+ typename,
+ fields,
+ typing_is_inline=False,
+ total=total,
+ closed=closed,
+ extra_items=extra_items,
+ **kwargs,
+ )
+
+ def __mro_entries__(self, bases):
+ return (_TypedDict,)
+
+ @_TypedDictSpecialForm
+ def TypedDict(self, args):
"""A simple typed namespace. At runtime it is equivalent to a plain dict.
TypedDict creates a dictionary type such that a type checker will expect all
@@ -1124,46 +1344,22 @@ class Point2D(TypedDict):
See PEP 655 for more details on Required and NotRequired.
"""
- if __fields is _marker or __fields is None:
- if __fields is _marker:
- deprecated_thing = "Failing to pass a value for the 'fields' parameter"
- else:
- deprecated_thing = "Passing `None` as the 'fields' parameter"
-
- example = f"`{__typename} = TypedDict({__typename!r}, {{}})`"
- deprecation_msg = (
- f"{deprecated_thing} is deprecated and will be disallowed in "
- "Python 3.15. To create a TypedDict class with 0 fields "
- "using the functional syntax, pass an empty dictionary, e.g. "
- ) + example + "."
- warnings.warn(deprecation_msg, DeprecationWarning, stacklevel=2)
- __fields = kwargs
- elif kwargs:
- raise TypeError("TypedDict takes either a dict or keyword arguments,"
- " but not both")
- if kwargs:
- warnings.warn(
- "The kwargs-based syntax for TypedDict definitions is deprecated "
- "in Python 3.11, will be removed in Python 3.13, and may not be "
- "understood by third-party type checkers.",
- DeprecationWarning,
- stacklevel=2,
+ # This runs when creating inline TypedDicts:
+ if not isinstance(args, dict):
+ raise TypeError(
+ "TypedDict[...] should be used with a single dict argument"
)
- ns = {'__annotations__': dict(__fields)}
- module = _caller()
- if module is not None:
- # Setting correct module is necessary to make typed dict classes pickleable.
- ns['__module__'] = module
-
- td = _TypedDictMeta(__typename, (), ns, total=total)
- td.__orig_bases__ = (TypedDict,)
- return td
+ return _create_typeddict(
+ "",
+ args,
+ typing_is_inline=True,
+ total=True,
+ closed=True,
+ extra_items=NoExtraItems,
+ )
- if hasattr(typing, "_TypedDictMeta"):
- _TYPEDDICT_TYPES = (typing._TypedDictMeta, _TypedDictMeta)
- else:
- _TYPEDDICT_TYPES = (_TypedDictMeta,)
+ _TYPEDDICT_TYPES = (typing._TypedDictMeta, _TypedDictMeta)
def is_typeddict(tp):
"""Check if an annotation is a TypedDict class
@@ -1176,9 +1372,6 @@ class Film(TypedDict):
is_typeddict(Film) # => True
is_typeddict(Union[list, str]) # => False
"""
- # On 3.8, this would otherwise return True
- if hasattr(typing, "TypedDict") and tp is typing.TypedDict:
- return False
return isinstance(tp, _TYPEDDICT_TYPES)
@@ -1186,7 +1379,7 @@ class Film(TypedDict):
assert_type = typing.assert_type
else:
- def assert_type(__val, __typ):
+ def assert_type(val, typ, /):
"""Assert (to the type checker) that the value is of the given type.
When the type checker encounters a call to assert_type(), it
@@ -1199,18 +1392,18 @@ def greet(name: str) -> None:
At runtime this returns the first argument unchanged and otherwise
does nothing.
"""
- return __val
+ return val
-if hasattr(typing, "Required"):
+if hasattr(typing, "ReadOnly"): # 3.13+
get_type_hints = typing.get_type_hints
-else:
+else: # <=3.13
# replaces _strip_annotations()
def _strip_extras(t):
"""Strips Annotated, Required and NotRequired from a given type."""
- if isinstance(t, _AnnotatedAlias):
+ if isinstance(t, typing._AnnotatedAlias):
return _strip_extras(t.__origin__)
- if hasattr(t, "__origin__") and t.__origin__ in (Required, NotRequired):
+ if hasattr(t, "__origin__") and t.__origin__ in (Required, NotRequired, ReadOnly):
return _strip_extras(t.__args__[0])
if isinstance(t, typing._GenericAlias):
stripped_args = tuple(_strip_extras(a) for a in t.__args__)
@@ -1262,141 +1455,86 @@ def get_type_hints(obj, globalns=None, localns=None, include_extras=False):
- If two dict arguments are passed, they specify globals and
locals, respectively.
"""
- if hasattr(typing, "Annotated"):
- hint = typing.get_type_hints(
- obj, globalns=globalns, localns=localns, include_extras=True
- )
- else:
- hint = typing.get_type_hints(obj, globalns=globalns, localns=localns)
+ hint = typing.get_type_hints(
+ obj, globalns=globalns, localns=localns, include_extras=True
+ )
+ if sys.version_info < (3, 11):
+ _clean_optional(obj, hint, globalns, localns)
if include_extras:
return hint
return {k: _strip_extras(t) for k, t in hint.items()}
+ _NoneType = type(None)
-# Python 3.9+ has PEP 593 (Annotated)
-if hasattr(typing, 'Annotated'):
- Annotated = typing.Annotated
- # Not exported and not a public API, but needed for get_origin() and get_args()
- # to work.
- _AnnotatedAlias = typing._AnnotatedAlias
-# 3.7-3.8
-else:
- class _AnnotatedAlias(typing._GenericAlias, _root=True):
- """Runtime representation of an annotated type.
-
- At its core 'Annotated[t, dec1, dec2, ...]' is an alias for the type 't'
- with extra annotations. The alias behaves like a normal typing alias,
- instantiating is the same as instantiating the underlying type, binding
- it to types is also the same.
- """
- def __init__(self, origin, metadata):
- if isinstance(origin, _AnnotatedAlias):
- metadata = origin.__metadata__ + metadata
- origin = origin.__origin__
- super().__init__(origin, origin)
- self.__metadata__ = metadata
-
- def copy_with(self, params):
- assert len(params) == 1
- new_type = params[0]
- return _AnnotatedAlias(new_type, self.__metadata__)
-
- def __repr__(self):
- return (f"typing_extensions.Annotated[{typing._type_repr(self.__origin__)}, "
- f"{', '.join(repr(a) for a in self.__metadata__)}]")
-
- def __reduce__(self):
- return operator.getitem, (
- Annotated, (self.__origin__,) + self.__metadata__
- )
-
- def __eq__(self, other):
- if not isinstance(other, _AnnotatedAlias):
- return NotImplemented
- if self.__origin__ != other.__origin__:
- return False
- return self.__metadata__ == other.__metadata__
-
- def __hash__(self):
- return hash((self.__origin__, self.__metadata__))
-
- class Annotated:
- """Add context specific metadata to a type.
-
- Example: Annotated[int, runtime_check.Unsigned] indicates to the
- hypothetical runtime_check module that this type is an unsigned int.
- Every other consumer of this type can ignore this metadata and treat
- this type as int.
-
- The first argument to Annotated must be a valid type (and will be in
- the __origin__ field), the remaining arguments are kept as a tuple in
- the __extra__ field.
-
- Details:
-
- - It's an error to call `Annotated` with less than two arguments.
- - Nested Annotated are flattened::
-
- Annotated[Annotated[T, Ann1, Ann2], Ann3] == Annotated[T, Ann1, Ann2, Ann3]
-
- - Instantiating an annotated type is equivalent to instantiating the
- underlying type::
-
- Annotated[C, Ann1](5) == C(5)
-
- - Annotated can be used as a generic type alias::
-
- Optimized = Annotated[T, runtime.Optimize()]
- Optimized[int] == Annotated[int, runtime.Optimize()]
-
- OptimizedList = Annotated[List[T], runtime.Optimize()]
- OptimizedList[int] == Annotated[List[int], runtime.Optimize()]
- """
-
- __slots__ = ()
-
- def __new__(cls, *args, **kwargs):
- raise TypeError("Type Annotated cannot be instantiated.")
-
- @typing._tp_cache
- def __class_getitem__(cls, params):
- if not isinstance(params, tuple) or len(params) < 2:
- raise TypeError("Annotated[...] should be used "
- "with at least two arguments (a type and an "
- "annotation).")
- allowed_special_forms = (ClassVar, Final)
- if get_origin(params[0]) in allowed_special_forms:
- origin = params[0]
- else:
- msg = "Annotated[t, ...]: t must be a type."
- origin = typing._type_check(params[0], msg)
- metadata = tuple(params[1:])
- return _AnnotatedAlias(origin, metadata)
+ def _could_be_inserted_optional(t):
+ """detects Union[..., None] pattern"""
+ if not isinstance(t, typing._UnionGenericAlias):
+ return False
+ # Assume if last argument is not None they are user defined
+ if t.__args__[-1] is not _NoneType:
+ return False
+ return True
- def __init_subclass__(cls, *args, **kwargs):
- raise TypeError(
- f"Cannot subclass {cls.__module__}.Annotated"
- )
+ # < 3.11
+ def _clean_optional(obj, hints, globalns=None, localns=None):
+ # reverts injected Union[..., None] cases from typing.get_type_hints
+ # when a None default value is used.
+ # see https://github.com/python/typing_extensions/issues/310
+ if not hints or isinstance(obj, type):
+ return
+ defaults = typing._get_defaults(obj) # avoid accessing __annotations___
+ if not defaults:
+ return
+ original_hints = obj.__annotations__
+ for name, value in hints.items():
+ # Not a Union[..., None] or replacement conditions not fullfilled
+ if (not _could_be_inserted_optional(value)
+ or name not in defaults
+ or defaults[name] is not None
+ ):
+ continue
+ original_value = original_hints[name]
+ # value=NoneType should have caused a skip above but check for safety
+ if original_value is None:
+ original_value = _NoneType
+ # Forward reference
+ if isinstance(original_value, str):
+ if globalns is None:
+ if isinstance(obj, _types.ModuleType):
+ globalns = obj.__dict__
+ else:
+ nsobj = obj
+ # Find globalns for the unwrapped object.
+ while hasattr(nsobj, '__wrapped__'):
+ nsobj = nsobj.__wrapped__
+ globalns = getattr(nsobj, '__globals__', {})
+ if localns is None:
+ localns = globalns
+ elif localns is None:
+ localns = globalns
+
+ original_value = ForwardRef(
+ original_value,
+ is_argument=not isinstance(obj, _types.ModuleType)
+ )
+ original_evaluated = typing._eval_type(original_value, globalns, localns)
+ # Compare if values differ. Note that even if equal
+ # value might be cached by typing._tp_cache contrary to original_evaluated
+ if original_evaluated != value or (
+ # 3.10: ForwardRefs of UnionType might be turned into _UnionGenericAlias
+ hasattr(_types, "UnionType")
+ and isinstance(original_evaluated, _types.UnionType)
+ and not isinstance(value, _types.UnionType)
+ ):
+ hints[name] = original_evaluated
-# Python 3.8 has get_origin() and get_args() but those implementations aren't
-# Annotated-aware, so we can't use those. Python 3.9's versions don't support
+# Python 3.9 has get_origin() and get_args() but those implementations don't support
# ParamSpecArgs and ParamSpecKwargs, so only Python 3.10's versions will do.
if sys.version_info[:2] >= (3, 10):
get_origin = typing.get_origin
get_args = typing.get_args
-# 3.7-3.9
+# 3.9
else:
- try:
- # 3.9+
- from typing import _BaseGenericAlias
- except ImportError:
- _BaseGenericAlias = typing._GenericAlias
- try:
- # 3.9+
- from typing import GenericAlias as _typing_GenericAlias
- except ImportError:
- _typing_GenericAlias = typing._GenericAlias
-
def get_origin(tp):
"""Get the unsubscripted version of a type.
@@ -1412,9 +1550,9 @@ def get_origin(tp):
get_origin(List[Tuple[T, T]][int]) == list
get_origin(P.args) is P
"""
- if isinstance(tp, _AnnotatedAlias):
+ if isinstance(tp, typing._AnnotatedAlias):
return Annotated
- if isinstance(tp, (typing._GenericAlias, _typing_GenericAlias, _BaseGenericAlias,
+ if isinstance(tp, (typing._BaseGenericAlias, _types.GenericAlias,
ParamSpecArgs, ParamSpecKwargs)):
return tp.__origin__
if tp is typing.Generic:
@@ -1432,11 +1570,9 @@ def get_args(tp):
get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int])
get_args(Callable[[], T][int]) == ([], int)
"""
- if isinstance(tp, _AnnotatedAlias):
- return (tp.__origin__,) + tp.__metadata__
- if isinstance(tp, (typing._GenericAlias, _typing_GenericAlias)):
- if getattr(tp, "_special", False):
- return ()
+ if isinstance(tp, typing._AnnotatedAlias):
+ return (tp.__origin__, *tp.__metadata__)
+ if isinstance(tp, (typing._GenericAlias, _types.GenericAlias)):
res = tp.__args__
if get_origin(tp) is collections.abc.Callable and res[0] is not Ellipsis:
res = (list(res[:-1]), res[-1])
@@ -1448,7 +1584,7 @@ def get_args(tp):
if hasattr(typing, 'TypeAlias'):
TypeAlias = typing.TypeAlias
# 3.9
-elif sys.version_info[:2] >= (3, 9):
+else:
@_ExtensionsSpecialForm
def TypeAlias(self, parameters):
"""Special marker indicating that an assignment should
@@ -1462,36 +1598,16 @@ def TypeAlias(self, parameters):
It's invalid when used anywhere except as in the example above.
"""
raise TypeError(f"{self} is not subscriptable")
-# 3.7-3.8
-else:
- TypeAlias = _ExtensionsSpecialForm(
- 'TypeAlias',
- doc="""Special marker indicating that an assignment should
- be recognized as a proper type alias definition by type
- checkers.
-
- For example::
-
- Predicate: TypeAlias = Callable[..., bool]
-
- It's invalid when used anywhere except as in the example
- above."""
- )
def _set_default(type_param, default):
- if isinstance(default, (tuple, list)):
- type_param.__default__ = tuple((typing._type_check(d, "Default must be a type")
- for d in default))
- elif default != _marker:
- type_param.__default__ = typing._type_check(default, "Default must be a type")
- else:
- type_param.__default__ = None
+ type_param.has_default = lambda: default is not NoDefault
+ type_param.__default__ = default
def _set_module(typevarlike):
# for pickling:
- def_mod = _caller(depth=3)
+ def_mod = _caller(depth=2)
if def_mod != 'typing_extensions':
typevarlike.__module__ = def_mod
@@ -1509,39 +1625,53 @@ def __instancecheck__(cls, __instance: Any) -> bool:
return isinstance(__instance, cls._backported_typevarlike)
-# Add default and infer_variance parameters from PEP 696 and 695
-class TypeVar(metaclass=_TypeVarLikeMeta):
- """Type variable."""
+if _PEP_696_IMPLEMENTED:
+ from typing import TypeVar
+else:
+ # Add default and infer_variance parameters from PEP 696 and 695
+ class TypeVar(metaclass=_TypeVarLikeMeta):
+ """Type variable."""
- _backported_typevarlike = typing.TypeVar
+ _backported_typevarlike = typing.TypeVar
- def __new__(cls, name, *constraints, bound=None,
- covariant=False, contravariant=False,
- default=_marker, infer_variance=False):
- if hasattr(typing, "TypeAliasType"):
- # PEP 695 implemented, can pass infer_variance to typing.TypeVar
- typevar = typing.TypeVar(name, *constraints, bound=bound,
- covariant=covariant, contravariant=contravariant,
- infer_variance=infer_variance)
- else:
- typevar = typing.TypeVar(name, *constraints, bound=bound,
- covariant=covariant, contravariant=contravariant)
- if infer_variance and (covariant or contravariant):
- raise ValueError("Variance cannot be specified with infer_variance.")
- typevar.__infer_variance__ = infer_variance
- _set_default(typevar, default)
- _set_module(typevar)
- return typevar
+ def __new__(cls, name, *constraints, bound=None,
+ covariant=False, contravariant=False,
+ default=NoDefault, infer_variance=False):
+ if hasattr(typing, "TypeAliasType"):
+ # PEP 695 implemented (3.12+), can pass infer_variance to typing.TypeVar
+ typevar = typing.TypeVar(name, *constraints, bound=bound,
+ covariant=covariant, contravariant=contravariant,
+ infer_variance=infer_variance)
+ else:
+ typevar = typing.TypeVar(name, *constraints, bound=bound,
+ covariant=covariant, contravariant=contravariant)
+ if infer_variance and (covariant or contravariant):
+ raise ValueError("Variance cannot be specified with infer_variance.")
+ typevar.__infer_variance__ = infer_variance
+
+ _set_default(typevar, default)
+ _set_module(typevar)
+
+ def _tvar_prepare_subst(alias, args):
+ if (
+ typevar.has_default()
+ and alias.__parameters__.index(typevar) == len(args)
+ ):
+ args += (typevar.__default__,)
+ return args
+
+ typevar.__typing_prepare_subst__ = _tvar_prepare_subst
+ return typevar
- def __init_subclass__(cls) -> None:
- raise TypeError(f"type '{__name__}.TypeVar' is not an acceptable base type")
+ def __init_subclass__(cls) -> None:
+ raise TypeError(f"type '{__name__}.TypeVar' is not an acceptable base type")
# Python 3.10+ has PEP 612
if hasattr(typing, 'ParamSpecArgs'):
ParamSpecArgs = typing.ParamSpecArgs
ParamSpecKwargs = typing.ParamSpecKwargs
-# 3.7-3.9
+# 3.9
else:
class _Immutable:
"""Mixin to indicate that object should not be copied."""
@@ -1599,8 +1729,12 @@ def __eq__(self, other):
return NotImplemented
return self.__origin__ == other.__origin__
+
+if _PEP_696_IMPLEMENTED:
+ from typing import ParamSpec
+
# 3.10+
-if hasattr(typing, 'ParamSpec'):
+elif hasattr(typing, 'ParamSpec'):
# Add default parameter - PEP 696
class ParamSpec(metaclass=_TypeVarLikeMeta):
@@ -1610,7 +1744,7 @@ class ParamSpec(metaclass=_TypeVarLikeMeta):
def __new__(cls, name, *, bound=None,
covariant=False, contravariant=False,
- infer_variance=False, default=_marker):
+ infer_variance=False, default=NoDefault):
if hasattr(typing, "TypeAliasType"):
# PEP 695 implemented, can pass infer_variance to typing.TypeVar
paramspec = typing.ParamSpec(name, bound=bound,
@@ -1625,12 +1759,30 @@ def __new__(cls, name, *, bound=None,
_set_default(paramspec, default)
_set_module(paramspec)
+
+ def _paramspec_prepare_subst(alias, args):
+ params = alias.__parameters__
+ i = params.index(paramspec)
+ if i == len(args) and paramspec.has_default():
+ args = [*args, paramspec.__default__]
+ if i >= len(args):
+ raise TypeError(f"Too few arguments for {alias}")
+ # Special case where Z[[int, str, bool]] == Z[int, str, bool] in PEP 612.
+ if len(params) == 1 and not typing._is_param_expr(args[0]):
+ assert i == 0
+ args = (args,)
+ # Convert lists to tuples to help other libraries cache the results.
+ elif isinstance(args[i], list):
+ args = (*args[:i], tuple(args[i]), *args[i + 1:])
+ return args
+
+ paramspec.__typing_prepare_subst__ = _paramspec_prepare_subst
return paramspec
def __init_subclass__(cls) -> None:
raise TypeError(f"type '{__name__}.ParamSpec' is not an acceptable base type")
-# 3.7-3.9
+# 3.9
else:
# Inherits from list as a workaround for Callable checks in Python < 3.9.2.
@@ -1693,8 +1845,8 @@ def kwargs(self):
return ParamSpecKwargs(self)
def __init__(self, name, *, bound=None, covariant=False, contravariant=False,
- infer_variance=False, default=_marker):
- super().__init__([self])
+ infer_variance=False, default=NoDefault):
+ list.__init__(self, [self])
self.__name__ = name
self.__covariant__ = bool(covariant)
self.__contravariant__ = bool(contravariant)
@@ -1735,17 +1887,31 @@ def __call__(self, *args, **kwargs):
pass
-# 3.7-3.9
+# 3.9
if not hasattr(typing, 'Concatenate'):
# Inherits from list as a workaround for Callable checks in Python < 3.9.2.
+
+ # 3.9.0-1
+ if not hasattr(typing, '_type_convert'):
+ def _type_convert(arg, module=None, *, allow_special_forms=False):
+ """For converting None to type(None), and strings to ForwardRef."""
+ if arg is None:
+ return type(None)
+ if isinstance(arg, str):
+ if sys.version_info <= (3, 9, 6):
+ return ForwardRef(arg)
+ if sys.version_info <= (3, 9, 7):
+ return ForwardRef(arg, module=module)
+ return ForwardRef(arg, module=module, is_class=allow_special_forms)
+ return arg
+ else:
+ _type_convert = typing._type_convert
+
class _ConcatenateGenericAlias(list):
# Trick Generic into looking into this for __parameters__.
__class__ = typing._GenericAlias
- # Flag in 3.8.
- _special = False
-
def __init__(self, origin, args):
super().__init__(args)
self.__origin__ = origin
@@ -1769,28 +1935,171 @@ def __parameters__(self):
tp for tp in self.__args__ if isinstance(tp, (typing.TypeVar, ParamSpec))
)
+ # 3.9 used by __getitem__ below
+ def copy_with(self, params):
+ if isinstance(params[-1], _ConcatenateGenericAlias):
+ params = (*params[:-1], *params[-1].__args__)
+ elif isinstance(params[-1], (list, tuple)):
+ return (*params[:-1], *params[-1])
+ elif (not (params[-1] is ... or isinstance(params[-1], ParamSpec))):
+ raise TypeError("The last parameter to Concatenate should be a "
+ "ParamSpec variable or ellipsis.")
+ return self.__class__(self.__origin__, params)
+
+ # 3.9; accessed during GenericAlias.__getitem__ when substituting
+ def __getitem__(self, args):
+ if self.__origin__ in (Generic, Protocol):
+ # Can't subscript Generic[...] or Protocol[...].
+ raise TypeError(f"Cannot subscript already-subscripted {self}")
+ if not self.__parameters__:
+ raise TypeError(f"{self} is not a generic class")
+
+ if not isinstance(args, tuple):
+ args = (args,)
+ args = _unpack_args(*(_type_convert(p) for p in args))
+ params = self.__parameters__
+ for param in params:
+ prepare = getattr(param, "__typing_prepare_subst__", None)
+ if prepare is not None:
+ args = prepare(self, args)
+ # 3.9 & typing.ParamSpec
+ elif isinstance(param, ParamSpec):
+ i = params.index(param)
+ if (
+ i == len(args)
+ and getattr(param, '__default__', NoDefault) is not NoDefault
+ ):
+ args = [*args, param.__default__]
+ if i >= len(args):
+ raise TypeError(f"Too few arguments for {self}")
+ # Special case for Z[[int, str, bool]] == Z[int, str, bool]
+ if len(params) == 1 and not _is_param_expr(args[0]):
+ assert i == 0
+ args = (args,)
+ elif (
+ isinstance(args[i], list)
+ # 3.9
+ # This class inherits from list do not convert
+ and not isinstance(args[i], _ConcatenateGenericAlias)
+ ):
+ args = (*args[:i], tuple(args[i]), *args[i + 1:])
-# 3.7-3.9
+ alen = len(args)
+ plen = len(params)
+ if alen != plen:
+ raise TypeError(
+ f"Too {'many' if alen > plen else 'few'} arguments for {self};"
+ f" actual {alen}, expected {plen}"
+ )
+
+ subst = dict(zip(self.__parameters__, args))
+ # determine new args
+ new_args = []
+ for arg in self.__args__:
+ if isinstance(arg, type):
+ new_args.append(arg)
+ continue
+ if isinstance(arg, TypeVar):
+ arg = subst[arg]
+ if (
+ (isinstance(arg, typing._GenericAlias) and _is_unpack(arg))
+ or (
+ hasattr(_types, "GenericAlias")
+ and isinstance(arg, _types.GenericAlias)
+ and getattr(arg, "__unpacked__", False)
+ )
+ ):
+ raise TypeError(f"{arg} is not valid as type argument")
+
+ elif isinstance(arg,
+ typing._GenericAlias
+ if not hasattr(_types, "GenericAlias") else
+ (typing._GenericAlias, _types.GenericAlias)
+ ):
+ subparams = arg.__parameters__
+ if subparams:
+ subargs = tuple(subst[x] for x in subparams)
+ arg = arg[subargs]
+ new_args.append(arg)
+ return self.copy_with(tuple(new_args))
+
+# 3.10+
+else:
+ _ConcatenateGenericAlias = typing._ConcatenateGenericAlias
+
+ # 3.10
+ if sys.version_info < (3, 11):
+
+ class _ConcatenateGenericAlias(typing._ConcatenateGenericAlias, _root=True):
+ # needed for checks in collections.abc.Callable to accept this class
+ __module__ = "typing"
+
+ def copy_with(self, params):
+ if isinstance(params[-1], (list, tuple)):
+ return (*params[:-1], *params[-1])
+ if isinstance(params[-1], typing._ConcatenateGenericAlias):
+ params = (*params[:-1], *params[-1].__args__)
+ elif not (params[-1] is ... or isinstance(params[-1], ParamSpec)):
+ raise TypeError("The last parameter to Concatenate should be a "
+ "ParamSpec variable or ellipsis.")
+ return super(typing._ConcatenateGenericAlias, self).copy_with(params)
+
+ def __getitem__(self, args):
+ value = super().__getitem__(args)
+ if isinstance(value, tuple) and any(_is_unpack(t) for t in value):
+ return tuple(_unpack_args(*(n for n in value)))
+ return value
+
+
+# 3.9.2
+class _EllipsisDummy: ...
+
+
+# <=3.10
+def _create_concatenate_alias(origin, parameters):
+ if parameters[-1] is ... and sys.version_info < (3, 9, 2):
+ # Hack: Arguments must be types, replace it with one.
+ parameters = (*parameters[:-1], _EllipsisDummy)
+ if sys.version_info >= (3, 10, 3):
+ concatenate = _ConcatenateGenericAlias(origin, parameters,
+ _typevar_types=(TypeVar, ParamSpec),
+ _paramspec_tvars=True)
+ else:
+ concatenate = _ConcatenateGenericAlias(origin, parameters)
+ if parameters[-1] is not _EllipsisDummy:
+ return concatenate
+ # Remove dummy again
+ concatenate.__args__ = tuple(p if p is not _EllipsisDummy else ...
+ for p in concatenate.__args__)
+ if sys.version_info < (3, 10):
+ # backport needs __args__ adjustment only
+ return concatenate
+ concatenate.__parameters__ = tuple(p for p in concatenate.__parameters__
+ if p is not _EllipsisDummy)
+ return concatenate
+
+
+# <=3.10
@typing._tp_cache
def _concatenate_getitem(self, parameters):
if parameters == ():
raise TypeError("Cannot take a Concatenate of no types.")
if not isinstance(parameters, tuple):
parameters = (parameters,)
- if not isinstance(parameters[-1], ParamSpec):
+ if not (parameters[-1] is ... or isinstance(parameters[-1], ParamSpec)):
raise TypeError("The last parameter to Concatenate should be a "
- "ParamSpec variable.")
+ "ParamSpec variable or ellipsis.")
msg = "Concatenate[arg, ...]: each arg must be a type."
- parameters = tuple(typing._type_check(p, msg) for p in parameters)
- return _ConcatenateGenericAlias(self, parameters)
+ parameters = (*(typing._type_check(p, msg) for p in parameters[:-1]),
+ parameters[-1])
+ return _create_concatenate_alias(self, parameters)
-# 3.10+
-if hasattr(typing, 'Concatenate'):
+# 3.11+; Concatenate does not accept ellipsis in 3.10
+if sys.version_info >= (3, 11):
Concatenate = typing.Concatenate
- _ConcatenateGenericAlias = typing._ConcatenateGenericAlias # noqa: F811
-# 3.9
-elif sys.version_info[:2] >= (3, 9):
+# <=3.10
+else:
@_ExtensionsSpecialForm
def Concatenate(self, parameters):
"""Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a
@@ -1804,30 +2113,13 @@ def Concatenate(self, parameters):
See PEP 612 for detailed information.
"""
return _concatenate_getitem(self, parameters)
-# 3.7-8
-else:
- class _ConcatenateForm(_ExtensionsSpecialForm, _root=True):
- def __getitem__(self, parameters):
- return _concatenate_getitem(self, parameters)
-
- Concatenate = _ConcatenateForm(
- 'Concatenate',
- doc="""Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a
- higher order function which adds, removes or transforms parameters of a
- callable.
-
- For example::
- Callable[Concatenate[int, P], int]
-
- See PEP 612 for detailed information.
- """)
# 3.10+
if hasattr(typing, 'TypeGuard'):
TypeGuard = typing.TypeGuard
# 3.9
-elif sys.version_info[:2] >= (3, 9):
+else:
@_ExtensionsSpecialForm
def TypeGuard(self, parameters):
"""Special typing form used to annotate the return type of a user-defined
@@ -1874,105 +2166,92 @@ def is_str(val: Union[str, float]):
"""
item = typing._type_check(parameters, f'{self} accepts only a single type.')
return typing._GenericAlias(self, (item,))
-# 3.7-3.8
-else:
- class _TypeGuardForm(_ExtensionsSpecialForm, _root=True):
- def __getitem__(self, parameters):
- item = typing._type_check(parameters,
- f'{self._name} accepts only a single type')
- return typing._GenericAlias(self, (item,))
- TypeGuard = _TypeGuardForm(
- 'TypeGuard',
- doc="""Special typing form used to annotate the return type of a user-defined
- type guard function. ``TypeGuard`` only accepts a single type argument.
+
+# 3.13+
+if hasattr(typing, 'TypeIs'):
+ TypeIs = typing.TypeIs
+# <=3.12
+else:
+ @_ExtensionsSpecialForm
+ def TypeIs(self, parameters):
+ """Special typing form used to annotate the return type of a user-defined
+ type narrower function. ``TypeIs`` only accepts a single type argument.
At runtime, functions marked this way should return a boolean.
- ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static
+ ``TypeIs`` aims to benefit *type narrowing* -- a technique used by static
type checkers to determine a more precise type of an expression within a
program's code flow. Usually type narrowing is done by analyzing
conditional code flow and applying the narrowing to a block of code. The
conditional expression here is sometimes referred to as a "type guard".
Sometimes it would be convenient to use a user-defined boolean function
- as a type guard. Such a function should use ``TypeGuard[...]`` as its
+ as a type guard. Such a function should use ``TypeIs[...]`` as its
return type to alert static type checkers to this intention.
- Using ``-> TypeGuard`` tells the static type checker that for a given
+ Using ``-> TypeIs`` tells the static type checker that for a given
function:
1. The return value is a boolean.
2. If the return value is ``True``, the type of its argument
- is the type inside ``TypeGuard``.
+ is the intersection of the type inside ``TypeIs`` and the argument's
+ previously known type.
For example::
- def is_str(val: Union[str, float]):
- # "isinstance" type guard
- if isinstance(val, str):
- # Type of ``val`` is narrowed to ``str``
- ...
- else:
- # Else, type of ``val`` is narrowed to ``float``.
- ...
-
- Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower
- form of ``TypeA`` (it can even be a wider form) and this may lead to
- type-unsafe results. The main reason is to allow for things like
- narrowing ``List[object]`` to ``List[str]`` even though the latter is not
- a subtype of the former, since ``List`` is invariant. The responsibility of
- writing type-safe type guards is left to the user.
-
- ``TypeGuard`` also works with type variables. For more information, see
- PEP 647 (User-Defined Type Guards).
- """)
+ def is_awaitable(val: object) -> TypeIs[Awaitable[Any]]:
+ return hasattr(val, '__await__')
+ def f(val: Union[int, Awaitable[int]]) -> int:
+ if is_awaitable(val):
+ assert_type(val, Awaitable[int])
+ else:
+ assert_type(val, int)
-# Vendored from cpython typing._SpecialFrom
-class _SpecialForm(typing._Final, _root=True):
- __slots__ = ('_name', '__doc__', '_getitem')
-
- def __init__(self, getitem):
- self._getitem = getitem
- self._name = getitem.__name__
- self.__doc__ = getitem.__doc__
-
- def __getattr__(self, item):
- if item in {'__name__', '__qualname__'}:
- return self._name
+ ``TypeIs`` also works with type variables. For more information, see
+ PEP 742 (Narrowing types with TypeIs).
+ """
+ item = typing._type_check(parameters, f'{self} accepts only a single type.')
+ return typing._GenericAlias(self, (item,))
- raise AttributeError(item)
- def __mro_entries__(self, bases):
- raise TypeError(f"Cannot subclass {self!r}")
+# 3.14+?
+if hasattr(typing, 'TypeForm'):
+ TypeForm = typing.TypeForm
+# <=3.13
+else:
+ class _TypeFormForm(_ExtensionsSpecialForm, _root=True):
+ # TypeForm(X) is equivalent to X but indicates to the type checker
+ # that the object is a TypeForm.
+ def __call__(self, obj, /):
+ return obj
- def __repr__(self):
- return f'typing_extensions.{self._name}'
+ @_TypeFormForm
+ def TypeForm(self, parameters):
+ """A special form representing the value that results from the evaluation
+ of a type expression. This value encodes the information supplied in the
+ type expression, and it represents the type described by that type expression.
- def __reduce__(self):
- return self._name
+ When used in a type expression, TypeForm describes a set of type form objects.
+ It accepts a single type argument, which must be a valid type expression.
+ ``TypeForm[T]`` describes the set of all type form objects that represent
+ the type T or types that are assignable to T.
- def __call__(self, *args, **kwds):
- raise TypeError(f"Cannot instantiate {self!r}")
+ Usage:
- def __or__(self, other):
- return typing.Union[self, other]
+ def cast[T](typ: TypeForm[T], value: Any) -> T: ...
- def __ror__(self, other):
- return typing.Union[other, self]
+ reveal_type(cast(int, "x")) # int
- def __instancecheck__(self, obj):
- raise TypeError(f"{self} cannot be used with isinstance()")
+ See PEP 747 for more information.
+ """
+ item = typing._type_check(parameters, f'{self} accepts only a single type.')
+ return typing._GenericAlias(self, (item,))
- def __subclasscheck__(self, cls):
- raise TypeError(f"{self} cannot be used with issubclass()")
- @typing._tp_cache
- def __getitem__(self, parameters):
- return self._getitem(self, parameters)
-if hasattr(typing, "LiteralString"):
+if hasattr(typing, "LiteralString"): # 3.11+
LiteralString = typing.LiteralString
else:
@_SpecialForm
@@ -1995,7 +2274,7 @@ def query(sql: LiteralString) -> ...:
raise TypeError(f"{self} is not subscriptable")
-if hasattr(typing, "Self"):
+if hasattr(typing, "Self"): # 3.11+
Self = typing.Self
else:
@_SpecialForm
@@ -2016,7 +2295,7 @@ def parse(self, data: bytes) -> Self:
raise TypeError(f"{self} is not subscriptable")
-if hasattr(typing, "Never"):
+if hasattr(typing, "Never"): # 3.11+
Never = typing.Never
else:
@_SpecialForm
@@ -2046,10 +2325,10 @@ def int_or_str(arg: int | str) -> None:
raise TypeError(f"{self} is not subscriptable")
-if hasattr(typing, 'Required'):
+if hasattr(typing, 'Required'): # 3.11+
Required = typing.Required
NotRequired = typing.NotRequired
-elif sys.version_info[:2] >= (3, 9):
+else: # <=3.10
@_ExtensionsSpecialForm
def Required(self, parameters):
"""A special typing construct to mark a key of a total=False TypedDict
@@ -2087,44 +2366,28 @@ class Movie(TypedDict):
item = typing._type_check(parameters, f'{self._name} accepts only a single type.')
return typing._GenericAlias(self, (item,))
-else:
- class _RequiredForm(_ExtensionsSpecialForm, _root=True):
- def __getitem__(self, parameters):
- item = typing._type_check(parameters,
- f'{self._name} accepts only a single type.')
- return typing._GenericAlias(self, (item,))
-
- Required = _RequiredForm(
- 'Required',
- doc="""A special typing construct to mark a key of a total=False TypedDict
- as required. For example:
-
- class Movie(TypedDict, total=False):
- title: Required[str]
- year: int
- m = Movie(
- title='The Matrix', # typechecker error if key is omitted
- year=1999,
- )
+if hasattr(typing, 'ReadOnly'):
+ ReadOnly = typing.ReadOnly
+else: # <=3.12
+ @_ExtensionsSpecialForm
+ def ReadOnly(self, parameters):
+ """A special typing construct to mark an item of a TypedDict as read-only.
- There is no runtime checking that a required key is actually provided
- when instantiating a related TypedDict.
- """)
- NotRequired = _RequiredForm(
- 'NotRequired',
- doc="""A special typing construct to mark a key of a TypedDict as
- potentially missing. For example:
+ For example:
class Movie(TypedDict):
- title: str
- year: NotRequired[int]
+ title: ReadOnly[str]
+ year: int
- m = Movie(
- title='The Matrix', # typechecker error if key is omitted
- year=1999,
- )
- """)
+ def mutate_movie(m: Movie) -> None:
+ m["year"] = 1992 # allowed
+ m["title"] = "The Matrix" # typechecker error
+
+ There is no runtime checking for this property.
+ """
+ item = typing._type_check(parameters, f'{self._name} accepts only a single type.')
+ return typing._GenericAlias(self, (item,))
_UNPACK_DOC = """\
@@ -2175,14 +2438,38 @@ def foo(**kwargs: Unpack[Movie]): ...
def _is_unpack(obj):
return get_origin(obj) is Unpack
-elif sys.version_info[:2] >= (3, 9):
+else: # <=3.11
class _UnpackSpecialForm(_ExtensionsSpecialForm, _root=True):
def __init__(self, getitem):
super().__init__(getitem)
self.__doc__ = _UNPACK_DOC
class _UnpackAlias(typing._GenericAlias, _root=True):
- __class__ = typing.TypeVar
+ if sys.version_info < (3, 11):
+ # needed for compatibility with Generic[Unpack[Ts]]
+ __class__ = typing.TypeVar
+
+ @property
+ def __typing_unpacked_tuple_args__(self):
+ assert self.__origin__ is Unpack
+ assert len(self.__args__) == 1
+ arg, = self.__args__
+ if isinstance(arg, (typing._GenericAlias, _types.GenericAlias)):
+ if arg.__origin__ is not tuple:
+ raise TypeError("Unpack[...] must be used with a tuple type")
+ return arg.__args__
+ return None
+
+ @property
+ def __typing_is_unpacked_typevartuple__(self):
+ assert self.__origin__ is Unpack
+ assert len(self.__args__) == 1
+ return isinstance(self.__args__[0], TypeVarTuple)
+
+ def __getitem__(self, args):
+ if self.__typing_is_unpacked_typevartuple__:
+ return args
+ return super().__getitem__(args)
@_UnpackSpecialForm
def Unpack(self, parameters):
@@ -2192,23 +2479,22 @@ def Unpack(self, parameters):
def _is_unpack(obj):
return isinstance(obj, _UnpackAlias)
-else:
- class _UnpackAlias(typing._GenericAlias, _root=True):
- __class__ = typing.TypeVar
-
- class _UnpackForm(_ExtensionsSpecialForm, _root=True):
- def __getitem__(self, parameters):
- item = typing._type_check(parameters,
- f'{self._name} accepts only a single type.')
- return _UnpackAlias(self, (item,))
- Unpack = _UnpackForm('Unpack', doc=_UNPACK_DOC)
+def _unpack_args(*args):
+ newargs = []
+ for arg in args:
+ subargs = getattr(arg, '__typing_unpacked_tuple_args__', None)
+ if subargs is not None and (not (subargs and subargs[-1] is ...)):
+ newargs.extend(subargs)
+ else:
+ newargs.append(arg)
+ return newargs
- def _is_unpack(obj):
- return isinstance(obj, _UnpackAlias)
+if _PEP_696_IMPLEMENTED:
+ from typing import TypeVarTuple
-if hasattr(typing, "TypeVarTuple"): # 3.11+
+elif hasattr(typing, "TypeVarTuple"): # 3.11+
# Add default parameter - PEP 696
class TypeVarTuple(metaclass=_TypeVarLikeMeta):
@@ -2216,16 +2502,63 @@ class TypeVarTuple(metaclass=_TypeVarLikeMeta):
_backported_typevarlike = typing.TypeVarTuple
- def __new__(cls, name, *, default=_marker):
+ def __new__(cls, name, *, default=NoDefault):
tvt = typing.TypeVarTuple(name)
_set_default(tvt, default)
_set_module(tvt)
+
+ def _typevartuple_prepare_subst(alias, args):
+ params = alias.__parameters__
+ typevartuple_index = params.index(tvt)
+ for param in params[typevartuple_index + 1:]:
+ if isinstance(param, TypeVarTuple):
+ raise TypeError(
+ f"More than one TypeVarTuple parameter in {alias}"
+ )
+
+ alen = len(args)
+ plen = len(params)
+ left = typevartuple_index
+ right = plen - typevartuple_index - 1
+ var_tuple_index = None
+ fillarg = None
+ for k, arg in enumerate(args):
+ if not isinstance(arg, type):
+ subargs = getattr(arg, '__typing_unpacked_tuple_args__', None)
+ if subargs and len(subargs) == 2 and subargs[-1] is ...:
+ if var_tuple_index is not None:
+ raise TypeError(
+ "More than one unpacked "
+ "arbitrary-length tuple argument"
+ )
+ var_tuple_index = k
+ fillarg = subargs[0]
+ if var_tuple_index is not None:
+ left = min(left, var_tuple_index)
+ right = min(right, alen - var_tuple_index - 1)
+ elif left + right > alen:
+ raise TypeError(f"Too few arguments for {alias};"
+ f" actual {alen}, expected at least {plen - 1}")
+ if left == alen - right and tvt.has_default():
+ replacement = _unpack_args(tvt.__default__)
+ else:
+ replacement = args[left: alen - right]
+
+ return (
+ *args[:left],
+ *([fillarg] * (typevartuple_index - left)),
+ replacement,
+ *([fillarg] * (plen - right - left - typevartuple_index - 1)),
+ *args[alen - right:],
+ )
+
+ tvt.__typing_prepare_subst__ = _typevartuple_prepare_subst
return tvt
def __init_subclass__(self, *args, **kwds):
raise TypeError("Cannot subclass special typing classes")
-else:
+else: # <=3.10
class TypeVarTuple(_DefaultMixin):
"""Type variable tuple.
@@ -2276,7 +2609,7 @@ def get_shape(self) -> Tuple[*Ts]:
def __iter__(self):
yield self.__unpacked__
- def __init__(self, name, *, default=_marker):
+ def __init__(self, name, *, default=NoDefault):
self.__name__ = name
_DefaultMixin.__init__(self, default)
@@ -2304,10 +2637,10 @@ def __init_subclass__(self, *args, **kwds):
raise TypeError("Cannot subclass special typing classes")
-if hasattr(typing, "reveal_type"):
+if hasattr(typing, "reveal_type"): # 3.11+
reveal_type = typing.reveal_type
-else:
- def reveal_type(__obj: T) -> T:
+else: # <=3.10
+ def reveal_type(obj: T, /) -> T:
"""Reveal the inferred type of a variable.
When a static type checker encounters a call to ``reveal_type()``,
@@ -2323,14 +2656,20 @@ def reveal_type(__obj: T) -> T:
argument and returns it unchanged.
"""
- print(f"Runtime type is {type(__obj).__name__!r}", file=sys.stderr)
- return __obj
+ print(f"Runtime type is {type(obj).__name__!r}", file=sys.stderr)
+ return obj
+
+if hasattr(typing, "_ASSERT_NEVER_REPR_MAX_LENGTH"): # 3.11+
+ _ASSERT_NEVER_REPR_MAX_LENGTH = typing._ASSERT_NEVER_REPR_MAX_LENGTH
+else: # <=3.10
+ _ASSERT_NEVER_REPR_MAX_LENGTH = 100
-if hasattr(typing, "assert_never"):
+
+if hasattr(typing, "assert_never"): # 3.11+
assert_never = typing.assert_never
-else:
- def assert_never(__arg: Never) -> Never:
+else: # <=3.10
+ def assert_never(arg: Never, /) -> Never:
"""Assert to the type checker that a line of code is unreachable.
Example::
@@ -2350,13 +2689,16 @@ def int_or_str(arg: int | str) -> None:
At runtime, this throws an exception when called.
"""
- raise AssertionError("Expected code to be unreachable")
+ value = repr(arg)
+ if len(value) > _ASSERT_NEVER_REPR_MAX_LENGTH:
+ value = value[:_ASSERT_NEVER_REPR_MAX_LENGTH] + '...'
+ raise AssertionError(f"Expected code to be unreachable, but got: {value}")
-if sys.version_info >= (3, 12):
+if sys.version_info >= (3, 12): # 3.12+
# dataclass_transform exists in 3.11 but lacks the frozen_default parameter
dataclass_transform = typing.dataclass_transform
-else:
+else: # <=3.11
def dataclass_transform(
*,
eq_default: bool = True,
@@ -2443,18 +2785,18 @@ def decorator(cls_or_fn):
return decorator
-if hasattr(typing, "override"):
+if hasattr(typing, "override"): # 3.12+
override = typing.override
-else:
+else: # <=3.11
_F = typing.TypeVar("_F", bound=typing.Callable[..., typing.Any])
- def override(__arg: _F) -> _F:
+ def override(arg: _F, /) -> _F:
"""Indicate that a method is intended to override a method in a base class.
Usage:
class Base:
- def method(self) -> None: ...
+ def method(self) -> None:
pass
class Child(Base):
@@ -2475,28 +2817,27 @@ def method(self) -> None:
"""
try:
- __arg.__override__ = True
+ arg.__override__ = True
except (AttributeError, TypeError):
# Skip the attribute silently if it is not writable.
# AttributeError happens if the object has __slots__ or a
# read-only property, TypeError if it's a builtin class.
pass
- return __arg
+ return arg
-if hasattr(typing, "deprecated"):
- deprecated = typing.deprecated
+# Python 3.13.3+ contains a fix for the wrapped __new__
+if sys.version_info >= (3, 13, 3):
+ deprecated = warnings.deprecated
else:
_T = typing.TypeVar("_T")
- def deprecated(
- __msg: str,
- *,
- category: typing.Optional[typing.Type[Warning]] = DeprecationWarning,
- stacklevel: int = 1,
- ) -> typing.Callable[[_T], _T]:
+ class deprecated:
"""Indicate that a class, function or overload is deprecated.
+ When this decorator is applied to an object, the type checker
+ will generate a diagnostic on usage of the deprecated object.
+
Usage:
@deprecated("Use B instead")
@@ -2513,63 +2854,138 @@ def g(x: int) -> int: ...
@overload
def g(x: str) -> int: ...
- When this decorator is applied to an object, the type checker
- will generate a diagnostic on usage of the deprecated object.
-
- The warning specified by ``category`` will be emitted on use
- of deprecated objects. For functions, that happens on calls;
- for classes, on instantiation. If the ``category`` is ``None``,
- no warning is emitted. The ``stacklevel`` determines where the
+ The warning specified by *category* will be emitted at runtime
+ on use of deprecated objects. For functions, that happens on calls;
+ for classes, on instantiation and on creation of subclasses.
+ If the *category* is ``None``, no warning is emitted at runtime.
+ The *stacklevel* determines where the
warning is emitted. If it is ``1`` (the default), the warning
is emitted at the direct caller of the deprecated object; if it
is higher, it is emitted further up the stack.
+ Static type checker behavior is not affected by the *category*
+ and *stacklevel* arguments.
- The decorator sets the ``__deprecated__``
- attribute on the decorated object to the deprecation message
- passed to the decorator. If applied to an overload, the decorator
+ The deprecation message passed to the decorator is saved in the
+ ``__deprecated__`` attribute on the decorated object.
+ If applied to an overload, the decorator
must be after the ``@overload`` decorator for the attribute to
exist on the overload as returned by ``get_overloads()``.
See PEP 702 for details.
"""
- def decorator(__arg: _T) -> _T:
+ def __init__(
+ self,
+ message: str,
+ /,
+ *,
+ category: typing.Optional[typing.Type[Warning]] = DeprecationWarning,
+ stacklevel: int = 1,
+ ) -> None:
+ if not isinstance(message, str):
+ raise TypeError(
+ "Expected an object of type str for 'message', not "
+ f"{type(message).__name__!r}"
+ )
+ self.message = message
+ self.category = category
+ self.stacklevel = stacklevel
+
+ def __call__(self, arg: _T, /) -> _T:
+ # Make sure the inner functions created below don't
+ # retain a reference to self.
+ msg = self.message
+ category = self.category
+ stacklevel = self.stacklevel
if category is None:
- __arg.__deprecated__ = __msg
- return __arg
- elif isinstance(__arg, type):
- original_new = __arg.__new__
- has_init = __arg.__init__ is not object.__init__
+ arg.__deprecated__ = msg
+ return arg
+ elif isinstance(arg, type):
+ import functools
+ from types import MethodType
+
+ original_new = arg.__new__
@functools.wraps(original_new)
- def __new__(cls, *args, **kwargs):
- warnings.warn(__msg, category=category, stacklevel=stacklevel + 1)
+ def __new__(cls, /, *args, **kwargs):
+ if cls is arg:
+ warnings.warn(msg, category=category, stacklevel=stacklevel + 1)
if original_new is not object.__new__:
return original_new(cls, *args, **kwargs)
# Mirrors a similar check in object.__new__.
- elif not has_init and (args or kwargs):
+ elif cls.__init__ is object.__init__ and (args or kwargs):
raise TypeError(f"{cls.__name__}() takes no arguments")
else:
return original_new(cls)
- __arg.__new__ = staticmethod(__new__)
- __arg.__deprecated__ = __new__.__deprecated__ = __msg
- return __arg
- elif callable(__arg):
- @functools.wraps(__arg)
+ arg.__new__ = staticmethod(__new__)
+
+ original_init_subclass = arg.__init_subclass__
+ # We need slightly different behavior if __init_subclass__
+ # is a bound method (likely if it was implemented in Python)
+ if isinstance(original_init_subclass, MethodType):
+ original_init_subclass = original_init_subclass.__func__
+
+ @functools.wraps(original_init_subclass)
+ def __init_subclass__(*args, **kwargs):
+ warnings.warn(msg, category=category, stacklevel=stacklevel + 1)
+ return original_init_subclass(*args, **kwargs)
+
+ arg.__init_subclass__ = classmethod(__init_subclass__)
+ # Or otherwise, which likely means it's a builtin such as
+ # object's implementation of __init_subclass__.
+ else:
+ @functools.wraps(original_init_subclass)
+ def __init_subclass__(*args, **kwargs):
+ warnings.warn(msg, category=category, stacklevel=stacklevel + 1)
+ return original_init_subclass(*args, **kwargs)
+
+ arg.__init_subclass__ = __init_subclass__
+
+ arg.__deprecated__ = __new__.__deprecated__ = msg
+ __init_subclass__.__deprecated__ = msg
+ return arg
+ elif callable(arg):
+ import asyncio.coroutines
+ import functools
+ import inspect
+
+ @functools.wraps(arg)
def wrapper(*args, **kwargs):
- warnings.warn(__msg, category=category, stacklevel=stacklevel + 1)
- return __arg(*args, **kwargs)
+ warnings.warn(msg, category=category, stacklevel=stacklevel + 1)
+ return arg(*args, **kwargs)
- __arg.__deprecated__ = wrapper.__deprecated__ = __msg
+ if asyncio.coroutines.iscoroutinefunction(arg):
+ if sys.version_info >= (3, 12):
+ wrapper = inspect.markcoroutinefunction(wrapper)
+ else:
+ wrapper._is_coroutine = asyncio.coroutines._is_coroutine
+
+ arg.__deprecated__ = wrapper.__deprecated__ = msg
return wrapper
else:
raise TypeError(
"@deprecated decorator with non-None category must be applied to "
- f"a class or callable, not {__arg!r}"
+ f"a class or callable, not {arg!r}"
)
- return decorator
+if sys.version_info < (3, 10):
+ def _is_param_expr(arg):
+ return arg is ... or isinstance(
+ arg, (tuple, list, ParamSpec, _ConcatenateGenericAlias)
+ )
+else:
+ def _is_param_expr(arg):
+ return arg is ... or isinstance(
+ arg,
+ (
+ tuple,
+ list,
+ ParamSpec,
+ _ConcatenateGenericAlias,
+ typing._ConcatenateGenericAlias,
+ ),
+ )
# We have to do some monkey patching to deal with the dual nature of
@@ -2580,11 +2996,244 @@ def wrapper(*args, **kwargs):
# counting generic parameters, so that when we subscript a generic,
# the runtime doesn't try to substitute the Unpack with the subscripted type.
if not hasattr(typing, "TypeVarTuple"):
- typing._collect_type_vars = _collect_type_vars
+ def _check_generic(cls, parameters, elen=_marker):
+ """Check correct count for parameters of a generic cls (internal helper).
+
+ This gives a nice error message in case of count mismatch.
+ """
+ # If substituting a single ParamSpec with multiple arguments
+ # we do not check the count
+ if (inspect.isclass(cls) and issubclass(cls, typing.Generic)
+ and len(cls.__parameters__) == 1
+ and isinstance(cls.__parameters__[0], ParamSpec)
+ and parameters
+ and not _is_param_expr(parameters[0])
+ ):
+ # Generic modifies parameters variable, but here we cannot do this
+ return
+
+ if not elen:
+ raise TypeError(f"{cls} is not a generic class")
+ if elen is _marker:
+ if not hasattr(cls, "__parameters__") or not cls.__parameters__:
+ raise TypeError(f"{cls} is not a generic class")
+ elen = len(cls.__parameters__)
+ alen = len(parameters)
+ if alen != elen:
+ expect_val = elen
+ if hasattr(cls, "__parameters__"):
+ parameters = [p for p in cls.__parameters__ if not _is_unpack(p)]
+ num_tv_tuples = sum(isinstance(p, TypeVarTuple) for p in parameters)
+ if (num_tv_tuples > 0) and (alen >= elen - num_tv_tuples):
+ return
+
+ # deal with TypeVarLike defaults
+ # required TypeVarLikes cannot appear after a defaulted one.
+ if alen < elen:
+ # since we validate TypeVarLike default in _collect_type_vars
+ # or _collect_parameters we can safely check parameters[alen]
+ if (
+ getattr(parameters[alen], '__default__', NoDefault)
+ is not NoDefault
+ ):
+ return
+
+ num_default_tv = sum(getattr(p, '__default__', NoDefault)
+ is not NoDefault for p in parameters)
+
+ elen -= num_default_tv
+
+ expect_val = f"at least {elen}"
+
+ things = "arguments" if sys.version_info >= (3, 10) else "parameters"
+ raise TypeError(f"Too {'many' if alen > elen else 'few'} {things}"
+ f" for {cls}; actual {alen}, expected {expect_val}")
+else:
+ # Python 3.11+
+
+ def _check_generic(cls, parameters, elen):
+ """Check correct count for parameters of a generic cls (internal helper).
+
+ This gives a nice error message in case of count mismatch.
+ """
+ if not elen:
+ raise TypeError(f"{cls} is not a generic class")
+ alen = len(parameters)
+ if alen != elen:
+ expect_val = elen
+ if hasattr(cls, "__parameters__"):
+ parameters = [p for p in cls.__parameters__ if not _is_unpack(p)]
+
+ # deal with TypeVarLike defaults
+ # required TypeVarLikes cannot appear after a defaulted one.
+ if alen < elen:
+ # since we validate TypeVarLike default in _collect_type_vars
+ # or _collect_parameters we can safely check parameters[alen]
+ if (
+ getattr(parameters[alen], '__default__', NoDefault)
+ is not NoDefault
+ ):
+ return
+
+ num_default_tv = sum(getattr(p, '__default__', NoDefault)
+ is not NoDefault for p in parameters)
+
+ elen -= num_default_tv
+
+ expect_val = f"at least {elen}"
+
+ raise TypeError(f"Too {'many' if alen > elen else 'few'} arguments"
+ f" for {cls}; actual {alen}, expected {expect_val}")
+
+if not _PEP_696_IMPLEMENTED:
typing._check_generic = _check_generic
-# Backport typing.NamedTuple as it exists in Python 3.12.
+def _has_generic_or_protocol_as_origin() -> bool:
+ try:
+ frame = sys._getframe(2)
+ # - Catch AttributeError: not all Python implementations have sys._getframe()
+ # - Catch ValueError: maybe we're called from an unexpected module
+ # and the call stack isn't deep enough
+ except (AttributeError, ValueError):
+ return False # err on the side of leniency
+ else:
+ # If we somehow get invoked from outside typing.py,
+ # also err on the side of leniency
+ if frame.f_globals.get("__name__") != "typing":
+ return False
+ origin = frame.f_locals.get("origin")
+ # Cannot use "in" because origin may be an object with a buggy __eq__ that
+ # throws an error.
+ return origin is typing.Generic or origin is Protocol or origin is typing.Protocol
+
+
+_TYPEVARTUPLE_TYPES = {TypeVarTuple, getattr(typing, "TypeVarTuple", None)}
+
+
+def _is_unpacked_typevartuple(x) -> bool:
+ if get_origin(x) is not Unpack:
+ return False
+ args = get_args(x)
+ return (
+ bool(args)
+ and len(args) == 1
+ and type(args[0]) in _TYPEVARTUPLE_TYPES
+ )
+
+
+# Python 3.11+ _collect_type_vars was renamed to _collect_parameters
+if hasattr(typing, '_collect_type_vars'):
+ def _collect_type_vars(types, typevar_types=None):
+ """Collect all type variable contained in types in order of
+ first appearance (lexicographic order). For example::
+
+ _collect_type_vars((T, List[S, T])) == (T, S)
+ """
+ if typevar_types is None:
+ typevar_types = typing.TypeVar
+ tvars = []
+
+ # A required TypeVarLike cannot appear after a TypeVarLike with a default
+ # if it was a direct call to `Generic[]` or `Protocol[]`
+ enforce_default_ordering = _has_generic_or_protocol_as_origin()
+ default_encountered = False
+
+ # Also, a TypeVarLike with a default cannot appear after a TypeVarTuple
+ type_var_tuple_encountered = False
+
+ for t in types:
+ if _is_unpacked_typevartuple(t):
+ type_var_tuple_encountered = True
+ elif (
+ isinstance(t, typevar_types) and not isinstance(t, _UnpackAlias)
+ and t not in tvars
+ ):
+ if enforce_default_ordering:
+ has_default = getattr(t, '__default__', NoDefault) is not NoDefault
+ if has_default:
+ if type_var_tuple_encountered:
+ raise TypeError('Type parameter with a default'
+ ' follows TypeVarTuple')
+ default_encountered = True
+ elif default_encountered:
+ raise TypeError(f'Type parameter {t!r} without a default'
+ ' follows type parameter with a default')
+
+ tvars.append(t)
+ if _should_collect_from_parameters(t):
+ tvars.extend([t for t in t.__parameters__ if t not in tvars])
+ elif isinstance(t, tuple):
+ # Collect nested type_vars
+ # tuple wrapped by _prepare_paramspec_params(cls, params)
+ for x in t:
+ for collected in _collect_type_vars([x]):
+ if collected not in tvars:
+ tvars.append(collected)
+ return tuple(tvars)
+
+ typing._collect_type_vars = _collect_type_vars
+else:
+ def _collect_parameters(args):
+ """Collect all type variables and parameter specifications in args
+ in order of first appearance (lexicographic order).
+
+ For example::
+
+ assert _collect_parameters((T, Callable[P, T])) == (T, P)
+ """
+ parameters = []
+
+ # A required TypeVarLike cannot appear after a TypeVarLike with default
+ # if it was a direct call to `Generic[]` or `Protocol[]`
+ enforce_default_ordering = _has_generic_or_protocol_as_origin()
+ default_encountered = False
+
+ # Also, a TypeVarLike with a default cannot appear after a TypeVarTuple
+ type_var_tuple_encountered = False
+
+ for t in args:
+ if isinstance(t, type):
+ # We don't want __parameters__ descriptor of a bare Python class.
+ pass
+ elif isinstance(t, tuple):
+ # `t` might be a tuple, when `ParamSpec` is substituted with
+ # `[T, int]`, or `[int, *Ts]`, etc.
+ for x in t:
+ for collected in _collect_parameters([x]):
+ if collected not in parameters:
+ parameters.append(collected)
+ elif hasattr(t, '__typing_subst__'):
+ if t not in parameters:
+ if enforce_default_ordering:
+ has_default = (
+ getattr(t, '__default__', NoDefault) is not NoDefault
+ )
+
+ if type_var_tuple_encountered and has_default:
+ raise TypeError('Type parameter with a default'
+ ' follows TypeVarTuple')
+
+ if has_default:
+ default_encountered = True
+ elif default_encountered:
+ raise TypeError(f'Type parameter {t!r} without a default'
+ ' follows type parameter with a default')
+
+ parameters.append(t)
+ else:
+ if _is_unpacked_typevartuple(t):
+ type_var_tuple_encountered = True
+ for x in getattr(t, '__parameters__', ()):
+ if x not in parameters:
+ parameters.append(x)
+
+ return tuple(parameters)
+
+ if not _PEP_696_IMPLEMENTED:
+ typing._collect_parameters = _collect_parameters
+
+# Backport typing.NamedTuple as it exists in Python 3.13.
# In 3.11, the ability to define generic `NamedTuple`s was supported.
# This was explicitly disallowed in 3.9-3.10, and only half-worked in <=3.8.
# On 3.12, we added __orig_bases__ to call-based NamedTuples
@@ -2599,10 +3248,6 @@ def _make_nmtuple(name, types, module, defaults=()):
nm_tpl = collections.namedtuple(name, fields,
defaults=defaults, module=module)
nm_tpl.__annotations__ = nm_tpl.__new__.__annotations__ = annotations
- # The `_field_types` attribute was removed in 3.9;
- # in earlier versions, it is the same as the `__annotations__` attribute
- if sys.version_info < (3, 9):
- nm_tpl._field_types = annotations
return nm_tpl
_prohibited_namedtuple_fields = typing._prohibited
@@ -2616,7 +3261,13 @@ def __new__(cls, typename, bases, ns):
raise TypeError(
'can only inherit from a NamedTuple type and Generic')
bases = tuple(tuple if base is _NamedTuple else base for base in bases)
- types = ns.get('__annotations__', {})
+ if "__annotations__" in ns:
+ types = ns["__annotations__"]
+ elif "__annotate__" in ns:
+ # TODO: Use inspect.VALUE here, and make the annotations lazily evaluated
+ types = ns["__annotate__"](1)
+ else:
+ types = {}
default_names = []
for field_name in types:
if field_name in ns:
@@ -2639,11 +3290,35 @@ def __new__(cls, typename, bases, ns):
class_getitem = typing.Generic.__class_getitem__.__func__
nm_tpl.__class_getitem__ = classmethod(class_getitem)
# update from user namespace without overriding special namedtuple attributes
- for key in ns:
+ for key, val in ns.items():
if key in _prohibited_namedtuple_fields:
raise AttributeError("Cannot overwrite NamedTuple attribute " + key)
- elif key not in _special_namedtuple_fields and key not in nm_tpl._fields:
- setattr(nm_tpl, key, ns[key])
+ elif key not in _special_namedtuple_fields:
+ if key not in nm_tpl._fields:
+ setattr(nm_tpl, key, ns[key])
+ try:
+ set_name = type(val).__set_name__
+ except AttributeError:
+ pass
+ else:
+ try:
+ set_name(val, nm_tpl, key)
+ except BaseException as e:
+ msg = (
+ f"Error calling __set_name__ on {type(val).__name__!r} "
+ f"instance {key!r} in {typename!r}"
+ )
+ # BaseException.add_note() existed on py311,
+ # but the __set_name__ machinery didn't start
+ # using add_note() until py312.
+ # Making sure exceptions are raised in the same way
+ # as in "normal" classes seems most important here.
+ if sys.version_info >= (3, 12):
+ e.add_note(msg)
+ raise
+ else:
+ raise RuntimeError(msg) from e
+
if typing.Generic in bases:
nm_tpl.__init_subclass__()
return nm_tpl
@@ -2654,8 +3329,7 @@ def _namedtuple_mro_entries(bases):
assert NamedTuple in bases
return (_NamedTuple,)
- @_ensure_subclassable(_namedtuple_mro_entries)
- def NamedTuple(__typename, __fields=_marker, **kwargs):
+ def NamedTuple(typename, fields=_marker, /, **kwargs):
"""Typed version of namedtuple.
Usage::
@@ -2675,7 +3349,7 @@ class Employee(NamedTuple):
Employee = NamedTuple('Employee', [('name', str), ('id', int)])
"""
- if __fields is _marker:
+ if fields is _marker:
if kwargs:
deprecated_thing = "Creating NamedTuple classes using keyword arguments"
deprecation_msg = (
@@ -2684,14 +3358,14 @@ class Employee(NamedTuple):
)
else:
deprecated_thing = "Failing to pass a value for the 'fields' parameter"
- example = f"`{__typename} = NamedTuple({__typename!r}, [])`"
+ example = f"`{typename} = NamedTuple({typename!r}, [])`"
deprecation_msg = (
"{name} is deprecated and will be disallowed in Python {remove}. "
"To create a NamedTuple class with 0 fields "
"using the functional syntax, "
"pass an empty list, e.g. "
) + example + "."
- elif __fields is None:
+ elif fields is None:
if kwargs:
raise TypeError(
"Cannot pass `None` as the 'fields' parameter "
@@ -2699,7 +3373,7 @@ class Employee(NamedTuple):
)
else:
deprecated_thing = "Passing `None` as the 'fields' parameter"
- example = f"`{__typename} = NamedTuple({__typename!r}, [])`"
+ example = f"`{typename} = NamedTuple({typename!r}, [])`"
deprecation_msg = (
"{name} is deprecated and will be disallowed in Python {remove}. "
"To create a NamedTuple class with 0 fields "
@@ -2709,32 +3383,24 @@ class Employee(NamedTuple):
elif kwargs:
raise TypeError("Either list of fields or keywords"
" can be provided to NamedTuple, not both")
- if __fields is _marker or __fields is None:
+ if fields is _marker or fields is None:
warnings.warn(
deprecation_msg.format(name=deprecated_thing, remove="3.15"),
DeprecationWarning,
stacklevel=2,
)
- __fields = kwargs.items()
- nt = _make_nmtuple(__typename, __fields, module=_caller())
+ fields = kwargs.items()
+ nt = _make_nmtuple(typename, fields, module=_caller())
nt.__orig_bases__ = (NamedTuple,)
return nt
- # On 3.8+, alter the signature so that it matches typing.NamedTuple.
- # The signature of typing.NamedTuple on >=3.8 is invalid syntax in Python 3.7,
- # so just leave the signature as it is on 3.7.
- if sys.version_info >= (3, 8):
- _new_signature = '(typename, fields=None, /, **kwargs)'
- if isinstance(NamedTuple, _types.FunctionType):
- NamedTuple.__text_signature__ = _new_signature
- else:
- NamedTuple.__call__.__text_signature__ = _new_signature
+ NamedTuple.__mro_entries__ = _namedtuple_mro_entries
if hasattr(collections.abc, "Buffer"):
Buffer = collections.abc.Buffer
else:
- class Buffer(abc.ABC):
+ class Buffer(abc.ABC): # noqa: B024
"""Base class for classes that implement the buffer protocol.
The buffer protocol allows Python objects to expose a low-level
@@ -2764,7 +3430,7 @@ class Buffer(abc.ABC):
if hasattr(_types, "get_original_bases"):
get_original_bases = _types.get_original_bases
else:
- def get_original_bases(__cls):
+ def get_original_bases(cls, /):
"""Return the class's "original" bases prior to modification by `__mro_entries__`.
Examples::
@@ -2786,14 +3452,11 @@ class Baz(list[str]): ...
assert get_original_bases(int) == (object,)
"""
try:
- return __cls.__orig_bases__
+ return cls.__dict__.get("__orig_bases__", cls.__bases__)
except AttributeError:
- try:
- return __cls.__bases__
- except AttributeError:
- raise TypeError(
- f'Expected an instance of type, not {type(__cls).__name__!r}'
- ) from None
+ raise TypeError(
+ f'Expected an instance of type, not {type(cls).__name__!r}'
+ ) from None
# NewType is a class on Python 3.10+, making it pickleable
@@ -2815,7 +3478,7 @@ def name_by_id(user_id: UserId) -> str:
num = UserId(5) + 1 # type: int
"""
- def __call__(self, obj):
+ def __call__(self, obj, /):
return obj
def __init__(self, name, tp):
@@ -2861,17 +3524,57 @@ def __ror__(self, other):
return typing.Union[other, self]
-if hasattr(typing, "TypeAliasType"):
+if sys.version_info >= (3, 14):
TypeAliasType = typing.TypeAliasType
+# <=3.13
else:
- def _is_unionable(obj):
- """Corresponds to is_unionable() in unionobject.c in CPython."""
- return obj is None or isinstance(obj, (
- type,
- _types.GenericAlias,
- _types.UnionType,
- TypeAliasType,
- ))
+ if sys.version_info >= (3, 12):
+ # 3.12-3.13
+ def _is_unionable(obj):
+ """Corresponds to is_unionable() in unionobject.c in CPython."""
+ return obj is None or isinstance(obj, (
+ type,
+ _types.GenericAlias,
+ _types.UnionType,
+ typing.TypeAliasType,
+ TypeAliasType,
+ ))
+ else:
+ # <=3.11
+ def _is_unionable(obj):
+ """Corresponds to is_unionable() in unionobject.c in CPython."""
+ return obj is None or isinstance(obj, (
+ type,
+ _types.GenericAlias,
+ _types.UnionType,
+ TypeAliasType,
+ ))
+
+ if sys.version_info < (3, 10):
+ # Copied and pasted from https://github.com/python/cpython/blob/986a4e1b6fcae7fe7a1d0a26aea446107dd58dd2/Objects/genericaliasobject.c#L568-L582,
+ # so that we emulate the behaviour of `types.GenericAlias`
+ # on the latest versions of CPython
+ _ATTRIBUTE_DELEGATION_EXCLUSIONS = frozenset({
+ "__class__",
+ "__bases__",
+ "__origin__",
+ "__args__",
+ "__unpacked__",
+ "__parameters__",
+ "__typing_unpacked_tuple_args__",
+ "__mro_entries__",
+ "__reduce_ex__",
+ "__reduce__",
+ "__copy__",
+ "__deepcopy__",
+ })
+
+ class _TypeAliasGenericAlias(typing._GenericAlias, _root=True):
+ def __getattr__(self, attr):
+ if attr in _ATTRIBUTE_DELEGATION_EXCLUSIONS:
+ return object.__getattr__(self, attr)
+ return getattr(self.__origin__, attr)
+
class TypeAliasType:
"""Create named, parameterized type aliases.
@@ -2904,11 +3607,29 @@ class TypeAliasType:
def __init__(self, name: str, value, *, type_params=()):
if not isinstance(name, str):
raise TypeError("TypeAliasType name must be a string")
+ if not isinstance(type_params, tuple):
+ raise TypeError("type_params must be a tuple")
self.__value__ = value
self.__type_params__ = type_params
+ default_value_encountered = False
parameters = []
for type_param in type_params:
+ if (
+ not isinstance(type_param, (TypeVar, TypeVarTuple, ParamSpec))
+ # <=3.11
+ # Unpack Backport passes isinstance(type_param, TypeVar)
+ or _is_unpack(type_param)
+ ):
+ raise TypeError(f"Expected a type param, got {type_param!r}")
+ has_default = (
+ getattr(type_param, '__default__', NoDefault) is not NoDefault
+ )
+ if default_value_encountered and not has_default:
+ raise TypeError(f"non-default type parameter '{type_param!r}'"
+ " follows default type parameter")
+ if has_default:
+ default_value_encountered = True
if isinstance(type_param, TypeVarTuple):
parameters.extend(type_param)
else:
@@ -2920,13 +3641,13 @@ def __init__(self, name: str, value, *, type_params=()):
# Setting this attribute closes the TypeAliasType from further modification
self.__name__ = name
- def __setattr__(self, __name: str, __value: object) -> None:
+ def __setattr__(self, name: str, value: object, /) -> None:
if hasattr(self, "__name__"):
- self._raise_attribute_error(__name)
- super().__setattr__(__name, __value)
+ self._raise_attribute_error(name)
+ super().__setattr__(name, value)
- def __delattr__(self, __name: str) -> Never:
- self._raise_attribute_error(__name)
+ def __delattr__(self, name: str, /) -> Never:
+ self._raise_attribute_error(name)
def _raise_attribute_error(self, name: str) -> Never:
# Match the Python 3.12 error messages exactly
@@ -2945,16 +3666,49 @@ def _raise_attribute_error(self, name: str) -> Never:
def __repr__(self) -> str:
return self.__name__
+ if sys.version_info < (3, 11):
+ def _check_single_param(self, param, recursion=0):
+ # Allow [], [int], [int, str], [int, ...], [int, T]
+ if param is ...:
+ return ...
+ if param is None:
+ return None
+ # Note in <= 3.9 _ConcatenateGenericAlias inherits from list
+ if isinstance(param, list) and recursion == 0:
+ return [self._check_single_param(arg, recursion+1)
+ for arg in param]
+ return typing._type_check(
+ param, f'Subscripting {self.__name__} requires a type.'
+ )
+
+ def _check_parameters(self, parameters):
+ if sys.version_info < (3, 11):
+ return tuple(
+ self._check_single_param(item)
+ for item in parameters
+ )
+ return tuple(typing._type_check(
+ item, f'Subscripting {self.__name__} requires a type.'
+ )
+ for item in parameters
+ )
+
def __getitem__(self, parameters):
+ if not self.__type_params__:
+ raise TypeError("Only generic type aliases are subscriptable")
if not isinstance(parameters, tuple):
parameters = (parameters,)
- parameters = [
- typing._type_check(
- item, f'Subscripting {self.__name__} requires a type.'
- )
- for item in parameters
- ]
- return typing._GenericAlias(self, tuple(parameters))
+ # Using 3.9 here will create problems with Concatenate
+ if sys.version_info >= (3, 10):
+ return _types.GenericAlias(self, parameters)
+ type_vars = _collect_type_vars(parameters)
+ parameters = self._check_parameters(parameters)
+ alias = _TypeAliasGenericAlias(self, parameters)
+ # alias.__parameters__ is not complete if Concatenate is present
+ # as it is converted to a list from which no parameters are extracted.
+ if alias.__parameters__ != type_vars:
+ alias.__parameters__ = type_vars
+ return alias
def __reduce__(self):
return self.__name__
@@ -2987,7 +3741,7 @@ def __ror__(self, left):
is_protocol = typing.is_protocol
get_protocol_members = typing.get_protocol_members
else:
- def is_protocol(__tp: type) -> bool:
+ def is_protocol(tp: type, /) -> bool:
"""Return True if the given type is a Protocol.
Example::
@@ -3002,13 +3756,13 @@ def is_protocol(__tp: type) -> bool:
False
"""
return (
- isinstance(__tp, type)
- and getattr(__tp, '_is_protocol', False)
- and __tp is not Protocol
- and __tp is not getattr(typing, "Protocol", object())
+ isinstance(tp, type)
+ and getattr(tp, '_is_protocol', False)
+ and tp is not Protocol
+ and tp is not typing.Protocol
)
- def get_protocol_members(__tp: type) -> typing.FrozenSet[str]:
+ def get_protocol_members(tp: type, /) -> typing.FrozenSet[str]:
"""Return the set of members defined in a Protocol.
Example::
@@ -3022,51 +3776,469 @@ def get_protocol_members(__tp: type) -> typing.FrozenSet[str]:
Raise a TypeError for arguments that are not Protocols.
"""
- if not is_protocol(__tp):
- raise TypeError(f'{__tp!r} is not a Protocol')
- if hasattr(__tp, '__protocol_attrs__'):
- return frozenset(__tp.__protocol_attrs__)
- return frozenset(_get_protocol_attrs(__tp))
-
-
-# Aliases for items that have always been in typing.
-# Explicitly assign these (rather than using `from typing import *` at the top),
-# so that we get a CI error if one of these is deleted from typing.py
-# in a future version of Python
-AbstractSet = typing.AbstractSet
-AnyStr = typing.AnyStr
-BinaryIO = typing.BinaryIO
-Callable = typing.Callable
-Collection = typing.Collection
-Container = typing.Container
-Dict = typing.Dict
-ForwardRef = typing.ForwardRef
-FrozenSet = typing.FrozenSet
-Generator = typing.Generator
+ if not is_protocol(tp):
+ raise TypeError(f'{tp!r} is not a Protocol')
+ if hasattr(tp, '__protocol_attrs__'):
+ return frozenset(tp.__protocol_attrs__)
+ return frozenset(_get_protocol_attrs(tp))
+
+
+if hasattr(typing, "Doc"):
+ Doc = typing.Doc
+else:
+ class Doc:
+ """Define the documentation of a type annotation using ``Annotated``, to be
+ used in class attributes, function and method parameters, return values,
+ and variables.
+
+ The value should be a positional-only string literal to allow static tools
+ like editors and documentation generators to use it.
+
+ This complements docstrings.
+
+ The string value passed is available in the attribute ``documentation``.
+
+ Example::
+
+ >>> from typing_extensions import Annotated, Doc
+ >>> def hi(to: Annotated[str, Doc("Who to say hi to")]) -> None: ...
+ """
+ def __init__(self, documentation: str, /) -> None:
+ self.documentation = documentation
+
+ def __repr__(self) -> str:
+ return f"Doc({self.documentation!r})"
+
+ def __hash__(self) -> int:
+ return hash(self.documentation)
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, Doc):
+ return NotImplemented
+ return self.documentation == other.documentation
+
+
+_CapsuleType = getattr(_types, "CapsuleType", None)
+
+if _CapsuleType is None:
+ try:
+ import _socket
+ except ImportError:
+ pass
+ else:
+ _CAPI = getattr(_socket, "CAPI", None)
+ if _CAPI is not None:
+ _CapsuleType = type(_CAPI)
+
+if _CapsuleType is not None:
+ CapsuleType = _CapsuleType
+ __all__.append("CapsuleType")
+
+
+if sys.version_info >= (3,14):
+ from annotationlib import Format, get_annotations
+else:
+ class Format(enum.IntEnum):
+ VALUE = 1
+ VALUE_WITH_FAKE_GLOBALS = 2
+ FORWARDREF = 3
+ STRING = 4
+
+ def get_annotations(obj, *, globals=None, locals=None, eval_str=False,
+ format=Format.VALUE):
+ """Compute the annotations dict for an object.
+
+ obj may be a callable, class, or module.
+ Passing in an object of any other type raises TypeError.
+
+ Returns a dict. get_annotations() returns a new dict every time
+ it's called; calling it twice on the same object will return two
+ different but equivalent dicts.
+
+ This is a backport of `inspect.get_annotations`, which has been
+ in the standard library since Python 3.10. See the standard library
+ documentation for more:
+
+ https://docs.python.org/3/library/inspect.html#inspect.get_annotations
+
+ This backport adds the *format* argument introduced by PEP 649. The
+ three formats supported are:
+ * VALUE: the annotations are returned as-is. This is the default and
+ it is compatible with the behavior on previous Python versions.
+ * FORWARDREF: return annotations as-is if possible, but replace any
+ undefined names with ForwardRef objects. The implementation proposed by
+ PEP 649 relies on language changes that cannot be backported; the
+ typing-extensions implementation simply returns the same result as VALUE.
+ * STRING: return annotations as strings, in a format close to the original
+ source. Again, this behavior cannot be replicated directly in a backport.
+ As an approximation, typing-extensions retrieves the annotations under
+ VALUE semantics and then stringifies them.
+
+ The purpose of this backport is to allow users who would like to use
+ FORWARDREF or STRING semantics once PEP 649 is implemented, but who also
+ want to support earlier Python versions, to simply write:
+
+ typing_extensions.get_annotations(obj, format=Format.FORWARDREF)
+
+ """
+ format = Format(format)
+ if format is Format.VALUE_WITH_FAKE_GLOBALS:
+ raise ValueError(
+ "The VALUE_WITH_FAKE_GLOBALS format is for internal use only"
+ )
+
+ if eval_str and format is not Format.VALUE:
+ raise ValueError("eval_str=True is only supported with format=Format.VALUE")
+
+ if isinstance(obj, type):
+ # class
+ obj_dict = getattr(obj, '__dict__', None)
+ if obj_dict and hasattr(obj_dict, 'get'):
+ ann = obj_dict.get('__annotations__', None)
+ if isinstance(ann, _types.GetSetDescriptorType):
+ ann = None
+ else:
+ ann = None
+
+ obj_globals = None
+ module_name = getattr(obj, '__module__', None)
+ if module_name:
+ module = sys.modules.get(module_name, None)
+ if module:
+ obj_globals = getattr(module, '__dict__', None)
+ obj_locals = dict(vars(obj))
+ unwrap = obj
+ elif isinstance(obj, _types.ModuleType):
+ # module
+ ann = getattr(obj, '__annotations__', None)
+ obj_globals = obj.__dict__
+ obj_locals = None
+ unwrap = None
+ elif callable(obj):
+ # this includes types.Function, types.BuiltinFunctionType,
+ # types.BuiltinMethodType, functools.partial, functools.singledispatch,
+ # "class funclike" from Lib/test/test_inspect... on and on it goes.
+ ann = getattr(obj, '__annotations__', None)
+ obj_globals = getattr(obj, '__globals__', None)
+ obj_locals = None
+ unwrap = obj
+ elif hasattr(obj, '__annotations__'):
+ ann = obj.__annotations__
+ obj_globals = obj_locals = unwrap = None
+ else:
+ raise TypeError(f"{obj!r} is not a module, class, or callable.")
+
+ if ann is None:
+ return {}
+
+ if not isinstance(ann, dict):
+ raise ValueError(f"{obj!r}.__annotations__ is neither a dict nor None")
+
+ if not ann:
+ return {}
+
+ if not eval_str:
+ if format is Format.STRING:
+ return {
+ key: value if isinstance(value, str) else typing._type_repr(value)
+ for key, value in ann.items()
+ }
+ return dict(ann)
+
+ if unwrap is not None:
+ while True:
+ if hasattr(unwrap, '__wrapped__'):
+ unwrap = unwrap.__wrapped__
+ continue
+ if isinstance(unwrap, functools.partial):
+ unwrap = unwrap.func
+ continue
+ break
+ if hasattr(unwrap, "__globals__"):
+ obj_globals = unwrap.__globals__
+
+ if globals is None:
+ globals = obj_globals
+ if locals is None:
+ locals = obj_locals or {}
+
+ # "Inject" type parameters into the local namespace
+ # (unless they are shadowed by assignments *in* the local namespace),
+ # as a way of emulating annotation scopes when calling `eval()`
+ if type_params := getattr(obj, "__type_params__", ()):
+ locals = {param.__name__: param for param in type_params} | locals
+
+ return_value = {key:
+ value if not isinstance(value, str) else eval(value, globals, locals)
+ for key, value in ann.items() }
+ return return_value
+
+
+if hasattr(typing, "evaluate_forward_ref"):
+ evaluate_forward_ref = typing.evaluate_forward_ref
+else:
+ # Implements annotationlib.ForwardRef.evaluate
+ def _eval_with_owner(
+ forward_ref, *, owner=None, globals=None, locals=None, type_params=None
+ ):
+ if forward_ref.__forward_evaluated__:
+ return forward_ref.__forward_value__
+ if getattr(forward_ref, "__cell__", None) is not None:
+ try:
+ value = forward_ref.__cell__.cell_contents
+ except ValueError:
+ pass
+ else:
+ forward_ref.__forward_evaluated__ = True
+ forward_ref.__forward_value__ = value
+ return value
+ if owner is None:
+ owner = getattr(forward_ref, "__owner__", None)
+
+ if (
+ globals is None
+ and getattr(forward_ref, "__forward_module__", None) is not None
+ ):
+ globals = getattr(
+ sys.modules.get(forward_ref.__forward_module__, None), "__dict__", None
+ )
+ if globals is None:
+ globals = getattr(forward_ref, "__globals__", None)
+ if globals is None:
+ if isinstance(owner, type):
+ module_name = getattr(owner, "__module__", None)
+ if module_name:
+ module = sys.modules.get(module_name, None)
+ if module:
+ globals = getattr(module, "__dict__", None)
+ elif isinstance(owner, _types.ModuleType):
+ globals = getattr(owner, "__dict__", None)
+ elif callable(owner):
+ globals = getattr(owner, "__globals__", None)
+
+ # If we pass None to eval() below, the globals of this module are used.
+ if globals is None:
+ globals = {}
+
+ if locals is None:
+ locals = {}
+ if isinstance(owner, type):
+ locals.update(vars(owner))
+
+ if type_params is None and owner is not None:
+ # "Inject" type parameters into the local namespace
+ # (unless they are shadowed by assignments *in* the local namespace),
+ # as a way of emulating annotation scopes when calling `eval()`
+ type_params = getattr(owner, "__type_params__", None)
+
+ # type parameters require some special handling,
+ # as they exist in their own scope
+ # but `eval()` does not have a dedicated parameter for that scope.
+ # For classes, names in type parameter scopes should override
+ # names in the global scope (which here are called `localns`!),
+ # but should in turn be overridden by names in the class scope
+ # (which here are called `globalns`!)
+ if type_params is not None:
+ globals = dict(globals)
+ locals = dict(locals)
+ for param in type_params:
+ param_name = param.__name__
+ if (
+ _FORWARD_REF_HAS_CLASS and not forward_ref.__forward_is_class__
+ ) or param_name not in globals:
+ globals[param_name] = param
+ locals.pop(param_name, None)
+
+ arg = forward_ref.__forward_arg__
+ if arg.isidentifier() and not keyword.iskeyword(arg):
+ if arg in locals:
+ value = locals[arg]
+ elif arg in globals:
+ value = globals[arg]
+ elif hasattr(builtins, arg):
+ return getattr(builtins, arg)
+ else:
+ raise NameError(arg)
+ else:
+ code = forward_ref.__forward_code__
+ value = eval(code, globals, locals)
+ forward_ref.__forward_evaluated__ = True
+ forward_ref.__forward_value__ = value
+ return value
+
+ def evaluate_forward_ref(
+ forward_ref,
+ *,
+ owner=None,
+ globals=None,
+ locals=None,
+ type_params=None,
+ format=None,
+ _recursive_guard=frozenset(),
+ ):
+ """Evaluate a forward reference as a type hint.
+
+ This is similar to calling the ForwardRef.evaluate() method,
+ but unlike that method, evaluate_forward_ref() also:
+
+ * Recursively evaluates forward references nested within the type hint.
+ * Rejects certain objects that are not valid type hints.
+ * Replaces type hints that evaluate to None with types.NoneType.
+ * Supports the *FORWARDREF* and *STRING* formats.
+
+ *forward_ref* must be an instance of ForwardRef. *owner*, if given,
+ should be the object that holds the annotations that the forward reference
+ derived from, such as a module, class object, or function. It is used to
+ infer the namespaces to use for looking up names. *globals* and *locals*
+ can also be explicitly given to provide the global and local namespaces.
+ *type_params* is a tuple of type parameters that are in scope when
+ evaluating the forward reference. This parameter must be provided (though
+ it may be an empty tuple) if *owner* is not given and the forward reference
+ does not already have an owner set. *format* specifies the format of the
+ annotation and is a member of the annotationlib.Format enum.
+
+ """
+ if format == Format.STRING:
+ return forward_ref.__forward_arg__
+ if forward_ref.__forward_arg__ in _recursive_guard:
+ return forward_ref
+
+ # Evaluate the forward reference
+ try:
+ value = _eval_with_owner(
+ forward_ref,
+ owner=owner,
+ globals=globals,
+ locals=locals,
+ type_params=type_params,
+ )
+ except NameError:
+ if format == Format.FORWARDREF:
+ return forward_ref
+ else:
+ raise
+
+ if isinstance(value, str):
+ value = ForwardRef(value)
+
+ # Recursively evaluate the type
+ if isinstance(value, ForwardRef):
+ if getattr(value, "__forward_module__", True) is not None:
+ globals = None
+ return evaluate_forward_ref(
+ value,
+ globals=globals,
+ locals=locals,
+ type_params=type_params, owner=owner,
+ _recursive_guard=_recursive_guard, format=format
+ )
+ if sys.version_info < (3, 12, 5) and type_params:
+ # Make use of type_params
+ locals = dict(locals) if locals else {}
+ for tvar in type_params:
+ if tvar.__name__ not in locals: # lets not overwrite something present
+ locals[tvar.__name__] = tvar
+ if sys.version_info < (3, 12, 5):
+ return typing._eval_type(
+ value,
+ globals,
+ locals,
+ recursive_guard=_recursive_guard | {forward_ref.__forward_arg__},
+ )
+ else:
+ return typing._eval_type(
+ value,
+ globals,
+ locals,
+ type_params,
+ recursive_guard=_recursive_guard | {forward_ref.__forward_arg__},
+ )
+
+
+class Sentinel:
+ """Create a unique sentinel object.
+
+ *name* should be the name of the variable to which the return value shall be assigned.
+
+ *repr*, if supplied, will be used for the repr of the sentinel object.
+ If not provided, "" will be used.
+ """
+
+ def __init__(
+ self,
+ name: str,
+ repr: typing.Optional[str] = None,
+ ):
+ self._name = name
+ self._repr = repr if repr is not None else f'<{name}>'
+
+ def __repr__(self):
+ return self._repr
+
+ if sys.version_info < (3, 11):
+ # The presence of this method convinces typing._type_check
+ # that Sentinels are types.
+ def __call__(self, *args, **kwargs):
+ raise TypeError(f"{type(self).__name__!r} object is not callable")
+
+ if sys.version_info >= (3, 10):
+ def __or__(self, other):
+ return typing.Union[self, other]
+
+ def __ror__(self, other):
+ return typing.Union[other, self]
+
+ def __getstate__(self):
+ raise TypeError(f"Cannot pickle {type(self).__name__!r} object")
+
+
+# Aliases for items that are in typing in all supported versions.
+# We use hasattr() checks so this library will continue to import on
+# future versions of Python that may remove these names.
+_typing_names = [
+ "AbstractSet",
+ "AnyStr",
+ "BinaryIO",
+ "Callable",
+ "Collection",
+ "Container",
+ "Dict",
+ "FrozenSet",
+ "Hashable",
+ "IO",
+ "ItemsView",
+ "Iterable",
+ "Iterator",
+ "KeysView",
+ "List",
+ "Mapping",
+ "MappingView",
+ "Match",
+ "MutableMapping",
+ "MutableSequence",
+ "MutableSet",
+ "Optional",
+ "Pattern",
+ "Reversible",
+ "Sequence",
+ "Set",
+ "Sized",
+ "TextIO",
+ "Tuple",
+ "Union",
+ "ValuesView",
+ "cast",
+ "no_type_check",
+ "no_type_check_decorator",
+ # This is private, but it was defined by typing_extensions for a long time
+ # and some users rely on it.
+ "_AnnotatedAlias",
+]
+globals().update(
+ {name: getattr(typing, name) for name in _typing_names if hasattr(typing, name)}
+)
+# These are defined unconditionally because they are used in
+# typing-extensions itself.
Generic = typing.Generic
-Hashable = typing.Hashable
-IO = typing.IO
-ItemsView = typing.ItemsView
-Iterable = typing.Iterable
-Iterator = typing.Iterator
-KeysView = typing.KeysView
-List = typing.List
-Mapping = typing.Mapping
-MappingView = typing.MappingView
-Match = typing.Match
-MutableMapping = typing.MutableMapping
-MutableSequence = typing.MutableSequence
-MutableSet = typing.MutableSet
-Optional = typing.Optional
-Pattern = typing.Pattern
-Reversible = typing.Reversible
-Sequence = typing.Sequence
-Set = typing.Set
-Sized = typing.Sized
-TextIO = typing.TextIO
-Tuple = typing.Tuple
-Union = typing.Union
-ValuesView = typing.ValuesView
-cast = typing.cast
-no_type_check = typing.no_type_check
-no_type_check_decorator = typing.no_type_check_decorator
+ForwardRef = typing.ForwardRef
+Annotated = typing.Annotated
diff --git a/test-requirements.txt b/test-requirements.txt
index 675b2c5d..4b0fc81e 100644
--- a/test-requirements.txt
+++ b/test-requirements.txt
@@ -1,2 +1 @@
-flake8
-flake8-bugbear
+ruff==0.9.6
diff --git a/tox.ini b/tox.ini
index 3d583efc..1f2877ff 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,6 +1,6 @@
[tox]
isolated_build = True
-envlist = py37, py38, py39, py310, py311, py312
+envlist = py39, py310, py311, py312, py313, py314
[testenv]
changedir = src